gnunet-svn
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[GNUnet-SVN] [taler-anastasis] 01/01: initial basic setup


From: gnunet
Subject: [GNUnet-SVN] [taler-anastasis] 01/01: initial basic setup
Date: Tue, 18 Jun 2019 16:47:19 +0200

This is an automated email from the git hooks/post-receive script.

grothoff pushed a commit to branch master
in repository anastasis.

commit d134b3e61ec1da9a5ff03c2e69a2e50c1213f45b
Author: Christian Grothoff <address@hidden>
AuthorDate: Tue Jun 18 16:47:09 2019 +0200

    initial basic setup
---
 AUTHORS                                   |     1 +
 COPYING.AGPL                              |   661 ++
 ChangeLog                                 |     2 +
 Doxyfile                                  |   245 +
 INSTALL                                   |   368 +
 Makefile.am                               |    24 +
 NEWS                                      |     0
 README                                    |    36 +
 bootstrap                                 |    12 +
 configure.ac                              |   309 +
 contrib/gnunet.tag                        |    55 +
 doc/Makefile.am                           |    71 +
 doc/Makefile.in                           |  1041 +++
 doc/agpl.texi                             |   698 ++
 doc/brown-paper.css                       |    63 +
 doc/config.sh                             |   116 +
 doc/configuration-format.texi             |    73 +
 doc/docstyle.css                          |    76 +
 doc/fdl-1.3.texi                          |   506 ++
 doc/highlight.pack.js                     |     2 +
 doc/lgpl.texi                             |   549 ++
 doc/manual.info                           |   Bin 0 -> 140832 bytes
 doc/manual.texi                           |  1296 +++
 doc/mdate-sh                              |   228 +
 doc/merchant-api.content.texi             |  1068 +++
 doc/stamp-1                               |     4 +
 doc/stamp-2                               |     4 +
 doc/stamp-3                               |     4 +
 doc/stamp-vti                             |     4 +
 doc/syntax.texi                           |    44 +
 doc/taler-config.texi                     |    47 +
 doc/texinfo.tex                           | 11727 ++++++++++++++++++++++++++++
 doc/version-manual.texi                   |     4 +
 m4/ax_lib_postgresql.m4                   |   155 +
 m4/ax_prog_doxygen.m4                     |   586 ++
 m4/libcurl.m4                             |   251 +
 m4/libgnurl.m4                            |   250 +
 m4/libtool.m4                             |  8388 ++++++++++++++++++++
 m4/ltoptions.m4                           |   437 ++
 m4/ltsugar.m4                             |   124 +
 m4/ltversion.m4                           |    23 +
 m4/lt~obsolete.m4                         |    99 +
 src/Makefile.am                           |     3 +
 src/backup-db/Makefile.am                 |    26 +
 src/backup-db/plugin_anastasis_postgres.c |  2758 +++++++
 src/backup/Makefile.am                    |    26 +
 src/backup/Makefile.in                    |   869 +++
 src/backup/anastasis-httpd.c              |   705 ++
 src/backup/anastasis-httpd.h              |   142 +
 src/backup/anastasis-httpd_mhd.c          |   156 +
 src/backup/anastasis-httpd_parsing.c      |   272 +
 src/backup/anastasis-httpd_parsing.h      |    93 +
 src/backup/anastasis-httpd_responses.c    |   407 +
 src/backup/anastasis.conf                 |    30 +
 src/include/Makefile.am                   |     9 +
 src/include/Makefile.in                   |   622 ++
 src/include/anastasis_database_plugin.h   |   119 +
 src/include/anastasis_service.h           |    45 +
 src/include/platform.h                    |    60 +
 src/lib/Makefile.am                       |    32 +
 src/lib/anastasis_api_salt.c              |    58 +
 61 files changed, 36083 insertions(+)

diff --git a/AUTHORS b/AUTHORS
new file mode 100644
index 0000000..eb41b69
--- /dev/null
+++ b/AUTHORS
@@ -0,0 +1 @@
+Christian Grothoff
diff --git a/COPYING.AGPL b/COPYING.AGPL
new file mode 100644
index 0000000..dba13ed
--- /dev/null
+++ b/COPYING.AGPL
@@ -0,0 +1,661 @@
+                    GNU AFFERO GENERAL PUBLIC LICENSE
+                       Version 3, 19 November 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+                            Preamble
+
+  The GNU Affero General Public License is a free, copyleft license for
+software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
+
+  The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works.  By contrast,
+our General Public Licenses are intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+  Developers that use our General Public Licenses protect your rights
+with two steps: (1) assert copyright on the software, and (2) offer
+you this License which gives you legal permission to copy, distribute
+and/or modify the software.
+
+  A secondary benefit of defending all users' freedom is that
+improvements made in alternate versions of the program, if they
+receive widespread use, become available for other developers to
+incorporate.  Many developers of free software are heartened and
+encouraged by the resulting cooperation.  However, in the case of
+software used on network servers, this result may fail to come about.
+The GNU General Public License permits making a modified version and
+letting the public access it on a server without ever releasing its
+source code to the public.
+
+  The GNU Affero General Public License is designed specifically to
+ensure that, in such cases, the modified source code becomes available
+to the community.  It requires the operator of a network server to
+provide the source code of the modified version running there to the
+users of that server.  Therefore, public use of a modified version, on
+a publicly accessible server, gives the public access to the source
+code of the modified version.
+
+  An older license, called the Affero General Public License and
+published by Affero, was designed to accomplish similar goals.  This is
+a different license, not a version of the Affero GPL, but Affero has
+released a new version of the Affero GPL which permits relicensing under
+this license.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+                       TERMS AND CONDITIONS
+
+  0. Definitions.
+
+  "This License" refers to version 3 of the GNU Affero General Public License.
+
+  "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+  "The Program" refers to any copyrightable work licensed under this
+License.  Each licensee is addressed as "you".  "Licensees" and
+"recipients" may be individuals or organizations.
+
+  To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy.  The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+  A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+  To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy.  Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+  To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies.  Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+  An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License.  If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+  1. Source Code.
+
+  The "source code" for a work means the preferred form of the work
+for making modifications to it.  "Object code" means any non-source
+form of a work.
+
+  A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+  The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form.  A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+  The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities.  However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work.  For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+  The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+  The Corresponding Source for a work in source code form is that
+same work.
+
+  2. Basic Permissions.
+
+  All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met.  This License explicitly affirms your unlimited
+permission to run the unmodified Program.  The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work.  This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+  You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force.  You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright.  Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+  Conveying under any other circumstances is permitted solely under
+the conditions stated below.  Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+  No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+  When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+  4. Conveying Verbatim Copies.
+
+  You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+  You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+  5. Conveying Modified Source Versions.
+
+  You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+    a) The work must carry prominent notices stating that you modified
+    it, and giving a relevant date.
+
+    b) The work must carry prominent notices stating that it is
+    released under this License and any conditions added under section
+    7.  This requirement modifies the requirement in section 4 to
+    "keep intact all notices".
+
+    c) You must license the entire work, as a whole, under this
+    License to anyone who comes into possession of a copy.  This
+    License will therefore apply, along with any applicable section 7
+    additional terms, to the whole of the work, and all its parts,
+    regardless of how they are packaged.  This License gives no
+    permission to license the work in any other way, but it does not
+    invalidate such permission if you have separately received it.
+
+    d) If the work has interactive user interfaces, each must display
+    Appropriate Legal Notices; however, if the Program has interactive
+    interfaces that do not display Appropriate Legal Notices, your
+    work need not make them do so.
+
+  A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit.  Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+  6. Conveying Non-Source Forms.
+
+  You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+    a) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by the
+    Corresponding Source fixed on a durable physical medium
+    customarily used for software interchange.
+
+    b) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by a
+    written offer, valid for at least three years and valid for as
+    long as you offer spare parts or customer support for that product
+    model, to give anyone who possesses the object code either (1) a
+    copy of the Corresponding Source for all the software in the
+    product that is covered by this License, on a durable physical
+    medium customarily used for software interchange, for a price no
+    more than your reasonable cost of physically performing this
+    conveying of source, or (2) access to copy the
+    Corresponding Source from a network server at no charge.
+
+    c) Convey individual copies of the object code with a copy of the
+    written offer to provide the Corresponding Source.  This
+    alternative is allowed only occasionally and noncommercially, and
+    only if you received the object code with such an offer, in accord
+    with subsection 6b.
+
+    d) Convey the object code by offering access from a designated
+    place (gratis or for a charge), and offer equivalent access to the
+    Corresponding Source in the same way through the same place at no
+    further charge.  You need not require recipients to copy the
+    Corresponding Source along with the object code.  If the place to
+    copy the object code is a network server, the Corresponding Source
+    may be on a different server (operated by you or a third party)
+    that supports equivalent copying facilities, provided you maintain
+    clear directions next to the object code saying where to find the
+    Corresponding Source.  Regardless of what server hosts the
+    Corresponding Source, you remain obligated to ensure that it is
+    available for as long as needed to satisfy these requirements.
+
+    e) Convey the object code using peer-to-peer transmission, provided
+    you inform other peers where the object code and Corresponding
+    Source of the work are being offered to the general public at no
+    charge under subsection 6d.
+
+  A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+  A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling.  In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage.  For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product.  A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+  "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source.  The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+  If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information.  But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+  The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed.  Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+  Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+  7. Additional Terms.
+
+  "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law.  If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+  When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it.  (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.)  You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+  Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+    a) Disclaiming warranty or limiting liability differently from the
+    terms of sections 15 and 16 of this License; or
+
+    b) Requiring preservation of specified reasonable legal notices or
+    author attributions in that material or in the Appropriate Legal
+    Notices displayed by works containing it; or
+
+    c) Prohibiting misrepresentation of the origin of that material, or
+    requiring that modified versions of such material be marked in
+    reasonable ways as different from the original version; or
+
+    d) Limiting the use for publicity purposes of names of licensors or
+    authors of the material; or
+
+    e) Declining to grant rights under trademark law for use of some
+    trade names, trademarks, or service marks; or
+
+    f) Requiring indemnification of licensors and authors of that
+    material by anyone who conveys the material (or modified versions of
+    it) with contractual assumptions of liability to the recipient, for
+    any liability that these contractual assumptions directly impose on
+    those licensors and authors.
+
+  All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10.  If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term.  If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+  If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+  Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+  8. Termination.
+
+  You may not propagate or modify a covered work except as expressly
+provided under this License.  Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+  However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+  Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+  Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License.  If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+  9. Acceptance Not Required for Having Copies.
+
+  You are not required to accept this License in order to receive or
+run a copy of the Program.  Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance.  However,
+nothing other than this License grants you permission to propagate or
+modify any covered work.  These actions infringe copyright if you do
+not accept this License.  Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+  10. Automatic Licensing of Downstream Recipients.
+
+  Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License.  You are not responsible
+for enforcing compliance by third parties with this License.
+
+  An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations.  If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+  You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License.  For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+  11. Patents.
+
+  A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based.  The
+work thus licensed is called the contributor's "contributor version".
+
+  A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version.  For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+  Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+  In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement).  To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+  If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients.  "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+  If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+  A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License.  You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+  Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+  12. No Surrender of Others' Freedom.
+
+  If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all.  For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+  13. Remote Network Interaction; Use with the GNU General Public License.
+
+  Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users
+interacting with it remotely through a computer network (if your version
+supports such interaction) an opportunity to receive the Corresponding
+Source of your version by providing access to the Corresponding Source
+from a network server at no charge, through some standard or customary
+means of facilitating copying of software.  This Corresponding Source
+shall include the Corresponding Source for any work covered by version 3
+of the GNU General Public License that is incorporated pursuant to the
+following paragraph.
+
+  Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU General Public License into a single
+combined work, and to convey the resulting work.  The terms of this
+License will continue to apply to the part which is the covered work,
+but the work with which it is combined will remain governed by version
+3 of the GNU General Public License.
+
+  14. Revised Versions of this License.
+
+  The Free Software Foundation may publish revised and/or new versions of
+the GNU Affero General Public License from time to time.  Such new versions
+will be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+  Each version is given a distinguishing version number.  If the
+Program specifies that a certain numbered version of the GNU Affero General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation.  If the Program does not specify a version number of the
+GNU Affero General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+  If the Program specifies that a proxy can decide which future
+versions of the GNU Affero General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+  Later license versions may give you additional or different
+permissions.  However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+  15. Disclaimer of Warranty.
+
+  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. Limitation of Liability.
+
+  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+  17. Interpretation of Sections 15 and 16.
+
+  If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+                     END OF TERMS AND CONDITIONS
+
+            How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software: you can redistribute it and/or modify
+    it under the terms of the GNU Affero General Public License as published by
+    the Free Software Foundation, either version 3 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU Affero General Public License for more details.
+
+    You should have received a copy of the GNU Affero General Public License
+    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+  If your software can interact with users remotely through a computer
+network, you should also make sure that it provides a way for users to
+get its source.  For example, if your program is a web application, its
+interface could display a "Source" link that leads users to an archive
+of the code.  There are many ways you could offer source, and different
+solutions will be better for different programs; see section 13 for the
+specific requirements.
+
+  You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU AGPL, see
+<http://www.gnu.org/licenses/>.
diff --git a/ChangeLog b/ChangeLog
new file mode 100644
index 0000000..823d0fa
--- /dev/null
+++ b/ChangeLog
@@ -0,0 +1,2 @@
+Tue 18 Jun 2019 04:19:29 PM CEST
+       Initial project setup. -CG
diff --git a/Doxyfile b/Doxyfile
new file mode 100644
index 0000000..8770791
--- /dev/null
+++ b/Doxyfile
@@ -0,0 +1,245 @@
+# Doxyfile 1.5.5
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+DOXYFILE_ENCODING      = UTF-8
+PROJECT_NAME           = "Anastasis"
+PROJECT_NUMBER         = 0.0
+OUTPUT_DIRECTORY       = doxygen-doc/
+CREATE_SUBDIRS         = YES
+OUTPUT_LANGUAGE        = English
+BRIEF_MEMBER_DESC      = YES
+REPEAT_BRIEF           = YES
+ABBREVIATE_BRIEF       = "The $name class" \
+                         "The $name widget" \
+                         "The $name file" \
+                         is \
+                         provides \
+                         specifies \
+                         contains \
+                         represents \
+                         a \
+                         an \
+                         the
+ALWAYS_DETAILED_SEC    = NO
+INLINE_INHERITED_MEMB  = NO
+FULL_PATH_NAMES        = YES
+STRIP_FROM_PATH        = .
+STRIP_FROM_INC_PATH    = src/include
+SHORT_NAMES            = NO
+JAVADOC_AUTOBRIEF      = NO
+QT_AUTOBRIEF           = NO
+MULTILINE_CPP_IS_BRIEF = NO
+INHERIT_DOCS           = NO
+SEPARATE_MEMBER_PAGES  = NO
+TAB_SIZE               = 8
+ALIASES                =
+OPTIMIZE_OUTPUT_FOR_C  = YES
+OPTIMIZE_OUTPUT_JAVA   = NO
+OPTIMIZE_FOR_FORTRAN   = NO
+OPTIMIZE_OUTPUT_VHDL   = NO
+BUILTIN_STL_SUPPORT    = NO
+CPP_CLI_SUPPORT        = NO
+SIP_SUPPORT            = NO
+DISTRIBUTE_GROUP_DOC   = NO
+SUBGROUPING            = YES
+TYPEDEF_HIDES_STRUCT   = NO
+#---------------------------------------------------------------------------
+# Build related configuration options
+#---------------------------------------------------------------------------
+EXTRACT_ALL            = YES
+EXTRACT_PRIVATE        = NO
+EXTRACT_STATIC         = YES
+EXTRACT_LOCAL_CLASSES  = NO
+EXTRACT_LOCAL_METHODS  = YES
+EXTRACT_ANON_NSPACES   = NO
+HIDE_UNDOC_MEMBERS     = NO
+HIDE_UNDOC_CLASSES     = NO
+HIDE_FRIEND_COMPOUNDS  = NO
+HIDE_IN_BODY_DOCS      = NO
+INTERNAL_DOCS          = NO
+CASE_SENSE_NAMES       = YES
+HIDE_SCOPE_NAMES       = NO
+SHOW_INCLUDE_FILES     = YES
+INLINE_INFO            = YES
+SORT_MEMBER_DOCS       = YES
+SORT_BRIEF_DOCS        = NO
+SORT_GROUP_NAMES       = NO
+SORT_BY_SCOPE_NAME     = NO
+GENERATE_TODOLIST      = NO
+GENERATE_TESTLIST      = NO
+GENERATE_BUGLIST       = NO
+GENERATE_DEPRECATEDLIST= NO
+ENABLED_SECTIONS       =
+MAX_INITIALIZER_LINES  = 30
+SHOW_USED_FILES        = YES
+FILE_VERSION_FILTER    =
+#---------------------------------------------------------------------------
+# configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+QUIET                  = YES
+WARNINGS               = YES
+WARN_IF_UNDOCUMENTED   = YES
+WARN_IF_DOC_ERROR      = YES
+WARN_NO_PARAMDOC       = YES
+WARN_FORMAT            = "$file:$line: $text"
+WARN_LOGFILE           =
+#---------------------------------------------------------------------------
+# configuration options related to the input files
+#---------------------------------------------------------------------------
+INPUT                  = src/
+INPUT_ENCODING         = UTF-8
+FILE_PATTERNS          = *.c \
+                         *.h
+RECURSIVE              = YES
+EXCLUDE                =
+EXCLUDE_SYMLINKS       = NO
+EXCLUDE_PATTERNS       = */test_* */.git/* */perf_* */tls_test_*  
anastasis_config.h
+EXCLUDE_SYMBOLS        = GNUNET_* JSON_*
+EXAMPLE_PATH           =
+EXAMPLE_PATTERNS       = *
+EXAMPLE_RECURSIVE      = NO
+IMAGE_PATH             =
+INPUT_FILTER           =
+FILTER_PATTERNS        =
+FILTER_SOURCE_FILES    = NO
+#---------------------------------------------------------------------------
+# configuration options related to source browsing
+#---------------------------------------------------------------------------
+SOURCE_BROWSER         = YES
+INLINE_SOURCES         = NO
+STRIP_CODE_COMMENTS    = YES
+REFERENCED_BY_RELATION = YES
+REFERENCES_RELATION    = YES
+REFERENCES_LINK_SOURCE = YES
+USE_HTAGS              = NO
+VERBATIM_HEADERS       = NO
+#---------------------------------------------------------------------------
+# configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+ALPHABETICAL_INDEX     = YES
+COLS_IN_ALPHA_INDEX    = 5
+IGNORE_PREFIX          = ANASTASIS_
+#---------------------------------------------------------------------------
+# configuration options related to the HTML output
+#---------------------------------------------------------------------------
+#GENERATE_HTML          = YES
+HTML_OUTPUT            = html
+HTML_FILE_EXTENSION    = .html
+HTML_HEADER            =
+HTML_FOOTER            =
+HTML_STYLESHEET        =
+GENERATE_HTMLHELP      = NO
+GENERATE_DOCSET        = NO
+DOCSET_FEEDNAME        = "Doxygen generated docs"
+DOCSET_BUNDLE_ID       = org.doxygen.Project
+HTML_DYNAMIC_SECTIONS  = NO
+CHM_FILE               =
+HHC_LOCATION           =
+GENERATE_CHI           = NO
+BINARY_TOC             = NO
+TOC_EXPAND             = NO
+DISABLE_INDEX          = NO
+ENUM_VALUES_PER_LINE   = 4
+GENERATE_TREEVIEW      = YES
+TREEVIEW_WIDTH         = 250
+#---------------------------------------------------------------------------
+# configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+#GENERATE_LATEX         = YES
+LATEX_OUTPUT           = latex
+LATEX_CMD_NAME         = latex
+MAKEINDEX_CMD_NAME     = makeindex
+COMPACT_LATEX          = YES
+PAPER_TYPE             = a4wide
+EXTRA_PACKAGES         =
+LATEX_HEADER           =
+PDF_HYPERLINKS         = YES
+USE_PDFLATEX           = YES
+LATEX_BATCHMODE        = NO
+LATEX_HIDE_INDICES     = NO
+#---------------------------------------------------------------------------
+# configuration options related to the RTF output
+#---------------------------------------------------------------------------
+#GENERATE_RTF           = NO
+RTF_OUTPUT             = rtf
+COMPACT_RTF            = YES
+RTF_HYPERLINKS         = NO
+RTF_STYLESHEET_FILE    =
+RTF_EXTENSIONS_FILE    =
+#---------------------------------------------------------------------------
+# configuration options related to the man page output
+#---------------------------------------------------------------------------
+GENERATE_MAN           = NO
+MAN_OUTPUT             = man
+MAN_EXTENSION          = .3
+MAN_LINKS              = NO
+#---------------------------------------------------------------------------
+# configuration options related to the XML output
+#---------------------------------------------------------------------------
+#GENERATE_XML           = NO
+XML_OUTPUT             = xml
+XML_PROGRAMLISTING     = YES
+#---------------------------------------------------------------------------
+# configuration options for the AutoGen Definitions output
+#---------------------------------------------------------------------------
+GENERATE_AUTOGEN_DEF   = NO
+#---------------------------------------------------------------------------
+# configuration options related to the Perl module output
+#---------------------------------------------------------------------------
+GENERATE_PERLMOD       = NO
+PERLMOD_LATEX          = NO
+PERLMOD_PRETTY         = YES
+PERLMOD_MAKEVAR_PREFIX =
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+ENABLE_PREPROCESSING   = YES
+MACRO_EXPANSION        = NO
+EXPAND_ONLY_PREDEF     = NO
+SEARCH_INCLUDES        = YES
+INCLUDE_PATH           =
+INCLUDE_FILE_PATTERNS  =
+PREDEFINED             =
+EXPAND_AS_DEFINED      =
+SKIP_FUNCTION_MACROS   = YES
+#---------------------------------------------------------------------------
+# Configuration::additions related to external references
+#---------------------------------------------------------------------------
+TAGFILES               = contrib/gnunet.tag
+GENERATE_TAGFILE       = contrib/anastasis.tag
+ALLEXTERNALS           = NO
+EXTERNAL_GROUPS        = YES
+PERL_PATH              = /usr/bin/perl
+#---------------------------------------------------------------------------
+# Configuration options related to the dot tool
+#---------------------------------------------------------------------------
+CLASS_DIAGRAMS         = NO
+MSCGEN_PATH            =
+HIDE_UNDOC_RELATIONS   = YES
+HAVE_DOT               = YES
+CLASS_GRAPH            = NO
+COLLABORATION_GRAPH    = NO
+GROUP_GRAPHS           = NO
+UML_LOOK               = NO
+TEMPLATE_RELATIONS     = NO
+INCLUDE_GRAPH          = YES
+INCLUDED_BY_GRAPH      = YES
+CALL_GRAPH             = YES
+CALLER_GRAPH           = YES
+GRAPHICAL_HIERARCHY    = NO
+DIRECTORY_GRAPH        = YES
+DOT_IMAGE_FORMAT       = png
+DOT_PATH               =
+DOTFILE_DIRS           =
+DOT_GRAPH_MAX_NODES    = 100
+MAX_DOT_GRAPH_DEPTH    = 2
+DOT_TRANSPARENT        = YES
+DOT_MULTI_TARGETS      = NO
+GENERATE_LEGEND        = YES
+DOT_CLEANUP            = YES
+#---------------------------------------------------------------------------
+# Configuration::additions related to the search engine
+#---------------------------------------------------------------------------
+SEARCHENGINE           = YES
diff --git a/INSTALL b/INSTALL
new file mode 100644
index 0000000..8865734
--- /dev/null
+++ b/INSTALL
@@ -0,0 +1,368 @@
+Installation Instructions
+*************************
+
+   Copyright (C) 1994-1996, 1999-2002, 2004-2016 Free Software
+Foundation, Inc.
+
+   Copying and distribution of this file, with or without modification,
+are permitted in any medium without royalty provided the copyright
+notice and this notice are preserved.  This file is offered as-is,
+without warranty of any kind.
+
+Basic Installation
+==================
+
+   Briefly, the shell command './configure && make && make install'
+should configure, build, and install this package.  The following
+more-detailed instructions are generic; see the 'README' file for
+instructions specific to this package.  Some packages provide this
+'INSTALL' file but do not implement all of the features documented
+below.  The lack of an optional feature in a given package is not
+necessarily a bug.  More recommendations for GNU packages can be found
+in *note Makefile Conventions: (standards)Makefile Conventions.
+
+   The 'configure' shell script attempts to guess correct values for
+various system-dependent variables used during compilation.  It uses
+those values to create a 'Makefile' in each directory of the package.
+It may also create one or more '.h' files containing system-dependent
+definitions.  Finally, it creates a shell script 'config.status' that
+you can run in the future to recreate the current configuration, and a
+file 'config.log' containing compiler output (useful mainly for
+debugging 'configure').
+
+   It can also use an optional file (typically called 'config.cache' and
+enabled with '--cache-file=config.cache' or simply '-C') that saves the
+results of its tests to speed up reconfiguring.  Caching is disabled by
+default to prevent problems with accidental use of stale cache files.
+
+   If you need to do unusual things to compile the package, please try
+to figure out how 'configure' could check whether to do them, and mail
+diffs or instructions to the address given in the 'README' so they can
+be considered for the next release.  If you are using the cache, and at
+some point 'config.cache' contains results you don't want to keep, you
+may remove or edit it.
+
+   The file 'configure.ac' (or 'configure.in') is used to create
+'configure' by a program called 'autoconf'.  You need 'configure.ac' if
+you want to change it or regenerate 'configure' using a newer version of
+'autoconf'.
+
+   The simplest way to compile this package is:
+
+  1. 'cd' to the directory containing the package's source code and type
+     './configure' to configure the package for your system.
+
+     Running 'configure' might take a while.  While running, it prints
+     some messages telling which features it is checking for.
+
+  2. Type 'make' to compile the package.
+
+  3. Optionally, type 'make check' to run any self-tests that come with
+     the package, generally using the just-built uninstalled binaries.
+
+  4. Type 'make install' to install the programs and any data files and
+     documentation.  When installing into a prefix owned by root, it is
+     recommended that the package be configured and built as a regular
+     user, and only the 'make install' phase executed with root
+     privileges.
+
+  5. Optionally, type 'make installcheck' to repeat any self-tests, but
+     this time using the binaries in their final installed location.
+     This target does not install anything.  Running this target as a
+     regular user, particularly if the prior 'make install' required
+     root privileges, verifies that the installation completed
+     correctly.
+
+  6. You can remove the program binaries and object files from the
+     source code directory by typing 'make clean'.  To also remove the
+     files that 'configure' created (so you can compile the package for
+     a different kind of computer), type 'make distclean'.  There is
+     also a 'make maintainer-clean' target, but that is intended mainly
+     for the package's developers.  If you use it, you may have to get
+     all sorts of other programs in order to regenerate files that came
+     with the distribution.
+
+  7. Often, you can also type 'make uninstall' to remove the installed
+     files again.  In practice, not all packages have tested that
+     uninstallation works correctly, even though it is required by the
+     GNU Coding Standards.
+
+  8. Some packages, particularly those that use Automake, provide 'make
+     distcheck', which can by used by developers to test that all other
+     targets like 'make install' and 'make uninstall' work correctly.
+     This target is generally not run by end users.
+
+Compilers and Options
+=====================
+
+   Some systems require unusual options for compilation or linking that
+the 'configure' script does not know about.  Run './configure --help'
+for details on some of the pertinent environment variables.
+
+   You can give 'configure' initial values for configuration parameters
+by setting variables in the command line or in the environment.  Here is
+an example:
+
+     ./configure CC=c99 CFLAGS=-g LIBS=-lposix
+
+   *Note Defining Variables::, for more details.
+
+Compiling For Multiple Architectures
+====================================
+
+   You can compile the package for more than one kind of computer at the
+same time, by placing the object files for each architecture in their
+own directory.  To do this, you can use GNU 'make'.  'cd' to the
+directory where you want the object files and executables to go and run
+the 'configure' script.  'configure' automatically checks for the source
+code in the directory that 'configure' is in and in '..'.  This is known
+as a "VPATH" build.
+
+   With a non-GNU 'make', it is safer to compile the package for one
+architecture at a time in the source code directory.  After you have
+installed the package for one architecture, use 'make distclean' before
+reconfiguring for another architecture.
+
+   On MacOS X 10.5 and later systems, you can create libraries and
+executables that work on multiple system types--known as "fat" or
+"universal" binaries--by specifying multiple '-arch' options to the
+compiler but only a single '-arch' option to the preprocessor.  Like
+this:
+
+     ./configure CC="gcc -arch i386 -arch x86_64 -arch ppc -arch ppc64" \
+                 CXX="g++ -arch i386 -arch x86_64 -arch ppc -arch ppc64" \
+                 CPP="gcc -E" CXXCPP="g++ -E"
+
+   This is not guaranteed to produce working output in all cases, you
+may have to build one architecture at a time and combine the results
+using the 'lipo' tool if you have problems.
+
+Installation Names
+==================
+
+   By default, 'make install' installs the package's commands under
+'/usr/local/bin', include files under '/usr/local/include', etc.  You
+can specify an installation prefix other than '/usr/local' by giving
+'configure' the option '--prefix=PREFIX', where PREFIX must be an
+absolute file name.
+
+   You can specify separate installation prefixes for
+architecture-specific files and architecture-independent files.  If you
+pass the option '--exec-prefix=PREFIX' to 'configure', the package uses
+PREFIX as the prefix for installing programs and libraries.
+Documentation and other data files still use the regular prefix.
+
+   In addition, if you use an unusual directory layout you can give
+options like '--bindir=DIR' to specify different values for particular
+kinds of files.  Run 'configure --help' for a list of the directories
+you can set and what kinds of files go in them.  In general, the default
+for these options is expressed in terms of '${prefix}', so that
+specifying just '--prefix' will affect all of the other directory
+specifications that were not explicitly provided.
+
+   The most portable way to affect installation locations is to pass the
+correct locations to 'configure'; however, many packages provide one or
+both of the following shortcuts of passing variable assignments to the
+'make install' command line to change installation locations without
+having to reconfigure or recompile.
+
+   The first method involves providing an override variable for each
+affected directory.  For example, 'make install
+prefix=/alternate/directory' will choose an alternate location for all
+directory configuration variables that were expressed in terms of
+'${prefix}'.  Any directories that were specified during 'configure',
+but not in terms of '${prefix}', must each be overridden at install time
+for the entire installation to be relocated.  The approach of makefile
+variable overrides for each directory variable is required by the GNU
+Coding Standards, and ideally causes no recompilation.  However, some
+platforms have known limitations with the semantics of shared libraries
+that end up requiring recompilation when using this method, particularly
+noticeable in packages that use GNU Libtool.
+
+   The second method involves providing the 'DESTDIR' variable.  For
+example, 'make install DESTDIR=/alternate/directory' will prepend
+'/alternate/directory' before all installation names.  The approach of
+'DESTDIR' overrides is not required by the GNU Coding Standards, and
+does not work on platforms that have drive letters.  On the other hand,
+it does better at avoiding recompilation issues, and works well even
+when some directory options were not specified in terms of '${prefix}'
+at 'configure' time.
+
+Optional Features
+=================
+
+   If the package supports it, you can cause programs to be installed
+with an extra prefix or suffix on their names by giving 'configure' the
+option '--program-prefix=PREFIX' or '--program-suffix=SUFFIX'.
+
+   Some packages pay attention to '--enable-FEATURE' options to
+'configure', where FEATURE indicates an optional part of the package.
+They may also pay attention to '--with-PACKAGE' options, where PACKAGE
+is something like 'gnu-as' or 'x' (for the X Window System).  The
+'README' should mention any '--enable-' and '--with-' options that the
+package recognizes.
+
+   For packages that use the X Window System, 'configure' can usually
+find the X include and library files automatically, but if it doesn't,
+you can use the 'configure' options '--x-includes=DIR' and
+'--x-libraries=DIR' to specify their locations.
+
+   Some packages offer the ability to configure how verbose the
+execution of 'make' will be.  For these packages, running './configure
+--enable-silent-rules' sets the default to minimal output, which can be
+overridden with 'make V=1'; while running './configure
+--disable-silent-rules' sets the default to verbose, which can be
+overridden with 'make V=0'.
+
+Particular systems
+==================
+
+   On HP-UX, the default C compiler is not ANSI C compatible.  If GNU CC
+is not installed, it is recommended to use the following options in
+order to use an ANSI C compiler:
+
+     ./configure CC="cc -Ae -D_XOPEN_SOURCE=500"
+
+and if that doesn't work, install pre-built binaries of GCC for HP-UX.
+
+   HP-UX 'make' updates targets which have the same time stamps as their
+prerequisites, which makes it generally unusable when shipped generated
+files such as 'configure' are involved.  Use GNU 'make' instead.
+
+   On OSF/1 a.k.a. Tru64, some versions of the default C compiler cannot
+parse its '<wchar.h>' header file.  The option '-nodtk' can be used as a
+workaround.  If GNU CC is not installed, it is therefore recommended to
+try
+
+     ./configure CC="cc"
+
+and if that doesn't work, try
+
+     ./configure CC="cc -nodtk"
+
+   On Solaris, don't put '/usr/ucb' early in your 'PATH'.  This
+directory contains several dysfunctional programs; working variants of
+these programs are available in '/usr/bin'.  So, if you need '/usr/ucb'
+in your 'PATH', put it _after_ '/usr/bin'.
+
+   On Haiku, software installed for all users goes in '/boot/common',
+not '/usr/local'.  It is recommended to use the following options:
+
+     ./configure --prefix=/boot/common
+
+Specifying the System Type
+==========================
+
+   There may be some features 'configure' cannot figure out
+automatically, but needs to determine by the type of machine the package
+will run on.  Usually, assuming the package is built to be run on the
+_same_ architectures, 'configure' can figure that out, but if it prints
+a message saying it cannot guess the machine type, give it the
+'--build=TYPE' option.  TYPE can either be a short name for the system
+type, such as 'sun4', or a canonical name which has the form:
+
+     CPU-COMPANY-SYSTEM
+
+where SYSTEM can have one of these forms:
+
+     OS
+     KERNEL-OS
+
+   See the file 'config.sub' for the possible values of each field.  If
+'config.sub' isn't included in this package, then this package doesn't
+need to know the machine type.
+
+   If you are _building_ compiler tools for cross-compiling, you should
+use the option '--target=TYPE' to select the type of system they will
+produce code for.
+
+   If you want to _use_ a cross compiler, that generates code for a
+platform different from the build platform, you should specify the
+"host" platform (i.e., that on which the generated programs will
+eventually be run) with '--host=TYPE'.
+
+Sharing Defaults
+================
+
+   If you want to set default values for 'configure' scripts to share,
+you can create a site shell script called 'config.site' that gives
+default values for variables like 'CC', 'cache_file', and 'prefix'.
+'configure' looks for 'PREFIX/share/config.site' if it exists, then
+'PREFIX/etc/config.site' if it exists.  Or, you can set the
+'CONFIG_SITE' environment variable to the location of the site script.
+A warning: not all 'configure' scripts look for a site script.
+
+Defining Variables
+==================
+
+   Variables not defined in a site shell script can be set in the
+environment passed to 'configure'.  However, some packages may run
+configure again during the build, and the customized values of these
+variables may be lost.  In order to avoid this problem, you should set
+them in the 'configure' command line, using 'VAR=value'.  For example:
+
+     ./configure CC=/usr/local2/bin/gcc
+
+causes the specified 'gcc' to be used as the C compiler (unless it is
+overridden in the site shell script).
+
+Unfortunately, this technique does not work for 'CONFIG_SHELL' due to an
+Autoconf limitation.  Until the limitation is lifted, you can use this
+workaround:
+
+     CONFIG_SHELL=/bin/bash ./configure CONFIG_SHELL=/bin/bash
+
+'configure' Invocation
+======================
+
+   'configure' recognizes the following options to control how it
+operates.
+
+'--help'
+'-h'
+     Print a summary of all of the options to 'configure', and exit.
+
+'--help=short'
+'--help=recursive'
+     Print a summary of the options unique to this package's
+     'configure', and exit.  The 'short' variant lists options used only
+     in the top level, while the 'recursive' variant lists options also
+     present in any nested packages.
+
+'--version'
+'-V'
+     Print the version of Autoconf used to generate the 'configure'
+     script, and exit.
+
+'--cache-file=FILE'
+     Enable the cache: use and save the results of the tests in FILE,
+     traditionally 'config.cache'.  FILE defaults to '/dev/null' to
+     disable caching.
+
+'--config-cache'
+'-C'
+     Alias for '--cache-file=config.cache'.
+
+'--quiet'
+'--silent'
+'-q'
+     Do not print messages saying which checks are being made.  To
+     suppress all normal output, redirect it to '/dev/null' (any error
+     messages will still be shown).
+
+'--srcdir=DIR'
+     Look for the package's source code in directory DIR.  Usually
+     'configure' can determine that directory automatically.
+
+'--prefix=DIR'
+     Use DIR as the installation prefix.  *note Installation Names:: for
+     more details, including other options available for fine-tuning the
+     installation locations.
+
+'--no-create'
+'-n'
+     Run the configure checks, but stop before creating any output
+     files.
+
+'configure' also accepts some other, not widely useful, options.  Run
+'configure --help' for more details.
diff --git a/Makefile.am b/Makefile.am
new file mode 100644
index 0000000..221dcd7
--- /dev/null
+++ b/Makefile.am
@@ -0,0 +1,24 @@
+# This Makefile is in the public domain
+
+if DOC_ONLY
+if ENABLE_DOC
+  SUBDIRS = . doc
+else
+  SUBDIRS = .
+endif
+else
+if ENABLE_DOC
+  SUBDIRS = . src doc
+else
+  SUBDIRS = . src doc
+endif
+endif
+
+@DX_RULES@
+
+ACLOCAL_AMFLAGS = -I m4
+EXTRA_DIST = \
+ AUTHORS \
+ COPYING.AGPL \
+ contrib/gnunet.tag \
+ Doxyfile
diff --git a/NEWS b/NEWS
new file mode 100644
index 0000000..e69de29
diff --git a/README b/README
new file mode 100644
index 0000000..326a1d9
--- /dev/null
+++ b/README
@@ -0,0 +1,36 @@
+TALER MERCHANT
+
+1. DESCRIPTION
+
+Under the name 'Taler merchant' there are two main objectives: one is to
+implement a common helper (called 'backend') for all the existing merchants
+willing to integrate Taler in their existing online shops, whereas the other
+objective is implementing some demonstrator shops which use that backend.
+
+2. INNOVATIVE ASPECTS
+
+This component allows merchants to receive payments without invading the
+customers' privacy. Of course, this applies mostly for digital goods, as the
+merchant does not need to know the customer's physical address.
+
+3. PLANNING/PERSPECTIVES
+
+Since the main component's part is the backend, and it is supposed to work with
+preexisting shops, a massive backend's testing with existing shop portals 
should
+be accomplished.
+
+
+4. DEPENDENCIES
+
+libjansson: MIT License, AGPL- and LGPL-Compatible, owned by Petri Lehtinen 
and other individuals
+libgcrypt: LGPL, owned by Free Software Foundation
+postgresql: PostgreSQL License, AGPL- and LGPL-Compatible, owned by The 
PostgreSQL
+Global Development Group
+libgnunetutil (in all of its variants): GPLv3+, owned by GNUnet e.V.
+PHP:  PHP License, AGPL- and LGPL-Compatible, owned by The PHP Group
+
+5. DIRECTORY STRUCTURE
+
+- 'src/' contains the "backend", a C piece of software which implements cryto 
routines
+  and communication with the 'exchange'
+ 'doc/' contains documentation files, like TeXinfo
diff --git a/bootstrap b/bootstrap
new file mode 100755
index 0000000..373e5f5
--- /dev/null
+++ b/bootstrap
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+if ! git --version >/dev/null; then
+  echo "git not installed"
+  exit 1
+fi
+
+echo "$0: Updating submodules"
+echo | git submodule update --init
+
+echo "$0: Running autoreconf"
+autoreconf -if
diff --git a/configure.ac b/configure.ac
new file mode 100644
index 0000000..6580692
--- /dev/null
+++ b/configure.ac
@@ -0,0 +1,309 @@
+#                                               -*- Autoconf -*-
+# Process this file with autoconf to produce a configure script.
+#
+# This configure file is in the public domain
+
+AC_PREREQ([2.69])
+AC_INIT([anastasis], [0.5.0], [address@hidden])
+AC_CONFIG_SRCDIR([src/backup/anastasis-httpd.c])
+AC_CONFIG_HEADERS([anastasis_config.h])
+# support for non-recursive builds
+AM_INIT_AUTOMAKE([subdir-objects 1.9 tar-pax])
+
+# pretty build rules
+AM_SILENT_RULES([yes])
+
+AC_CONFIG_MACRO_DIR([m4])
+
+LT_INIT
+
+DX_INIT_DOXYGEN([anastasis],,,
+DX_PS_FEATURE(OFF),
+DX_PDF_FEATURE(OFF),
+DX_RTF_FEATURE(OFF),
+DX_CHI_FEATURE(OFF),
+DX_XML_FEATURE(OFF))
+
+AC_MSG_CHECKING([whether to compile documentation ONLY])
+AC_ARG_ENABLE([only-doc],
+  [AS_HELP_STRING([--enable-only-doc], [only compile Taler documentation])],
+  [doc_only=${enableval}],
+  [doc_only=no])
+AC_MSG_RESULT($doc_only)
+AM_CONDITIONAL([DOC_ONLY], [test "$doc_only" = "yes"])
+
+
+# Not indented as it covers most of the file...
+AS_IF([test "x$doc_only" != xyes],[
+
+
+# Checks for programs.
+AC_PROG_CC
+AC_PROG_CC_C99
+
+CFLAGS="-Wall $CFLAGS"
+
+# Checks for header files.
+AC_CHECK_HEADERS([stdint.h stdlib.h string.h unistd.h])
+
+# Check for GNUnet's libgnunetutil.
+libgnunetutil=0
+AC_MSG_CHECKING([for libgnunetutil])
+AC_ARG_WITH(gnunet,
+            [AS_HELP_STRING([--with-gnunet=PFX], [base of GNUnet 
installation])],
+            [AC_MSG_RESULT([given as $with_gnunet])],
+            [AC_MSG_RESULT(not given)
+             with_gnunet=yes])
+AS_CASE([$with_gnunet],
+        [yes], [],
+        [no], [AC_MSG_ERROR([--with-gnunet is required])],
+        [LDFLAGS="-L$with_gnunet/lib $LDFLAGS"
+         CPPFLAGS="-I$with_gnunet/include $CPPFLAGS"])
+AC_CHECK_HEADERS([gnunet/platform.h gnunet/gnunet_util_lib.h],
+ [AC_CHECK_LIB([gnunetutil], [GNUNET_SCHEDULER_run], libgnunetutil=1)],
+  [], [#ifdef HAVE_GNUNET_PLATFORM_H
+        #include <gnunet/platform.h>
+       #endif])
+AS_IF([test $libgnunetutil != 1],
+  [AC_MSG_ERROR([[
+***
+*** You need libgnunetutil to build this program.
+*** This library is part of GNUnet, available at
+***   https://gnunet.org
+*** ]])])
+
+
+# test for postgres
+AX_LIB_POSTGRESQL([9.3])
+AS_IF([test "x$found_postgresql" = "xyes"],[postgres=true])
+AM_CONDITIONAL(HAVE_POSTGRESQL, test x$postgres = xtrue)
+
+# Check for Taler's libtalerpq
+libtalerpq=0
+AC_MSG_CHECKING([for libtalerpq])
+AC_ARG_WITH(exchange,
+            [AS_HELP_STRING([--with-exchange=PFX], [base of Taler EXCHANGE 
installation])],
+            [AC_MSG_RESULT([given as $with_exchange])],
+            [AC_MSG_RESULT(not given)
+             with_exchange=yes])
+AS_CASE([$with_exchange],
+        [yes], [],
+        [no], [AC_MSG_ERROR([--with-exchange is required])],
+        [LDFLAGS="-L$with_exchange/lib $LDFLAGS"
+         CPPFLAGS="-I$with_exchange/include $CPPFLAGS $POSTGRESQL_CPPFLAGS"])
+
+CPPFLAGS="$CPPFLAGS $POSTGRESQL_CPPFLAGS"
+LDFLAGS="$LDFLAGS -L/usr/local/lib"
+
+AC_CHECK_HEADERS([gnunet/gnunet_pq_lib.h],
+ [AC_CHECK_LIB([gnunetpq], [GNUNET_PQ_connect_with_cfg], libgnunetpq=1)],
+   [], [#ifdef HAVE_GNUNET_PLATFORM_H
+        #include <gnunet/platform.h>
+       #endif])
+AM_CONDITIONAL(HAVE_GNUNETPQ, test x$libgnunetpq = x1)
+
+# check for libmicrohttpd
+microhttpd=0
+AC_MSG_CHECKING([for microhttpd])
+AC_ARG_WITH([microhttpd],
+            [AS_HELP_STRING([--with-microhttpd=PFX], [base of microhttpd 
installation])],
+            [AC_MSG_RESULT([given as $with_microhttpd])],
+            [AC_MSG_RESULT([not given])
+             with_microhttpd=yes])
+AS_CASE([$with_microhttpd],
+        [yes], [],
+        [no], [AC_MSG_ERROR([--with-microhttpd is required])],
+        [LDFLAGS="-L$with_microhttpd/lib $LDFLAGS"
+         CPPFLAGS="-I$with_microhttpd/include $CPPFLAGS"])
+AC_CHECK_LIB(microhttpd,MHD_start_daemon,
+  [AC_CHECK_HEADER([microhttpd.h],[microhttpd=1])])
+AS_IF([test $microhttpd = 0],
+  [AC_MSG_ERROR([[
+***
+*** You need libmicrohttpd to build this program.
+*** ]])])
+
+jansson=0
+PKG_CHECK_MODULES([JANSSON], [jansson >= 2.3],
+                  [LDFLAGS="$JANSSON_LIBS $LDFLAGS"
+                   CPPFLAGS="$JANSSON_CFLAGS $CPPFLAGS"],
+                  [AC_MSG_ERROR([[
+***
+*** You need libjansson to build this program.
+***]])])
+
+# check for libgnurl
+# libgnurl
+LIBGNURL_CHECK_CONFIG(,7.34.0,gnurl=1,gnurl=0)
+AS_IF([test "x$gnurl" = x1],[
+       AM_CONDITIONAL(HAVE_LIBGNURL, true)
+       AC_DEFINE([HAVE_LIBGNURL],[1],[Have libgnurl])
+],[
+       AM_CONDITIONAL(HAVE_LIBGNURL, false)
+])
+
+# libcurl-gnutls
+LIBCURL_CHECK_CONFIG(,7.34.0,[curl=true],[curl=false])
+AS_IF([test "x$curl" = xtrue],
+      [LDFLAGS="-L$with_libcurl/lib $LDFLAGS"
+       CPPFLAGS="-I$with_libcurl/include $CPPFLAGS"
+       AC_CHECK_HEADERS([curl/curl.h],
+                        [AC_CHECK_DECLS(CURLINFO_TLS_SESSION,
+                         [curl=true],
+                         [curl=false],
+                         [[#include <curl/curl.h>]])],
+                        [curl=false])
+       # need libcurl-gnutls.so, everything else is not acceptable
+       AC_CHECK_LIB([curl-gnutls],
+                    [curl_easy_getinfo],,
+                    [curl=false])])
+       # cURL must support CURLINFO_TLS_SESSION, version >= 7.34
+
+# Check for curl/curl.h and gnurl/curl.h so we can use #ifdef
+# HAVE_CURL_CURL_H later (the above LIBCURL_CHECK_CONFIG accepted
+# *either* header set).
+AC_CHECK_HEADERS([curl/curl.h],,
+  curl=false
+  AC_CHECK_HEADERS([gnurl/curl.h],,
+  gnurl=false))
+
+# libgnurl
+AS_IF([test "x$gnurl" = "x0"],
+      [AS_IF([test "x$curl" = "x0"],
+              [AC_MSG_NOTICE([NOTICE: libgnurl not found. taler-bank support 
will not be compiled.])],
+              [AC_MSG_NOTICE([WARNING: libgnurl not found, trying to use 
libcurl-gnutls instead.])])])
+
+AS_IF([test x$curl = xfalse],
+       [AM_CONDITIONAL(HAVE_LIBCURL, false)
+        AS_IF([test "x$gnurl" = "x0"],
+              [AC_MSG_WARN([GNU Taler requires libcurl-gnutls  >= 7.34])])],
+       [AM_CONDITIONAL(HAVE_LIBCURL, true)
+       AC_DEFINE([HAVE_LIBCURL],[1],[Have CURL])])
+
+# check for libtalertwistertesting
+twistertesting=0
+AC_MSG_CHECKING([for talerwtistertesting])
+AC_ARG_WITH([twister],
+            [AS_HELP_STRING([--with-twister=PFX], [base of 
libtalertwistertesting])],
+            [AC_MSG_RESULT([given as $with_twister])],
+            [AC_MSG_RESULT([not given])
+             with_twister=yes])
+AS_CASE([$with_twister],
+        [yes], [],
+        [no], [AC_MSG_WARN([no twister-testing will be compiled])],
+        [LDFLAGS="-L$with_twister/lib $LDFLAGS"
+         CPPFLAGS="-I$with_twister/include $CPPFLAGS"])
+
+AC_CHECK_HEADERS([taler/taler_twister_testing_lib.h],
+ [AC_CHECK_LIB([talertwistertesting], [TALER_TESTING_run_twister], 
twistertesting=1,, [-ltalerexchange -ltalerbank])],
+   [], [#ifdef HAVE_GNUNET_PLATFORM_H
+        #include <gnunet/platform.h>
+       #endif])
+AM_CONDITIONAL(HAVE_TWISTER, test x$twistertesting = x1)
+
+# gcov compilation
+AC_MSG_CHECKING(whether to compile with support for code coverage analysis)
+AC_ARG_ENABLE([coverage],
+              AS_HELP_STRING([--enable-coverage],
+                             [compile the library with code coverage support]),
+              [use_gcov=${enableval}],
+              [use_gcov=no])
+AC_MSG_RESULT($use_gcov)
+AM_CONDITIONAL([USE_COVERAGE], [test "x$use_gcov" = "xyes"])
+
+# Require minimum libgcrypt version
+need_libgcrypt_version=1.6.1
+AC_DEFINE_UNQUOTED([NEED_LIBGCRYPT_VERSION], ["$need_libgcrypt_version"],
+                                             [minimum version of libgcrypt 
required])
+AM_PATH_LIBGCRYPT([$need_libgcrypt_version])
+
+# logging
+extra_logging=0
+AC_ARG_ENABLE([logging],
+   AS_HELP_STRING([--enable-logging@<:@=value@:>@],[Enable logging calls. 
Possible values: yes,no,verbose ('yes' is the default)]),
+   [AS_IF([test "x$enableval" = "xyes"], [],
+          [test "x$enableval" = "xno"], 
[AC_DEFINE([GNUNET_CULL_LOGGING],[],[Define to cull all logging calls])],
+          [test "x$enableval" = "xverbose"], [extra_logging=1]
+          [test "x$enableval" = "xveryverbose"], [extra_logging=2])
+   ], [])
+AC_DEFINE_UNQUOTED([GNUNET_EXTRA_LOGGING],[$extra_logging],[1 if extra logging 
is enabled, 2 for very verbose extra logging, 0 otherwise])
+
+# version info
+AC_PATH_PROG(gitcommand, git)
+AC_MSG_CHECKING(for source being under a VCS)
+git_version=
+AS_IF([test ! "X$gitcommand" = "X"],
+[
+  git_version=$(cd $srcdir ; git rev-list --full-history --all --abbrev-commit 
| head -n 1 2>/dev/null)
+])
+AS_IF([test "X$git_version" = "X"],
+  [
+    vcs_name="no"
+    vcs_version="\"release\""
+  ],
+  [
+    vcs_name="yes, git-svn"
+    vcs_version="\"git-$git_version\""
+  ])
+AC_MSG_RESULT($vcs_name)
+
+AC_MSG_CHECKING(VCS version)
+AC_MSG_RESULT($vcs_version)
+AC_DEFINE_UNQUOTED(VCS_VERSION, [$vcs_version], [VCS revision/hash or tarball 
version])
+
+# Checks for typedefs, structures, and compiler characteristics.
+AC_TYPE_PID_T
+AC_TYPE_SIZE_T
+AC_TYPE_UINT16_T
+AC_TYPE_UINT32_T
+AC_TYPE_UINT64_T
+AC_TYPE_INTMAX_T
+AC_TYPE_UINTMAX_T
+
+# Checks for library functions.
+AC_CHECK_FUNCS([strdup])
+
+
+AC_ARG_ENABLE([[doc]],
+  [AS_HELP_STRING([[--disable-doc]], [do not build any documentation])], ,
+    [enable_doc=yes])
+test "x$enable_doc" = "xno" || enable_doc=yes
+AM_CONDITIONAL([ENABLE_DOC], [test "x$enable_doc" = "xyes"])
+
+
+],[  # this is about the doc-only if on top of the file
+
+# logic if doc_only is set, make sure conditionals are still defined
+AM_CONDITIONAL([HAVE_GNUNETPQ], [false])
+AM_CONDITIONAL([HAVE_POSTGRESQL], [false])
+AM_CONDITIONAL([HAVE_LIBCURL], [false])
+AM_CONDITIONAL([HAVE_LIBGNURL], [false])
+AM_CONDITIONAL([HAVE_TSC], [false])
+AM_CONDITIONAL([USE_COVERAGE], [false])
+AM_CONDITIONAL([ENABLE_DOC], [true])
+AM_CONDITIONAL([HAVE_TWISTER], [true])
+
+
+# end of 'doc_only'
+])
+
+
+# should experimental code be compiled (code that may not yet compile / have 
passing test cases)?
+AC_MSG_CHECKING(whether to compile experimental code)
+AC_ARG_ENABLE([experimental],
+   [AS_HELP_STRING([--enable-experimental], [enable compiling experimental 
code])],
+   [enable_experimental=${enableval}],
+   [enable_experimental=no])
+AC_MSG_RESULT($enable_experimental)
+AM_CONDITIONAL([HAVE_EXPERIMENTAL], [test "x$enable_experimental" = "xyes"])
+
+
+AC_CONFIG_FILES([Makefile
+doc/Makefile
+src/Makefile
+src/backup-db/Makefile
+src/backup/Makefile
+src/include/Makefile
+src/lib/Makefile
+])
+AC_OUTPUT
diff --git a/contrib/gnunet.tag b/contrib/gnunet.tag
new file mode 100644
index 0000000..1cf4f0d
--- /dev/null
+++ b/contrib/gnunet.tag
@@ -0,0 +1,55 @@
+<?xml version='1.0' encoding='UTF-8' standalone='yes' ?>
+<tagfile>
+  <compound kind="file">
+    <name>gnunet_util_lib.h</name>
+    <path></path>
+    <filename>gnunet_util_lib.h</filename>
+    <member kind="define">
+      <type>#define</type>
+      <name>GNUNET_YES</name>
+      <anchorfile>gnunet_util_lib.h</anchorfile>
+      <arglist></arglist>
+    </member>
+    <member kind="define">
+      <type>#define</type>
+      <name>GNUNET_OK</name>
+      <anchorfile>gnunet_util_lib.h</anchorfile>
+      <arglist></arglist>
+    </member>
+    <member kind="define">
+      <type>#define</type>
+      <name>GNUNET_NO</name>
+      <anchorfile>gnunet_util_lib.h</anchorfile>
+      <arglist></arglist>
+    </member>
+    <member kind="define">
+      <type>#define</type>
+      <name>GNUNET_SYSERR</name>
+      <anchorfile>gnunet_util_lib.h</anchorfile>
+      <arglist></arglist>
+    </member>
+    <member kind="define">
+      <type>#define</type>
+      <name>GNUNET_TIME_UNIT_FOREVER_ABS</name>
+      <anchorfile>gnunet_util_lib.h</anchorfile>
+      <arglist></arglist>
+    </member>
+  </compound>
+  <compound kind="file">
+    <name>gnunet_pq_lib.h</name>
+    <path></path>
+    <filename>gnunet_pq_lib.h</filename>
+    <member kind="define">
+      <type>#define</type>
+      <name>GNUNET_PQ_query_param_end</name>
+      <anchorfile>gnunet_pq_lib.h</anchorfile>
+      <arglist></arglist>
+    </member>
+    <member kind="typedef">
+      <type>int</type>
+      <name>GNUNET_PQ_ResultConverter</name>
+      <anchorfile>gnunet_pq_lib.h</anchorfile>
+      <arglist>)(void *cls, PGresult *result, int row, const char *fname, 
size_t *dst_size, void *dst)</arglist>
+    </member>
+  </compound>
+</tagfile>
diff --git a/doc/Makefile.am b/doc/Makefile.am
new file mode 100644
index 0000000..f135953
--- /dev/null
+++ b/doc/Makefile.am
@@ -0,0 +1,71 @@
+all: manual.pdf manual.html
+
+manual.pdf: arch.pdf manual.texi
+manual.html: arch.png manual.texi
+
+%.png: %.dot
+       dot -Tpng $< > $@
+%.pdf: %.dot
+       dot -Tpdf $< > $@
+
+clean-local:
+       -rm -f arch.png arch.pdf arch-api.png arch-api.pdf
+
+merchant-api-curl.pdf: merchant-api.content.texi arch-api.pdf
+merchant-api-python.pdf: merchant-api.content.texi arch-api.pdf
+merchant-api-curl.html: merchant-api.content.texi arch-api.png
+merchant-api-python.html: merchant-api.content.texi arch-api.png
+
+# NOTE: While GNU makeinfo 6.5 supports --css-ref=URL,
+# makeinfo 4.8 (in NetBSD 8.0, macOS, and maybe other
+# base) does only support --css-include=FILE.
+# The only difference is a shorter html output and
+# in 6.5 the ability to use refs instead of include.
+# We prefer not to break builds in this case, so
+# we use the include version which is backwards compatible
+# and upwards compatible, while the ref variant is neither.
+
+AM_MAKEINFOHTMLFLAGS = --no-split --css-include=docstyle.css 
--css-include=brown-paper.css
+
+man_MANS = \
+  taler-merchant-benchmark.1 \
+  taler-merchant-httpd.1
+
+info_TEXINFOS = \
+  manual.texi \
+  merchant-api-python.texi \
+  merchant-api-curl.texi \
+  merchant-api-php.texi
+
+manual_TEXINFOS = \
+  version-manual.texi \
+  merchant-api.content.texi
+
+merchant_api_python_TEXINFOS = \
+  version-merchant-api-python.texi \
+  merchant-api.content.texi
+
+merchant_api_curl_TEXINFOS = \
+  version-merchant-api-curl.texi \
+  merchant-api.content.texi
+
+merchant_api_php_TEXINFOS = \
+  version-merchant-api-php.texi \
+  merchant-api.content.texi
+
+extra_TEXINFOS = \
+  fdl-1.3.texi \
+  agpl.texi \
+  syntax.texi \
+  merchant-api.content.texi
+
+EXTRA_DIST = \
+  arch.dot \
+  $(extra_TEXINFOS) \
+  docstyle.css \
+  brown-paper.css \
+  $(man_MANS)
+
+DISTCLEANFILES = \
+  manual.cps \
+  manual.dvi
diff --git a/doc/Makefile.in b/doc/Makefile.in
new file mode 100644
index 0000000..a66aca2
--- /dev/null
+++ b/doc/Makefile.in
@@ -0,0 +1,1041 @@
+# Makefile.in generated by automake 1.16.1 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994-2018 Free Software Foundation, Inc.
+
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+@SET_MAKE@
+VPATH = @srcdir@
+am__is_gnu_make = { \
+  if test -z '$(MAKELEVEL)'; then \
+    false; \
+  elif test -n '$(MAKE_HOST)'; then \
+    true; \
+  elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
+    true; \
+  else \
+    false; \
+  fi; \
+}
+am__make_running_with_option = \
+  case $${target_option-} in \
+      ?) ;; \
+      *) echo "am__make_running_with_option: internal error: invalid" \
+              "target option '$${target_option-}' specified" >&2; \
+         exit 1;; \
+  esac; \
+  has_opt=no; \
+  sane_makeflags=$$MAKEFLAGS; \
+  if $(am__is_gnu_make); then \
+    sane_makeflags=$$MFLAGS; \
+  else \
+    case $$MAKEFLAGS in \
+      *\\[\ \  ]*) \
+        bs=\\; \
+        sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
+          | sed "s/$$bs$$bs[$$bs $$bs  ]*//g"`;; \
+    esac; \
+  fi; \
+  skip_next=no; \
+  strip_trailopt () \
+  { \
+    flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
+  }; \
+  for flg in $$sane_makeflags; do \
+    test $$skip_next = yes && { skip_next=no; continue; }; \
+    case $$flg in \
+      *=*|--*) continue;; \
+        -*I) strip_trailopt 'I'; skip_next=yes;; \
+      -*I?*) strip_trailopt 'I';; \
+        -*O) strip_trailopt 'O'; skip_next=yes;; \
+      -*O?*) strip_trailopt 'O';; \
+        -*l) strip_trailopt 'l'; skip_next=yes;; \
+      -*l?*) strip_trailopt 'l';; \
+      -[dEDm]) skip_next=yes;; \
+      -[JT]) skip_next=yes;; \
+    esac; \
+    case $$flg in \
+      *$$target_option*) has_opt=yes; break;; \
+    esac; \
+  done; \
+  test $$has_opt = yes
+am__make_dryrun = (target_option=n; $(am__make_running_with_option))
+am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
+pkgdatadir = $(datadir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkglibexecdir = $(libexecdir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+subdir = doc
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_lib_postgresql.m4 \
+       $(top_srcdir)/m4/ax_prog_doxygen.m4 \
+       $(top_srcdir)/m4/libcurl.m4 $(top_srcdir)/m4/libgnurl.m4 \
+       $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \
+       $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \
+       $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+       $(ACLOCAL_M4)
+DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/version-manual.texi \
+       $(srcdir)/stamp-vti $(srcdir)/version-merchant-api-python.texi \
+       $(srcdir)/stamp-1 $(srcdir)/version-merchant-api-curl.texi \
+       $(srcdir)/stamp-2 $(srcdir)/version-merchant-api-php.texi \
+       $(srcdir)/stamp-3 $(am__DIST_COMMON)
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = $(top_builddir)/taler_merchant_config.h
+CONFIG_CLEAN_FILES =
+CONFIG_CLEAN_VPATH_FILES =
+AM_V_P = $(am__v_P_@AM_V@)
+am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
+am__v_P_0 = false
+am__v_P_1 = :
+AM_V_GEN = $(am__v_GEN_@AM_V@)
+am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
+am__v_GEN_0 = @echo "  GEN     " $@;
+am__v_GEN_1 = 
+AM_V_at = $(am__v_at_@AM_V@)
+am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
+am__v_at_0 = @
+am__v_at_1 = 
+SOURCES =
+DIST_SOURCES =
+AM_V_DVIPS = $(am__v_DVIPS_@AM_V@)
+am__v_DVIPS_ = $(am__v_DVIPS_@AM_DEFAULT_V@)
+am__v_DVIPS_0 = @echo "  DVIPS   " $@;
+am__v_DVIPS_1 = 
+AM_V_MAKEINFO = $(am__v_MAKEINFO_@AM_V@)
+am__v_MAKEINFO_ = $(am__v_MAKEINFO_@AM_DEFAULT_V@)
+am__v_MAKEINFO_0 = @echo "  MAKEINFO" $@;
+am__v_MAKEINFO_1 = 
+AM_V_INFOHTML = $(am__v_INFOHTML_@AM_V@)
+am__v_INFOHTML_ = $(am__v_INFOHTML_@AM_DEFAULT_V@)
+am__v_INFOHTML_0 = @echo "  INFOHTML" $@;
+am__v_INFOHTML_1 = 
+AM_V_TEXI2DVI = $(am__v_TEXI2DVI_@AM_V@)
+am__v_TEXI2DVI_ = $(am__v_TEXI2DVI_@AM_DEFAULT_V@)
+am__v_TEXI2DVI_0 = @echo "  TEXI2DVI" $@;
+am__v_TEXI2DVI_1 = 
+AM_V_TEXI2PDF = $(am__v_TEXI2PDF_@AM_V@)
+am__v_TEXI2PDF_ = $(am__v_TEXI2PDF_@AM_DEFAULT_V@)
+am__v_TEXI2PDF_0 = @echo "  TEXI2PDF" $@;
+am__v_TEXI2PDF_1 = 
+AM_V_texinfo = $(am__v_texinfo_@AM_V@)
+am__v_texinfo_ = $(am__v_texinfo_@AM_DEFAULT_V@)
+am__v_texinfo_0 = -q
+am__v_texinfo_1 = 
+AM_V_texidevnull = $(am__v_texidevnull_@AM_V@)
+am__v_texidevnull_ = $(am__v_texidevnull_@AM_DEFAULT_V@)
+am__v_texidevnull_0 = > /dev/null
+am__v_texidevnull_1 = 
+INFO_DEPS = $(srcdir)/manual.info $(srcdir)/merchant-api-python.info \
+       $(srcdir)/merchant-api-curl.info \
+       $(srcdir)/merchant-api-php.info
+am__TEXINFO_TEX_DIR = $(srcdir)
+DVIS = manual.dvi merchant-api-python.dvi merchant-api-curl.dvi \
+       merchant-api-php.dvi
+PDFS = manual.pdf merchant-api-python.pdf merchant-api-curl.pdf \
+       merchant-api-php.pdf
+PSS = manual.ps merchant-api-python.ps merchant-api-curl.ps \
+       merchant-api-php.ps
+HTMLS = manual.html merchant-api-python.html merchant-api-curl.html \
+       merchant-api-php.html
+TEXINFOS = manual.texi merchant-api-python.texi merchant-api-curl.texi \
+       merchant-api-php.texi
+TEXI2DVI = texi2dvi
+TEXI2PDF = $(TEXI2DVI) --pdf --batch
+MAKEINFOHTML = $(MAKEINFO) --html
+DVIPS = dvips
+am__can_run_installinfo = \
+  case $$AM_UPDATE_INFO_DIR in \
+    n|no|NO) false;; \
+    *) (install-info --version) >/dev/null 2>&1;; \
+  esac
+am__installdirs = "$(DESTDIR)$(infodir)" "$(DESTDIR)$(man1dir)"
+am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
+am__vpath_adj = case $$p in \
+    $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
+    *) f=$$p;; \
+  esac;
+am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
+am__install_max = 40
+am__nobase_strip_setup = \
+  srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
+am__nobase_strip = \
+  for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
+am__nobase_list = $(am__nobase_strip_setup); \
+  for p in $$list; do echo "$$p $$p"; done | \
+  sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
+  $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
+    if (++n[$$2] == $(am__install_max)) \
+      { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
+    END { for (dir in files) print dir, files[dir] }'
+am__base_list = \
+  sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
+  sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
+am__uninstall_files_from_dir = { \
+  test -z "$$files" \
+    || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
+    || { echo " ( cd '$$dir' && rm -f" $$files ")"; \
+         $(am__cd) "$$dir" && rm -f $$files; }; \
+  }
+man1dir = $(mandir)/man1
+NROFF = nroff
+MANS = $(man_MANS)
+am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
+am__DIST_COMMON = $(manual_TEXINFOS) $(merchant_api_curl_TEXINFOS) \
+       $(merchant_api_php_TEXINFOS) $(merchant_api_python_TEXINFOS) \
+       $(srcdir)/Makefile.in mdate-sh texinfo.tex
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DLLTOOL = @DLLTOOL@
+DOXYGEN_PAPER_SIZE = @DOXYGEN_PAPER_SIZE@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+DX_CONFIG = @DX_CONFIG@
+DX_CONFIG2 = @DX_CONFIG2@
+DX_CONFIG3 = @DX_CONFIG3@
+DX_CONFIG4 = @DX_CONFIG4@
+DX_DOCDIR = @DX_DOCDIR@
+DX_DOCDIR2 = @DX_DOCDIR2@
+DX_DOCDIR3 = @DX_DOCDIR3@
+DX_DOCDIR4 = @DX_DOCDIR4@
+DX_DOT = @DX_DOT@
+DX_DOXYGEN = @DX_DOXYGEN@
+DX_DVIPS = @DX_DVIPS@
+DX_EGREP = @DX_EGREP@
+DX_ENV = @DX_ENV@
+DX_FLAG_chi = @DX_FLAG_chi@
+DX_FLAG_chm = @DX_FLAG_chm@
+DX_FLAG_doc = @DX_FLAG_doc@
+DX_FLAG_dot = @DX_FLAG_dot@
+DX_FLAG_html = @DX_FLAG_html@
+DX_FLAG_man = @DX_FLAG_man@
+DX_FLAG_pdf = @DX_FLAG_pdf@
+DX_FLAG_ps = @DX_FLAG_ps@
+DX_FLAG_rtf = @DX_FLAG_rtf@
+DX_FLAG_xml = @DX_FLAG_xml@
+DX_HHC = @DX_HHC@
+DX_LATEX = @DX_LATEX@
+DX_MAKEINDEX = @DX_MAKEINDEX@
+DX_PDFLATEX = @DX_PDFLATEX@
+DX_PERL = @DX_PERL@
+DX_PROJECT = @DX_PROJECT@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+FGREP = @FGREP@
+GREP = @GREP@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JANSSON_CFLAGS = @JANSSON_CFLAGS@
+JANSSON_LIBS = @JANSSON_LIBS@
+LD = @LD@
+LDFLAGS = @LDFLAGS@
+LIBCURL = @LIBCURL@
+LIBCURL_CPPFLAGS = @LIBCURL_CPPFLAGS@
+LIBGCRYPT_CFLAGS = @LIBGCRYPT_CFLAGS@
+LIBGCRYPT_CONFIG = @LIBGCRYPT_CONFIG@
+LIBGCRYPT_LIBS = @LIBGCRYPT_LIBS@
+LIBGNURL = @LIBGNURL@
+LIBGNURL_CPPFLAGS = @LIBGNURL_CPPFLAGS@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@
+MAKEINFO = @MAKEINFO@
+MANIFEST_TOOL = @MANIFEST_TOOL@
+MKDIR_P = @MKDIR_P@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+PG_CONFIG = @PG_CONFIG@
+PKG_CONFIG = @PKG_CONFIG@
+PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@
+PKG_CONFIG_PATH = @PKG_CONFIG_PATH@
+POSTGRESQL_CPPFLAGS = @POSTGRESQL_CPPFLAGS@
+POSTGRESQL_LDFLAGS = @POSTGRESQL_LDFLAGS@
+POSTGRESQL_VERSION = @POSTGRESQL_VERSION@
+RANLIB = @RANLIB@
+SED = @SED@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+_libcurl_config = @_libcurl_config@
+_libgnurl_config = @_libgnurl_config@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_AR = @ac_ct_AR@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+gitcommand = @gitcommand@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+runstatedir = @runstatedir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+tsc = @tsc@
+
+# NOTE: While GNU makeinfo 6.5 supports --css-ref=URL,
+# makeinfo 4.8 (in NetBSD 8.0, macOS, and maybe other
+# base) does only support --css-include=FILE.
+# The only difference is a shorter html output and
+# in 6.5 the ability to use refs instead of include.
+# We prefer not to break builds in this case, so
+# we use the include version which is backwards compatible
+# and upwards compatible, while the ref variant is neither.
+AM_MAKEINFOHTMLFLAGS = --no-split --css-include=docstyle.css 
--css-include=brown-paper.css
+man_MANS = \
+  taler-merchant-benchmark.1 \
+  taler-merchant-httpd.1
+
+info_TEXINFOS = \
+  manual.texi \
+  merchant-api-python.texi \
+  merchant-api-curl.texi \
+  merchant-api-php.texi
+
+manual_TEXINFOS = \
+  version-manual.texi \
+  merchant-api.content.texi
+
+merchant_api_python_TEXINFOS = \
+  version-merchant-api-python.texi \
+  merchant-api.content.texi
+
+merchant_api_curl_TEXINFOS = \
+  version-merchant-api-curl.texi \
+  merchant-api.content.texi
+
+merchant_api_php_TEXINFOS = \
+  version-merchant-api-php.texi \
+  merchant-api.content.texi
+
+extra_TEXINFOS = \
+  fdl-1.3.texi \
+  agpl.texi \
+  syntax.texi \
+  merchant-api.content.texi
+
+EXTRA_DIST = \
+  arch.dot \
+  $(extra_TEXINFOS) \
+  docstyle.css \
+  brown-paper.css \
+  $(man_MANS)
+
+DISTCLEANFILES = \
+  manual.cps \
+  manual.dvi
+
+all: all-am
+
+.SUFFIXES:
+.SUFFIXES: .dvi .html .info .pdf .ps .texi
+$(srcdir)/Makefile.in:  $(srcdir)/Makefile.am  $(am__configure_deps)
+       @for dep in $?; do \
+         case '$(am__configure_deps)' in \
+           *$$dep*) \
+             ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
+               && { if test -f $@; then exit 0; else break; fi; }; \
+             exit 1;; \
+         esac; \
+       done; \
+       echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu doc/Makefile'; \
+       $(am__cd) $(top_srcdir) && \
+         $(AUTOMAKE) --gnu doc/Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+       @case '$?' in \
+         *config.status*) \
+           cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+         *) \
+           echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ 
$(am__maybe_remake_depfiles)'; \
+           cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ 
$(am__maybe_remake_depfiles);; \
+       esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure 
$(CONFIG_STATUS_DEPENDENCIES)
+       cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure:  $(am__configure_deps)
+       cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4):  $(am__aclocal_m4_deps)
+       cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(am__aclocal_m4_deps):
+
+mostlyclean-libtool:
+       -rm -f *.lo
+
+clean-libtool:
+       -rm -rf .libs _libs
+
+.texi.info:
+       $(AM_V_MAKEINFO)restore=: && backupdir="$(am__leading_dot)am$$$$" && \
+       am__cwd=`pwd` && $(am__cd) $(srcdir) && \
+       rm -rf $$backupdir && mkdir $$backupdir && \
+       if ($(MAKEINFO) --version) >/dev/null 2>&1; then \
+         for f in $@ $@-[0-9] $@-[0-9][0-9] $(@:.info=).i[0-9] 
$(@:.info=).i[0-9][0-9]; do \
+           if test -f $$f; then mv $$f $$backupdir; restore=mv; else :; fi; \
+         done; \
+       else :; fi && \
+       cd "$$am__cwd"; \
+       if $(MAKEINFO) $(AM_MAKEINFOFLAGS) $(MAKEINFOFLAGS) -I $(srcdir) \
+        -o $@ $<; \
+       then \
+         rc=0; \
+         $(am__cd) $(srcdir); \
+       else \
+         rc=$$?; \
+         $(am__cd) $(srcdir) && \
+         $$restore $$backupdir/* `echo "./$@" | sed 's|[^/]*$$||'`; \
+       fi; \
+       rm -rf $$backupdir; exit $$rc
+
+.texi.dvi:
+       
$(AM_V_TEXI2DVI)TEXINPUTS="$(am__TEXINFO_TEX_DIR)$(PATH_SEPARATOR)$$TEXINPUTS" \
+       MAKEINFO='$(MAKEINFO) $(AM_MAKEINFOFLAGS) $(MAKEINFOFLAGS) -I 
$(srcdir)' \
+       $(TEXI2DVI) $(AM_V_texinfo) --build-dir=$(@:.dvi=.t2d) -o $@ 
$(AM_V_texidevnull) \
+       $<
+
+.texi.pdf:
+       
$(AM_V_TEXI2PDF)TEXINPUTS="$(am__TEXINFO_TEX_DIR)$(PATH_SEPARATOR)$$TEXINPUTS" \
+       MAKEINFO='$(MAKEINFO) $(AM_MAKEINFOFLAGS) $(MAKEINFOFLAGS) -I 
$(srcdir)' \
+       $(TEXI2PDF) $(AM_V_texinfo) --build-dir=$(@:.pdf=.t2p) -o $@ 
$(AM_V_texidevnull) \
+       $<
+
+.texi.html:
+       $(AM_V_MAKEINFO)rm -rf $(@:.html=.htp)
+       $(AM_V_at)if $(MAKEINFOHTML) $(AM_MAKEINFOHTMLFLAGS) $(MAKEINFOFLAGS) 
-I $(srcdir) \
+        -o $(@:.html=.htp) $<; \
+       then \
+         rm -rf $@ && mv $(@:.html=.htp) $@; \
+       else \
+         rm -rf $(@:.html=.htp); exit 1; \
+       fi
+$(srcdir)/manual.info: manual.texi $(srcdir)/version-manual.texi 
$(manual_TEXINFOS)
+manual.dvi: manual.texi $(srcdir)/version-manual.texi $(manual_TEXINFOS)
+$(srcdir)/version-manual.texi:  $(srcdir)/stamp-vti
+$(srcdir)/stamp-vti: manual.texi $(top_srcdir)/configure
+       @(dir=.; test -f ./manual.texi || dir=$(srcdir); \
+       set `$(SHELL) $(srcdir)/mdate-sh $$dir/manual.texi`; \
+       echo "@set UPDATED $$1 $$2 $$3"; \
+       echo "@set UPDATED-MONTH $$2 $$3"; \
+       echo "@set EDITION $(VERSION)"; \
+       echo "@set VERSION $(VERSION)") > vti.tmp$$$$ && \
+       (cmp -s vti.tmp$$$$ $(srcdir)/version-manual.texi \
+         || (echo "Updating $(srcdir)/version-manual.texi" && \
+             cp vti.tmp$$$$ $(srcdir)/version-manual.texi.tmp$$$$ && \
+             mv $(srcdir)/version-manual.texi.tmp$$$$ 
$(srcdir)/version-manual.texi)) && \
+       rm -f vti.tmp$$$$ $(srcdir)/version-manual.texi.$$$$
+       @cp $(srcdir)/version-manual.texi $@
+
+mostlyclean-vti:
+       -rm -f vti.tmp* $(srcdir)/version-manual.texi.tmp*
+
+maintainer-clean-vti:
+       -rm -f $(srcdir)/stamp-vti $(srcdir)/version-manual.texi
+$(srcdir)/merchant-api-python.info: merchant-api-python.texi 
$(srcdir)/version-merchant-api-python.texi $(merchant_api_python_TEXINFOS)
+merchant-api-python.dvi: merchant-api-python.texi 
$(srcdir)/version-merchant-api-python.texi $(merchant_api_python_TEXINFOS)
+$(srcdir)/version-merchant-api-python.texi:  $(srcdir)/stamp-1
+$(srcdir)/stamp-1: merchant-api-python.texi $(top_srcdir)/configure
+       @(dir=.; test -f ./merchant-api-python.texi || dir=$(srcdir); \
+       set `$(SHELL) $(srcdir)/mdate-sh $$dir/merchant-api-python.texi`; \
+       echo "@set UPDATED $$1 $$2 $$3"; \
+       echo "@set UPDATED-MONTH $$2 $$3"; \
+       echo "@set EDITION $(VERSION)"; \
+       echo "@set VERSION $(VERSION)") > 1.tmp$$$$ && \
+       (cmp -s 1.tmp$$$$ $(srcdir)/version-merchant-api-python.texi \
+         || (echo "Updating $(srcdir)/version-merchant-api-python.texi" && \
+             cp 1.tmp$$$$ $(srcdir)/version-merchant-api-python.texi.tmp$$$$ 
&& \
+             mv $(srcdir)/version-merchant-api-python.texi.tmp$$$$ 
$(srcdir)/version-merchant-api-python.texi)) && \
+       rm -f 1.tmp$$$$ $(srcdir)/version-merchant-api-python.texi.$$$$
+       @cp $(srcdir)/version-merchant-api-python.texi $@
+
+mostlyclean-1:
+       -rm -f 1.tmp* $(srcdir)/version-merchant-api-python.texi.tmp*
+
+maintainer-clean-1:
+       -rm -f $(srcdir)/stamp-1 $(srcdir)/version-merchant-api-python.texi
+$(srcdir)/merchant-api-curl.info: merchant-api-curl.texi 
$(srcdir)/version-merchant-api-curl.texi $(merchant_api_curl_TEXINFOS)
+merchant-api-curl.dvi: merchant-api-curl.texi 
$(srcdir)/version-merchant-api-curl.texi $(merchant_api_curl_TEXINFOS)
+$(srcdir)/version-merchant-api-curl.texi:  $(srcdir)/stamp-2
+$(srcdir)/stamp-2: merchant-api-curl.texi $(top_srcdir)/configure
+       @(dir=.; test -f ./merchant-api-curl.texi || dir=$(srcdir); \
+       set `$(SHELL) $(srcdir)/mdate-sh $$dir/merchant-api-curl.texi`; \
+       echo "@set UPDATED $$1 $$2 $$3"; \
+       echo "@set UPDATED-MONTH $$2 $$3"; \
+       echo "@set EDITION $(VERSION)"; \
+       echo "@set VERSION $(VERSION)") > 2.tmp$$$$ && \
+       (cmp -s 2.tmp$$$$ $(srcdir)/version-merchant-api-curl.texi \
+         || (echo "Updating $(srcdir)/version-merchant-api-curl.texi" && \
+             cp 2.tmp$$$$ $(srcdir)/version-merchant-api-curl.texi.tmp$$$$ && \
+             mv $(srcdir)/version-merchant-api-curl.texi.tmp$$$$ 
$(srcdir)/version-merchant-api-curl.texi)) && \
+       rm -f 2.tmp$$$$ $(srcdir)/version-merchant-api-curl.texi.$$$$
+       @cp $(srcdir)/version-merchant-api-curl.texi $@
+
+mostlyclean-2:
+       -rm -f 2.tmp* $(srcdir)/version-merchant-api-curl.texi.tmp*
+
+maintainer-clean-2:
+       -rm -f $(srcdir)/stamp-2 $(srcdir)/version-merchant-api-curl.texi
+$(srcdir)/merchant-api-php.info: merchant-api-php.texi 
$(srcdir)/version-merchant-api-php.texi $(merchant_api_php_TEXINFOS)
+merchant-api-php.dvi: merchant-api-php.texi 
$(srcdir)/version-merchant-api-php.texi $(merchant_api_php_TEXINFOS)
+merchant-api-php.pdf: merchant-api-php.texi 
$(srcdir)/version-merchant-api-php.texi $(merchant_api_php_TEXINFOS)
+merchant-api-php.html: merchant-api-php.texi 
$(srcdir)/version-merchant-api-php.texi $(merchant_api_php_TEXINFOS)
+$(srcdir)/version-merchant-api-php.texi:  $(srcdir)/stamp-3
+$(srcdir)/stamp-3: merchant-api-php.texi $(top_srcdir)/configure
+       @(dir=.; test -f ./merchant-api-php.texi || dir=$(srcdir); \
+       set `$(SHELL) $(srcdir)/mdate-sh $$dir/merchant-api-php.texi`; \
+       echo "@set UPDATED $$1 $$2 $$3"; \
+       echo "@set UPDATED-MONTH $$2 $$3"; \
+       echo "@set EDITION $(VERSION)"; \
+       echo "@set VERSION $(VERSION)") > 3.tmp$$$$ && \
+       (cmp -s 3.tmp$$$$ $(srcdir)/version-merchant-api-php.texi \
+         || (echo "Updating $(srcdir)/version-merchant-api-php.texi" && \
+             cp 3.tmp$$$$ $(srcdir)/version-merchant-api-php.texi.tmp$$$$ && \
+             mv $(srcdir)/version-merchant-api-php.texi.tmp$$$$ 
$(srcdir)/version-merchant-api-php.texi)) && \
+       rm -f 3.tmp$$$$ $(srcdir)/version-merchant-api-php.texi.$$$$
+       @cp $(srcdir)/version-merchant-api-php.texi $@
+
+mostlyclean-3:
+       -rm -f 3.tmp* $(srcdir)/version-merchant-api-php.texi.tmp*
+
+maintainer-clean-3:
+       -rm -f $(srcdir)/stamp-3 $(srcdir)/version-merchant-api-php.texi
+.dvi.ps:
+       
$(AM_V_DVIPS)TEXINPUTS="$(am__TEXINFO_TEX_DIR)$(PATH_SEPARATOR)$$TEXINPUTS" \
+       $(DVIPS) $(AM_V_texinfo) -o $@ $<
+
+uninstall-dvi-am:
+       @$(NORMAL_UNINSTALL)
+       @list='$(DVIS)'; test -n "$(dvidir)" || list=; \
+       for p in $$list; do \
+         $(am__strip_dir) \
+         echo " rm -f '$(DESTDIR)$(dvidir)/$$f'"; \
+         rm -f "$(DESTDIR)$(dvidir)/$$f"; \
+       done
+
+uninstall-html-am:
+       @$(NORMAL_UNINSTALL)
+       @list='$(HTMLS)'; test -n "$(htmldir)" || list=; \
+       for p in $$list; do \
+         $(am__strip_dir) \
+         echo " rm -rf '$(DESTDIR)$(htmldir)/$$f'"; \
+         rm -rf "$(DESTDIR)$(htmldir)/$$f"; \
+       done
+
+uninstall-info-am:
+       @$(PRE_UNINSTALL)
+       @if test -d '$(DESTDIR)$(infodir)' && $(am__can_run_installinfo); then \
+         list='$(INFO_DEPS)'; \
+         for file in $$list; do \
+           relfile=`echo "$$file" | sed 's|^.*/||'`; \
+           echo " install-info --info-dir='$(DESTDIR)$(infodir)' --remove 
'$(DESTDIR)$(infodir)/$$relfile'"; \
+           if install-info --info-dir="$(DESTDIR)$(infodir)" --remove 
"$(DESTDIR)$(infodir)/$$relfile"; \
+           then :; else test ! -f "$(DESTDIR)$(infodir)/$$relfile" || exit 1; 
fi; \
+         done; \
+       else :; fi
+       @$(NORMAL_UNINSTALL)
+       @list='$(INFO_DEPS)'; \
+       for file in $$list; do \
+         relfile=`echo "$$file" | sed 's|^.*/||'`; \
+         relfile_i=`echo "$$relfile" | sed 's|\.info$$||;s|$$|.i|'`; \
+         (if test -d "$(DESTDIR)$(infodir)" && cd "$(DESTDIR)$(infodir)"; then 
\
+            echo " cd '$(DESTDIR)$(infodir)' && rm -f $$relfile 
$$relfile-[0-9] $$relfile-[0-9][0-9] $$relfile_i[0-9] $$relfile_i[0-9][0-9]"; \
+            rm -f $$relfile $$relfile-[0-9] $$relfile-[0-9][0-9] 
$$relfile_i[0-9] $$relfile_i[0-9][0-9]; \
+          else :; fi); \
+       done
+
+uninstall-pdf-am:
+       @$(NORMAL_UNINSTALL)
+       @list='$(PDFS)'; test -n "$(pdfdir)" || list=; \
+       for p in $$list; do \
+         $(am__strip_dir) \
+         echo " rm -f '$(DESTDIR)$(pdfdir)/$$f'"; \
+         rm -f "$(DESTDIR)$(pdfdir)/$$f"; \
+       done
+
+uninstall-ps-am:
+       @$(NORMAL_UNINSTALL)
+       @list='$(PSS)'; test -n "$(psdir)" || list=; \
+       for p in $$list; do \
+         $(am__strip_dir) \
+         echo " rm -f '$(DESTDIR)$(psdir)/$$f'"; \
+         rm -f "$(DESTDIR)$(psdir)/$$f"; \
+       done
+
+dist-info: $(INFO_DEPS)
+       @srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \
+       list='$(INFO_DEPS)'; \
+       for base in $$list; do \
+         case $$base in \
+           $(srcdir)/*) base=`echo "$$base" | sed "s|^$$srcdirstrip/||"`;; \
+         esac; \
+         if test -f $$base; then d=.; else d=$(srcdir); fi; \
+         base_i=`echo "$$base" | sed 's|\.info$$||;s|$$|.i|'`; \
+         for file in $$d/$$base $$d/$$base-[0-9] $$d/$$base-[0-9][0-9] 
$$d/$$base_i[0-9] $$d/$$base_i[0-9][0-9]; do \
+           if test -f $$file; then \
+             relfile=`expr "$$file" : "$$d/\(.*\)"`; \
+             test -f "$(distdir)/$$relfile" || \
+               cp -p $$file "$(distdir)/$$relfile"; \
+           else :; fi; \
+         done; \
+       done
+
+mostlyclean-aminfo:
+       -rm -rf manual.t2d manual.t2p merchant-api-python.t2d \
+         merchant-api-python.t2p merchant-api-curl.t2d \
+         merchant-api-curl.t2p merchant-api-php.t2d \
+         merchant-api-php.t2p
+
+clean-aminfo:
+       -test -z "manual.dvi manual.pdf manual.ps manual.html 
merchant-api-python.dvi \
+         merchant-api-python.pdf merchant-api-python.ps \
+         merchant-api-python.html merchant-api-curl.dvi \
+         merchant-api-curl.pdf merchant-api-curl.ps \
+         merchant-api-curl.html merchant-api-php.dvi \
+         merchant-api-php.pdf merchant-api-php.ps \
+         merchant-api-php.html" \
+       || rm -rf manual.dvi manual.pdf manual.ps manual.html 
merchant-api-python.dvi \
+         merchant-api-python.pdf merchant-api-python.ps \
+         merchant-api-python.html merchant-api-curl.dvi \
+         merchant-api-curl.pdf merchant-api-curl.ps \
+         merchant-api-curl.html merchant-api-php.dvi \
+         merchant-api-php.pdf merchant-api-php.ps \
+         merchant-api-php.html
+
+maintainer-clean-aminfo:
+       @list='$(INFO_DEPS)'; for i in $$list; do \
+         i_i=`echo "$$i" | sed 's|\.info$$||;s|$$|.i|'`; \
+         echo " rm -f $$i $$i-[0-9] $$i-[0-9][0-9] $$i_i[0-9] 
$$i_i[0-9][0-9]"; \
+         rm -f $$i $$i-[0-9] $$i-[0-9][0-9] $$i_i[0-9] $$i_i[0-9][0-9]; \
+       done
+install-man1: $(man_MANS)
+       @$(NORMAL_INSTALL)
+       @list1=''; \
+       list2='$(man_MANS)'; \
+       test -n "$(man1dir)" \
+         && test -n "`echo $$list1$$list2`" \
+         || exit 0; \
+       echo " $(MKDIR_P) '$(DESTDIR)$(man1dir)'"; \
+       $(MKDIR_P) "$(DESTDIR)$(man1dir)" || exit 1; \
+       { for i in $$list1; do echo "$$i"; done;  \
+       if test -n "$$list2"; then \
+         for i in $$list2; do echo "$$i"; done \
+           | sed -n '/\.1[a-z]*$$/p'; \
+       fi; \
+       } | while read p; do \
+         if test -f $$p; then d=; else d="$(srcdir)/"; fi; \
+         echo "$$d$$p"; echo "$$p"; \
+       done | \
+       sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \
+             -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \
+       sed 'N;N;s,\n, ,g' | { \
+       list=; while read file base inst; do \
+         if test "$$base" = "$$inst"; then list="$$list $$file"; else \
+           echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man1dir)/$$inst'"; \
+           $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man1dir)/$$inst" || exit $$?; 
\
+         fi; \
+       done; \
+       for i in $$list; do echo "$$i"; done | $(am__base_list) | \
+       while read files; do \
+         test -z "$$files" || { \
+           echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man1dir)'"; \
+           $(INSTALL_DATA) $$files "$(DESTDIR)$(man1dir)" || exit $$?; }; \
+       done; }
+
+uninstall-man1:
+       @$(NORMAL_UNINSTALL)
+       @list=''; test -n "$(man1dir)" || exit 0; \
+       files=`{ for i in $$list; do echo "$$i"; done; \
+       l2='$(man_MANS)'; for i in $$l2; do echo "$$i"; done | \
+         sed -n '/\.1[a-z]*$$/p'; \
+       } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \
+             -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \
+       dir='$(DESTDIR)$(man1dir)'; $(am__uninstall_files_from_dir)
+tags TAGS:
+
+ctags CTAGS:
+
+cscope cscopelist:
+
+
+distdir: $(BUILT_SOURCES)
+       $(MAKE) $(AM_MAKEFLAGS) distdir-am
+
+distdir-am: $(DISTFILES)
+       @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+       topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+       list='$(DISTFILES)'; \
+         dist_files=`for file in $$list; do echo $$file; done | \
+         sed -e "s|^$$srcdirstrip/||;t" \
+             -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+       case $$dist_files in \
+         */*) $(MKDIR_P) `echo "$$dist_files" | \
+                          sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+                          sort -u` ;; \
+       esac; \
+       for file in $$dist_files; do \
+         if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+         if test -d $$d/$$file; then \
+           dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+           if test -d "$(distdir)/$$file"; then \
+             find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx 
{} \;; \
+           fi; \
+           if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+             cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
+             find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx 
{} \;; \
+           fi; \
+           cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
+         else \
+           test -f "$(distdir)/$$file" \
+           || cp -p $$d/$$file "$(distdir)/$$file" \
+           || exit 1; \
+         fi; \
+       done
+       $(MAKE) $(AM_MAKEFLAGS) \
+         top_distdir="$(top_distdir)" distdir="$(distdir)" \
+         dist-info
+check-am: all-am
+check: check-am
+all-am: Makefile $(INFO_DEPS) $(MANS)
+installdirs:
+       for dir in "$(DESTDIR)$(infodir)" "$(DESTDIR)$(man1dir)"; do \
+         test -z "$$dir" || $(MKDIR_P) "$$dir"; \
+       done
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+       @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+       if test -z '$(STRIP)'; then \
+         $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+           install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s 
\
+             install; \
+       else \
+         $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+           install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s 
\
+           "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
+       fi
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+       -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+       -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f 
$(CONFIG_CLEAN_VPATH_FILES)
+       -test -z "$(DISTCLEANFILES)" || rm -f $(DISTCLEANFILES)
+
+maintainer-clean-generic:
+       @echo "This command is intended for maintainers to use"
+       @echo "it deletes files that may require special tools to rebuild."
+clean: clean-am
+
+clean-am: clean-aminfo clean-generic clean-libtool clean-local \
+       mostlyclean-am
+
+distclean: distclean-am
+       -rm -f Makefile
+distclean-am: clean-am distclean-generic
+
+dvi: dvi-am
+
+dvi-am: $(DVIS)
+
+html: html-am
+
+html-am: $(HTMLS)
+
+info: info-am
+
+info-am: $(INFO_DEPS)
+
+install-data-am: install-info-am install-man
+
+install-dvi: install-dvi-am
+
+install-dvi-am: $(DVIS)
+       @$(NORMAL_INSTALL)
+       @list='$(DVIS)'; test -n "$(dvidir)" || list=; \
+       if test -n "$$list"; then \
+         echo " $(MKDIR_P) '$(DESTDIR)$(dvidir)'"; \
+         $(MKDIR_P) "$(DESTDIR)$(dvidir)" || exit 1; \
+       fi; \
+       for p in $$list; do \
+         if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+         echo "$$d$$p"; \
+       done | $(am__base_list) | \
+       while read files; do \
+         echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(dvidir)'"; \
+         $(INSTALL_DATA) $$files "$(DESTDIR)$(dvidir)" || exit $$?; \
+       done
+install-exec-am:
+
+install-html: install-html-am
+
+install-html-am: $(HTMLS)
+       @$(NORMAL_INSTALL)
+       @list='$(HTMLS)'; list2=; test -n "$(htmldir)" || list=; \
+       if test -n "$$list"; then \
+         echo " $(MKDIR_P) '$(DESTDIR)$(htmldir)'"; \
+         $(MKDIR_P) "$(DESTDIR)$(htmldir)" || exit 1; \
+       fi; \
+       for p in $$list; do \
+         if test -f "$$p" || test -d "$$p"; then d=; else d="$(srcdir)/"; fi; \
+         $(am__strip_dir) \
+         d2=$$d$$p; \
+         if test -d "$$d2"; then \
+           echo " $(MKDIR_P) '$(DESTDIR)$(htmldir)/$$f'"; \
+           $(MKDIR_P) "$(DESTDIR)$(htmldir)/$$f" || exit 1; \
+           echo " $(INSTALL_DATA) '$$d2'/* '$(DESTDIR)$(htmldir)/$$f'"; \
+           $(INSTALL_DATA) "$$d2"/* "$(DESTDIR)$(htmldir)/$$f" || exit $$?; \
+         else \
+           list2="$$list2 $$d2"; \
+         fi; \
+       done; \
+       test -z "$$list2" || { echo "$$list2" | $(am__base_list) | \
+       while read files; do \
+         echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(htmldir)'"; \
+         $(INSTALL_DATA) $$files "$(DESTDIR)$(htmldir)" || exit $$?; \
+       done; }
+install-info: install-info-am
+
+install-info-am: $(INFO_DEPS)
+       @$(NORMAL_INSTALL)
+       @srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \
+       list='$(INFO_DEPS)'; test -n "$(infodir)" || list=; \
+       if test -n "$$list"; then \
+         echo " $(MKDIR_P) '$(DESTDIR)$(infodir)'"; \
+         $(MKDIR_P) "$(DESTDIR)$(infodir)" || exit 1; \
+       fi; \
+       for file in $$list; do \
+         case $$file in \
+           $(srcdir)/*) file=`echo "$$file" | sed "s|^$$srcdirstrip/||"`;; \
+         esac; \
+         if test -f $$file; then d=.; else d=$(srcdir); fi; \
+         file_i=`echo "$$file" | sed 's|\.info$$||;s|$$|.i|'`; \
+         for ifile in $$d/$$file $$d/$$file-[0-9] $$d/$$file-[0-9][0-9] \
+                      $$d/$$file_i[0-9] $$d/$$file_i[0-9][0-9] ; do \
+           if test -f $$ifile; then \
+             echo "$$ifile"; \
+           else : ; fi; \
+         done; \
+       done | $(am__base_list) | \
+       while read files; do \
+         echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(infodir)'"; \
+         $(INSTALL_DATA) $$files "$(DESTDIR)$(infodir)" || exit $$?; done
+       @$(POST_INSTALL)
+       @if $(am__can_run_installinfo); then \
+         list='$(INFO_DEPS)'; test -n "$(infodir)" || list=; \
+         for file in $$list; do \
+           relfile=`echo "$$file" | sed 's|^.*/||'`; \
+           echo " install-info --info-dir='$(DESTDIR)$(infodir)' 
'$(DESTDIR)$(infodir)/$$relfile'";\
+           install-info --info-dir="$(DESTDIR)$(infodir)" 
"$(DESTDIR)$(infodir)/$$relfile" || :;\
+         done; \
+       else : ; fi
+install-man: install-man1
+
+install-pdf: install-pdf-am
+
+install-pdf-am: $(PDFS)
+       @$(NORMAL_INSTALL)
+       @list='$(PDFS)'; test -n "$(pdfdir)" || list=; \
+       if test -n "$$list"; then \
+         echo " $(MKDIR_P) '$(DESTDIR)$(pdfdir)'"; \
+         $(MKDIR_P) "$(DESTDIR)$(pdfdir)" || exit 1; \
+       fi; \
+       for p in $$list; do \
+         if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+         echo "$$d$$p"; \
+       done | $(am__base_list) | \
+       while read files; do \
+         echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pdfdir)'"; \
+         $(INSTALL_DATA) $$files "$(DESTDIR)$(pdfdir)" || exit $$?; done
+install-ps: install-ps-am
+
+install-ps-am: $(PSS)
+       @$(NORMAL_INSTALL)
+       @list='$(PSS)'; test -n "$(psdir)" || list=; \
+       if test -n "$$list"; then \
+         echo " $(MKDIR_P) '$(DESTDIR)$(psdir)'"; \
+         $(MKDIR_P) "$(DESTDIR)$(psdir)" || exit 1; \
+       fi; \
+       for p in $$list; do \
+         if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+         echo "$$d$$p"; \
+       done | $(am__base_list) | \
+       while read files; do \
+         echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(psdir)'"; \
+         $(INSTALL_DATA) $$files "$(DESTDIR)$(psdir)" || exit $$?; done
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+       -rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-1 \
+       maintainer-clean-2 maintainer-clean-3 maintainer-clean-aminfo \
+       maintainer-clean-generic maintainer-clean-vti
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-1 mostlyclean-2 mostlyclean-3 \
+       mostlyclean-aminfo mostlyclean-generic mostlyclean-libtool \
+       mostlyclean-vti
+
+pdf: pdf-am
+
+pdf-am: $(PDFS)
+
+ps: ps-am
+
+ps-am: $(PSS)
+
+uninstall-am: uninstall-dvi-am uninstall-html-am uninstall-info-am \
+       uninstall-man uninstall-pdf-am uninstall-ps-am
+
+uninstall-man: uninstall-man1
+
+.MAKE: install-am install-strip
+
+.PHONY: all all-am check check-am clean clean-aminfo clean-generic \
+       clean-libtool clean-local cscopelist-am ctags-am dist-info \
+       distclean distclean-generic distclean-libtool distdir dvi \
+       dvi-am html html-am info info-am install install-am \
+       install-data install-data-am install-dvi install-dvi-am \
+       install-exec install-exec-am install-html install-html-am \
+       install-info install-info-am install-man install-man1 \
+       install-pdf install-pdf-am install-ps install-ps-am \
+       install-strip installcheck installcheck-am installdirs \
+       maintainer-clean maintainer-clean-1 maintainer-clean-2 \
+       maintainer-clean-3 maintainer-clean-aminfo \
+       maintainer-clean-generic maintainer-clean-vti mostlyclean \
+       mostlyclean-1 mostlyclean-2 mostlyclean-3 mostlyclean-aminfo \
+       mostlyclean-generic mostlyclean-libtool mostlyclean-vti pdf \
+       pdf-am ps ps-am tags-am uninstall uninstall-am \
+       uninstall-dvi-am uninstall-html-am uninstall-info-am \
+       uninstall-man uninstall-man1 uninstall-pdf-am uninstall-ps-am
+
+.PRECIOUS: Makefile
+
+all: manual.pdf manual.html
+
+manual.pdf: arch.pdf manual.texi
+manual.html: arch.png manual.texi
+
+%.png: %.dot
+       dot -Tpng $< > $@
+%.pdf: %.dot
+       dot -Tpdf $< > $@
+
+clean-local:
+       -rm -f arch.png arch.pdf arch-api.png arch-api.pdf
+
+merchant-api-curl.pdf: merchant-api.content.texi arch-api.pdf
+merchant-api-python.pdf: merchant-api.content.texi arch-api.pdf
+merchant-api-curl.html: merchant-api.content.texi arch-api.png
+merchant-api-python.html: merchant-api.content.texi arch-api.png
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/doc/agpl.texi b/doc/agpl.texi
new file mode 100644
index 0000000..e6ee662
--- /dev/null
+++ b/doc/agpl.texi
@@ -0,0 +1,698 @@
+@c The GNU Affero General Public License.
+@center Version 3, 19 November 2007
+
+@c This file is intended to be included within another document,
+@c hence no sectioning command or @node.
+
+@display
+Copyright @copyright{} 2007 Free Software Foundation, Inc. 
@url{http://fsf.org/}
+
+Everyone is permitted to copy and distribute verbatim copies of this
+license document, but changing it is not allowed.
+@end display
+
+@heading Preamble
+
+The GNU Affero General Public License is a free, copyleft license
+for software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
+
+The licenses for most software and other practical works are
+designed to take away your freedom to share and change the works.  By
+contrast, our General Public Licenses are intended to guarantee your
+freedom to share and change all versions of a program--to make sure it
+remains free software for all its users.
+
+When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+Developers that use our General Public Licenses protect your rights
+with two steps: (1) assert copyright on the software, and (2) offer
+you this License which gives you legal permission to copy, distribute
+and/or modify the software.
+
+A secondary benefit of defending all users' freedom is that
+improvements made in alternate versions of the program, if they
+receive widespread use, become available for other developers to
+incorporate.  Many developers of free software are heartened and
+encouraged by the resulting cooperation.  However, in the case of
+software used on network servers, this result may fail to come about.
+The GNU General Public License permits making a modified version and
+letting the public access it on a server without ever releasing its
+source code to the public.
+
+The GNU Affero General Public License is designed specifically to
+ensure that, in such cases, the modified source code becomes available
+to the community.  It requires the operator of a network server to
+provide the source code of the modified version running there to the
+users of that server.  Therefore, public use of a modified version, on
+a publicly accessible server, gives the public access to the source
+code of the modified version.
+
+An older license, called the Affero General Public License and
+published by Affero, was designed to accomplish similar goals.  This is
+a different license, not a version of the Affero GPL, but Affero has
+released a new version of the Affero GPL which permits relicensing under
+this license.
+
+The precise terms and conditions for copying, distribution and
+modification follow.
+
+@heading TERMS AND CONDITIONS
+
+@enumerate 0
+@item Definitions.
+
+``This License'' refers to version 3 of the GNU Affero General Public License.
+
+``Copyright'' also means copyright-like laws that apply to other kinds
+of works, such as semiconductor masks.
+
+``The Program'' refers to any copyrightable work licensed under this
+License.  Each licensee is addressed as ``you''.  ``Licensees'' and
+``recipients'' may be individuals or organizations.
+
+To ``modify'' a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of
+an exact copy.  The resulting work is called a ``modified version'' of
+the earlier work or a work ``based on'' the earlier work.
+
+A ``covered work'' means either the unmodified Program or a work based
+on the Program.
+
+To ``propagate'' a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy.  Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+To ``convey'' a work means any kind of propagation that enables other
+parties to make or receive copies.  Mere interaction with a user
+through a computer network, with no transfer of a copy, is not
+conveying.
+
+An interactive user interface displays ``Appropriate Legal Notices'' to
+the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License.  If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+@item Source Code.
+
+The ``source code'' for a work means the preferred form of the work for
+making modifications to it.  ``Object code'' means any non-source form
+of a work.
+
+A ``Standard Interface'' means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+The ``System Libraries'' of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form.  A
+``Major Component'', in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+The ``Corresponding Source'' for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities.  However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work.  For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+The Corresponding Source need not include anything that users can
+regenerate automatically from other parts of the Corresponding Source.
+
+The Corresponding Source for a work in source code form is that same
+work.
+
+@item Basic Permissions.
+
+All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met.  This License explicitly affirms your unlimited
+permission to run the unmodified Program.  The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work.  This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+You may make, run and propagate covered works that you do not convey,
+without conditions so long as your license otherwise remains in force.
+You may convey covered works to others for the sole purpose of having
+them make modifications exclusively for you, or provide you with
+facilities for running those works, provided that you comply with the
+terms of this License in conveying all material for which you do not
+control copyright.  Those thus making or running the covered works for
+you must do so exclusively on your behalf, under your direction and
+control, on terms that prohibit them from making any copies of your
+copyrighted material outside their relationship with you.
+
+Conveying under any other circumstances is permitted solely under the
+conditions stated below.  Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+@item Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such
+circumvention is effected by exercising rights under this License with
+respect to the covered work, and you disclaim any intention to limit
+operation or modification of the work as a means of enforcing, against
+the work's users, your or third parties' legal rights to forbid
+circumvention of technological measures.
+
+@item Conveying Verbatim Copies.
+
+You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+@item Conveying Modified Source Versions.
+
+You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these
+conditions:
+
+@enumerate a
+@item
+The work must carry prominent notices stating that you modified it,
+and giving a relevant date.
+
+@item
+The work must carry prominent notices stating that it is released
+under this License and any conditions added under section 7.  This
+requirement modifies the requirement in section 4 to ``keep intact all
+notices''.
+
+@item
+You must license the entire work, as a whole, under this License to
+anyone who comes into possession of a copy.  This License will
+therefore apply, along with any applicable section 7 additional terms,
+to the whole of the work, and all its parts, regardless of how they
+are packaged.  This License gives no permission to license the work in
+any other way, but it does not invalidate such permission if you have
+separately received it.
+
+@item
+If the work has interactive user interfaces, each must display
+Appropriate Legal Notices; however, if the Program has interactive
+interfaces that do not display Appropriate Legal Notices, your work
+need not make them do so.
+@end enumerate
+
+A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+``aggregate'' if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit.  Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+@item  Conveying Non-Source Forms.
+
+You may convey a covered work in object code form under the terms of
+sections 4 and 5, provided that you also convey the machine-readable
+Corresponding Source under the terms of this License, in one of these
+ways:
+
+@enumerate a
+@item
+Convey the object code in, or embodied in, a physical product
+(including a physical distribution medium), accompanied by the
+Corresponding Source fixed on a durable physical medium customarily
+used for software interchange.
+
+@item
+Convey the object code in, or embodied in, a physical product
+(including a physical distribution medium), accompanied by a written
+offer, valid for at least three years and valid for as long as you
+offer spare parts or customer support for that product model, to give
+anyone who possesses the object code either (1) a copy of the
+Corresponding Source for all the software in the product that is
+covered by this License, on a durable physical medium customarily used
+for software interchange, for a price no more than your reasonable
+cost of physically performing this conveying of source, or (2) access
+to copy the Corresponding Source from a network server at no charge.
+
+@item
+Convey individual copies of the object code with a copy of the written
+offer to provide the Corresponding Source.  This alternative is
+allowed only occasionally and noncommercially, and only if you
+received the object code with such an offer, in accord with subsection
+6b.
+
+@item
+Convey the object code by offering access from a designated place
+(gratis or for a charge), and offer equivalent access to the
+Corresponding Source in the same way through the same place at no
+further charge.  You need not require recipients to copy the
+Corresponding Source along with the object code.  If the place to copy
+the object code is a network server, the Corresponding Source may be
+on a different server (operated by you or a third party) that supports
+equivalent copying facilities, provided you maintain clear directions
+next to the object code saying where to find the Corresponding Source.
+Regardless of what server hosts the Corresponding Source, you remain
+obligated to ensure that it is available for as long as needed to
+satisfy these requirements.
+
+@item
+Convey the object code using peer-to-peer transmission, provided you
+inform other peers where the object code and Corresponding Source of
+the work are being offered to the general public at no charge under
+subsection 6d.
+
+@end enumerate
+
+A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+A ``User Product'' is either (1) a ``consumer product'', which means any
+tangible personal property which is normally used for personal,
+family, or household purposes, or (2) anything designed or sold for
+incorporation into a dwelling.  In determining whether a product is a
+consumer product, doubtful cases shall be resolved in favor of
+coverage.  For a particular product received by a particular user,
+``normally used'' refers to a typical or common use of that class of
+product, regardless of the status of the particular user or of the way
+in which the particular user actually uses, or expects or is expected
+to use, the product.  A product is a consumer product regardless of
+whether the product has substantial commercial, industrial or
+non-consumer uses, unless such uses represent the only significant
+mode of use of the product.
+
+``Installation Information'' for a User Product means any methods,
+procedures, authorization keys, or other information required to
+install and execute modified versions of a covered work in that User
+Product from a modified version of its Corresponding Source.  The
+information must suffice to ensure that the continued functioning of
+the modified object code is in no case prevented or interfered with
+solely because modification has been made.
+
+If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information.  But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or
+updates for a work that has been modified or installed by the
+recipient, or for the User Product in which it has been modified or
+installed.  Access to a network may be denied when the modification
+itself materially and adversely affects the operation of the network
+or violates the rules and protocols for communication across the
+network.
+
+Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+@item Additional Terms.
+
+``Additional permissions'' are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law.  If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it.  (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.)  You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders
+of that material) supplement the terms of this License with terms:
+
+@enumerate a
+@item
+Disclaiming warranty or limiting liability differently from the terms
+of sections 15 and 16 of this License; or
+
+@item
+Requiring preservation of specified reasonable legal notices or author
+attributions in that material or in the Appropriate Legal Notices
+displayed by works containing it; or
+
+@item
+Prohibiting misrepresentation of the origin of that material, or
+requiring that modified versions of such material be marked in
+reasonable ways as different from the original version; or
+
+@item
+Limiting the use for publicity purposes of names of licensors or
+authors of the material; or
+
+@item
+Declining to grant rights under trademark law for use of some trade
+names, trademarks, or service marks; or
+
+@item
+Requiring indemnification of licensors and authors of that material by
+anyone who conveys the material (or modified versions of it) with
+contractual assumptions of liability to the recipient, for any
+liability that these contractual assumptions directly impose on those
+licensors and authors.
+@end enumerate
+
+All other non-permissive additional terms are considered ``further
+restrictions'' within the meaning of section 10.  If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term.  If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions; the
+above requirements apply either way.
+
+@item Termination.
+
+You may not propagate or modify a covered work except as expressly
+provided under this License.  Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+However, if you cease all violation of this License, then your license
+from a particular copyright holder is reinstated (a) provisionally,
+unless and until the copyright holder explicitly and finally
+terminates your license, and (b) permanently, if the copyright holder
+fails to notify you of the violation by some reasonable means prior to
+60 days after the cessation.
+
+Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License.  If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+@item Acceptance Not Required for Having Copies.
+
+You are not required to accept this License in order to receive or run
+a copy of the Program.  Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance.  However,
+nothing other than this License grants you permission to propagate or
+modify any covered work.  These actions infringe copyright if you do
+not accept this License.  Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+@item Automatic Licensing of Downstream Recipients.
+
+Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License.  You are not responsible
+for enforcing compliance by third parties with this License.
+
+An ``entity transaction'' is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations.  If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License.  For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+@item Patents.
+
+A ``contributor'' is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based.  The
+work thus licensed is called the contributor's ``contributor version''.
+
+A contributor's ``essential patent claims'' are all patent claims owned
+or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version.  For
+purposes of this definition, ``control'' includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+In the following three paragraphs, a ``patent license'' is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement).  To ``grant'' such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients.  ``Knowingly relying'' means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+A patent license is ``discriminatory'' if it does not include within the
+scope of its coverage, prohibits the exercise of, or is conditioned on
+the non-exercise of one or more of the rights that are specifically
+granted under this License.  You may not convey a covered work if you
+are a party to an arrangement with a third party that is in the
+business of distributing software, under which you make payment to the
+third party based on the extent of your activity of conveying the
+work, and under which the third party grants, to any of the parties
+who would receive the covered work from you, a discriminatory patent
+license (a) in connection with copies of the covered work conveyed by
+you (or copies made from those copies), or (b) primarily for and in
+connection with specific products or compilations that contain the
+covered work, unless you entered into that arrangement, or that patent
+license was granted, prior to 28 March 2007.
+
+Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+@item No Surrender of Others' Freedom.
+
+If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot convey
+a covered work so as to satisfy simultaneously your obligations under
+this License and any other pertinent obligations, then as a
+consequence you may not convey it at all.  For example, if you agree
+to terms that obligate you to collect a royalty for further conveying
+from those to whom you convey the Program, the only way you could
+satisfy both those terms and this License would be to refrain entirely
+from conveying the Program.
+
+@item Remote Network Interaction; Use with the GNU General Public License.
+
+Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users interacting
+with it remotely through a computer network (if your version supports such
+interaction) an opportunity to receive the Corresponding Source of your
+version by providing access to the Corresponding Source from a network
+server at no charge, through some standard or customary means of
+facilitating copying of software.  This Corresponding Source shall include
+the Corresponding Source for any work covered by version 3 of the GNU
+General Public License that is incorporated pursuant to the following
+paragraph.
+
+Notwithstanding any other provision of this License, you have permission to
+link or combine any covered work with a work licensed under version 3 of
+the GNU General Public License into a single combined work, and to convey
+the resulting work.  The terms of this License will continue to apply to
+the part which is the covered work, but the work with which it is combined
+will remain governed by version 3 of the GNU General Public License.
+
+@item Revised Versions of this License.
+
+The Free Software Foundation may publish revised and/or new versions
+of the GNU Affero General Public License from time to time.  Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Program
+specifies that a certain numbered version of the GNU Affero General Public
+License ``or any later version'' applies to it, you have the option of
+following the terms and conditions either of that numbered version or
+of any later version published by the Free Software Foundation.  If
+the Program does not specify a version number of the GNU Affero General
+Public License, you may choose any version ever published by the Free
+Software Foundation.
+
+If the Program specifies that a proxy can decide which future versions
+of the GNU Affero General Public License can be used, that proxy's public
+statement of acceptance of a version permanently authorizes you to
+choose that version for the Program.
+
+Later license versions may give you additional or different
+permissions.  However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+@item Disclaimer of Warranty.
+
+THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM ``AS IS'' WITHOUT
+WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND
+PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE PROGRAM PROVE
+DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
+CORRECTION.
+
+@item Limitation of Liability.
+
+IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR
+CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES
+ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT
+NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR
+LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
+TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER
+PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+
+@item Interpretation of Sections 15 and 16.
+
+If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+@end enumerate
+
+@heading END OF TERMS AND CONDITIONS
+
+@heading How to Apply These Terms to Your New Programs
+
+If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these
+terms.
+
+To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the ``copyright'' line and a pointer to where the full notice is found.
+
+@smallexample
+@var{one line to give the program's name and a brief idea of what it does.}
+Copyright (C) @var{year} @var{name of author}
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU Affero General Public License as published by
+the Free Software Foundation, either version 3 of the License, or (at
+your option) any later version.
+
+This program is distributed in the hope that it will be useful, but
+WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+Affero General Public License for more details.
+
+You should have received a copy of the GNU Affero General Public License
+along with this program.  If not, see @url{http://www.gnu.org/licenses/}.
+@end smallexample
+
+Also add information on how to contact you by electronic and paper mail.
+
+If your software can interact with users remotely through a computer
+network, you should also make sure that it provides a way for users to
+get its source.  For example, if your program is a web application, its
+interface could display a ``Source'' link that leads users to an archive
+of the code.  There are many ways you could offer source, and different
+solutions will be better for different programs; see section 13 for the
+specific requirements.
+
+You should also get your employer (if you work as a programmer) or school,
+if any, to sign a ``copyright disclaimer'' for the program, if necessary.
+For more information on this, and how to apply and follow the GNU AGPL, see
+@url{http://www.gnu.org/licenses/}.
diff --git a/doc/brown-paper.css b/doc/brown-paper.css
new file mode 100644
index 0000000..65e2e79
--- /dev/null
+++ b/doc/brown-paper.css
@@ -0,0 +1,63 @@
+/*
+
+Brown Paper style from goldblog.com.ua (c) Zaripov Yura <address@hidden>
+
+*/
+
+.hljs {
+  display: block;
+  overflow-x: auto;
+  padding: 0.5em;
+}
+
+.hljs-keyword,
+.hljs-selector-tag,
+.hljs-literal {
+  color:#005599;
+  font-weight:bold;
+}
+
+.hljs,
+.hljs-subst {
+  color: #363c69;
+}
+
+.hljs-string,
+.hljs-title,
+.hljs-section,
+.hljs-type,
+.hljs-attribute,
+.hljs-symbol,
+.hljs-bullet,
+.hljs-built_in,
+.hljs-addition,
+.hljs-variable,
+.hljs-template-tag,
+.hljs-template-variable,
+.hljs-link,
+.hljs-name {
+  color: #2c009f;
+}
+
+.hljs-comment,
+.hljs-quote,
+.hljs-meta,
+.hljs-deletion {
+  color: #802022;
+}
+
+.hljs-keyword,
+.hljs-selector-tag,
+.hljs-literal,
+.hljs-doctag,
+.hljs-title,
+.hljs-section,
+.hljs-type,
+.hljs-name,
+.hljs-strong {
+  font-weight: bold;
+}
+
+.hljs-emphasis {
+  font-style: italic;
+}
diff --git a/doc/config.sh b/doc/config.sh
new file mode 100755
index 0000000..6448b38
--- /dev/null
+++ b/doc/config.sh
@@ -0,0 +1,116 @@
+#!/bin/bash
+
+# This file is part of TALER
+# (C) 2014, 2015, 2016 INRIA
+# TALER is free software; you can redistribute it and/or modify it under the
+# terms of the GNU Affero General Public License as published by the Free 
Software
+# Foundation; either version 3, or (at your option) any later version.
+# TALER is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
+# You should have received a copy of the GNU General Public License along with
+# TALER; see the file COPYING.  If not, see <http://www.gnu.org/licenses/>
+
+# @author Marcello Stanisci
+# @brief Walkthrough configuration steps
+
+echo -e "\nThis script will set and show all the configuration\n\
+values needed to run an example backend. The backend will listen on port 
8888\n\
+and cooperate with exchange at https://exchange.demo.taler.net/.\n\
+Additionally, the script will also generate the backend's private\n\
+key and banking details' file.\n\n"
+
+# FIXME:
+#       1) banking details generator missing
+
+echo -n "Press ENTER to start > "
+read
+echo 
+
+echo -n "Setting section [merchant]<ENTER> "
+read
+echo 
+
+echo -n "taler-config -s merchant -o serve -V TCP<ENTER> "
+read
+echo 
+taler-config -s merchant -o serve -V TCP
+
+echo -n "taler-config -s merchant -o port -V 8888<ENTER> "
+read
+echo 
+taler-config -s merchant -o port -V 8888
+
+echo -n "taler-config -s merchant -o database -V postgres<ENTER> "
+read
+echo 
+taler-config -s merchant -o database -V postgres
+
+echo -n "taler-config -s merchant -o currency -V KUDOS<ENTER> "
+read
+echo 
+taler-config -s merchant -o currency -V KUDOS
+
+echo -n "taler-config -s merchant -o wireformat -V TEST<ENTER> "
+read
+echo 
+taler-config -s merchant -o wireformat -V TEST
+
+echo -n "Setting section [merchant-instance-default]<ENTER> "
+read
+echo 
+
+echo -ne "taler-config -s merchant-instance-default -o keyfile -V 
\${TALER_DATA_HOME}/key.priv\n\n\
+(The key will be dynamically generated once the backend starts)<ENTER> "
+read
+echo 
+taler-config -s merchant-instance-default -o keyfile -V 
'${TALER_DATA_HOME}/key.priv'
+
+echo -n "Setting section [merchant-instance-wireformat-default]<ENTER> "
+read
+echo 
+
+echo -n "taler-config -s merchant-instance-wireformat-default -o 
test_response_file -V \${TALER_DATA_HOME}/test.json<ENTER> "
+read
+echo 
+taler-config -s merchant-instance-wireformat-default -o test_response_file -V 
'${TALER_DATA_HOME}/test.json'
+
+sleep 1
+echo -ne "Generating test.json..\n\n"
+DEST=$(taler-config -s merchant-instance-wireformat-default -o 
test_response_file -f)
+echo '{
+  "type": "test",
+  "bank_uri": "https://bank.test.taler.net/";,
+  "sig": "MERCHANTSIGNATURE",
+  "account_number": 6,
+  "salt": "SALT"
+  }' > $DEST
+
+echo -n "Setting section [merchantdb-postgres]<ENTER> "
+read
+echo 
+
+echo -n "taler-config -s merchantdb-postgres -o config -V 
postgres:///donations<ENTER> "
+read
+echo 
+taler-config -s merchantdb-postgres -o config -V "postgres:///donations"
+
+echo -n "Setting section [merchant-demoexchange]<ENTER> "
+read
+echo 
+
+echo -n "taler-config -s merchant-demoexchange -o uri -V 
https://exchange.demo.taler.net/<ENTER> "
+read
+echo 
+taler-config -s merchant-demoexchange -o uri -V 
"https://exchange.demo.taler.net/";
+
+echo -n "taler-config -s merchant-demoexchange -o master_key -V 
CQQZ9DY3MZ1ARMN5K1VKDETS04Y2QCKMMCFHZSWJWWVN82BTTH00<ENTER> "
+read
+echo 
+taler-config -s merchant-demoexchange -o master_key -V 
"CQQZ9DY3MZ1ARMN5K1VKDETS04Y2QCKMMCFHZSWJWWVN82BTTH00"
+
+echo -ne "Done. Launch the backend with:\n\
+\$ taler-merchant-httpd\n\nTest it with:\n\
+\$ curl http://127.0.0.1:8888/\n\nIf everything worked \
+fine, you should see:\n\n\
+'Hello, I'm a merchant's Taler backend. This HTTP server is not for 
humans.'\n\n"
diff --git a/doc/configuration-format.texi b/doc/configuration-format.texi
new file mode 100644
index 0000000..6453ba6
--- /dev/null
+++ b/doc/configuration-format.texi
@@ -0,0 +1,73 @@
+@c This file is used both in the exchange and merchant
+@c manuals. Edits should be propagated to both Gits!
+
+@node Configuration format
+@section Configuration format
+@cindex configuration
+
+In Taler realm, any component obeys to the same pattern to get configuration
+values.  According to this pattern, once the component has been installed, the
+installation deploys default values in 
@cite{$@{prefix@}/share/taler/config.d/}, in
+@cite{.conf} files.  In order to override these defaults, the user can write a 
custom
+@cite{.conf} file and either pass it to the component at execution time, or 
name it
+@cite{taler.conf} and place it under @cite{$HOME/.config/}.
+
+
+A config file is a text file containing @cite{sections}, and each section 
contains
+its @cite{values}. The right format follows:
+
+@example
+[section1]
+value1 = string
+value2 = 23
+
+[section2]
+value21 = string
+value22 = /path22
+@end example
+
+Throughout any configuration file, it is possible to use @code{$}-prefixed 
variables,
+like @code{$VAR}, especially when they represent filesystem paths.
+It is also possible to provide defaults values for those variables that are 
unset,
+by using the following syntax: @code{$@{VAR:-default@}}.
+However, there are two ways a user can set @code{$}-prefixable variables:
+
+by defining them under a @code{[paths]} section, see example below,
+
+@example
+[paths]
+TALER_DEPLOYMENT_SHARED = $@{HOME@}/shared-data
+..
+[section-x]
+path-x = $@{TALER_DEPLOYMENT_SHARED@}/x
+@end example
+
+or by setting them in the environment:
+
+@example
+$ export VAR=/x
+@end example
+
+The configuration loader will give precedence to variables set under 
@code{[path]},
+though.
+
+The utility @code{taler-config}, which gets installed along with the exchange, 
serves
+to get and set configuration values without directly editing the @cite{.conf}.
+The option @code{-f} is particularly useful to resolve pathnames, when they use
+several levels of @code{$}-expanded variables. See @code{taler-config --help}.
+
+Note that, in this stage of development, the file 
@code{$HOME/.config/taler.conf}
+can contain sections for @emph{all} the component. For example, both an 
exchange and
+a bank can read values from it.
+
+The repository @code{git://taler.net/deployment} contains examples of 
configuration
+file used in our demos. See under @code{deployment/config}.
+
+@cartouche
+@quotation Note
+Expectably, some components will not work just by using default values, as 
their
+work is often interdependent. For example, a merchant needs to know an exchange
+URL, or a database name.
+@end quotation
+@end cartouche
+
diff --git a/doc/docstyle.css b/doc/docstyle.css
new file mode 100644
index 0000000..c91d3f7
--- /dev/null
+++ b/doc/docstyle.css
@@ -0,0 +1,76 @@
+html, body {
+   font-size: 1em;
+   text-align: left;
+   text-decoration: none;
+}
+html { background-color: #e7e7e7; }
+
+body {
+   max-width: 74.92em;
+   margin: 0 auto;
+   padding: .5em 1em 1em 1em;
+   background-color: white;
+   border: .1em solid #c0c0c0;
+}
+
+h1, h2, h3, h4 { color: #333; }
+h5, h6, dt { color: #222; }
+
+
+a h3 {
+  color: #005090;
+}
+
+a[href] { color: #005090; }
+a[href]:visited { color: #100070; }
+a[href]:active, a[href]:hover {
+   color: #100070;
+   text-decoration: none;
+}
+
+.linkrow {
+  margin: 3em 0;
+}
+
+.linkrow {
+  text-align: center;
+}
+
+div.example { padding: .8em 1.2em .4em; }
+pre.example { padding: .8em 1.2em; }
+div.example, pre.example {
+   margin: 1em 0 1em 3% ;
+   -webkit-border-radius: .3em;
+   -moz-border-radius: .3em;
+   border-radius: .3em;
+   border: 1px solid #d4cbb6;
+   background-color: #f2efe4;
+}
+div.example > pre.example {
+   padding: 0 0 .4em;
+   margin: 0;
+   border: none;
+}
+
+
+/* This makes the very long tables of contents in Gnulib and other
+   manuals easier to read. */
+.contents ul, .shortcontents ul { font-weight: bold; }
+.contents ul ul, .shortcontents ul ul { font-weight: normal; }
+.contents ul { list-style: none; }
+
+/* For colored navigation bars (Emacs manual): make the bar extend
+   across the whole width of the page and give it a decent height. */
+.header, .node { margin: 0 -1em; padding: 0 1em; }
+.header p, .node p { line-height: 2em; }
+
+/* For navigation links */
+.node a, .header a { display: inline-block; line-height: 2em; }
+.node a:hover, .header a:hover { background: #f2efe4; }
+
+table.cartouche {
+  border-collapse: collapse;
+  border-color: darkred;
+  border-style: solid;
+  border-width: 3px;
+}
diff --git a/doc/fdl-1.3.texi b/doc/fdl-1.3.texi
new file mode 100644
index 0000000..8805f1a
--- /dev/null
+++ b/doc/fdl-1.3.texi
@@ -0,0 +1,506 @@
+@c The GNU Free Documentation License.
+@center Version 1.3, 3 November 2008
+
+@c This file is intended to be included within another document,
+@c hence no sectioning command or @node.
+
+@display
+Copyright @copyright{} 2000, 2001, 2002, 2007, 2008 Free Software Foundation, 
Inc.
+@uref{http://fsf.org/}
+
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+@end display
+
+@enumerate 0
+@item
+PREAMBLE
+
+The purpose of this License is to make a manual, textbook, or other
+functional and useful document @dfn{free} in the sense of freedom: to
+assure everyone the effective freedom to copy and redistribute it,
+with or without modifying it, either commercially or noncommercially.
+Secondarily, this License preserves for the author and publisher a way
+to get credit for their work, while not being considered responsible
+for modifications made by others.
+
+This License is a kind of ``copyleft'', which means that derivative
+works of the document must themselves be free in the same sense.  It
+complements the GNU General Public License, which is a copyleft
+license designed for free software.
+
+We have designed this License in order to use it for manuals for free
+software, because free software needs free documentation: a free
+program should come with manuals providing the same freedoms that the
+software does.  But this License is not limited to software manuals;
+it can be used for any textual work, regardless of subject matter or
+whether it is published as a printed book.  We recommend this License
+principally for works whose purpose is instruction or reference.
+
+@item
+APPLICABILITY AND DEFINITIONS
+
+This License applies to any manual or other work, in any medium, that
+contains a notice placed by the copyright holder saying it can be
+distributed under the terms of this License.  Such a notice grants a
+world-wide, royalty-free license, unlimited in duration, to use that
+work under the conditions stated herein.  The ``Document'', below,
+refers to any such manual or work.  Any member of the public is a
+licensee, and is addressed as ``you''.  You accept the license if you
+copy, modify or distribute the work in a way requiring permission
+under copyright law.
+
+A ``Modified Version'' of the Document means any work containing the
+Document or a portion of it, either copied verbatim, or with
+modifications and/or translated into another language.
+
+A ``Secondary Section'' is a named appendix or a front-matter section
+of the Document that deals exclusively with the relationship of the
+publishers or authors of the Document to the Document's overall
+subject (or to related matters) and contains nothing that could fall
+directly within that overall subject.  (Thus, if the Document is in
+part a textbook of mathematics, a Secondary Section may not explain
+any mathematics.)  The relationship could be a matter of historical
+connection with the subject or with related matters, or of legal,
+commercial, philosophical, ethical or political position regarding
+them.
+
+The ``Invariant Sections'' are certain Secondary Sections whose titles
+are designated, as being those of Invariant Sections, in the notice
+that says that the Document is released under this License.  If a
+section does not fit the above definition of Secondary then it is not
+allowed to be designated as Invariant.  The Document may contain zero
+Invariant Sections.  If the Document does not identify any Invariant
+Sections then there are none.
+
+The ``Cover Texts'' are certain short passages of text that are listed,
+as Front-Cover Texts or Back-Cover Texts, in the notice that says that
+the Document is released under this License.  A Front-Cover Text may
+be at most 5 words, and a Back-Cover Text may be at most 25 words.
+
+A ``Transparent'' copy of the Document means a machine-readable copy,
+represented in a format whose specification is available to the
+general public, that is suitable for revising the document
+straightforwardly with generic text editors or (for images composed of
+pixels) generic paint programs or (for drawings) some widely available
+drawing editor, and that is suitable for input to text formatters or
+for automatic translation to a variety of formats suitable for input
+to text formatters.  A copy made in an otherwise Transparent file
+format whose markup, or absence of markup, has been arranged to thwart
+or discourage subsequent modification by readers is not Transparent.
+An image format is not Transparent if used for any substantial amount
+of text.  A copy that is not ``Transparent'' is called ``Opaque''.
+
+Examples of suitable formats for Transparent copies include plain
+@sc{ascii} without markup, Texinfo input format, La@TeX{} input
+format, @acronym{SGML} or @acronym{XML} using a publicly available
+@acronym{DTD}, and standard-conforming simple @acronym{HTML},
+PostScript or @acronym{PDF} designed for human modification.  Examples
+of transparent image formats include @acronym{PNG}, @acronym{XCF} and
+@acronym{JPG}.  Opaque formats include proprietary formats that can be
+read and edited only by proprietary word processors, @acronym{SGML} or
+@acronym{XML} for which the @acronym{DTD} and/or processing tools are
+not generally available, and the machine-generated @acronym{HTML},
+PostScript or @acronym{PDF} produced by some word processors for
+output purposes only.
+
+The ``Title Page'' means, for a printed book, the title page itself,
+plus such following pages as are needed to hold, legibly, the material
+this License requires to appear in the title page.  For works in
+formats which do not have any title page as such, ``Title Page'' means
+the text near the most prominent appearance of the work's title,
+preceding the beginning of the body of the text.
+
+The ``publisher'' means any person or entity that distributes copies
+of the Document to the public.
+
+A section ``Entitled XYZ'' means a named subunit of the Document whose
+title either is precisely XYZ or contains XYZ in parentheses following
+text that translates XYZ in another language.  (Here XYZ stands for a
+specific section name mentioned below, such as ``Acknowledgements'',
+``Dedications'', ``Endorsements'', or ``History''.)  To ``Preserve the Title''
+of such a section when you modify the Document means that it remains a
+section ``Entitled XYZ'' according to this definition.
+
+The Document may include Warranty Disclaimers next to the notice which
+states that this License applies to the Document.  These Warranty
+Disclaimers are considered to be included by reference in this
+License, but only as regards disclaiming warranties: any other
+implication that these Warranty Disclaimers may have is void and has
+no effect on the meaning of this License.
+
+@item
+VERBATIM COPYING
+
+You may copy and distribute the Document in any medium, either
+commercially or noncommercially, provided that this License, the
+copyright notices, and the license notice saying this License applies
+to the Document are reproduced in all copies, and that you add no other
+conditions whatsoever to those of this License.  You may not use
+technical measures to obstruct or control the reading or further
+copying of the copies you make or distribute.  However, you may accept
+compensation in exchange for copies.  If you distribute a large enough
+number of copies you must also follow the conditions in section 3.
+
+You may also lend copies, under the same conditions stated above, and
+you may publicly display copies.
+
+@item
+COPYING IN QUANTITY
+
+If you publish printed copies (or copies in media that commonly have
+printed covers) of the Document, numbering more than 100, and the
+Document's license notice requires Cover Texts, you must enclose the
+copies in covers that carry, clearly and legibly, all these Cover
+Texts: Front-Cover Texts on the front cover, and Back-Cover Texts on
+the back cover.  Both covers must also clearly and legibly identify
+you as the publisher of these copies.  The front cover must present
+the full title with all words of the title equally prominent and
+visible.  You may add other material on the covers in addition.
+Copying with changes limited to the covers, as long as they preserve
+the title of the Document and satisfy these conditions, can be treated
+as verbatim copying in other respects.
+
+If the required texts for either cover are too voluminous to fit
+legibly, you should put the first ones listed (as many as fit
+reasonably) on the actual cover, and continue the rest onto adjacent
+pages.
+
+If you publish or distribute Opaque copies of the Document numbering
+more than 100, you must either include a machine-readable Transparent
+copy along with each Opaque copy, or state in or with each Opaque copy
+a computer-network location from which the general network-using
+public has access to download using public-standard network protocols
+a complete Transparent copy of the Document, free of added material.
+If you use the latter option, you must take reasonably prudent steps,
+when you begin distribution of Opaque copies in quantity, to ensure
+that this Transparent copy will remain thus accessible at the stated
+location until at least one year after the last time you distribute an
+Opaque copy (directly or through your agents or retailers) of that
+edition to the public.
+
+It is requested, but not required, that you contact the authors of the
+Document well before redistributing any large number of copies, to give
+them a chance to provide you with an updated version of the Document.
+
+@item
+MODIFICATIONS
+
+You may copy and distribute a Modified Version of the Document under
+the conditions of sections 2 and 3 above, provided that you release
+the Modified Version under precisely this License, with the Modified
+Version filling the role of the Document, thus licensing distribution
+and modification of the Modified Version to whoever possesses a copy
+of it.  In addition, you must do these things in the Modified Version:
+
+@enumerate A
+@item
+Use in the Title Page (and on the covers, if any) a title distinct
+from that of the Document, and from those of previous versions
+(which should, if there were any, be listed in the History section
+of the Document).  You may use the same title as a previous version
+if the original publisher of that version gives permission.
+
+@item
+List on the Title Page, as authors, one or more persons or entities
+responsible for authorship of the modifications in the Modified
+Version, together with at least five of the principal authors of the
+Document (all of its principal authors, if it has fewer than five),
+unless they release you from this requirement.
+
+@item
+State on the Title page the name of the publisher of the
+Modified Version, as the publisher.
+
+@item
+Preserve all the copyright notices of the Document.
+
+@item
+Add an appropriate copyright notice for your modifications
+adjacent to the other copyright notices.
+
+@item
+Include, immediately after the copyright notices, a license notice
+giving the public permission to use the Modified Version under the
+terms of this License, in the form shown in the Addendum below.
+
+@item
+Preserve in that license notice the full lists of Invariant Sections
+and required Cover Texts given in the Document's license notice.
+
+@item
+Include an unaltered copy of this License.
+
+@item
+Preserve the section Entitled ``History'', Preserve its Title, and add
+to it an item stating at least the title, year, new authors, and
+publisher of the Modified Version as given on the Title Page.  If
+there is no section Entitled ``History'' in the Document, create one
+stating the title, year, authors, and publisher of the Document as
+given on its Title Page, then add an item describing the Modified
+Version as stated in the previous sentence.
+
+@item
+Preserve the network location, if any, given in the Document for
+public access to a Transparent copy of the Document, and likewise
+the network locations given in the Document for previous versions
+it was based on.  These may be placed in the ``History'' section.
+You may omit a network location for a work that was published at
+least four years before the Document itself, or if the original
+publisher of the version it refers to gives permission.
+
+@item
+For any section Entitled ``Acknowledgements'' or ``Dedications'', Preserve
+the Title of the section, and preserve in the section all the
+substance and tone of each of the contributor acknowledgements and/or
+dedications given therein.
+
+@item
+Preserve all the Invariant Sections of the Document,
+unaltered in their text and in their titles.  Section numbers
+or the equivalent are not considered part of the section titles.
+
+@item
+Delete any section Entitled ``Endorsements''.  Such a section
+may not be included in the Modified Version.
+
+@item
+Do not retitle any existing section to be Entitled ``Endorsements'' or
+to conflict in title with any Invariant Section.
+
+@item
+Preserve any Warranty Disclaimers.
+@end enumerate
+
+If the Modified Version includes new front-matter sections or
+appendices that qualify as Secondary Sections and contain no material
+copied from the Document, you may at your option designate some or all
+of these sections as invariant.  To do this, add their titles to the
+list of Invariant Sections in the Modified Version's license notice.
+These titles must be distinct from any other section titles.
+
+You may add a section Entitled ``Endorsements'', provided it contains
+nothing but endorsements of your Modified Version by various
+parties---for example, statements of peer review or that the text has
+been approved by an organization as the authoritative definition of a
+standard.
+
+You may add a passage of up to five words as a Front-Cover Text, and a
+passage of up to 25 words as a Back-Cover Text, to the end of the list
+of Cover Texts in the Modified Version.  Only one passage of
+Front-Cover Text and one of Back-Cover Text may be added by (or
+through arrangements made by) any one entity.  If the Document already
+includes a cover text for the same cover, previously added by you or
+by arrangement made by the same entity you are acting on behalf of,
+you may not add another; but you may replace the old one, on explicit
+permission from the previous publisher that added the old one.
+
+The author(s) and publisher(s) of the Document do not by this License
+give permission to use their names for publicity for or to assert or
+imply endorsement of any Modified Version.
+
+@item
+COMBINING DOCUMENTS
+
+You may combine the Document with other documents released under this
+License, under the terms defined in section 4 above for modified
+versions, provided that you include in the combination all of the
+Invariant Sections of all of the original documents, unmodified, and
+list them all as Invariant Sections of your combined work in its
+license notice, and that you preserve all their Warranty Disclaimers.
+
+The combined work need only contain one copy of this License, and
+multiple identical Invariant Sections may be replaced with a single
+copy.  If there are multiple Invariant Sections with the same name but
+different contents, make the title of each such section unique by
+adding at the end of it, in parentheses, the name of the original
+author or publisher of that section if known, or else a unique number.
+Make the same adjustment to the section titles in the list of
+Invariant Sections in the license notice of the combined work.
+
+In the combination, you must combine any sections Entitled ``History''
+in the various original documents, forming one section Entitled
+``History''; likewise combine any sections Entitled ``Acknowledgements'',
+and any sections Entitled ``Dedications''.  You must delete all
+sections Entitled ``Endorsements.''
+
+@item
+COLLECTIONS OF DOCUMENTS
+
+You may make a collection consisting of the Document and other documents
+released under this License, and replace the individual copies of this
+License in the various documents with a single copy that is included in
+the collection, provided that you follow the rules of this License for
+verbatim copying of each of the documents in all other respects.
+
+You may extract a single document from such a collection, and distribute
+it individually under this License, provided you insert a copy of this
+License into the extracted document, and follow this License in all
+other respects regarding verbatim copying of that document.
+
+@item
+AGGREGATION WITH INDEPENDENT WORKS
+
+A compilation of the Document or its derivatives with other separate
+and independent documents or works, in or on a volume of a storage or
+distribution medium, is called an ``aggregate'' if the copyright
+resulting from the compilation is not used to limit the legal rights
+of the compilation's users beyond what the individual works permit.
+When the Document is included in an aggregate, this License does not
+apply to the other works in the aggregate which are not themselves
+derivative works of the Document.
+
+If the Cover Text requirement of section 3 is applicable to these
+copies of the Document, then if the Document is less than one half of
+the entire aggregate, the Document's Cover Texts may be placed on
+covers that bracket the Document within the aggregate, or the
+electronic equivalent of covers if the Document is in electronic form.
+Otherwise they must appear on printed covers that bracket the whole
+aggregate.
+
+@item
+TRANSLATION
+
+Translation is considered a kind of modification, so you may
+distribute translations of the Document under the terms of section 4.
+Replacing Invariant Sections with translations requires special
+permission from their copyright holders, but you may include
+translations of some or all Invariant Sections in addition to the
+original versions of these Invariant Sections.  You may include a
+translation of this License, and all the license notices in the
+Document, and any Warranty Disclaimers, provided that you also include
+the original English version of this License and the original versions
+of those notices and disclaimers.  In case of a disagreement between
+the translation and the original version of this License or a notice
+or disclaimer, the original version will prevail.
+
+If a section in the Document is Entitled ``Acknowledgements'',
+``Dedications'', or ``History'', the requirement (section 4) to Preserve
+its Title (section 1) will typically require changing the actual
+title.
+
+@item
+TERMINATION
+
+You may not copy, modify, sublicense, or distribute the Document
+except as expressly provided under this License.  Any attempt
+otherwise to copy, modify, sublicense, or distribute it is void, and
+will automatically terminate your rights under this License.
+
+However, if you cease all violation of this License, then your license
+from a particular copyright holder is reinstated (a) provisionally,
+unless and until the copyright holder explicitly and finally
+terminates your license, and (b) permanently, if the copyright holder
+fails to notify you of the violation by some reasonable means prior to
+60 days after the cessation.
+
+Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License.  If your rights have been terminated and not permanently
+reinstated, receipt of a copy of some or all of the same material does
+not give you any rights to use it.
+
+@item
+FUTURE REVISIONS OF THIS LICENSE
+
+The Free Software Foundation may publish new, revised versions
+of the GNU Free Documentation License from time to time.  Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.  See
+@uref{http://www.gnu.org/copyleft/}.
+
+Each version of the License is given a distinguishing version number.
+If the Document specifies that a particular numbered version of this
+License ``or any later version'' applies to it, you have the option of
+following the terms and conditions either of that specified version or
+of any later version that has been published (not as a draft) by the
+Free Software Foundation.  If the Document does not specify a version
+number of this License, you may choose any version ever published (not
+as a draft) by the Free Software Foundation.  If the Document
+specifies that a proxy can decide which future versions of this
+License can be used, that proxy's public statement of acceptance of a
+version permanently authorizes you to choose that version for the
+Document.
+
+@item
+RELICENSING
+
+``Massive Multiauthor Collaboration Site'' (or ``MMC Site'') means any
+World Wide Web server that publishes copyrightable works and also
+provides prominent facilities for anybody to edit those works.  A
+public wiki that anybody can edit is an example of such a server.  A
+``Massive Multiauthor Collaboration'' (or ``MMC'') contained in the
+site means any set of copyrightable works thus published on the MMC
+site.
+
+``CC-BY-SA'' means the Creative Commons Attribution-Share Alike 3.0
+license published by Creative Commons Corporation, a not-for-profit
+corporation with a principal place of business in San Francisco,
+California, as well as future copyleft versions of that license
+published by that same organization.
+
+``Incorporate'' means to publish or republish a Document, in whole or
+in part, as part of another Document.
+
+An MMC is ``eligible for relicensing'' if it is licensed under this
+License, and if all works that were first published under this License
+somewhere other than this MMC, and subsequently incorporated in whole
+or in part into the MMC, (1) had no cover texts or invariant sections,
+and (2) were thus incorporated prior to November 1, 2008.
+
+The operator of an MMC Site may republish an MMC contained in the site
+under CC-BY-SA on the same site at any time before August 1, 2009,
+provided the MMC is eligible for relicensing.
+
+@end enumerate
+
+@page
+@heading ADDENDUM: How to use this License for your documents
+
+To use this License in a document you have written, include a copy of
+the License in the document and put the following copyright and
+license notices just after the title page:
+
+@smallexample
+@group
+  Copyright (C)  @var{year}  @var{your name}.
+  Permission is granted to copy, distribute and/or modify this document
+  under the terms of the GNU Free Documentation License, Version 1.3
+  or any later version published by the Free Software Foundation;
+  with no Invariant Sections, no Front-Cover Texts, and no Back-Cover
+  Texts.  A copy of the license is included in the section entitled ``GNU
+  Free Documentation License''.
+@end group
+@end smallexample
+
+If you have Invariant Sections, Front-Cover Texts and Back-Cover Texts,
+replace the ``with@dots{}Texts.'' line with this:
+
+@smallexample
+@group
+    with the Invariant Sections being @var{list their titles}, with
+    the Front-Cover Texts being @var{list}, and with the Back-Cover Texts
+    being @var{list}.
+@end group
+@end smallexample
+
+If you have Invariant Sections without Cover Texts, or some other
+combination of the three, merge those two alternatives to suit the
+situation.
+
+If your document contains nontrivial examples of program code, we
+recommend releasing these examples in parallel under your choice of
+free software license, such as the GNU General Public License,
+to permit their use in free software.
+
+@c Local Variables:
+@c ispell-local-pdict: "ispell-dict"
+@c End:
+
diff --git a/doc/highlight.pack.js b/doc/highlight.pack.js
new file mode 100644
index 0000000..b2f9ff2
--- /dev/null
+++ b/doc/highlight.pack.js
@@ -0,0 +1,2 @@
+/*! highlight.js v9.12.0 | BSD3 License | git.io/hljslicense */
+!function(e){var n="object"==typeof window&&window||"object"==typeof 
self&&self;"undefined"!=typeof 
exports?e(exports):n&&(n.hljs=e({}),"function"==typeof 
define&&define.amd&&define([],function(){return n.hljs}))}(function(e){function 
n(e){return 
e.replace(/&/g,"&amp;").replace(/</g,"&lt;").replace(/>/g,"&gt;")}function 
t(e){return e.nodeName.toLowerCase()}function r(e,n){var t=e&&e.exec(n);return 
t&&0===t.index}function a(e){return k.test(e)}function i(e){var 
n,t,r,i,o=e.className+" ";i [...]
\ No newline at end of file
diff --git a/doc/lgpl.texi b/doc/lgpl.texi
new file mode 100644
index 0000000..ab03d6c
--- /dev/null
+++ b/doc/lgpl.texi
@@ -0,0 +1,549 @@
+@c The GNU Lesser General Public License.
+@center Version 2.1, February 1999
+
+@c This file is intended to be included within another document,
+@c hence no sectioning command or @node.
+
+@display
+Copyright @copyright{} 1991, 1999 Free Software Foundation, Inc.
+51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
+
+Everyone is permitted to copy and distribute verbatim copies
+of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL.  It also counts
+as the successor of the GNU Library Public License, version 2, hence the
+version number 2.1.]
+@end display
+
+@subheading Preamble
+
+  The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General Public
+Licenses are intended to guarantee your freedom to share and change
+free software---to make sure the software is free for all its users.
+
+  This license, the Lesser General Public License, applies to some
+specially designated software---typically libraries---of the Free
+Software Foundation and other authors who decide to use it.  You can use
+it too, but we suggest you first think carefully about whether this
+license or the ordinary General Public License is the better strategy to
+use in any particular case, based on the explanations below.
+
+  When we speak of free software, we are referring to freedom of use,
+not price.  Our General Public Licenses are designed to make sure that
+you have the freedom to distribute copies of free software (and charge
+for this service if you wish); that you receive source code or can get
+it if you want it; that you can change the software and use pieces of it
+in new free programs; and that you are informed that you can do these
+things.
+
+  To protect your rights, we need to make restrictions that forbid
+distributors to deny you these rights or to ask you to surrender these
+rights.  These restrictions translate to certain responsibilities for
+you if you distribute copies of the library or if you modify it.
+
+  For example, if you distribute copies of the library, whether gratis
+or for a fee, you must give the recipients all the rights that we gave
+you.  You must make sure that they, too, receive or can get the source
+code.  If you link other code with the library, you must provide
+complete object files to the recipients, so that they can relink them
+with the library after making changes to the library and recompiling
+it.  And you must show them these terms so they know their rights.
+
+  We protect your rights with a two-step method: (1) we copyright the
+library, and (2) we offer you this license, which gives you legal
+permission to copy, distribute and/or modify the library.
+
+  To protect each distributor, we want to make it very clear that
+there is no warranty for the free library.  Also, if the library is
+modified by someone else and passed on, the recipients should know
+that what they have is not the original version, so that the original
+author's reputation will not be affected by problems that might be
+introduced by others.
+
+  Finally, software patents pose a constant threat to the existence of
+any free program.  We wish to make sure that a company cannot
+effectively restrict the users of a free program by obtaining a
+restrictive license from a patent holder.  Therefore, we insist that
+any patent license obtained for a version of the library must be
+consistent with the full freedom of use specified in this license.
+
+  Most GNU software, including some libraries, is covered by the
+ordinary GNU General Public License.  This license, the GNU Lesser
+General Public License, applies to certain designated libraries, and
+is quite different from the ordinary General Public License.  We use
+this license for certain libraries in order to permit linking those
+libraries into non-free programs.
+
+  When a program is linked with a library, whether statically or using
+a shared library, the combination of the two is legally speaking a
+combined work, a derivative of the original library.  The ordinary
+General Public License therefore permits such linking only if the
+entire combination fits its criteria of freedom.  The Lesser General
+Public License permits more lax criteria for linking other code with
+the library.
+
+  We call this license the @dfn{Lesser} General Public License because it
+does @emph{Less} to protect the user's freedom than the ordinary General
+Public License.  It also provides other free software developers Less
+of an advantage over competing non-free programs.  These disadvantages
+are the reason we use the ordinary General Public License for many
+libraries.  However, the Lesser license provides advantages in certain
+special circumstances.
+
+  For example, on rare occasions, there may be a special need to
+encourage the widest possible use of a certain library, so that it becomes
+a de-facto standard.  To achieve this, non-free programs must be
+allowed to use the library.  A more frequent case is that a free
+library does the same job as widely used non-free libraries.  In this
+case, there is little to gain by limiting the free library to free
+software only, so we use the Lesser General Public License.
+
+  In other cases, permission to use a particular library in non-free
+programs enables a greater number of people to use a large body of
+free software.  For example, permission to use the GNU C Library in
+non-free programs enables many more people to use the whole GNU
+operating system, as well as its variant, the GNU/Linux operating
+system.
+
+  Although the Lesser General Public License is Less protective of the
+users' freedom, it does ensure that the user of a program that is
+linked with the Library has the freedom and the wherewithal to run
+that program using a modified version of the Library.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.  Pay close attention to the difference between a
+``work based on the library'' and a ``work that uses the library''.  The
+former contains code derived from the library, whereas the latter must
+be combined with the library in order to run.
+
+@subheading TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+@enumerate 0
+@item
+This License Agreement applies to any software library or other program
+which contains a notice placed by the copyright holder or other
+authorized party saying it may be distributed under the terms of this
+Lesser General Public License (also called ``this License'').  Each
+licensee is addressed as ``you''.
+
+  A ``library'' means a collection of software functions and/or data
+prepared so as to be conveniently linked with application programs
+(which use some of those functions and data) to form executables.
+
+  The ``Library'', below, refers to any such software library or work
+which has been distributed under these terms.  A ``work based on the
+Library'' means either the Library or any derivative work under
+copyright law: that is to say, a work containing the Library or a
+portion of it, either verbatim or with modifications and/or translated
+straightforwardly into another language.  (Hereinafter, translation is
+included without limitation in the term ``modification''.)
+
+  ``Source code'' for a work means the preferred form of the work for
+making modifications to it.  For a library, complete source code means
+all the source code for all modules it contains, plus any associated
+interface definition files, plus the scripts used to control compilation
+and installation of the library.
+
+  Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope.  The act of
+running a program using the Library is not restricted, and output from
+such a program is covered only if its contents constitute a work based
+on the Library (independent of the use of the Library in a tool for
+writing it).  Whether that is true depends on what the Library does
+and what the program that uses the Library does.
+
+@item
+You may copy and distribute verbatim copies of the Library's
+complete source code as you receive it, in any medium, provided that
+you conspicuously and appropriately publish on each copy an
+appropriate copyright notice and disclaimer of warranty; keep intact
+all the notices that refer to this License and to the absence of any
+warranty; and distribute a copy of this License along with the
+Library.
+
+  You may charge a fee for the physical act of transferring a copy,
+and you may at your option offer warranty protection in exchange for a
+fee.
+
+@item
+You may modify your copy or copies of the Library or any portion
+of it, thus forming a work based on the Library, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+@enumerate a
+@item
+The modified work must itself be a software library.
+
+@item
+You must cause the files modified to carry prominent notices
+stating that you changed the files and the date of any change.
+
+@item
+You must cause the whole of the work to be licensed at no
+charge to all third parties under the terms of this License.
+
+@item
+If a facility in the modified Library refers to a function or a
+table of data to be supplied by an application program that uses
+the facility, other than as an argument passed when the facility
+is invoked, then you must make a good faith effort to ensure that,
+in the event an application does not supply such function or
+table, the facility still operates, and performs whatever part of
+its purpose remains meaningful.
+
+(For example, a function in a library to compute square roots has
+a purpose that is entirely well-defined independent of the
+application.  Therefore, Subsection 2d requires that any
+application-supplied function or table used by this function must
+be optional: if the application does not supply it, the square
+root function must still compute square roots.)
+@end enumerate
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the Library,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works.  But when you
+distribute the same sections as part of a whole which is a work based
+on the Library, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote
+it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library
+with the Library (or with a work based on the Library) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+@item
+You may opt to apply the terms of the ordinary GNU General Public
+License instead of this License to a given copy of the Library.  To do
+this, you must alter all the notices that refer to this License, so
+that they refer to the ordinary GNU General Public License, version 2,
+instead of to this License.  (If a newer version than version 2 of the
+ordinary GNU General Public License has appeared, then you can specify
+that version instead if you wish.)  Do not make any other change in
+these notices.
+
+  Once this change is made in a given copy, it is irreversible for
+that copy, so the ordinary GNU General Public License applies to all
+subsequent copies and derivative works made from that copy.
+
+  This option is useful when you wish to copy part of the code of
+the Library into a program that is not a library.
+
+@item
+You may copy and distribute the Library (or a portion or
+derivative of it, under Section 2) in object code or executable form
+under the terms of Sections 1 and 2 above provided that you accompany
+it with the complete corresponding machine-readable source code, which
+must be distributed under the terms of Sections 1 and 2 above on a
+medium customarily used for software interchange.
+
+  If distribution of object code is made by offering access to copy
+from a designated place, then offering equivalent access to copy the
+source code from the same place satisfies the requirement to
+distribute the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+@item
+A program that contains no derivative of any portion of the
+Library, but is designed to work with the Library by being compiled or
+linked with it, is called a ``work that uses the Library''.  Such a
+work, in isolation, is not a derivative work of the Library, and
+therefore falls outside the scope of this License.
+
+  However, linking a ``work that uses the Library'' with the Library
+creates an executable that is a derivative of the Library (because it
+contains portions of the Library), rather than a ``work that uses the
+library''.  The executable is therefore covered by this License.
+Section 6 states terms for distribution of such executables.
+
+  When a ``work that uses the Library'' uses material from a header file
+that is part of the Library, the object code for the work may be a
+derivative work of the Library even though the source code is not.
+Whether this is true is especially significant if the work can be
+linked without the Library, or if the work is itself a library.  The
+threshold for this to be true is not precisely defined by law.
+
+  If such an object file uses only numerical parameters, data
+structure layouts and accessors, and small macros and small inline
+functions (ten lines or less in length), then the use of the object
+file is unrestricted, regardless of whether it is legally a derivative
+work.  (Executables containing this object code plus portions of the
+Library will still fall under Section 6.)
+
+  Otherwise, if the work is a derivative of the Library, you may
+distribute the object code for the work under the terms of Section 6.
+Any executables containing that work also fall under Section 6,
+whether or not they are linked directly with the Library itself.
+
+@item
+As an exception to the Sections above, you may also combine or
+link a ``work that uses the Library'' with the Library to produce a
+work containing portions of the Library, and distribute that work
+under terms of your choice, provided that the terms permit
+modification of the work for the customer's own use and reverse
+engineering for debugging such modifications.
+
+  You must give prominent notice with each copy of the work that the
+Library is used in it and that the Library and its use are covered by
+this License.  You must supply a copy of this License.  If the work
+during execution displays copyright notices, you must include the
+copyright notice for the Library among them, as well as a reference
+directing the user to the copy of this License.  Also, you must do one
+of these things:
+
+@enumerate a
+@item
+Accompany the work with the complete corresponding
+machine-readable source code for the Library including whatever
+changes were used in the work (which must be distributed under
+Sections 1 and 2 above); and, if the work is an executable linked
+with the Library, with the complete machine-readable ``work that
+uses the Library'', as object code and/or source code, so that the
+user can modify the Library and then relink to produce a modified
+executable containing the modified Library.  (It is understood
+that the user who changes the contents of definitions files in the
+Library will not necessarily be able to recompile the application
+to use the modified definitions.)
+
+@item
+Use a suitable shared library mechanism for linking with the Library.  A
+suitable mechanism is one that (1) uses at run time a copy of the
+library already present on the user's computer system, rather than
+copying library functions into the executable, and (2) will operate
+properly with a modified version of the library, if the user installs
+one, as long as the modified version is interface-compatible with the
+version that the work was made with.
+
+@item
+Accompany the work with a written offer, valid for at
+least three years, to give the same user the materials
+specified in Subsection 6a, above, for a charge no more
+than the cost of performing this distribution.
+
+@item
+If distribution of the work is made by offering access to copy
+from a designated place, offer equivalent access to copy the above
+specified materials from the same place.
+
+@item
+Verify that the user has already received a copy of these
+materials or that you have already sent this user a copy.
+@end enumerate
+
+  For an executable, the required form of the ``work that uses the
+Library'' must include any data and utility programs needed for
+reproducing the executable from it.  However, as a special exception,
+the materials to be distributed need not include anything that is
+normally distributed (in either source or binary form) with the major
+components (compiler, kernel, and so on) of the operating system on
+which the executable runs, unless that component itself accompanies the
+executable.
+
+  It may happen that this requirement contradicts the license
+restrictions of other proprietary libraries that do not normally
+accompany the operating system.  Such a contradiction means you cannot
+use both them and the Library together in an executable that you
+distribute.
+
+@item
+You may place library facilities that are a work based on the
+Library side-by-side in a single library together with other library
+facilities not covered by this License, and distribute such a combined
+library, provided that the separate distribution of the work based on
+the Library and of the other library facilities is otherwise
+permitted, and provided that you do these two things:
+
+@enumerate a
+@item
+Accompany the combined library with a copy of the same work
+based on the Library, uncombined with any other library
+facilities.  This must be distributed under the terms of the
+Sections above.
+
+@item
+Give prominent notice with the combined library of the fact
+that part of it is a work based on the Library, and explaining
+where to find the accompanying uncombined form of the same work.
+@end enumerate
+
+@item
+You may not copy, modify, sublicense, link with, or distribute
+the Library except as expressly provided under this License.  Any
+attempt otherwise to copy, modify, sublicense, link with, or
+distribute the Library is void, and will automatically terminate your
+rights under this License.  However, parties who have received copies,
+or rights, from you under this License will not have their licenses
+terminated so long as such parties remain in full compliance.
+
+@item
+You are not required to accept this License, since you have not
+signed it.  However, nothing else grants you permission to modify or
+distribute the Library or its derivative works.  These actions are
+prohibited by law if you do not accept this License.  Therefore, by
+modifying or distributing the Library (or any work based on the
+Library), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Library or works based on it.
+
+@item
+Each time you redistribute the Library (or any work based on the
+Library), the recipient automatically receives a license from the
+original licensor to copy, distribute, link with or modify the Library
+subject to these terms and conditions.  You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties with
+this License.
+
+@item
+If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Library at all.  For example, if a patent
+license would not permit royalty-free redistribution of the Library by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any
+particular circumstance, the balance of the section is intended to apply,
+and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system which is
+implemented by public license practices.  Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+@item
+If the distribution and/or use of the Library is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Library under this License may add
+an explicit geographical distribution limitation excluding those countries,
+so that distribution is permitted only in or among countries not thus
+excluded.  In such case, this License incorporates the limitation as if
+written in the body of this License.
+
+@item
+The Free Software Foundation may publish revised and/or new
+versions of the Lesser General Public License from time to time.
+Such new versions will be similar in spirit to the present version,
+but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Library
+specifies a version number of this License which applies to it and
+``any later version'', you have the option of following the terms and
+conditions either of that version or of any later version published by
+the Free Software Foundation.  If the Library does not specify a
+license version number, you may choose any version ever published by
+the Free Software Foundation.
+
+@item
+If you wish to incorporate parts of the Library into other free
+programs whose distribution conditions are incompatible with these,
+write to the author to ask for permission.  For software which is
+copyrighted by the Free Software Foundation, write to the Free
+Software Foundation; we sometimes make exceptions for this.  Our
+decision will be guided by the two goals of preserving the free status
+of all derivatives of our free software and of promoting the sharing
+and reuse of software generally.
+
+@center @b{NO WARRANTY}
+
+@item
+BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE LIBRARY ``AS IS'' WITHOUT WARRANTY OF ANY
+KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+LIBRARY IS WITH YOU.  SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+@item
+IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGES.
+@end enumerate
+
+@subheading END OF TERMS AND CONDITIONS
+
+@page
+@subheading How to Apply These Terms to Your New Libraries
+
+  If you develop a new library, and you want it to be of the greatest
+possible use to the public, we recommend making it free software that
+everyone can redistribute and change.  You can do so by permitting
+redistribution under these terms (or, alternatively, under the terms of the
+ordinary General Public License).
+
+  To apply these terms, attach the following notices to the library.  It is
+safest to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least the
+``copyright'' line and a pointer to where the full notice is found.
+
+@smallexample
+@var{one line to give the library's name and an idea of what it does.}
+Copyright (C) @var{year}  @var{name of author}
+
+This library is free software; you can redistribute it and/or modify it
+under the terms of the GNU Lesser General Public License as published by
+the Free Software Foundation; either version 2.1 of the License, or (at
+your option) any later version.
+
+This library is distributed in the hope that it will be useful, but
+WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+Lesser General Public License for more details.
+
+You should have received a copy of the GNU Lesser General Public
+License along with this library; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
+USA.
+@end smallexample
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a ``copyright disclaimer'' for the library, if
+necessary.  Here is a sample; alter the names:
+
+@smallexample
+Yoyodyne, Inc., hereby disclaims all copyright interest in the library
+`Frob' (a library for tweaking knobs) written by James Random Hacker.
+
+@var{signature of Ty Coon}, 1 April 1990
+Ty Coon, President of Vice
+@end smallexample
+
+That's all there is to it!
diff --git a/doc/manual.info b/doc/manual.info
new file mode 100644
index 0000000..9665880
Binary files /dev/null and b/doc/manual.info differ
diff --git a/doc/manual.texi b/doc/manual.texi
new file mode 100644
index 0000000..b37ad61
--- /dev/null
+++ b/doc/manual.texi
@@ -0,0 +1,1296 @@
+\input texinfo @c -*-texinfo-*-
+@c %**start of header
+@c Too generic, should be renamed to avoid system conflicts.
+@c probably: manual.info -> taler-merchant.info
+@setfilename manual.info
+@include version-manual.texi
+@settitle The GNU Taler merchant backend operator tutorial @value{VERSION}
+
+@include syntax.texi
+
+@c Define a new index for options.
+@defcodeindex op
+@c Combine everything into one index (arbitrarily chosen to be the
+@c concept index).
+@syncodeindex op cp
+@c %**end of header
+
+@copying
+This manual is for the GNU Taler merchant backend (version @value{VERSION}, 
@value{UPDATED}),
+
+Copyright @copyright{} 2016, 2017, 2019 Taler Systems SA
+
+@quotation
+Permission is granted to copy, distribute and/or modify this document
+under the terms of the GNU Free Documentation License, Version 1.3 or
+any later version published by the Free Software Foundation; with no
+Invariant Sections, with no Front-Cover Texts, and with no Back-Cover
+Texts.  A copy of the license is included in the section entitled
+``GNU Free Documentation License''.
+@end quotation
+@end copying
+@c If your manual is published on paper by the FSF, it should include
+@c The standard FSF Front-Cover and Back-Cover Texts, as given in
+@c maintain.texi.
+@c
+@c Titlepage
+@c
+@titlepage
+@title The GNU Taler merchant backend operator tutorial
+@subtitle Version @value{VERSION}
+@subtitle @value{UPDATED}
+@author Marcello Stanisci (@email{marcello.stanisci@@inria.fr})
+@author Christian Grothoff (@email{christian.grothoff@@inria.fr})
+@page
+@vskip 0pt plus 1filll
+@insertcopying
+@end titlepage
+
+@summarycontents
+@contents
+
+@ifnottex
+@node Top
+@top The GNU Taler manual for Web shops
+@insertcopying
+@end ifnottex
+
+@menu
+* Introduction::            Whom this manual is addressed to
+* Installation::            Installing the Merchant backend
+* Configuration::           How to set up the Merchant backend
+* Testing::                 How to test the installed Merchant backend
+* Advanced topics::         Detailed solutions to specific issues
+
+
+Appendices
+
+* GNU-LGPL::                The GNU Lesser General Public License says how you
+                            can use the code of libtalermerchant.so in your 
own projects.
+* GNU Affero GPL::          The Affero GNU General Public License says how you
+                            can copy and share the Taler merchant backend.
+* GNU-FDL::                 The GNU Free Documentation License says how you
+                            can copy and share the documentation of GNU Taler.
+
+Indices
+
+* Concept Index::           Index of concepts and programs.
+
+@detailmenu
+ --- The Detailed Node Listing ---
+
+Introduction
+
+* About GNU Taler::
+* About this manual::
+* Architecture overview::
+
+Installation
+
+* Installing Taler using Docker::
+* Generic instructions::
+* Installing Taler on Debian GNU/Linux::
+
+Configuration
+
+* Backend options::
+* Sample backend configuration::
+* Launching the backend::
+
+Testing
+
+Advanced topics
+
+* Configuration format::
+* Using taler-config::
+* Merchant key management::
+* SEPA configuration::
+* Tipping visitors::
+* Generate payments::
+
+@end detailmenu
+@end menu
+
+
+@node Introduction
+@chapter Introduction
+
+@menu
+* About GNU Taler::
+* About this manual::
+* Architecture overview::
+@end menu
+
+@node About GNU Taler
+@section About GNU Taler
+
+GNU Taler is an open protocol for an electronic payment system with a
+free software reference implementation.  GNU Taler offers secure, fast
+and easy payment processing using well understood cryptographic
+techniques.  GNU Taler allows customers to remain anonymous, while
+ensuring that merchants can be held accountable by governments.
+Hence, GNU Taler is compatible with anti-money-laundering (AML) and
+know-your-customer (KYC) regulation, as well as data protection
+regulation (such as GDPR).
+
+GNU Taler is not yet production-ready, after following this manual
+you will have a backend that can process payments in ``KUDOS'', but
+not regular currencies.  This is not so much because of limitations
+in the backend, but because we are not aware of a Taler exchange
+operator offering regular currencies today.
+
+@node About this manual
+@section About this manual
+
+This tutorial targets system administrators who want to
+install a GNU Taler merchant @emph{backend}.
+
+We expect some moderate familiarity with the compilation and installation
+of free software packages. An understanding of cryptography
+is not required.
+
+This first chapter of the tutorial will give a brief overview of
+the overall Taler architecture, describing the environment in which
+the Taler backend operates.
+The second chapter then explains how to install the software,
+including key dependencies.  The third chapter will explain how to
+configure the backend, including in particular the configuration of the
+bank account details of the merchant.
+
+@c The fouth chapter will explain how to test that the setup worked correctly.
+
+The last chapter gives some additional information about advanced topics
+which will be useful for system administrators but are not necessary for
+operating a basic backend.
+
+@node Architecture overview
+@section Architecture overview
+
+@cindex crypto-currency
+@cindex KUDOS
+Taler is a pure payment system, not a new crypto-currency. As such, it
+operates in a traditional banking context.  In particular, this means
+that in order to receive funds via Taler, the merchant must have a
+regular bank account, and payments can be executed in ordinary
+currencies such as USD or EUR.  For testing purposes, Taler uses a
+special currency ``KUDOS'' and includes its own special bank.
+
+The Taler software stack for a merchant consists of four main components:
+
+@itemize
+@cindex frontend
+@item A frontend which interacts with the customer's browser. The
+  frontend enables the customer to build a shopping cart and place
+  an order.  Upon payment, it triggers the respective business logic
+  to satisfy the order.  This component is not included with Taler,
+  but rather assumed to exist at the merchant. This manual
+  describes how to integrate Taler with Web shop frontends.
+@cindex back office
+@item A back office application that enables the shop operators to
+  view customer orders, match them to financial transfers, and possibly
+  approve refunds if an order cannot be satisfied.  This component is
+  again not included with Taler, but rather assumed to exist at the
+  merchant. This manual will describe how to integrate such a component
+  to handle payments managed by Taler.
+@cindex backend
+@item A Taler-specific payment backend which makes it easy for the
+  frontend to process financial transactions with Taler.  The
+  next two chapters will describe how to install and configure
+  this backend.
+@cindex DBMS
+@cindex Postgres
+@item A DBMS which stores the transaction history for the Taler backend.
+  For now, the GNU Taler reference implemenation only supports Postgres,
+  but the code could be easily extended to support another DBMS.
+@end itemize
+
+The following image illustrates the various interactions of these
+key components:
+
+@center @image{arch, 3in}
+
+@cindex RESTful
+Basically, the backend provides the cryptographic protocol support,
+stores Taler-specific financial information in a DBMS and communicates
+with the GNU Taler exchange over the Internet.  The frontend accesses
+the backend via a RESTful API.  As a result, the frontend never has to
+directly communicate with the exchange, and also does not deal with
+sensitive data.  In particular, the merchant's signing keys and bank
+account information is encapsulated within the Taler backend.
+
+
+@node Installation
+@chapter Installation
+
+@menu
+* Installing Taler using Docker::
+* Generic instructions::
+* Installing Taler on Debian GNU/Linux::
+@c * Installing Taler with GNU Guix:: Installing Taler with GNU Guix
+@c * Installing Taler on Arch Linux:: Installing Taler on Arch Linux
+@c * Installing Taler on Windows:: Installing Taler on Windows
+@c * Installing Taler on OS X:: Installing Taler on OS X
+@end menu
+
+This chapter describes how to install the GNU Taler merchant backend.
+
+@node Installing Taler using Docker
+@section Installing Taler using Docker
+
+This section provides instructions for the merchant backend
+installation using `Docker`.
+
+For security reasons, we run Docker against a VirtualBox instance,
+so the @code{docker} command should connect to a @code{docker-machine}
+instance that uses the VirtualBox driver.
+
+Therefore, the needed tools are: ``docker``, ``docker-machine``, and
+``docker-compose``.  Please refer to Docker's official
+@footnote{https://docs.docker.com/}documentation in order
+to get those components installed, as that is not in this manual's scope.
+
+Before starting to build the merchant's image, make sure a
+``docker-machine`` instance is up and running.
+
+Because all of the Docker source file are kept in our ``deployment``
+repository, we start by checking out the
+@code{git://taler.net/deployment} codebase:
+
+@smallexample
+$ git clone git://taler.net/deployment
+@end smallexample
+
+Now we actually build the merchant's image.  From the same
+directory as above:
+
+@smallexample
+$ cd deployment/docker/merchant/
+$ docker-compose build
+@end smallexample
+
+If everything worked as expected, the merchant is ready to be
+launched.  From the same directory as the previous step:
+
+@smallexample
+# Recall: the docker-machine should be up and running.
+$ docker-compose up
+@end smallexample
+
+You should see some live logging from all the involved containers.
+At this stage of development, you should also ignore some (harmless)
+error message from postresql about already existing roles and databases.
+
+To test if everything worked as expected, it suffices to issue a
+simple request to the merchant, as:
+
+@smallexample
+$ curl http://$(docker-machine ip)/
+# A greeting message should be returned by the merchant.
+@end smallexample
+
+
+@node Generic instructions
+@section Generic instructions
+
+This section provides generic instructions for the merchant backend
+installation independent of any particular operating system.
+Operating system specific instructions are provided in the following
+sections.  You should follow the operating system specific
+instructions if those are available, and only consult the generic
+instructions if no system-specific instructions are provided for your
+specific operating system.
+
+@menu
+* Installation of dependencies::
+* Installing libgnunetutil::
+* Installing the GNU Taler exchange::
+* Installing the GNU Taler merchant backend::
+@end menu
+
+@node Installation of dependencies
+@subsection Installation of dependencies
+
+The following packages need to be installed before we can compile the
+backend:
+
+@itemize
+@item autoconf >= 2.69
+@item automake >= 1.14
+@item libtool >= 2.4
+@item autopoint >= 0.19
+@item libltdl >= 2.4
+@item libunistring >= 0.9.3
+@item libcurl >= 7.26 (or libgnurl >= 7.26)
+@item GNU libmicrohttpd >= 0.9.39
+@item GNU libgcrypt >= 1.6
+@item libjansson >= 2.7
+@item Postgres >= 9.4, including libpq
+@item libgnunetutil (from Git)
+@item GNU Taler exchange (from Git)
+@end itemize
+
+Except for the last two, these are available in most GNU/Linux
+distributions and should just be installed using the respective
+package manager.
+
+The following sections will provide detailed instructions for
+installing the libgnunetutil and GNU Taler exchange dependencies.
+
+@node Installing libgnunetutil
+@subsection Installing libgnunetutil
+
+@cindex GNUnet
+Before you install libgnunetutil, you must download and install the
+dependencies mentioned in the previous section, otherwise the build
+may succeed but fail to export some of the tooling required by Taler.
+
+To download and install libgnunetutil, proceed as follows:
+
+@example
+$ git clone https://gnunet.org/git/gnunet/
+$ cd gnunet/
+$ ./bootstrap
+$ ./configure [--prefix=GNUNETPFX]
+$ # Each dependency can be fetched from non standard locations via
+$ # the '--with-<LIBNAME>' option. See './configure --help'.
+$ make
+# make install
+@end example
+
+If you did not specify a prefix, GNUnet will install to
+@code{/usr/local}, which requires you to run the last step as
+@code{root}.
+
+@node Installing the GNU Taler exchange
+@subsection Installing the GNU Taler exchange
+
+@cindex exchange
+After installing GNUnet, you can download and install
+the exchange as follows:
+
+@example
+$ git clone git://taler.net/exchange
+$ cd exchange
+$ ./bootstrap
+$ ./configure [--prefix=EXCHANGEPFX] \
+              [--with-gnunet=GNUNETPFX]
+$ # Each dependency can be fetched from non standard locations via
+$ # the '--with-<LIBNAME>' option. See './configure --help'.
+$ make
+# make install
+@end example
+
+If you did not specify a prefix, the exchange will install to
+@code{/usr/local}, which requires you to run the last step as
+@code{root}.  Note that you have to specify
+@code{--with-gnunet=/usr/local} if you installed GNUnet to
+@code{/usr/local} in the previous step.
+
+@node Installing the GNU Taler merchant backend
+@subsection Installing the GNU Taler merchant backend
+
+@cindex backend
+The following steps assume all dependencies are installed.
+
+Use the following commands to download and install the
+merchant backend:
+
+@example
+$ git clone git://taler.net/merchant
+$ cd merchant
+$ ./bootstrap
+$ ./configure [--prefix=PFX] \
+              [--with-gnunet=GNUNETPFX] \
+              [--with-exchange=EXCHANGEPFX]
+$ # Each dependency can be fetched from non standard locations via
+$ # the '--with-<LIBNAME>' option. See './configure --help'.
+$ make
+$ make install
+@end example
+
+Note that you have to specify @code{--with-exchange=/usr/local} and/or
+@code{--with-exchange=/usr/local} if you installed the exchange and/or
+GNUnet to @code{/usr/local} in the previous steps.
+
+@c @node Installing Taler with GNU Guix
+@c @section Installing Taler with GNU Guix
+
+@c This section has not yet been written.
+
+
+
+
+@node Installing Taler on Debian GNU/Linux
+@section Installing Taler on Debian GNU/Linux
+
+@cindex Wheezy
+@cindex Debian
+Debian wheezy is too old and lacks most of the packages required.
+
+On Debian jessie, only GNU libmicrohttpd needs to be compiled from
+source.  To install dependencies on Debian jesse, run the following
+commands:
+
+@example
+# apt-get install \
+  autoconf \
+  automake \
+  autopoint \
+  libtool \
+  libltdl-dev \
+  libunistring-dev \
+  libcurl4-gnutls-dev \
+  libgcrypt20-dev \
+  libjansson-dev \
+  libpq-dev \
+  postgresql-9.4
+# wget https://ftp.gnu.org/gnu/libmicrohttpd/libmicrohttpd-latest.tar.gz
+# wget https://ftp.gnu.org/gnu/libmicrohttpd/libmicrohttpd-latest.tar.gz.sig
+# gpg -v libmicrohttpd-latest.tar.gz # Should show signed by 939E6BE1E29FC3CC
+# tar xf libmicrohttpd-latest.tar.gz
+# cd libmicrohttpd-0*
+# ./configure
+# make install
+@end example
+
+For more recent versions of Debian, you should instead run:
+
+@example
+# apt-get install \
+  autoconf \
+  automake \
+  autopoint \
+  libtool \
+  libltdl-dev \
+  libunistring-dev \
+  libcurl4-gnutls-dev \
+  libgcrypt20-dev \
+  libjansson-dev \
+  libpq-dev \
+  postgresql-9.5 \
+  libmicrohttpd-dev
+@end example
+
+For the rest of the installation, follow the
+generic installation instructions starting with the installation of
+libgnunetutil.  Note that if you used the Debian wheezy instructions
+above, you need to pass
+@code{--with-microhttpd=/usr/local/} to all @code{configure} invocations.
+
+
+
+@c @node Installing Taler on Arch Linux
+@c @section Installing Taler on Arch Linux
+
+@c This section has not yet been written.
+
+
+@c @node Installing Taler on Windows
+@c @section Installing Taler on Windows
+
+@c This section has not yet been written.
+
+
+@c @node Installing Taler on OS X
+@c @section Installing Taler on OS X
+
+@c This section has not yet been written.
+
+
+@node Configuration
+@chapter How to configure the merchant's backend
+
+@cindex taler-config
+@cindex taler.conf
+The installation already provides reasonable defaults for most of the
+configuration options. However, some must be provided, in particular
+the database account and bank account that the backend should use.  By
+default, the file @code{$HOME/.config/taler.conf} is where the Web
+shop administrator specifies configuration values that augment or
+override the defaults.  The format of the configuration file is
+the well-known INI file format.  You can edit the file by hand, or
+use the @code{taler-config} commands given as examples.
+For more information on @code{taler-config}, @pxref{Using taler-config}.
+
+@menu
+* Backend options::
+* Sample backend configuration::
+* Launching the backend::
+@end menu
+
+@node Backend options
+@section Backend options
+
+The following table describes the options that commonly need to
+be modified.
+Here, the notation @code{[$section]/$option} denotes the option
+@code{$option} under the section @code{[$section]} in the
+configuration file.
+
+
+@table @asis
+
+@item Service address
+The following option sets the transport layer address used by the merchant 
backend:
+
+@cindex UNIX domain socket
+@cindex TCP
+@example
+[MERCHANT]/SERVE = TCP | UNIX
+@end example
+
+If given,
+@itemize
+@item @code{TCP}, then we need to set the TCP port in @code{[MERCHANT]/PORT}
+@item @code{UNIX}, then we need to set the unix domain socket path and mode in
+@code{[MERCHANT]/UNIXPATH} and @code{[MERCHANT]/UNIXPATH_MODE}. The latter 
takes
+the usual permission mask given as a number, e.g. 660 for user/group 
read-write access.
+@end itemize
+
+The frontend can then connect to the backend over HTTP using the specified 
address.
+If frontend and backend run within the same operating system, the use of
+a UNIX domain socket is recommended to avoid accidentally exposing the backend
+to the network.
+
+@c FIXME: we should offer an option to bind the TCP socket to a particular IP 
address (#4752)
+
+@cindex port
+To run the Taler backend on TCP port 8888, use:
+@example
+$ taler-config -s MERCHANT -o SERVE -V TCP
+$ taler-config -s MERCHANT -o PORT -V 8888
+@end example
+
+
+@item Currency
+Which currency the Web shop deals in, i.e. ``EUR'' or ``USD'', is specified 
using the option
+
+@cindex currency
+@cindex KUDOS
+@example
+[TALER]/CURRENCY
+@end example
+
+For testing purposes, the currency MUST match ``KUDOS'' so that tests will work
+with the Taler demonstration exchange at 
@url{https://exchange.demo.taler.net/}:
+
+@example
+$ taler-config -s TALER -o CURRENCY -V KUDOS
+@end example
+
+
+@item Database
+@cindex DBMS
+In principle is possible for the backend to support different DBMSs.
+The option
+
+@example
+[MERCHANT]/DB
+@end example
+
+specifies which DBMS is to be used. However, currently only the value 
"postgres" is supported. This is also
+the default.
+
+In addition to selecting the DBMS software, the backend requires DBMS-specific 
options to access the database.
+
+For postgres, you need to provide:
+
+@example
+[merchantdb-postgres]/config
+@end example
+@cindex Postgres
+
+This option specifies a postgres access path using the format
+@code{postgres:///$DBNAME}, where @code{$DBNAME} is the name of the Postgres
+database you want to use.  Suppose @code{$USER} is the name of the
+user who will run the backend process.  Then, you need to first run
+
+@example
+$ sudu -u postgres createuser -d $USER
+@end example
+
+as the Postgres database administrator (usually @code{postgres}) to
+grant @code{$USER} the ability to create new databases.  Next, you
+should as @code{$USER} run:
+
+@example
+$ createdb $DBNAME
+@end example
+
+to create the backend's database.  Here, @code{$DBNAME} must match the 
database name
+given in the configuration file.
+
+To configure the Taler backend to use this database, run:
+@example
+$ taler-config -s MERCHANTDB-postgres -o CONFIG \
+  -V postgres:///$DBNAME
+@end example
+
+
+@item Exchange
+@cindex exchange
+To add an exchange to the list of trusted payment service providers,
+you create a section with a name that starts with ``exchange-''.
+In that section, the following options need to be configured:
+
+@itemize
+
+@item
+The ``url'' option specifies the exchange's base URL.  For example,
+to use the Taler demonstrator use:
+
+@example
+$ taler-config -s EXCHANGE-demo -o URL \
+  -V https://exchange.demo.taler.net/
+@end example
+
+@item
+@cindex master key
+The ``master_key'' option specifies the exchange's master public key in base32 
encoding.
+For the Taler demonstrator, use:
+
+@example
+$ taler-config -s EXCHANGE-demo -o master_key \
+  -V CQQZ9DY3MZ1ARMN5K1VKDETS04Y2QCKMMCFHZSWJWWVN82BTTH00
+@end example
+
+Note that multiple exchanges can be added to the system by using different
+tokens in place of @code{demo} in the example above.  Note that all
+of the exchanges must use the same currency.  If you need to support
+multiple currencies, you need to configure a backend per currency.
+
+@end itemize
+
+@c FIXME: In the future, we need to describe specifying auditors here.
+@c @item Auditors
+
+
+@item Instances
+@cindex instance
+The backend allows the user to run multiple instances of shops with
+distinct business entities against a single backend.  Each instance
+uses its own bank accounts and key for signing contracts.  It is
+mandatory to configure a "default" instance.
+
+@itemize
+
+@item
+The ``KEYFILE'' option specifies the file containing the instance's
+private signing key.  For example, use:
+
+@example
+$ taler-config -s INSTANCE-default -o KEYFILE \
+  -V '$@{TALER_CONFIG_HOME@}/merchant/instace/default.key'
+@end example
+
+@item
+The ``NAME'' option specifies a human-readable name for the instance.
+For example, use:
+
+@example
+$ taler-config -s INSTANCE-default -o NAME \
+  -V 'Kudos Inc.'
+@end example
+
+
+@item
+The optional ``TIP_EXCHANGE'' and ``TIP_EXCHANGE_PRIV_FILENAME''
+options are discussed in @cite{Tipping visitors}
+@end itemize
+
+
+
+@item Accounts
+@cindex wire format
+In order to receive payments, the merchant backend needs to communicate bank
+account details to the exchange.  For this, the configuration must
+include one or more sections named ``ACCOUNT-name'' where @code{name} can be
+replaced by some human-readable word identifying the account.  For
+each section, the following options should be provided:
+
+
+@itemize
+@item
+The ``URL'' option specifies a @code{payto://}-URL for the account of
+the merchant.  For example, use:
+
+@example
+$ taler-config -s ACCOUNT-bank -o NAME \
+  -V 'payto://x-taler-bank/bank.demo.taler.net/4'
+@end example
+
+@item
+The ``WIRE_RESPONSE'' option specifies where Taler should store the
+(salted) JSON encoding of the wire account.  The file given will be
+created if it does not exist.  For example, use:
+
+@example
+$ taler-config -s ACCOUNT-bank -o WIRE_RESPONSE \
+  -V '@{$TALER_CONFIG_HOME@}/merchant/bank.json'
+@end example
+
+
+@item
+The ``PLUGIN'' option specifies which wire plugin should be used for
+this account.  The plugin must support the wire method used by the
+URL.  For example, use:
+
+@example
+$ taler-config -s ACCOUNT-bank -o PLUGIN \
+  -V taler_bank
+@end example
+
+@item
+For each @code{instance} that should use this account, you should set
+@code{HONOR_instance} and @code{ACTIVE_instance} to YES.  The first
+option will cause the instance to accept payments to the account (for
+existing contracts), while the second will cause the backend to
+include the account as a possible option for new contracts.
+
+For example, use:
+
+@example
+$ taler-config -s ACCOUNT-bank -o HONOR_default \
+  -V YES
+$ taler-config -s ACCOUNT-bank -o ACTIVE_default \
+  -V YES
+@end example
+
+to use ``account-bank'' for the ``default'' instance.
+
+@end itemize
+
+Depending on which PLUGIN you configured, you may additionally specfiy
+authentication options to enable the plugin to use the account.
+
+For example, with @code{taler_bank} plugin, use:
+
+@example
+$ taler-config -s ACCOUNT-bank -o TALER_BANK_AUTH_METHOD \
+  -V basic
+$ taler-config -s ACCOUNT-bank -o USERNAME \
+  -V user42
+$ taler-config -s ACCOUNT-bank -o PASSWORD \
+  -V pass42
+@end example
+
+
+
+@c Document EBICS here once supported.
+
+Note that additional instances can be specified using different tokens
+in the section name instead of @code{default}.
+
+@end table
+
+@node Sample backend configuration
+@section Sample backend configuration
+
+@cindex configuration
+The following is an example for a complete backend configuration:
+
+@smallexample
+[TALER]
+CURRENCY = KUDOS
+
+[MERCHANT]
+SERVE = TCP
+PORT = 8888
+DATABASE = postgres
+
+[MERCHANTDB-postgres]
+CONFIG = postgres:///donations
+
+[INSTANCE-default]
+KEYFILE = $DATADIR/key.priv
+NAME = "Kudos Inc."
+
+[ACCOUNT-bank]
+URL = payto://x-taler-bank/bank.demo.taler.net/4
+WIRE_RESPONSE = $DATADIR/bank.json
+PLUGIN = taler_bank
+HONOR_default = YES
+ACTIVE_default = YES
+TALER_BANK_AUTH_METHOD = basic
+USERNAME = my_user
+PASSWORD = 1234pass
+
+[EXCHANGE-trusted]
+URL = https://exchange.demo.taler.net/
+MASTER_KEY = CQQZ9DY3MZ1ARMN5K1VKDETS04Y2QCKMMCFHZSWJWWVN82BTTH00
+CURRENCY = KUDOS
+
+@end smallexample
+
+
+Given the above configuration, the backend will use a database named
+@code{donations} within Postgres.
+
+The backend will deposit the coins it receives to the exchange at
+@url{https://exchange.demo.taler.net/}, which has the master key @*
+"CQQZ9DY3MZ1ARMN5K1VKDETS04Y2QCKMMCFHZSWJWWVN82BTTH00".
+
+Please note that @code{doc/config.sh} will walk you through all
+configuration steps, showing how to invoke @code{taler-config}
+for each of them.
+
+@node Launching the backend
+@section Launching the backend
+
+@cindex backend
+@cindex taler-merchant-httpd
+Assuming you have configured everything correctly, you can launch the
+merchant backend using:
+
+@example
+$ taler-merchant-httpd
+@end example
+
+When launched for the first time, this command will print a message
+about generating your private key. If everything worked as expected,
+the command
+
+@example
+$ curl http://localhost:8888/
+@end example
+
+should return the message
+
+@smallexample
+Hello, I'm a merchant's Taler backend. This HTTP server is not for humans.
+@end smallexample
+
+Please note that your backend is right now likely globally
+reachable.  Production systems should be configured to bind
+to a UNIX domain socket or properly restrict access to the
+port.
+
+
+@node Testing
+@chapter Testing
+
+The tool @code{taler-merchant-generate-payments} can be used to test
+the merchant backend installation.  It implements all the payment's steps
+in a programmatically way, relying on the backend you give it as input.
+Note that this tool gets installed along all the
+merchant backend's binaries.
+
+This tool gets configured by a config file, that must have the following
+layout:
+
+@example
+[PAYMENTS-GENERATOR]
+
+# The exchange used during the test: make sure the merchant backend
+# being tested accpets this exchange.
+# If the sysadmin wants, she can also install a local exchange
+# and test against it.
+EXCHANGE = https://exchange.demo.taler.net/
+
+# This value must indicate some URL where the backend
+# to be tested is listening; it doesn't have to be the
+# "official" one, though.
+MERCHANT = http://localbackend/
+
+# This value is used when the tool tries to withdraw coins,
+# and must match the bank used by the exchange. If the test is
+# done against the exchange at https://exchange.demo.taler.net/,
+# then this value can be "https://bank.demo.taler.net/";.
+BANK = https://bank.demo.taler.net/
+
+# The merchant instance in charge of serving the payment.
+# Make sure this instance has a bank account at the same bank
+# indicated by the 'bank' option above.
+INSTANCE = default
+
+# The currency used during the test. Must match the one used
+# by merchant backend and exchange.
+CURRENCY = KUDOS
+@end example
+@c FIXME: the last option should be removed and [taler]/CURRENCY used instead!
+
+Run the test in the following way:
+
+@example
+$ taler-merchant-generate-payments [-c config] [-e EURL] [-m MURL]
+@end example
+
+The argument @code{config} given to @code{-c} points to the configuration
+file and is optional -- @code{~/.config/taler.conf} will be checked by
+default.
+By default, the tool forks two processes: one for the merchant backend,
+and one for the exchange.
+The option @code{-e} (@code{-m}) avoids any exchange (merchant backend)
+fork, and just runs the generator against the exchange (merchant backend)
+running at @code{EURL} (@code{MURL}).
+
+Please NOTE that the generator contains @emph{hardcoded} values, as for
+deposit fees of the coins it uses.
+In order to work against the used exchange, those values MUST match the
+ones used by the exchange.
+
+The following example shows how the generator "sets" a deposit fee
+of EUR:0.01 for the 5 EURO coin.
+
+@example
+// from <merchant_repository>/src/sample/generate_payments.c
+@{ .oc = OC_PAY,
+  .label = "deposit-simple",
+  .expected_response_code = MHD_HTTP_OK,
+  .details.pay.contract_ref = "create-proposal-1",
+  .details.pay.coin_ref = "withdraw-coin-1",
+  .details.pay.amount_with_fee = concat_amount (currency, "5"),
+  .details.pay.amount_without_fee = concat_amount (currency, "4.99") @},
+@end example
+
+The logic calculates the deposit fee according to the subtraction:
+@code{amount_with_fee - amount_without_fee}.
+
+The following example shows a 5 EURO coin configuration - needed by the
+used exchange - which is compatible with the hardcoded example above.
+
+@example
+[COIN_eur_5]
+value = EUR:5
+duration_overlap = 5 minutes
+duration_withdraw = 7 days
+duration_spend = 2 years
+duration_legal = 3 years
+fee_withdraw = EUR:0.00
+fee_deposit = EUR:0.01 # important bit
+fee_refresh = EUR:0.00
+fee_refund = EUR:0.00
+rsa_keysize = 1024
+@end example
+
+If the command terminates with no errors, then the merchant backend
+is correctly installed.
+
+After this operation is done, the merchant database will have some dummy
+data in it, so it may be convenient to clean all the tables; to this
+purpose, issue the following command:
+
+@example
+$ taler-merchant-dbinit -r
+@end example
+
+
+@node Advanced topics
+@chapter Advanced topics
+
+@menu
+* Configuration format::    Configuration file format
+* Using taler-config::      Introduction to the taler-config tool
+* Merchant key management:: Managing the merchant's cryptographic keys
+* SEPA configuration::      Configuring a SEPA bank account
+* Tipping visitors::        Giving money to Web site visitors with Taler
+* Generate payments::       Generate fake payments for testing purposes
+@end menu
+
+@include configuration-format.texi
+@include taler-config.texi
+
+
+@node Merchant key management
+@section Merchant key management
+@cindex merchant key
+@cindex KEYFILE
+
+The option ``KEYFILE'' in the section ``INSTANCE-default''
+specifies the path to the instance's private key.  You do not need to
+create a key manually, the backend will generate it automatically if
+it is missing.  While generally unnecessary, it is possible to display
+the corresponding public key using the @code{gnunet-ecc} command-line
+tool:
+
+@example
+$ gnunet-ecc -p                                  \
+  $(taler-config -f -s INSTANCE-default \
+                 -o KEYFILE)
+@end example
+
+@c Add more on how to add that key to X.509 CSRs once we can do that.
+
+@node SEPA configuration
+@section Using the SEPA wire transfer method
+@cindex SEPA
+@cindex EBICS
+
+The following is a sample configuration for the SEPA wire transfer
+method:@footnote{Supporting SEPA is still
+work in progress; the backend will accept this configuration, but the
+exchange will not work with SEPA today.}.
+
+Then, to configure the  EBICS backend for SEPA payments in EUR,
+the following configuration
+options need to be set:
+
+@example
+$ taler-config -s TALER -o CURRENCY -V EUR
+$ taler-config -s ACCOUNT-e -o PLUGIN -V ebics
+$ taler-config -s ACCOUNT-e -o URL \
+ -V payto://sepa/XY00111122223333444455556666
+$ taler-config -s ACCOUNT-e -o WIRE_RESPONSE
+ -V '$@{DATADIR@}/b.json'
+@end example
+
+Please note that you will also have to configure an exchange and/or
+auditors that support SEPA.  However, we cannot explain how to do this
+yet as such entities do not yet exist.  Once such entities do exist,
+we expect future versions of the Taler backend to ship with
+pre-configured exchanges and auditors for common denominations.
+
+
+@node Tipping visitors
+@section Tipping visitors
+@cindex tipping
+
+Taler can also be used to tip Web site visitors.  For example, you may
+be running an online survey, and you want to reward those people that have
+dutifully completed the survey.  If they have installed a Taler wallet,
+you can provide them with a tip for their deeds.  This section describes
+how to setup the Taler merchant backend for tipping.
+
+There are four basic steps that must happen to tip a visitor.
+
+@menu
+* Configure a reserve and exchange for tipping::
+* Fund the reserve::
+* Authorize a tip::
+* Picking up of the tip::
+@end menu
+
+@node Configure a reserve and exchange for tipping
+@subsection Configure a reserve and exchange for tipping
+@cindex gnunet-ecc
+@cindex reserve key
+
+@c TODO: We should probably create a tool that automates the
+@c configuration process and simply outputs the wire transfer
+@c subject of the reserve.
+
+To tip users, you first need to create a reserve.  A reserve is a pool
+of money held in escrow at the Taler exchange.  This is the source of
+the funds for the tips.  Tipping will fail (resulting in disappointed
+visitors) if you do not have enough funds in your reserve!
+
+First, we configure the backend.  You need to enable tipping for each
+instance separately, or you can use an instance only for tipping.  To
+configure the ``default'' instance for tipping, use the following
+configuration:
+
+@example
+[INSTANCE-default]
+# this is NOT the tip.priv
+KEYFILE = signing_key.priv
+# replace the URL with the URL of the exchange you will use
+TIP_EXCHANGE = https://exchange:443/
+# here put the path to the file created with "gnunet-ecc -g1 tip.priv"
+TIP_RESERVE_PRIV_FILENAME = tip.priv
+@end example
+
+Note that the KEYFILE option should have already been present for
+the instance. It has nothing to do with the ``tip.priv'' file we
+created above, and you should probably use a different file here.
+
+Instead of manually editing the configuration, you could also run:
+
+@example
+$ taler-config -s INSTANCE-default \
+    -o TIP_RESERVE_PRIV_FILENAME \
+    -V tip.priv
+$ taler-config -s INSTANCE-default \
+    -o TIP_EXCHANGE \
+    -V https://exchange:443/
+@end example
+
+
+Next, to create the @code{TIP_RESERVE_PRIV_FILENAME} file, use:
+
+@example
+$ gnunet-ecc -g 1   \
+  $(taler-config -f -s INSTANCE-default \
+      -o TIP-RESERVE_PRIV_FILENAME)
+@end example
+
+This will create a file with the private key that will be used to
+identify the reserve.  You need to do this once for each instance that
+is configured to tip.
+
+
+Now you can (re)start the backend with the new configuration.
+
+@node Fund the reserve
+@subsection Fund the reserve
+@cindex reserve
+@cindex close
+
+To fund the reserve, you must first extract the public key
+from ``tip.priv'':
+
+@example
+$ gnunet-ecc --print-public-key \
+  $(taler-config -f -s INSTANCE-default \
+      -o TIP-RESERVE_PRIV_FILENAME)
+@end example
+
+In our example, the output for the public key is:
+
+@example
+QPE24X8PBX3BZ6E7GQ5VAVHV32FWTTCADR0TRQ183MSSJD2CHNEG
+@end example
+
+You now need to make a wire transfer to the exchange's bank account
+using the public key as the wire transfer subject.  The exchange's
+bank account details can be found in JSON format at
+``https://exchange:443//wire/METHOD'' where METHOD is the respective
+wire method (i.e. ``sepa'').  Depending on the exchange's operator,
+you may also be able to find the bank details in a human-readable
+format on the main page of the exchange.
+
+Make your wire transfer and (optionally) check at
+``https://exchange:443/reserve/status/reserve_pub=QPE24X...''
+whether your transfer has arrived at the exchange.
+@c FIXME: we should create a nicer tool to do this check!
+
+Once the funds have arrived, you can start to use the reserve
+for tipping.
+
+Note that an exchange will typically close a reserve after four weeks,
+wiring all remaining funds back to the sender's account.  Thus, you
+should plan to wire funds corresponding to a campaign of about two
+weeks to the exchange initially. If your campaign runs longer, you
+should wire further funds to the reserve every other week to prevent
+it from expiring.
+
+@node Authorize a tip
+@subsection Authorize a tip
+
+When your frontend has reached the point where a client is supposed
+to receive a tip, it needs to first authorize the tip. For this,
+the frontend must use the ``/tip-authorize'' API of the backend.
+To authorize a tip, the frontend has to provide the following information
+in the body of the POST request:
+
+@itemize @bullet
+@item The amount of the tip
+
+@item The justification (only used internally for the back-office)
+
+@item The URL where the wallet should navigate next after the tip was processed
+
+@item The tip-pickup URL (see next section)
+@end itemize
+
+In response to this request, the backend will return a tip token, an
+expiration time and the exchange URL.
+The expiration time will indicate how long the tip is valid (when the
+reserve expires).  The tip token is an opaque string that contains all
+the information needed by the wallet to process the tip.  The
+frontend must send this tip token to the browser in a
+special ``402 Payment Required'' response inside
+the @code{X-Taler-Tip} header.
+
+The frontend should handle errors returned by the backend, such
+as missconfigured instances or a lack of remaining funds for tipping.
+
+@node Picking up of the tip
+@subsection Picking up of the tip
+
+The wallet will POST a JSON object to the shop's ``/tip-pickup'' handler.
+The frontend must then forward this request to the backend.  The response
+generated by the backend can then be forwarded directly to the wallet.
+
+@node Generate payments
+@section Generate payments
+@cindex testing database
+
+The merchant codebase offers the @code{taler-merchant-benchmark} tool
+to populate the database with fake payments.  This tool is in charge of
+starting a merchant, exchange, and bank processes, and provide them all
+the input to accomplish payments.  Note that each component will use its
+own configuration (as they would do in production).
+
+The tool takes all of the values it needs from the command line, with
+some of them being mandatory.  Among those, we have:
+
+@itemize
+@item @code{--currency=K} Use currency @emph{K}, for example to craft coins to 
withdraw.
+@item @code{--bank-url=URL} Assume that the bank is serving under the base URL 
@emph{URL}.
+This option is only actually used by the tool to check if the bank was well 
launched.
+@item @code{--merchant-url=URL} Reach the merchant through @emph{URL}, for 
downloading
+contracts and sending payments.
+@end itemize
+
+The tool then comes with two operation modes: @emph{ordinary},
+and @emph{corner}.  The first just executes normal payments,
+meaning that it uses the default instance and make sure that
+all payments get aggregated.  The second gives the chance to
+leave some payments unaggregated, and also to use merchant
+instances other than the default (which is, actually, the one
+used by default by the tool).
+
+Note: the abilty of driving the aggregation policy is useful
+for testing the backoffice facility.
+
+Any subcommand is also equipped with the canonical @code{--help}
+option, so feel free to issue the following command in order to
+explore all the possibilities.  For example:
+
+@smallexample
+$ taler-merchant-benchmark corner --help
+@end smallexample
+
+will show all the options offered by the @emph{corner} mode.
+Among the most interesting, there are:
+
+@itemize
+@item @code{--two-coins=TC} This option instructs the tool to perform @emph{TC}
+many payments that use two coins, because normally only one coin is spent per 
payment.
+@item @code{--unaggregated-number=UN} This option instructs the tool to 
perform @emph{UN}
+(one coin) payments that will be left unaggregated.
+@item @code{--alt-instance=AI} This option instructs the tool to perform 
payments
+using the merchant instance @emph{AI} (instead of the @emph{default} instance)
+@end itemize
+
+As for the @code{ordinary} subcommand, it is worth explaining the
+following options:
+
+@itemize
+@item @code{--payments-number=PN} Instructs the tool to perform @emph{PN} 
payments.
+@item @code{--tracks-number=TN} Instructs the tool to perform @emph{TN} 
tracking operations.
+Note that the @b{total} amount of operations will be two times @emph{TN}, 
since "one"
+tracking operation accounts for @code{/track/transaction} and 
@code{/track/transfer}.
+This command should only be used to see if the operation ends without 
problems, as
+no actual measurement of performance is provided (despite of the 'benchmark' 
work used
+in the tool's name).
+@end itemize
+
+@c **********************************************************
+@c *******************  Appendices  *************************
+@c **********************************************************
+
+@node GNU-LGPL
+@unnumbered GNU-LGPL
+@cindex license
+@cindex LGPL
+@include lgpl.texi
+
+@node GNU Affero GPL
+@unnumbered GNU Affero GPL
+@cindex license
+@cindex Affero GPL
+@include agpl.texi
+
+@node GNU-FDL
+@unnumbered GNU-FDL
+@cindex license
+@cindex GNU Free Documentation License
+@include fdl-1.3.texi
+
+@node Concept Index
+@unnumbered Concept Index
+
+@printindex cp
+
+@bye
diff --git a/doc/mdate-sh b/doc/mdate-sh
new file mode 100755
index 0000000..8c7a590
--- /dev/null
+++ b/doc/mdate-sh
@@ -0,0 +1,228 @@
+#!/bin/sh
+# Get modification time of a file or directory and pretty-print it.
+
+scriptversion=2018-03-07.03; # UTC
+
+# Copyright (C) 1995-2018 Free Software Foundation, Inc.
+# written by Ulrich Drepper <address@hidden>, June 1995
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2, or (at your option)
+# any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program that contains a
+# configuration script generated by Autoconf, you may include it under
+# the same distribution terms that you use for the rest of that program.
+
+# This file is maintained in Automake, please report
+# bugs to <address@hidden> or send patches to
+# <address@hidden>.
+
+if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then
+  emulate sh
+  NULLCMD=:
+  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
+  # is contrary to our usage.  Disable this feature.
+  alias -g '${1+"$@"}'='"$@"'
+  setopt NO_GLOB_SUBST
+fi
+
+case $1 in
+  '')
+     echo "$0: No file.  Try '$0 --help' for more information." 1>&2
+     exit 1;
+     ;;
+  -h | --h*)
+    cat <<\EOF
+Usage: mdate-sh [--help] [--version] FILE
+
+Pretty-print the modification day of FILE, in the format:
+1 January 1970
+
+Report bugs to <address@hidden>.
+EOF
+    exit $?
+    ;;
+  -v | --v*)
+    echo "mdate-sh $scriptversion"
+    exit $?
+    ;;
+esac
+
+error ()
+{
+  echo "$0: $1" >&2
+  exit 1
+}
+
+
+# Prevent date giving response in another language.
+LANG=C
+export LANG
+LC_ALL=C
+export LC_ALL
+LC_TIME=C
+export LC_TIME
+
+# Use UTC to get reproducible result.
+TZ=UTC0
+export TZ
+
+# GNU ls changes its time format in response to the TIME_STYLE
+# variable.  Since we cannot assume 'unset' works, revert this
+# variable to its documented default.
+if test "${TIME_STYLE+set}" = set; then
+  TIME_STYLE=posix-long-iso
+  export TIME_STYLE
+fi
+
+save_arg1=$1
+
+# Find out how to get the extended ls output of a file or directory.
+if ls -L /dev/null 1>/dev/null 2>&1; then
+  ls_command='ls -L -l -d'
+else
+  ls_command='ls -l -d'
+fi
+# Avoid user/group names that might have spaces, when possible.
+if ls -n /dev/null 1>/dev/null 2>&1; then
+  ls_command="$ls_command -n"
+fi
+
+# A 'ls -l' line looks as follows on OS/2.
+#  drwxrwx---        0 Aug 11  2001 foo
+# This differs from Unix, which adds ownership information.
+#  drwxrwx---   2 root  root      4096 Aug 11  2001 foo
+#
+# To find the date, we split the line on spaces and iterate on words
+# until we find a month.  This cannot work with files whose owner is a
+# user named "Jan", or "Feb", etc.  However, it's unlikely that '/'
+# will be owned by a user whose name is a month.  So we first look at
+# the extended ls output of the root directory to decide how many
+# words should be skipped to get the date.
+
+# On HPUX /bin/sh, "set" interprets "-rw-r--r--" as options, so the "x" below.
+set x`$ls_command /`
+
+# Find which argument is the month.
+month=
+command=
+until test $month
+do
+  test $# -gt 0 || error "failed parsing '$ls_command /' output"
+  shift
+  # Add another shift to the command.
+  command="$command shift;"
+  case $1 in
+    Jan) month=January; nummonth=1;;
+    Feb) month=February; nummonth=2;;
+    Mar) month=March; nummonth=3;;
+    Apr) month=April; nummonth=4;;
+    May) month=May; nummonth=5;;
+    Jun) month=June; nummonth=6;;
+    Jul) month=July; nummonth=7;;
+    Aug) month=August; nummonth=8;;
+    Sep) month=September; nummonth=9;;
+    Oct) month=October; nummonth=10;;
+    Nov) month=November; nummonth=11;;
+    Dec) month=December; nummonth=12;;
+  esac
+done
+
+test -n "$month" || error "failed parsing '$ls_command /' output"
+
+# Get the extended ls output of the file or directory.
+set dummy x`eval "$ls_command \"\\\$save_arg1\""`
+
+# Remove all preceding arguments
+eval $command
+
+# Because of the dummy argument above, month is in $2.
+#
+# On a POSIX system, we should have
+#
+# $# = 5
+# $1 = file size
+# $2 = month
+# $3 = day
+# $4 = year or time
+# $5 = filename
+#
+# On Darwin 7.7.0 and 7.6.0, we have
+#
+# $# = 4
+# $1 = day
+# $2 = month
+# $3 = year or time
+# $4 = filename
+
+# Get the month.
+case $2 in
+  Jan) month=January; nummonth=1;;
+  Feb) month=February; nummonth=2;;
+  Mar) month=March; nummonth=3;;
+  Apr) month=April; nummonth=4;;
+  May) month=May; nummonth=5;;
+  Jun) month=June; nummonth=6;;
+  Jul) month=July; nummonth=7;;
+  Aug) month=August; nummonth=8;;
+  Sep) month=September; nummonth=9;;
+  Oct) month=October; nummonth=10;;
+  Nov) month=November; nummonth=11;;
+  Dec) month=December; nummonth=12;;
+esac
+
+case $3 in
+  ???*) day=$1;;
+  *) day=$3; shift;;
+esac
+
+# Here we have to deal with the problem that the ls output gives either
+# the time of day or the year.
+case $3 in
+  *:*) set `date`; eval year=\$$#
+       case $2 in
+        Jan) nummonthtod=1;;
+        Feb) nummonthtod=2;;
+        Mar) nummonthtod=3;;
+        Apr) nummonthtod=4;;
+        May) nummonthtod=5;;
+        Jun) nummonthtod=6;;
+        Jul) nummonthtod=7;;
+        Aug) nummonthtod=8;;
+        Sep) nummonthtod=9;;
+        Oct) nummonthtod=10;;
+        Nov) nummonthtod=11;;
+        Dec) nummonthtod=12;;
+       esac
+       # For the first six month of the year the time notation can also
+       # be used for files modified in the last year.
+       if (expr $nummonth \> $nummonthtod) > /dev/null;
+       then
+        year=`expr $year - 1`
+       fi;;
+  *) year=$3;;
+esac
+
+# The result.
+echo $day $month $year
+
+# Local Variables:
+# mode: shell-script
+# sh-indentation: 2
+# eval: (add-hook 'before-save-hook 'time-stamp)
+# time-stamp-start: "scriptversion="
+# time-stamp-format: "%:y-%02m-%02d.%02H"
+# time-stamp-time-zone: "UTC0"
+# time-stamp-end: "; # UTC"
+# End:
diff --git a/doc/merchant-api.content.texi b/doc/merchant-api.content.texi
new file mode 100644
index 0000000..4ab09bd
--- /dev/null
+++ b/doc/merchant-api.content.texi
@@ -0,0 +1,1068 @@
+@c *****************************************
+@c This file is supposed to be included from
+@c the language-specific tutorial.
+@c *****************************************
+
+@c Define a new index for options.
+@defcodeindex op
+@c Combine everything into one index (arbitrarily chosen to be the
+@c concept index).
+@syncodeindex op cp
+@c %**end of header
+
+@copying
+This document is a tutorial for the GNU Taler Merchant API (version 
@value{VERSION}, @value{UPDATED})
+
+Copyright @copyright{} 2018 Taler Systems SA
+
+@quotation
+Permission is granted to copy, distribute and/or modify this document
+under the terms of the GNU Free Documentation License, Version 1.3 or
+any later version published by the Free Software Foundation; with no
+Invariant Sections, with no Front-Cover Texts, and with no Back-Cover
+Texts.  A copy of the license is included in the section entitled
+``GNU Free Documentation License''.
+@end quotation
+@end copying
+@c If your tutorial is published on paper by the FSF, it should include
+@c The standard FSF Front-Cover and Back-Cover Texts, as given in
+@c maintain.texi.
+@c
+@c Titlepage
+@c
+@titlepage
+@title The GNU Taler Merchant API tutorial
+@subtitle Version @value{VERSION}
+@subtitle @value{UPDATED}
+@author Christian Grothoff (@email{christian@@grothoff.org})
+@author Marcello Stanisci (@email{marcello.stanisci@@inria.fr})
+@author Florian Dold (@email{florian.dold@@inria.fr})
+@page
+@vskip 0pt plus 1filll
+@insertcopying
+@end titlepage
+
+@summarycontents
+@contents
+
+@ifnottex
+@node Top
+@top The GNU Taler Merchant API Tutorial (Version for @value{LANGNAME})
+@insertcopying
+@end ifnottex
+
+
+@menu
+* Introduction::                                  What this tutorial is about
+* Accepting a Simple Payment::                    How to accept simple payments
+* Giving Refunds::                                How to give refunds to 
customers
+* Giving Customers Tips::                         How to reward customers with 
tips
+* Advanced topics::                               Detailed solutions to 
specific issues
+
+
+Appendices
+
+* GNU-LGPL::                     The GNU Lesser General Public License says 
how you
+                                 can use the code of libtalermerchant.so in 
your own projects.
+* GNU-FDL::                      The GNU Free Documentation License says how 
you
+                                 can copy and share the documentation of GNU 
Taler.
+
+Indices
+
+* Concept Index::               Index of concepts and programs.
+@end menu
+
+
+@node Introduction
+@chapter Introduction
+
+@section About GNU Taler
+
+GNU Taler is an open protocol for an electronic payment system with a free
+software reference implementation.  GNU Taler offers secure, fast and easy
+payment processing using well understood cryptographic techniques.  GNU Taler
+allows customers to remain anonymous, while ensuring that merchants can be held
+accountable by governments.  Hence, GNU Taler is compatible with
+anti-money-laundering (AML) and know-your-customer (KYC) regulation, as well as
+data protection regulation (such as GDPR).
+
+
+@section About this tutorial
+
+This tutorial addresses how to process payments using the GNU Taler merchant
+Backend. This chapter explains some basic concepts.  In the second chapter, you
+will learn how to do basic payments.
+
+@clear GOT_LANG
+@ifset LANG_PYTHON
+@set GOT_LANG 1
+This version of the tutorial has examples for Python3.
+It uses the requests library for HTTP requests.
+@end ifset
+@ifset LANG_CURL
+@set GOT_LANG 1
+This version of the tutorial has examples for the
+command line with cURL.
+@end ifset
+@ifset LANG_PHP
+This version of the tutorial has examples for PHP,
+using libcurl.
+@end ifset
+@c
+Versions for other languages/environments are available as well.
+
+@cindex examples
+@cindex git
+If you want to look at some simple, running examples, check out these:
+@itemize
+@item
+The @url{https://git.taler.net/blog.git/tree/talerblog/blog/blog.py, essay 
merchant} that
+sells single chapters of a book.
+@item
+The 
@url{https://git.taler.net/donations.git/tree/talerdonations/donations/donations.py,
 donation page} that
+accepts donations for software projects and gives donation receipts.
+@item
+The 
@url{https://git.taler.net/survey.git/tree/talersurvey/survey/survey.py,survey} 
that
+gives users who answer a question a small reward.
+@end itemize
+
+@section Architecture overview
+
+The Taler software stack for a merchant consists of the following
+main components:
+
+@itemize
+@cindex frontend
+@item A frontend which interacts with the customer's browser. The
+  frontend enables the customer to build a shopping cart and place
+  an order.  Upon payment, it triggers the respective business logic
+  to satisfy the order.  This component is not included with Taler,
+  but rather assumed to exist at the merchant. This tutorial
+  describes how to develop a Taler frontend.
+@cindex backend
+@item A Taler-specific payment backend which makes it easy for the
+  frontend to process financial transactions with Taler.  For this
+  tutorial, you will use a public sandbox backend.  For production
+  use, you must either set up your own backend or ask another person
+  to do so for you.
+@end itemize
+
+The following image illustrates the various interactions of these
+key components:
+
+@image{arch-api, 3in}
+
+The backend provides the cryptographic protocol support, stores Taler-specific
+financial information and communicates with the GNU Taler exchange over the
+Internet.  The frontend accesses the backend via a RESTful API.  As a result,
+the frontend never has to directly communicate with the exchange, and also does
+not deal with sensitive data.  In particular, the merchant's signing keys and
+bank account information are encapsulated within the Taler backend.
+
+Some functionality of the backend (the ``public interface``) is also exposed 
to the
+customer's browser directly.  In the HTTP API, all public endpoints are 
prefixed with @code{/public/}.
+
+@section Public Sandbox Backend and Authentication
+@cindex sandbox
+@cindex authorization
+
+How the frontend authenticates to the Taler backend depends on the 
configuration. @xref{Top,,, manual, Taler Merchant Operating Manual}.
+
+The public sandbox backend @url{https://backend.demo.taler.net/} uses an API 
key
+in the @code{Authorization} header.  The value of this header must be
+@code{ApiKey sandbox} for the public sandbox backend.
+
+@clear GOT_LANG
+@ifset LANG_CURL
+@set GOT_LANG 1
+@example
+curl -i 'https://backend.demo.taler.net/' \
+  --header "Authorization: ApiKey sandbox"
+# HTTP/1.1 200 OK
+# [...]
+#
+# Hello, I'm a merchant's Taler backend. This HTTP server is not for humans.
+@end example
+@end ifset
+@ifset LANG_PYTHON
+@set GOT_LANG 1
+@example
+@verbatim
+>>> import requests
+>>> requests.get("https://backend.demo.taler.net";,
+...              headers={"Authorization": "ApiKey sandbox"})
+<Response [200]>
+@end verbatim
+@end example
+@end ifset
+@ifset LANG_PHP
+@set GOT_LANG 1
+@example
+@verbatim
+php > $c = curl_init("https://backend.demo.taler.net/";);
+php >  $options = array(CURLOPT_RETURNTRANSFER => true,
+php (                   CURLOPT_CUSTOMREQUEST => "GET",
+php (                   CURLOPT_HTTPHEADER => array("Authorization: ApiKey 
sandbox"));
+php > curl_setopt_array ($c, $options);
+php > $r = curl_exec ($c);
+php > echo curl_getinfo ($c, CURLINFO_HTTP_CODE);
+200
+php > echo $r;
+Hello, I'm a merchant's Taler backend. This HTTP server is not for humans.
+@end verbatim
+@end example
+@end ifset
+@ifclear GOT_LANG
+@example
+(example not available for this language)
+@end example
+@end ifclear
+
+If an HTTP status code other than 200 is returned, something went wrong. You
+should figure out what the problem is before continuing with this tutorial.
+
+The sandbox backend @url{https://backend.demo.taler.net/} uses @code{KUDOS} as
+an imaginary currency.  Coins denominated in @code{KUDOS} can be withdrawn
+from @url{https://bank.demo.taler.net/}.
+
+@section Merchant Instances
+@cindex instance
+
+The same Taler merchant backend server can be used by multiple separate
+merchants that are separate business entities.  Each of these separate business
+entities is called a @emph{merchant instance}, and is identified by an
+alphanumeric @emph{instance id}.  If the instance is omitted, the instance id
+@code{default} is assumed.
+
+The following merchant instances are configured on 
@url{https://backend.demo.taler.net/}:
+@itemize
+@item @code{GNUnet} (The GNUnet project)
+@item @code{FSF} (The Free Software Foundation)
+@item @code{Tor} (The Tor Project)
+@item @code{default} (Kudos Inc.)
+@end itemize
+
+Note that these are fictional merchants used for our demonstrators and
+not affiliated with or officially approved by the respective projects.
+
+
+@node Accepting a Simple Payment
+@chapter Accepting a Simple Payment
+
+@section Creating an Order for a Payment
+@cindex order
+
+Payments in Taler revolve around an @emph{order}, which is a machine-readable
+description of the business transaction for which the payment is to be made.
+Before accepting a Taler payment as a merchant
+you must create such an order.
+
+This is done by posting a JSON object to the backend's @code{/order} API 
endpoint.  At least the
+following fields must be given:
+
+@itemize
+@item @var{amount}: The amount to be paid, as a string in the format
+@code{CURRENCY:DECIMAL_VALUE}, for example @code{EUR:10} for 10 Euros or
+@code{KUDOS:1.5} for 1.5 KUDOS.
+
+@item @var{summary}:  A human-readable summary for what the payment is about.
+The summary should be short enough to fit into titles, though no
+hard limit is enforced.
+
+@item @var{fulfillment_url}:  A URL that will be displayed once the payment is
+completed.  For digital goods, this should be a page that displays the product
+that was purchased.  On successful payment, the wallet automatically appends
+the @code{order_id} as a query parameter, as well as the @code{session_sig} for
+session-bound payments (discussed later).
+@end itemize
+
+Orders can have many more fields, see @ref{The Taler Order Format}.
+
+After successfully @code{POST}ing to @code{/order}, an @code{order_id} will be
+returned.  Together with the merchant @code{instance}, the order id uniquely
+identifies the order within a merchant backend.
+
+@clear GOT_LANG
+@ifset LANG_CURL
+@set GOT_LANG 1
+@example
+@verbatim
+ORDER='
+{"order": {
+  "amount": "KUDOS:10",
+  "summary": "Donation",
+  "fulfillment_url": "https://example.com/thanks.html"}}
+'
+
+curl -i -X POST 'https://backend.demo.taler.net/order' \
+     --header "Authorization: ApiKey sandbox" -d "$ORDER"
+# HTTP/1.1 200 OK
+# [...]
+#
+# {
+#   "order_id": "2018.058.21.46.06-024C85K189H8P"
+# }
+@end verbatim
+@end example
+@end ifset
+@ifset LANG_PYTHON
+@set GOT_LANG 1
+@example
+@verbatim
+>>> import requests
+>>> order = dict(order=dict(amount="KUDOS:10",
+...                         summary="Donation",
+...                         fulfillment_url="https://example.com/thanks.html";))
+>>> order_resp = requests.post("https://backend.demo.taler.net/order";, 
json=order,
+...               headers={"Authorization": "ApiKey sandbox"})
+<Response [200]>
+@end verbatim
+@end example
+@end ifset
+@ifset LANG_PHP
+@set GOT_LANG 1
+@example
+@verbatim
+php > $c = curl_init("https://backend.demo.taler.net/order";);
+php > $json = array("order"=>
+php (   array("amount"=>"KUDOS:1",
+php (         "fulfillent_url"=>"https://example.com/thanks.html";, 
+php (         "summary"=>"nice product"));
+php > $options = array(CURLOPT_RETURNTRANSFER=>true,
+php (                  CURLOPT_CUSTOMREQUEST=>"POST",
+php (                  CURLOPT_POSTFIELDS=>json_encode($json),
+php (                  CURLOPT_HTTPHEADER=>array("Authorization: ApiKey 
sandbox"));
+php > curl_setopt_array($c, $options);
+php > $r = curl_exec($c);
+php > echo curl_getinfo($c, CURLINFO_HTTP_CODE);
+200
+php > echo $r;
+{
+  "order_id": "2018.072.12.48.51-014DKDKBMHPDP"
+}
+@end verbatim
+@end example
+@end ifset
+@ifclear GOT_LANG
+@example
+(example not available for this language)
+@end example
+@end ifclear
+
+The backend will fill in some details missing in the order, such as the address
+of the merchant instance.  The full details are called the @emph{contract
+terms}.
+@cindex contract
+@cindex terms
+
+@section Checking Payment Status and Prompting for Payment
+The status of a payment can be checked with the @code{/check-payment} 
endpoint.  If the payment
+is yet to be completed by the customer, @code{/check-payment} will give the 
frontend a URL (the @var{payment_redirect_url})
+that will trigger the customer's wallet to execute the payment.
+
+Note that the only way to obtain the @var{payment_redirect_url} is to check 
the status of the payment,
+even if you know that the user did not pay yet.
+
+@clear GOT_LANG
+@ifset LANG_CURL
+@set GOT_LANG 1
+@example
+@verbatim
+ORDER_ID="2018.058.21.46.06-024C85K189H8P"
+curl -i "https://backend.demo.taler.net/check-payment?order_id=$ORDER_ID"; \
+  --header "Authorization: ApiKey sandbox"
+# HTTP/1.1 200 OK
+# [...]
+#
+# {
+#   "payment_redirect_url":
+#      "https://backend.demo.taler.net/public/trigger-pay?[...]";,
+#   "paid": false
+# }
+@end verbatim
+@end example
+@end ifset
+@ifset LANG_PYTHON
+@set GOT_LANG 1
+@example
+@verbatim
+>>> import requests
+>>> r = requests.get("https://backend.demo.taler.net/check-payment";,
+...                  params=dict(order_id=order_resp.json()["order_id"]),
+...                  headers={"Authorization": "ApiKey sandbox"})
+>>> print(r.json())
+@end verbatim
+@end example
+@end ifset
+@ifset LANG_PHP
+@set GOT_LANG 1
+@example
+@verbatim
+php > $ORDER_ID = "2018.072.12.48.51-014DKDKBMHPDP";
+php > curl_init 
("https://backend.demo.taler.net/check-payment?order_id=$ORDER_ID";);
+php > $options = array(CURLOPT_RETURNTRANSFER=>true,
+php (                   CURLOPT_CUSTOMREQUEST=>"GET",
+php (                   CURLOPT_HTTPHEADER=>array(
+php (                     "Authorization: ApiKey sandbox"));
+php > curl_setopt_array($c, $options);
+php > $r = curl_exec($c);
+php > echo $r;
+@end verbatim
+@end example
+@end ifset
+@ifclear GOT_LANG
+@example
+(example not available for this language)
+@end example
+@end ifclear
+
+If the @var{paid} field in the response is @code{true}, the other
+fields in the response will be different. Once the payment was
+completed by the user, the response will contain the following fields:
+
+@itemize
+@item @var{paid}: Set to @var{true}.
+@item @var{contract_terms}:  The full contract terms of the order.
+@item @var{refunded}: @code{true} if a (possibly partial) refund was granted 
for this purchase.
+@item @var{refunded_amount}:  Amount that was refunded
+@item @var{last_session_id}:  Last session ID used by the customer's wallet. 
@xref{Session-Bound Payments}.
+@end itemize
+
+Once the frontend has confirmed that the payment was successful, it
+usually needs to trigger the business logic for the merchant to
+fulfill the merchant's obligations under the contract.
+
+
+@node Giving Refunds
+@chapter Giving Refunds
+@cindex refunds
+
+A refund in GNU Taler is a way to ``undo'' a payment.  It needs to be
+authorized by the merchant.  Refunds can be for any fraction of the
+original amount paid, but they cannot exceed the original payment.
+Refunds are
+time-limited and can only happen while the exchange holds funds for a
+particular payment in escrow.  The time during which a refund is possible
+can be controlled by setting the @code{refund_deadline} in an order.  The 
default
+value for this refund deadline is specified in the configuration of the
+merchant's backend.
+
+The frontend can instruct the merchant backend to authorize a refund
+by @code{POST}ing to the @code{/refund} endpoint.
+
+The refund request JSON object has the following fields:
+@itemize
+@item @var{order_id}: Identifies for which order a customer should be refunded.
+@c NOTE: the merchant does NOT default to instance "default".
+@item @var{instance}: Merchant instance to use.
+@item @var{refund}:  Amount to be refunded.  If a previous refund was
+authorized for the same order, the new amount must be higher, otherwise
+the operation has no effect. The value indicates the
+total amount to be refunded, @emph{not} an increase in the refund.
+@item @var{reason}:  Human-readable justification for the refund. The reason 
is only used by the Back Office and is not exposed to the customer.
+@end itemize
+
+If the request is successful (indicated by HTTP status code 200), the response
+includes a @code{refund_redirect_url}.  The frontend must redirect the
+customer's browser to that URL to allow the refund to be processed by the 
wallet.
+
+This code snipped illustrates giving a refund:
+@clear GOT_LANG
+@ifset LANG_CURL
+@set GOT_LANG 1
+@example
+@verbatim
+REFUND_REQ='
+{"order_id": "2018.058.21.46.06-024C85K189H8P",
+ "refund": "KUDOS:10",
+ "instance": "default",
+ "reason": "Customer did not like the product"}
+'
+
+curl -i -X POST 'https://backend.demo.taler.net/refund' \
+     --header "Authorization: ApiKey sandbox" -d "$REFUND_REQ"
+# HTTP/1.1 200 OK
+# [...]
+#
+# {
+#   [...]
+#   "refund_redirect_url": "[...]"
+# }
+@end verbatim
+@end example
+@end ifset
+@ifset LANG_PYTHON
+@set GOT_LANG 1
+@example
+@verbatim
+>>> import requests
+>>> refund_req = dict(order_id="2018.058.21.46.06-024C85K189H8P",
+...                   refund="KUDOS:10",
+...                   instance="default",
+...                   reason="Customer did not like the product")
+>>> requests.post("https://backend.demo.taler.net/refund";, json=refund_req,
+...              headers={"Authorization": "ApiKey sandbox"})
+<Response [200]>
+@end verbatim
+@end example
+@end ifset
+
+@ifset LANG_PHP
+@set GOT_LANG 1
+@example
+@verbatim
+php > $REFUND_REQ = array("order_id"=>$ORDER_ID,
+php (                     "refund"=>"KUDOS:0.5",
+php (                     "instance"=>"default",
+php (                     "reason"=>"Customer did not like product");
+php > $options = array(CURLOPT_RETURNTRANSFER=>true,
+php (                  CURLOPT_CUSTOMREQUEST=>"POST",
+php (                  CURLOPT_HTTPHEADER=>array(
+php (                    "Authorization: ApiKey sandbox"),
+php (                  CURLOPT_POSTFIELDS=>json_encode($REFUND_REQ));
+php > $c = curl_init("https://backend.demo.taler.net/refund";);
+php > curl_setopt_array($c, $options);
+php > $r = curl_exec($c);
+php > echo $r;
+php > echo curl_getinfo($c, CURLINFO_HTTP_CODE);
+200 # Make sure you paid first!
+@end verbatim
+@end example
+@end ifset
+@ifclear GOT_LANG
+@example
+(example not available for this language)
+@end example
+@end ifclear
+
+@node Giving Customers Tips
+@chapter Giving Customers Tips
+@cindex tips
+
+@c NOTE: Terminology should not be merchant/customer here, as
+@c the relationship is completely different. So I use
+@c ``site'' and ``visitor'', as that is right now the proper
+@c context. We may want to use more payment-ish terminology
+@c in the future, but ``donor'' and ``grantee'' sound excessive
+@c in the context of ``tips''.
+
+GNU Taler allows Web sites to grant small amounts directly to the
+visitor.  The idea is that some sites may want incentivize actions
+such as filling out a survey or trying a new feature.  It is important
+to note that tips are not enforceable for the visitor, as there is no
+contract. It is simply a voluntary gesture of appreciation of the site
+to its visitor.  However, once a tip has been granted, the visitor
+obtains full control over the funds provided by the site.
+
+The ``merchant'' backend of the site must be properly configured for
+tipping, and sufficient funds must be made available for tipping @xref{Top,,, 
manual,
+Taler Merchant Operating Manual}.
+
+To check if tipping is configured properly and if there are
+sufficient funds available for tipping, query the @code{/tip-query} endpoint:
+
+@clear GOT_LANG
+@ifset LANG_CURL
+@set GOT_LANG 1
+@example
+@verbatim
+curl -i 'https://backend.demo.taler.net/tip-query?instance=default' --header 
"Authorization: ApiKey sandbox"
+# HTTP/1.1 200 OK
+# [...]
+#
+# {
+#  [...]
+#  "amount_available": "KUDOS:153.47",
+#  "amount_authorized": "KUDOS:10"
+# }
+@end verbatim
+@end example
+@end ifset
+@ifset LANG_PYTHON
+@set GOT_LANG 1
+@example
+@verbatim
+>>> import requests
+>>> requests.get("https://backend.demo.taler.net/tip-query?instance=default";,
+...              headers={"Authorization": "ApiKey sandbox"})
+<Response [200]>
+@end verbatim
+@end example
+@end ifset
+@ifset LANG_PHP
+@set GOT_LANG 1
+@example
+@verbatim
+php > $c = 
curl_init("https://backend.demo.taler.net/tip-query?instance=default";);
+php >  $options = array(CURLOPT_RETURNTRANSFER=>true,
+php (                   CURLOPT_CUSTOMREQUEST=>"GET",
+php (                   CURLOPT_HTTPHEADER=>array(
+php (                     "Authorization: ApiKey sandbox"));
+php > curl_setopt_array($c, $options);
+php > $r = curl_exec($c);
+php > echo curl_getinfo($c, CURLINFO_HTTP_CODE);
+200
+@end verbatim
+@end example
+@end ifset
+@ifclear GOT_LANG
+@example
+(example not available for this language)
+@end example
+@end ifclear
+
+@cindex authorize tip
+To authorize a tip, @code{POST} to @code{/tip-authorize}.  The following 
fields are recognized in the JSON
+request object:
+
+@itemize
+@item @var{amount}: Amount that should be given to the visitor as a tip.
+@item @var{instance}: Merchant instance that grants the tip (each instance may 
have its own independend tipping funds configured).
+@item @var{justification}: Description of why the tip was granted.  
Human-readable text not exposed to the customer, but used by the Back Office.
+@item @var{next_url}: The URL that the user's browser should be redirected to 
by the wallet, once the tip has been processed.
+@end itemize
+
+The response from the backend contains a @code{tip_redirect_url}. The 
customer's browser must be
+redirected to this URL for the wallet to pick up the tip.
+@cindex pick up tip
+
+This code snipped illustrates giving a tip:
+@clear GOT_LANG
+@ifset LANG_CURL
+@set GOT_LANG 1
+@example
+@verbatim
+TIP_REQ='
+{"amount": "KUDOS:0.5",
+ "instance": "default",
+ "justification": "User filled out survey",
+ "next_url": "https://merchant.com/thanks.html"}
+'
+
+curl -i -X POST 'https://backend.demo.taler.net/tip-authorize' \
+     --header "Authorization: ApiKey sandbox" -d "$TIP_REQ"
+# HTTP/1.1 200 OK
+# [...]
+#
+# {
+#   [...]
+#   "tip_redirect_url": "[...]"
+# }
+@end verbatim
+@end example
+@end ifset
+@ifset LANG_PYTHON
+@set GOT_LANG 1
+@example
+@verbatim
+>>> import requests
+>>> tip_req = dict(amount="KUDOS:0.5",
+...                instance="default",
+...                justification="User filled out survey",
+...                next_url="https://merchant.com/thanks.html";)
+>>> requests.post("https://backend.demo.taler.net/tip-authorize";, json=tip_req,
+...              headers={"Authorization": "ApiKey sandbox"})
+<Response [200]>
+@end verbatim
+@end example
+@end ifset
+
+@ifset LANG_PHP
+@set GOT_LANG 1
+@example
+@verbatim
+php > $TIP_REQ = array(
+php (   "amount"=>"KUDOS:1",
+php (   "instance"=>"default",
+php (   "justification"=>"surveying",
+php (   "next_url"=>"https://example.com/survey-thanks.html";);
+php > $options = array(CURLOPT_RETURNTRANSFER=>true,
+php (                  CURLOPT_CUSTOMREQUEST=>"POST",
+php (                  CURLOPT_POSTFIELDS=>json_encode($TIP_REQ),
+php (                  CURLOPT_HTTPHEADER=>array("Authorization: ApiKey 
sandbox"));
+php > $c = curl_init("https://backend.demo.taler.net/tip-authorize";);
+php > curl_setopt_array($c, $options);
+php > $r = curl_exec($c);
+php > echo curl_getinfo($c, CURLINFO_HTTP_CODE); 
+200
+@end verbatim
+@end example
+@end ifset
+
+@ifclear GOT_LANG
+@example
+(example not available for this language)
+@end example
+@end ifclear
+
+
+@node Advanced topics
+@chapter Advanced topics
+
+@menu
+* Detecting the Presence of the Taler Wallet::  Detecting the Presence of the 
Taler Wallet
+* Integration with the Back Office::            Integration with the Back 
Office
+* Session-Bound Payments::                      Session-bound payments for 
digital goods
+* Product Identification::                      Product Identification
+* The Taler Order Format::                      The Taler Order Format
+@end menu
+
+@node Detecting the Presence of the Taler Wallet
+@section Detecting the Presence of the Taler Wallet
+@cindex wallet
+
+Taler offers ways to detect whether a user has the wallet installed in
+their browser. This allows Web sites to adapt accordingly.  Note that
+not all platforms can do presence detection reliably.  Some platforms
+might have a Taler wallet installed as a separate App instead of using
+a Web extension.  In these cases, presence detection will fail. Thus,
+sites may want to allow users to request Taler payments even if a
+wallet could not be detected, especially for visitors using mobiles.
+
+@subsection Presence detection without JavaScript
+Presence detection without JavaScript is based on CSS classes.  You can hide or
+show elements selectively depending on whether the wallet is detected or not.
+
+In order to work correctly, a special fallback stylesheet must be included that
+will be used when the wallet is not present.  The stylesheet can be put into
+any file, but must be included via a @code{link} tag with the @code{id}
+attribute set to @code{taler-presence-stylesheet}.  If a wallet is present, it
+will ``hijack'' this stylesheet to change how elements with the following
+classes are rendered:
+
+The following CSS classes can be used:
+@table @code
+@item taler-installed-hide
+A CSS rule will set the @code{display} property for this class to @code{none} 
once the Taler wallet is installed and enabled.
+If the wallet is not installed, @code{display} will be @code{inherit}.
+
+@item taler-installed-show
+A CSS rule will set the @code{display} property for this class to 
@code{inherit} once the Taler wallet is installed and enabled.
+If the wallet is not installed, @code{display} will be @code{none}.
+
+@end table
+
+The following is a complete example:
+
+@smallexample
+<!DOCTYPE html>
+<html data-taler-nojs="true">
+  <head>
+    <title>Tutorial</title>
+    <link rel="stylesheet"
+          type="text/css"
+          href="/web-common/taler-fallback.css"
+          id="taler-presence-stylesheet" />
+  </head>
+  <body>
+    <p class="taler-installed-hide">
+      No wallet found.
+    </p>
+    <p class="taler-installed-show">
+      Wallet found!
+    </p>
+  </body>
+</html>
+@end smallexample
+
+The @code{taler-fallback.css} is part of the Taler's @emph{web-common} 
repository,
+available at 
@url{https://git.taler.net/web-common.git/tree/taler-fallback.css}.
+You may have to adjust the @code{href} attribute in the HTML code above to 
point
+to the correct location of the @code{taler-fallback.css} file on your Web site.
+
+@subsection Detection with JavaScript
+
+The following functions are defined in the @code{taler} namespace of the 
@code{taler-wallet-lib} helper library
+available at 
@url{https://git.taler.net/web-common.git/tree/taler-wallet-lib.js}.
+
+@table @code
+@item onPresent(callback: () => void)
+Adds a callback to be called when support for Taler payments is detected.
+
+@item onAbsent(callback: () => void)
+Adds a callback to be called when support for Taler payments is disabled.
+
+@end table
+
+Note that the registered callbacks may be called more than once. This may
+happen if a user disables or enables the wallet in the browser's extension
+settings while a shop's frontend page is open.
+
+@c FIXME: include full example of Web site including taler-wallet-lib.js
+@c and using JS detection actions.  (alert()?)
+
+@node Integration with the Back Office
+@section Integration with the Back Office
+
+Taler ships a Back Office application as a stand-alone Web application.
+The Back Office has its own documentation at 
@url{https://docs.taler.net/backoffice/html/manual.html}.
+
+Developers wishing to tightly integrate back office support for
+Taler-based payments into an existing back office application should
+focus on the wire transfer tracking and transaction history sections
+of the Taler Backend API specification at
+@url{https://docs.taler.net/api/api-merchant.html}
+
+@node Session-Bound Payments
+@section Session-Bound Payments
+@cindex session
+
+Sometimes checking if an order has been paid for is not enough. For
+example, when selling access to online media, the publisher may want
+to be paid for exactly the same product by each customer.  Taler
+supports this model by allowing the mechant to check whether the
+``payment receipt'' is available on the user's current device.  This
+prevents users from easily sharing media access by transmitting a link
+to the fulfillment page.  Of course sophisticated users could share
+payment receipts as well, but this is not as easy as sharing a link,
+and in this case they are more likely to just share the media
+directly.
+
+To use this feature, the merchant must first assign the user's current browser
+an ephemeral @code{session_id}, usually via a session cookie.  When executing
+or re-playing a payment, the wallet will receive an additional signature
+(@code{session_sig}).  This signature certifies that the wallet
+showed a payment receipt for the respective order in the current session.
+@cindex cookie
+
+Session-bound payments are triggerd by passing the @code{session_id} parameter
+to the @code{/check-payment} endpoint.  The wallet will then redirect to the
+fulfillment page, but include an additional @code{session_sig} parameter.  The
+frontend can query @code{/check-payment} with both the @code{session_id} and
+the @code{session_sig} to verify that the signature is correct.
+
+The last session ID that was successfuly used to prove that the payment
+receipt is in the user's wallet is also available as @code{last_session_id} in
+the response to @code{/check-payment}.
+@c FIXME: used for what?
+
+@node Product Identification
+@section Product Identification
+@cindex resource url
+
+In some situations the user may have paid for some digital good, but the 
frontend
+does not know the exact order ID, and thus cannot instruct the wallet to reveil
+the existing payment receipt.  This is common for simple shops without a login
+system.  In this case, the user would be prompted for payment again, even
+though they already purchased the product.
+
+To allow the wallet to instead find the existing payment receipt, the
+shop must use a unique fulfillment URL for each product.  Then, the
+frontend must provide an additional @code{resource_url} parameter to
+to @code{/check-payment}.  It should identify this unique fulfillment
+URL for the product.  The wallet will then check whether it has paid
+for a contract with the same @code{resource_url} before, and if so
+replay the previous payment.
+@c FIXME: design question (!): why do we not simply set a flag (``unique 
fulfillment url'')
+@c instead of passing the fulfillment URL a *second* time to the backend?
+@c (and having to worry about it being the same as in the order on /order)?
+
+
+@c Section describing the format of Taler contracts/proposals in detail
+
+@node The Taler Order Format
+@section The Taler Order Format
+@cindex contract
+@cindex terms
+@cindex order
+
+A Taler order can specify many details about the payment.
+This section describes each of the fields in depth.
+
+Financial amounts are always specified as a string in the format 
@code{"CURRENCY:DECIMAL_VALUE"}.
+
+@table @var
+@item amount
+@cindex amount
+Specifies the total amount to be paid to the merchant by the customer.
+
+@item max_fee
+@cindex fees
+@cindex maximum deposit fee
+This is the maximum total amount of deposit fees that the merchant is
+willing to pay.  If the deposit fees for the coins exceed this amount,
+the customer has to include it in the payment total.  The fee is
+specified using the same triplet used for @var{amount}.
+
+
+@item max_wire_fee
+@cindex fees
+@cindex maximum wire fee
+Maximum wire fee accepted by the merchant (customer share to be
+divided by the 'wire_fee_amortization' factor, and further reduced
+if deposit fees are below 'max_fee').  Default if missing is zero.
+
+
+@item wire_fee_amortization
+@cindex fees
+@cindex maximum fee amortization
+Over how many customer transactions does the merchant expect to
+amortize wire fees on average?  If the exchange's wire fee is
+above 'max_wire_fee', the difference is divided by this number
+to compute the expected customer's contribution to the wire fee.
+The customer's contribution may further be reduced by the difference
+between the 'max_fee' and the sum of the actual deposit fees.
+Optional, default value if missing is 1.  0 and negative values are
+invalid and also interpreted as 1.
+
+@item pay_url
+@cindex pay_url
+Which URL accepts payments. This is the URL where the wallet will POST
+coins.
+
+@item fulfillment_url
+@cindex fulfillment URL
+Which URL should the wallet go to for obtaining the fulfillment,
+for example the HTML or PDF of an article that was bought, or an
+order tracking system for shipments, or a simple human-readable
+Web page indicating the status of the contract.
+
+@item order_id
+@cindex order ID
+Alphanumeric identifier, freely definable by the merchant.
+Used by the merchant to uniquely identify the transaction.
+
+@item summary
+@cindex summary
+Short, human-readable summary of the contract. To be used when
+displaying the contract in just one line, for example in the
+transaction history of the customer.
+
+@item timestamp
+Time at which the offer was generated.
+@c FIXME: describe time format in detail here
+
+@item pay_deadline
+@cindex payment deadline
+Timestamp of the time by which the merchant wants the exchange
+to definitively wire the money due from this contract.  Once
+this deadline expires, the exchange will aggregate all
+deposits where the contracts are past the @var{refund_deadline}
+and execute one large wire payment for them.  Amounts will be
+rounded down to the wire transfer unit; if the total amount is
+still below the wire transfer unit, it will not be disbursed.
+
+@item refund_deadline
+@cindex refund deadline
+Timestamp until which the merchant willing (and able) to give refunds
+for the contract using Taler.  Note that the Taler exchange will hold
+the payment in escrow at least until this deadline.  Until this time,
+the merchant will be able to sign a message to trigger a refund to the
+customer.  After this time, it will no longer be possible to refund
+the customer.  Must be smaller than the @var{pay_deadline}.
+
+@item products
+@cindex product description
+Array of products that are being sold to the customer.  Each
+entry contains a tuple with the following values:
+
+@table @var
+@item description
+Description of the product.
+@item quantity
+Quantity of the items to be shipped. May specify a unit (@code{1 kg})
+or just the count.
+@item price
+Price for @var{quantity} units of this product shipped to the
+given @var{delivery_location}. Note that usually the sum of all
+of the prices should add up to the total amount of the contract,
+but it may be different due to discounts or because individual
+prices are unavailable.
+@item product_id
+Unique ID of the product in the merchant's catalog.  Can generally
+be chosen freely as it only has meaning for the merchant, but
+should be a number in the range @math{[0,2^{51})}.
+@item taxes
+Map of applicable taxes to be paid by the merchant.  The label is the
+name of the tax, i.e. @var{VAT}, @var{sales tax} or @var{income tax},
+and the value is the applicable tax amount.  Note that arbitrary
+labels are permitted, as long as they are used to identify the
+applicable tax regime.  Details may be specified by the regulator.
+This is used to declare to the customer which taxes the merchant
+intends to pay, and can be used by the customer as a receipt.
+@c FIXME: a receipt not including the item's price?
+The information is also likely to be used by tax audits of the merchant.
+@item delivery_date
+Time by which the product is to be delivered to the
+@var{delivery_location}.
+@item delivery_location
+This should give a label in the @var{locations} map, specifying
+where the item is to be delivered.
+@end table
+Values can be omitted if they are not applicable. For example, if a
+purchase is about a bundle of products that have no individual prices
+or product IDs, the @var{product_id} or @var{price} may not be
+specified in the contract.  Similarly, for virtual products delivered
+directly via the fulfillment URI, there is no delivery location.
+
+@item merchant
+@table @var
+@item address
+This should give a label in the @var{locations} map, specifying
+where the merchant is located.
+@item name
+This should give a human-readable name for the merchant's business.
+@item jurisdiction
+This should give a label in the @var{locations} map, specifying
+the jurisdiction under which this contract is to be arbitrated.
+@end table
+
+@item locations
+@cindex location
+Associative map of locations used in the contract. Labels for
+locations in this map can be freely chosen and used whenever
+a location is required in other parts of the contract.  This way,
+if the same location is required many times (such as the business
+address of the customer or the merchant), it only needs to be
+listed (and transmitted) once, and can otherwise be referred to
+via the label.  A non-exhaustive list of location attributes
+is the following:
+@table @var
+@item country
+Name of the country for delivery, as found on a postal package, i.e. 
``France''.
+@item state
+Name of the state for delivery, as found on a postal package, i.e. ``NY''.
+@item region
+Name of the region for delivery, as found on a postal package.
+@item province
+Name of the province for delivery, as found on a postal package.
+@item city
+Name of the city for delivery, as found on a postal package.
+@item ZIP code
+ZIP code for delivery, as found on a postal package.
+@item street
+Street name for delivery, as found on a postal package.
+@item street number
+Street number (number of the house) for delivery, as found on a postal package.
+@item name receiver name for delivery, either business or person name.
+
+@end table
+
+Note that locations are not required to specify all of these fields,
+and they is also allowed to have additional fields.  Contract renderers
+must render at least the fields listed above, and should render fields
+that they do not understand as a key-value list.
+
+@end table
+
+
+@c **********************************************************
+@c *******************  Appendices  *************************
+@c **********************************************************
+
+@node GNU-LGPL
+@unnumbered GNU-LGPL
+@cindex license
+@cindex LGPL
+@include lgpl.texi
+
+@node GNU-FDL
+@unnumbered GNU-FDL
+@cindex license
+@cindex GNU Free Documentation License
+@include fdl-1.3.texi
+
+@node Concept Index
+@unnumbered Concept Index
+
+@printindex cp
diff --git a/doc/stamp-1 b/doc/stamp-1
new file mode 100644
index 0000000..8ec35dd
--- /dev/null
+++ b/doc/stamp-1
@@ -0,0 +1,4 @@
+@set UPDATED 20 June 2018
+@set UPDATED-MONTH June 2018
+@set EDITION 0.5.0
+@set VERSION 0.5.0
diff --git a/doc/stamp-2 b/doc/stamp-2
new file mode 100644
index 0000000..8ec35dd
--- /dev/null
+++ b/doc/stamp-2
@@ -0,0 +1,4 @@
+@set UPDATED 20 June 2018
+@set UPDATED-MONTH June 2018
+@set EDITION 0.5.0
+@set VERSION 0.5.0
diff --git a/doc/stamp-3 b/doc/stamp-3
new file mode 100644
index 0000000..8ec35dd
--- /dev/null
+++ b/doc/stamp-3
@@ -0,0 +1,4 @@
+@set UPDATED 20 June 2018
+@set UPDATED-MONTH June 2018
+@set EDITION 0.5.0
+@set VERSION 0.5.0
diff --git a/doc/stamp-vti b/doc/stamp-vti
new file mode 100644
index 0000000..21031dd
--- /dev/null
+++ b/doc/stamp-vti
@@ -0,0 +1,4 @@
+@set UPDATED 18 April 2019
+@set UPDATED-MONTH April 2019
+@set EDITION 0.5.0
+@set VERSION 0.5.0
diff --git a/doc/syntax.texi b/doc/syntax.texi
new file mode 100644
index 0000000..8aca39d
--- /dev/null
+++ b/doc/syntax.texi
@@ -0,0 +1,44 @@
+@c Syntax highlighting for texinfo's HTML output
+
+@html
+<script src="highlight.pack.js"></script>
+<script>
+var hls = [];
+var syntaxAuto = true;
+addEventListener("DOMContentLoaded", function() {
+  // Highlight blocks with fixed language
+  for (let x of hls) {
+    let next = x[0].nextElementSibling;
+    console.log("next", next);
+    let blocks = next.querySelectorAll("pre.example");
+    for (let i = 0; i < blocks.length; i++) {
+      blocks[i].classList.add("language-" + x[1]);
+      hljs.highlightBlock(blocks[i]);
+    }
+  } 
+  // auto-detect other blocks if not disabled
+  if (syntaxAuto) {
+    let blocks = document.querySelectorAll("pre.example");
+    for (let i = 0; i < blocks.length; i++) {
+      hljs.highlightBlock(blocks[i]);
+    }
+  }
+});
+</script>
+@end html
+
+@macro setsyntax{lang}
+@html
+<script>
+hls.push([document.currentScript, "\lang\"]);
+</script>
+@end html
+@end macro
+
+@macro setsyntaxnoauto{}
+@html
+<script>
+syntaxAuto = false;
+</script>
+@end html
+@end macro
diff --git a/doc/taler-config.texi b/doc/taler-config.texi
new file mode 100644
index 0000000..efca5a2
--- /dev/null
+++ b/doc/taler-config.texi
@@ -0,0 +1,47 @@
+@c This file is used both in the exchange and merchant
+@c manuals. Edits should be propagated to both Gits!
+
+@node Using taler-config
+@section Using taler-config
+@cindex taler-config
+
+The tool @code{taler-config} can be used to
+extract or manipulate configuration values; however, the configuration
+use the well-known INI file format and can also be edited by hand.
+
+Run
+@example
+$ taler-config -s $SECTION
+@end example
+to list all of the configuration values in section @code{$SECTION}.
+
+Run
+@example
+$ taler-config -s $section -o $option
+@end example
+to extract the respective configuration value for option @code{$option}
+in section @code{$section}.
+
+Finally, to change a setting, run
+@example
+$ taler-config -s $section -o $option -V $value
+@end example
+to set the respective configuration value to @code{$value}. Note that you have 
to
+manually restart the Taler backend after you change the configuration to
+make the new configuration go into effect.
+
+Some default options will use $-variables, such as @code{$DATADIR}
+within their value.  To expand the @code{$DATADIR} or other $-variables
+in the configuration, pass the @code{-f} option to
+@code{taler-config}.  For example, compare:
+@example
+$ taler-config -s ACCOUNT-bank \
+               -o WIRE_RESPONSE
+$ taler-config -f -s ACCOUNT-bank \
+               -o WIRE_RESPONSE
+@end example
+
+While the configuration file is typically located at
+@code{$HOME/.config/taler.conf}, an alternative location can be
+specified to @code{taler-merchant-httpd} and @code{taler-config} using
+the @code{-c} option.
diff --git a/doc/texinfo.tex b/doc/texinfo.tex
new file mode 100644
index 0000000..ac5c1d9
--- /dev/null
+++ b/doc/texinfo.tex
@@ -0,0 +1,11727 @@
+% texinfo.tex -- TeX macros to handle Texinfo files.
+% 
+% Load plain if necessary, i.e., if running under initex.
+\expandafter\ifx\csname fmtname\endcsname\relax\input plain\fi
+%
+\def\texinfoversion{2018-02-12.17}
+%
+% Copyright 1985, 1986, 1988, 1990, 1991, 1992, 1993, 1994, 1995,
+% 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
+% 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018
+% Free Software Foundation, Inc.
+%
+% This texinfo.tex file is free software: you can redistribute it and/or
+% modify it under the terms of the GNU General Public License as
+% published by the Free Software Foundation, either version 3 of the
+% License, or (at your option) any later version.
+%
+% This texinfo.tex file is distributed in the hope that it will be
+% useful, but WITHOUT ANY WARRANTY; without even the implied warranty
+% of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+% General Public License for more details.
+%
+% You should have received a copy of the GNU General Public License
+% along with this program.  If not, see <https://www.gnu.org/licenses/>.
+%
+% As a special exception, when this file is read by TeX when processing
+% a Texinfo source document, you may use the result without
+% restriction. This Exception is an additional permission under section 7
+% of the GNU General Public License, version 3 ("GPLv3").
+%
+% Please try the latest version of texinfo.tex before submitting bug
+% reports; you can get the latest version from:
+%   https://ftp.gnu.org/gnu/texinfo/ (the Texinfo release area), or
+%   https://ftpmirror.gnu.org/texinfo/ (same, via a mirror), or
+%   https://www.gnu.org/software/texinfo/ (the Texinfo home page)
+% The texinfo.tex in any given distribution could well be out
+% of date, so if that's what you're using, please check.
+%
+% Send bug reports to address@hidden.  Please include including a
+% complete document in each bug report with which we can reproduce the
+% problem.  Patches are, of course, greatly appreciated.
+%
+% To process a Texinfo manual with TeX, it's most reliable to use the
+% texi2dvi shell script that comes with the distribution.  For a simple
+% manual foo.texi, however, you can get away with this:
+%   tex foo.texi
+%   texindex foo.??
+%   tex foo.texi
+%   tex foo.texi
+%   dvips foo.dvi -o  # or whatever; this makes foo.ps.
+% The extra TeX runs get the cross-reference information correct.
+% Sometimes one run after texindex suffices, and sometimes you need more
+% than two; texi2dvi does it as many times as necessary.
+%
+% It is possible to adapt texinfo.tex for other languages, to some
+% extent.  You can get the existing language-specific files from the
+% full Texinfo distribution.
+%
+% The GNU Texinfo home page is https://www.gnu.org/software/texinfo.
+
+
+\message{Loading texinfo [version \texinfoversion]:}
+
+% If in a .fmt file, print the version number
+% and turn on active characters that we couldn't do earlier because
+% they might have appeared in the input file name.
+\everyjob{\message{[Texinfo version \texinfoversion]}%
+  \catcode`+=\active \catcode`\_=\active}
+
+% LaTeX's \typeout.  This ensures that the messages it is used for
+% are identical in format to the corresponding ones from latex/pdflatex.
+\def\typeout{\immediate\write17}%
+
+\chardef\other=12
+
+% We never want plain's \outer definition of \+ in Texinfo.
+% For @tex, we can use \tabalign.
+\let\+ = \relax
+
+% Save some plain tex macros whose names we will redefine.
+\let\ptexb=\b
+\let\ptexbullet=\bullet
+\let\ptexc=\c
+\let\ptexcomma=\,
+\let\ptexdot=\.
+\let\ptexdots=\dots
+\let\ptexend=\end
+\let\ptexequiv=\equiv
+\let\ptexexclam=\!
+\let\ptexfootnote=\footnote
+\let\ptexgtr=>
+\let\ptexhat=^
+\let\ptexi=\i
+\let\ptexindent=\indent
+\let\ptexinsert=\insert
+\let\ptexlbrace=\{
+\let\ptexless=<
+\let\ptexnewwrite\newwrite
+\let\ptexnoindent=\noindent
+\let\ptexplus=+
+\let\ptexraggedright=\raggedright
+\let\ptexrbrace=\}
+\let\ptexslash=\/
+\let\ptexsp=\sp
+\let\ptexstar=\*
+\let\ptexsup=\sup
+\let\ptext=\t
+\let\ptextop=\top
+{\catcode`\'=\active \global\let\ptexquoteright'}% active in plain's math mode
+
+% If this character appears in an error message or help string, it
+% starts a new line in the output.
+\newlinechar = `^^J
+
+% Use TeX 3.0's \inputlineno to get the line number, for better error
+% messages, but if we're using an old version of TeX, don't do anything.
+%
+\ifx\inputlineno\thisisundefined
+  \let\linenumber = \empty % Pre-3.0.
+\else
+  \def\linenumber{l.\the\inputlineno:\space}
+\fi
+
+% Set up fixed words for English if not already set.
+\ifx\putwordAppendix\undefined  \gdef\putwordAppendix{Appendix}\fi
+\ifx\putwordChapter\undefined   \gdef\putwordChapter{Chapter}\fi
+\ifx\putworderror\undefined     \gdef\putworderror{error}\fi
+\ifx\putwordfile\undefined      \gdef\putwordfile{file}\fi
+\ifx\putwordin\undefined        \gdef\putwordin{in}\fi
+\ifx\putwordIndexIsEmpty\undefined       \gdef\putwordIndexIsEmpty{(Index is 
empty)}\fi
+\ifx\putwordIndexNonexistent\undefined   \gdef\putwordIndexNonexistent{(Index 
is nonexistent)}\fi
+\ifx\putwordInfo\undefined      \gdef\putwordInfo{Info}\fi
+\ifx\putwordInstanceVariableof\undefined 
\gdef\putwordInstanceVariableof{Instance Variable of}\fi
+\ifx\putwordMethodon\undefined  \gdef\putwordMethodon{Method on}\fi
+\ifx\putwordNoTitle\undefined   \gdef\putwordNoTitle{No Title}\fi
+\ifx\putwordof\undefined        \gdef\putwordof{of}\fi
+\ifx\putwordon\undefined        \gdef\putwordon{on}\fi
+\ifx\putwordpage\undefined      \gdef\putwordpage{page}\fi
+\ifx\putwordsection\undefined   \gdef\putwordsection{section}\fi
+\ifx\putwordSection\undefined   \gdef\putwordSection{Section}\fi
+\ifx\putwordsee\undefined       \gdef\putwordsee{see}\fi
+\ifx\putwordSee\undefined       \gdef\putwordSee{See}\fi
+\ifx\putwordShortTOC\undefined  \gdef\putwordShortTOC{Short Contents}\fi
+\ifx\putwordTOC\undefined       \gdef\putwordTOC{Table of Contents}\fi
+%
+\ifx\putwordMJan\undefined \gdef\putwordMJan{January}\fi
+\ifx\putwordMFeb\undefined \gdef\putwordMFeb{February}\fi
+\ifx\putwordMMar\undefined \gdef\putwordMMar{March}\fi
+\ifx\putwordMApr\undefined \gdef\putwordMApr{April}\fi
+\ifx\putwordMMay\undefined \gdef\putwordMMay{May}\fi
+\ifx\putwordMJun\undefined \gdef\putwordMJun{June}\fi
+\ifx\putwordMJul\undefined \gdef\putwordMJul{July}\fi
+\ifx\putwordMAug\undefined \gdef\putwordMAug{August}\fi
+\ifx\putwordMSep\undefined \gdef\putwordMSep{September}\fi
+\ifx\putwordMOct\undefined \gdef\putwordMOct{October}\fi
+\ifx\putwordMNov\undefined \gdef\putwordMNov{November}\fi
+\ifx\putwordMDec\undefined \gdef\putwordMDec{December}\fi
+%
+\ifx\putwordDefmac\undefined    \gdef\putwordDefmac{Macro}\fi
+\ifx\putwordDefspec\undefined   \gdef\putwordDefspec{Special Form}\fi
+\ifx\putwordDefvar\undefined    \gdef\putwordDefvar{Variable}\fi
+\ifx\putwordDefopt\undefined    \gdef\putwordDefopt{User Option}\fi
+\ifx\putwordDeffunc\undefined   \gdef\putwordDeffunc{Function}\fi
+
+% Give the space character the catcode for a space.
+\def\spaceisspace{\catcode`\ =10\relax}
+
+% Likewise for ^^M, the end of line character.
+\def\endlineisspace{\catcode13=10\relax}
+
+\chardef\dashChar  = `\-
+\chardef\slashChar = `\/
+\chardef\underChar = `\_
+
+% Ignore a token.
+%
+\def\gobble#1{}
+
+% The following is used inside several \edef's.
+\def\makecsname#1{\expandafter\noexpand\csname#1\endcsname}
+
+% Hyphenation fixes.
+\hyphenation{
+  Flor-i-da Ghost-script Ghost-view Mac-OS Post-Script
+  auto-ma-ti-cal-ly ap-pen-dix bit-map bit-maps
+  data-base data-bases eshell fall-ing half-way long-est man-u-script
+  man-u-scripts mini-buf-fer mini-buf-fers over-view par-a-digm
+  par-a-digms rath-er rec-tan-gu-lar ro-bot-ics se-vere-ly set-up spa-ces
+  spell-ing spell-ings
+  stand-alone strong-est time-stamp time-stamps which-ever white-space
+  wide-spread wrap-around
+}
+
+% Sometimes it is convenient to have everything in the transcript file
+% and nothing on the terminal.  We don't just call \tracingall here,
+% since that produces some useless output on the terminal.  We also make
+% some effort to order the tracing commands to reduce output in the log
+% file; cf. trace.sty in LaTeX.
+%
+\def\gloggingall{\begingroup \globaldefs = 1 \loggingall \endgroup}%
+\def\loggingall{%
+  \tracingstats2
+  \tracingpages1
+  \tracinglostchars2  % 2 gives us more in etex
+  \tracingparagraphs1
+  \tracingoutput1
+  \tracingmacros2
+  \tracingrestores1
+  \showboxbreadth\maxdimen \showboxdepth\maxdimen
+  \ifx\eTeXversion\thisisundefined\else % etex gives us more logging
+    \tracingscantokens1
+    \tracingifs1
+    \tracinggroups1
+    \tracingnesting2
+    \tracingassigns1
+  \fi
+  \tracingcommands3  % 3 gives us more in etex
+  \errorcontextlines16
+}%
+
+% @errormsg{MSG}.  Do the index-like expansions on MSG, but if things
+% aren't perfect, it's not the end of the world, being an error message,
+% after all.
+% 
+\def\errormsg{\begingroup \indexnofonts \doerrormsg}
+\def\doerrormsg#1{\errmessage{#1}}
+
+% add check for \lastpenalty to plain's definitions.  If the last thing
+% we did was a \nobreak, we don't want to insert more space.
+%
+\def\smallbreak{\ifnum\lastpenalty<10000\par\ifdim\lastskip<\smallskipamount
+  \removelastskip\penalty-50\smallskip\fi\fi}
+\def\medbreak{\ifnum\lastpenalty<10000\par\ifdim\lastskip<\medskipamount
+  \removelastskip\penalty-100\medskip\fi\fi}
+\def\bigbreak{\ifnum\lastpenalty<10000\par\ifdim\lastskip<\bigskipamount
+  \removelastskip\penalty-200\bigskip\fi\fi}
+
+% Output routine
+%
+
+% For a final copy, take out the rectangles
+% that mark overfull boxes (in case you have decided
+% that the text looks ok even though it passes the margin).
+%
+\def\finalout{\overfullrule=0pt }
+
+% Do @cropmarks to get crop marks.
+%
+\newif\ifcropmarks
+\let\cropmarks = \cropmarkstrue
+%
+% Dimensions to add cropmarks at corners.
+% Added by P. A. MacKay, 12 Nov. 1986
+%
+\newdimen\outerhsize \newdimen\outervsize % set by the paper size routines
+\newdimen\cornerlong  \cornerlong=1pc
+\newdimen\cornerthick \cornerthick=.3pt
+\newdimen\topandbottommargin \topandbottommargin=.75in
+
+% Output a mark which sets \thischapter, \thissection and \thiscolor.
+% We dump everything together because we only have one kind of mark.
+% This works because we only use \botmark / \topmark, not \firstmark.
+%
+% A mark contains a subexpression of the \ifcase ... \fi construct.
+% \get*marks macros below extract the needed part using \ifcase.
+%
+% Another complication is to let the user choose whether \thischapter
+% (\thissection) refers to the chapter (section) in effect at the top
+% of a page, or that at the bottom of a page.
+
+% \domark is called twice inside \chapmacro, to add one
+% mark before the section break, and one after.
+%   In the second call \prevchapterdefs is the same as \lastchapterdefs,
+% and \prevsectiondefs is the same as \lastsectiondefs.
+%   Then if the page is not broken at the mark, some of the previous
+% section appears on the page, and we can get the name of this section
+% from \firstmark for @everyheadingmarks top.
+%   @everyheadingmarks bottom uses \botmark.
+%
+% See page 260 of The TeXbook.
+\def\domark{%
+  \toks0=\expandafter{\lastchapterdefs}%
+  \toks2=\expandafter{\lastsectiondefs}%
+  \toks4=\expandafter{\prevchapterdefs}%
+  \toks6=\expandafter{\prevsectiondefs}%
+  \toks8=\expandafter{\lastcolordefs}%
+  \mark{%
+                   \the\toks0 \the\toks2  % 0: marks for @everyheadingmarks top
+      \noexpand\or \the\toks4 \the\toks6  % 1: for @everyheadingmarks bottom
+    \noexpand\else \the\toks8             % 2: color marks
+  }%
+}
+
+% \gettopheadingmarks, \getbottomheadingmarks,
+% \getcolormarks - extract needed part of mark.
+%
+% \topmark doesn't work for the very first chapter (after the title
+% page or the contents), so we use \firstmark there -- this gets us
+% the mark with the chapter defs, unless the user sneaks in, e.g.,
+% @setcolor (or @url, or @link, etc.) between @contents and the very
+% first @chapter.
+\def\gettopheadingmarks{%
+  \ifcase0\topmark\fi
+  \ifx\thischapter\empty \ifcase0\firstmark\fi \fi
+}
+\def\getbottomheadingmarks{\ifcase1\botmark\fi}
+\def\getcolormarks{\ifcase2\topmark\fi}
+
+% Avoid "undefined control sequence" errors.
+\def\lastchapterdefs{}
+\def\lastsectiondefs{}
+\def\lastsection{}
+\def\prevchapterdefs{}
+\def\prevsectiondefs{}
+\def\lastcolordefs{}
+
+% Margin to add to right of even pages, to left of odd pages.
+\newdimen\bindingoffset
+\newdimen\normaloffset
+\newdimen\txipagewidth \newdimen\txipageheight
+
+% Main output routine.
+%
+\chardef\PAGE = 255
+\output = {\onepageout{\pagecontents\PAGE}}
+
+\newbox\headlinebox
+\newbox\footlinebox
+
+% \onepageout takes a vbox as an argument.
+% \shipout a vbox for a single page, adding an optional header, footer,
+% cropmarks, and footnote.  This also causes index entries for this page
+% to be written to the auxiliary files.
+%
+\def\onepageout#1{%
+  \ifcropmarks \hoffset=0pt \else \hoffset=\normaloffset \fi
+  %
+  \ifodd\pageno  \advance\hoffset by \bindingoffset
+  \else \advance\hoffset by -\bindingoffset\fi
+  %
+  % Common context changes for both heading and footing.
+  % Do this outside of the \shipout so @code etc. will be expanded in
+  % the headline as they should be, not taken literally (outputting ''code).
+  \def\commmonheadfootline{\let\hsize=\txipagewidth \texinfochars}
+  %
+  % Retrieve the information for the headings from the marks in the page,
+  % and call Plain TeX's \makeheadline and \makefootline, which use the
+  % values in \headline and \footline.
+  %
+  % This is used to check if we are on the first page of a chapter.
+  \ifcase1\topmark\fi
+  \let\prevchaptername\thischaptername
+  \ifcase0\firstmark\fi
+  \let\curchaptername\thischaptername
+  %
+  \ifodd\pageno \getoddheadingmarks \else \getevenheadingmarks \fi
+  \ifodd\pageno \getoddfootingmarks \else \getevenfootingmarks \fi
+  %
+  \ifx\curchaptername\prevchaptername
+    \let\thischapterheading\thischapter
+  \else
+    % \thischapterheading is the same as \thischapter except it is blank
+    % for the first page of a chapter.  This is to prevent the chapter name 
+    % being shown twice.
+    \def\thischapterheading{}%
+  \fi
+  %
+  \global\setbox\headlinebox = \vbox{\commmonheadfootline \makeheadline}%
+  \global\setbox\footlinebox = \vbox{\commmonheadfootline \makefootline}%
+  %
+  {%
+    % Set context for writing to auxiliary files like index files.
+    % Have to do this stuff outside the \shipout because we want it to
+    % take effect in \write's, yet the group defined by the \vbox ends
+    % before the \shipout runs.
+    %
+    \indexdummies         % don't expand commands in the output.
+    \normalturnoffactive  % \ in index entries must not stay \, e.g., if
+               % the page break happens to be in the middle of an example.
+               % We don't want .vr (or whatever) entries like this:
+               % \entry{{\indexbackslash }acronym}{32}{\code {\acronym}}
+               % "\acronym" won't work when it's read back in;
+               % it needs to be
+               % {\code {{\backslashcurfont }acronym}
+    \shipout\vbox{%
+      % Do this early so pdf references go to the beginning of the page.
+      \ifpdfmakepagedest \pdfdest name{\the\pageno} xyz\fi
+      %
+      \ifcropmarks \vbox to \outervsize\bgroup
+        \hsize = \outerhsize
+        \vskip-\topandbottommargin
+        \vtop to0pt{%
+          \line{\ewtop\hfil\ewtop}%
+          \nointerlineskip
+          \line{%
+            \vbox{\moveleft\cornerthick\nstop}%
+            \hfill
+            \vbox{\moveright\cornerthick\nstop}%
+          }%
+          \vss}%
+        \vskip\topandbottommargin
+        \line\bgroup
+          \hfil % center the page within the outer (page) hsize.
+          \ifodd\pageno\hskip\bindingoffset\fi
+          \vbox\bgroup
+      \fi
+      %
+      \unvbox\headlinebox
+      \pagebody{#1}%
+      \ifdim\ht\footlinebox > 0pt
+        % Only leave this space if the footline is nonempty.
+        % (We lessened \vsize for it in \oddfootingyyy.)
+        % The \baselineskip=24pt in plain's \makefootline has no effect.
+        \vskip 24pt
+        \unvbox\footlinebox
+      \fi
+      %
+      \ifcropmarks
+          \egroup % end of \vbox\bgroup
+        \hfil\egroup % end of (centering) \line\bgroup
+        \vskip\topandbottommargin plus1fill minus1fill
+        \boxmaxdepth = \cornerthick
+        \vbox to0pt{\vss
+          \line{%
+            \vbox{\moveleft\cornerthick\nsbot}%
+            \hfill
+            \vbox{\moveright\cornerthick\nsbot}%
+          }%
+          \nointerlineskip
+          \line{\ewbot\hfil\ewbot}%
+        }%
+      \egroup % \vbox from first cropmarks clause
+      \fi
+    }% end of \shipout\vbox
+  }% end of group with \indexdummies
+  \advancepageno
+  \ifnum\outputpenalty>-20000 \else\dosupereject\fi
+}
+
+\newinsert\margin \dimen\margin=\maxdimen
+
+% Main part of page, including any footnotes
+\def\pagebody#1{\vbox to\txipageheight{\boxmaxdepth=\maxdepth #1}}
+{\catcode`\@ =11
+\gdef\pagecontents#1{\ifvoid\topins\else\unvbox\topins\fi
+% marginal hacks, address@hidden (Juha Takala)
+\ifvoid\margin\else % marginal info is present
+  \rlap{\kern\hsize\vbox to\z@{\kern1pt\box\margin \vss}}\fi
+\dimen@=\dp#1\relax \unvbox#1\relax
+\ifvoid\footins\else\vskip\skip\footins\footnoterule \unvbox\footins\fi
+\ifr@ggedbottom \kern-\dimen@ \vfil \fi}
+}
+
+% Here are the rules for the cropmarks.  Note that they are
+% offset so that the space between them is truly \outerhsize or \outervsize
+% (P. A. MacKay, 12 November, 1986)
+%
+\def\ewtop{\vrule height\cornerthick depth0pt width\cornerlong}
+\def\nstop{\vbox
+  {\hrule height\cornerthick depth\cornerlong width\cornerthick}}
+\def\ewbot{\vrule height0pt depth\cornerthick width\cornerlong}
+\def\nsbot{\vbox
+  {\hrule height\cornerlong depth\cornerthick width\cornerthick}}
+
+
+% Argument parsing
+
+% Parse an argument, then pass it to #1.  The argument is the rest of
+% the input line (except we remove a trailing comment).  #1 should be a
+% macro which expects an ordinary undelimited TeX argument.
+% For example, \def\foo{\parsearg\fooxxx}.
+%
+\def\parsearg{\parseargusing{}}
+\def\parseargusing#1#2{%
+  \def\argtorun{#2}%
+  \begingroup
+    \obeylines
+    \spaceisspace
+    #1%
+    \parseargline\empty% Insert the \empty token, see \finishparsearg below.
+}
+
+{\obeylines %
+  \gdef\parseargline#1^^M{%
+    \endgroup % End of the group started in \parsearg.
+    \argremovecomment #1\comment\ArgTerm%
+  }%
+}
+
+% First remove any @comment, then any @c comment.  Also remove a @texinfoc
+% comment (see \scanmacro for details).  Pass the result on to \argcheckspaces.
+\def\argremovecomment#1\comment#2\ArgTerm{\argremovec #1\c\ArgTerm}
+\def\argremovec#1\c#2\ArgTerm{\argremovetexinfoc #1\texinfoc\ArgTerm}
+\def\argremovetexinfoc#1\texinfoc#2\ArgTerm{\argcheckspaces#1\^^M\ArgTerm}
+
+% Each occurrence of `\^^M' or `<space>\^^M' is replaced by a single space.
+%
+% \argremovec might leave us with trailing space, e.g.,
+%    @end itemize  @c foo
+% This space token undergoes the same procedure and is eventually removed
+% by \finishparsearg.
+%
+\def\argcheckspaces#1\^^M{\argcheckspacesX#1\^^M \^^M}
+\def\argcheckspacesX#1 \^^M{\argcheckspacesY#1\^^M}
+\def\argcheckspacesY#1\^^M#2\^^M#3\ArgTerm{%
+  \def\temp{#3}%
+  \ifx\temp\empty
+    % Do not use \next, perhaps the caller of \parsearg uses it; reuse \temp:
+    \let\temp\finishparsearg
+  \else
+    \let\temp\argcheckspaces
+  \fi
+  % Put the space token in:
+  \temp#1 #3\ArgTerm
+}
+
+% If a _delimited_ argument is enclosed in braces, they get stripped; so
+% to get _exactly_ the rest of the line, we had to prevent such situation.
+% We prepended an \empty token at the very beginning and we expand it now,
+% just before passing the control to \argtorun.
+% (Similarly, we have to think about #3 of \argcheckspacesY above: it is
+% either the null string, or it ends with \^^M---thus there is no danger
+% that a pair of braces would be stripped.
+%
+% But first, we have to remove the trailing space token.
+%
+\def\finishparsearg#1 \ArgTerm{\expandafter\argtorun\expandafter{#1}}
+
+
+% \parseargdef - define a command taking an argument on the line
+%
+% \parseargdef\foo{...}
+%      is roughly equivalent to
+% \def\foo{\parsearg\Xfoo}
+% \def\Xfoo#1{...}
+\def\parseargdef#1{%
+  \expandafter \doparseargdef \csname\string#1\endcsname #1%
+}
+\def\doparseargdef#1#2{%
+  \def#2{\parsearg#1}%
+  \def#1##1%
+}
+
+% Several utility definitions with active space:
+{
+  \obeyspaces
+  \gdef\obeyedspace{ }
+
+  % Make each space character in the input produce a normal interword
+  % space in the output.  Don't allow a line break at this space, as this
+  % is used only in environments like @example, where each line of input
+  % should produce a line of output anyway.
+  %
+  \gdef\sepspaces{\obeyspaces\let =\tie}
+
+  % If an index command is used in an @example environment, any spaces
+  % therein should become regular spaces in the raw index file, not the
+  % expansion of \tie (\leavevmode \penalty \@M \ ).
+  \gdef\unsepspaces{\let =\space}
+}
+
+
+\def\flushcr{\ifx\par\lisppar \def\next##1{}\else \let\next=\relax \fi \next}
+
+% Define the framework for environments in texinfo.tex.  It's used like this:
+%
+%   \envdef\foo{...}
+%   \def\Efoo{...}
+%
+% It's the responsibility of \envdef to insert \begingroup before the
+% actual body; @end closes the group after calling \Efoo.  \envdef also
+% defines \thisenv, so the current environment is known; @end checks
+% whether the environment name matches.  The \checkenv macro can also be
+% used to check whether the current environment is the one expected.
+%
+% Non-false conditionals (@iftex, @ifset) don't fit into this, so they
+% are not treated as environments; they don't open a group.  (The
+% implementation of @end takes care not to call \endgroup in this
+% special case.)
+
+
+% At run-time, environments start with this:
+\def\startenvironment#1{\begingroup\def\thisenv{#1}}
+% initialize
+\let\thisenv\empty
+
+% ... but they get defined via ``\envdef\foo{...}'':
+\long\def\envdef#1#2{\def#1{\startenvironment#1#2}}
+\def\envparseargdef#1#2{\parseargdef#1{\startenvironment#1#2}}
+
+% Check whether we're in the right environment:
+\def\checkenv#1{%
+  \def\temp{#1}%
+  \ifx\thisenv\temp
+  \else
+    \badenverr
+  \fi
+}
+
+% Environment mismatch, #1 expected:
+\def\badenverr{%
+  \errhelp = \EMsimple
+  \errmessage{This command can appear only \inenvironment\temp,
+    not \inenvironment\thisenv}%
+}
+\def\inenvironment#1{%
+  \ifx#1\empty
+    outside of any environment%
+  \else
+    in environment \expandafter\string#1%
+  \fi
+}
+
+% @end foo executes the definition of \Efoo.
+% But first, it executes a specialized version of \checkenv
+%
+\parseargdef\end{%
+  \if 1\csname iscond.#1\endcsname
+  \else
+    % The general wording of \badenverr may not be ideal.
+    \expandafter\checkenv\csname#1\endcsname
+    \csname E#1\endcsname
+    \endgroup
+  \fi
+}
+
+\newhelp\EMsimple{Press RETURN to continue.}
+
+
+% Be sure we're in horizontal mode when doing a tie, since we make space
+% equivalent to this in @example-like environments. Otherwise, a space
+% at the beginning of a line will start with \penalty -- and
+% since \penalty is valid in vertical mode, we'd end up putting the
+% penalty on the vertical list instead of in the new paragraph.
+{\catcode`@ = 11
+ % Avoid using \@M directly, because that causes trouble
+ % if the definition is written into an index file.
+ \global\let\tiepenalty = \@M
+ \gdef\tie{\leavevmode\penalty\tiepenalty\ }
+}
+
+% @: forces normal size whitespace following.
+\def\:{\spacefactor=1000 }
+
+% @* forces a line break.
+\def\*{\unskip\hfil\break\hbox{}\ignorespaces}
+
+% @/ allows a line break.
+\let\/=\allowbreak
+
+% @. is an end-of-sentence period.
+\def\.{.\spacefactor=\endofsentencespacefactor\space}
+
+% @! is an end-of-sentence bang.
+\def\!{!\spacefactor=\endofsentencespacefactor\space}
+
+% @? is an end-of-sentence query.
+\def\?{?\spacefactor=\endofsentencespacefactor\space}
+
+% @frenchspacing on|off  says whether to put extra space after punctuation.
+%
+\def\onword{on}
+\def\offword{off}
+%
+\parseargdef\frenchspacing{%
+  \def\temp{#1}%
+  \ifx\temp\onword \plainfrenchspacing
+  \else\ifx\temp\offword \plainnonfrenchspacing
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @frenchspacing option `\temp', must be on|off}%
+  \fi\fi
+}
+
+% @w prevents a word break.  Without the \leavevmode, @w at the
+% beginning of a paragraph, when TeX is still in vertical mode, would
+% produce a whole line of output instead of starting the paragraph.
+\def\w#1{\leavevmode\hbox{#1}}
+
+% @group ... @end group forces ... to be all on one page, by enclosing
+% it in a TeX vbox.  We use \vtop instead of \vbox to construct the box
+% to keep its height that of a normal line.  According to the rules for
+% \topskip (p.114 of the TeXbook), the glue inserted is
+% max (\topskip - \ht (first item), 0).  If that height is large,
+% therefore, no glue is inserted, and the space between the headline and
+% the text is small, which looks bad.
+%
+% Another complication is that the group might be very large.  This can
+% cause the glue on the previous page to be unduly stretched, because it
+% does not have much material.  In this case, it's better to add an
+% explicit \vfill so that the extra space is at the bottom.  The
+% threshold for doing this is if the group is more than \vfilllimit
+% percent of a page (\vfilllimit can be changed inside of @tex).
+%
+\newbox\groupbox
+\def\vfilllimit{0.7}
+%
+\envdef\group{%
+  \ifnum\catcode`\^^M=\active \else
+    \errhelp = \groupinvalidhelp
+    \errmessage{@group invalid in context where filling is enabled}%
+  \fi
+  \startsavinginserts
+  %
+  \setbox\groupbox = \vtop\bgroup
+    % Do @comment since we are called inside an environment such as
+    % @example, where each end-of-line in the input causes an
+    % end-of-line in the output.  We don't want the end-of-line after
+    % the `@group' to put extra space in the output.  Since @group
+    % should appear on a line by itself (according to the Texinfo
+    % manual), we don't worry about eating any user text.
+    \comment
+}
+%
+% The \vtop produces a box with normal height and large depth; thus, TeX puts
+% \baselineskip glue before it, and (when the next line of text is done)
+% \lineskip glue after it.  Thus, space below is not quite equal to space
+% above.  But it's pretty close.
+\def\Egroup{%
+    % To get correct interline space between the last line of the group
+    % and the first line afterwards, we have to propagate \prevdepth.
+    \endgraf % Not \par, as it may have been set to \lisppar.
+    \global\dimen1 = \prevdepth
+  \egroup           % End the \vtop.
+  \addgroupbox
+  \prevdepth = \dimen1
+  \checkinserts
+}
+
+\def\addgroupbox{
+  % \dimen0 is the vertical size of the group's box.
+  \dimen0 = \ht\groupbox  \advance\dimen0 by \dp\groupbox
+  % \dimen2 is how much space is left on the page (more or less).
+  \dimen2 = \txipageheight   \advance\dimen2 by -\pagetotal
+  % if the group doesn't fit on the current page, and it's a big big
+  % group, force a page break.
+  \ifdim \dimen0 > \dimen2
+    \ifdim \pagetotal < \vfilllimit\txipageheight
+      \page
+    \fi
+  \fi
+  \box\groupbox
+}
+
+%
+% TeX puts in an \escapechar (i.e., `@') at the beginning of the help
+% message, so this ends up printing `@group can only ...'.
+%
+\newhelp\groupinvalidhelp{%
+group can only be used in environments such as @example,^^J%
+where each line of input produces a line of output.}
+
+% @need space-in-mils
+% forces a page break if there is not space-in-mils remaining.
+
+\newdimen\mil  \mil=0.001in
+
+\parseargdef\need{%
+  % Ensure vertical mode, so we don't make a big box in the middle of a
+  % paragraph.
+  \par
+  %
+  % If the @need value is less than one line space, it's useless.
+  \dimen0 = #1\mil
+  \dimen2 = \ht\strutbox
+  \advance\dimen2 by \dp\strutbox
+  \ifdim\dimen0 > \dimen2
+    %
+    % Do a \strut just to make the height of this box be normal, so the
+    % normal leading is inserted relative to the preceding line.
+    % And a page break here is fine.
+    \vtop to #1\mil{\strut\vfil}%
+    %
+    % TeX does not even consider page breaks if a penalty added to the
+    % main vertical list is 10000 or more.  But in order to see if the
+    % empty box we just added fits on the page, we must make it consider
+    % page breaks.  On the other hand, we don't want to actually break the
+    % page after the empty box.  So we use a penalty of 9999.
+    %
+    % There is an extremely small chance that TeX will actually break the
+    % page at this \penalty, if there are no other feasible breakpoints in
+    % sight.  (If the user is using lots of big @group commands, which
+    % almost-but-not-quite fill up a page, TeX will have a hard time doing
+    % good page breaking, for example.)  However, I could not construct an
+    % example where a page broke at this \penalty; if it happens in a real
+    % document, then we can reconsider our strategy.
+    \penalty9999
+    %
+    % Back up by the size of the box, whether we did a page break or not.
+    \kern -#1\mil
+    %
+    % Do not allow a page break right after this kern.
+    \nobreak
+  \fi
+}
+
+% @br   forces paragraph break (and is undocumented).
+
+\let\br = \par
+
+% @page forces the start of a new page.
+%
+\def\page{\par\vfill\supereject}
+
+% @exdent text....
+% outputs text on separate line in roman font, starting at standard page margin
+
+% This records the amount of indent in the innermost environment.
+% That's how much \exdent should take out.
+\newskip\exdentamount
+
+% This defn is used inside fill environments such as @defun.
+\parseargdef\exdent{\hfil\break\hbox{\kern -\exdentamount{\rm#1}}\hfil\break}
+
+% This defn is used inside nofill environments such as @example.
+\parseargdef\nofillexdent{{\advance \leftskip by -\exdentamount
+  \leftline{\hskip\leftskip{\rm#1}}}}
+
+% @inmargin{WHICH}{TEXT} puts TEXT in the WHICH margin next to the current
+% paragraph.  For more general purposes, use the \margin insertion
+% class.  WHICH is `l' or `r'.  Not documented, written for gawk manual.
+%
+\newskip\inmarginspacing \inmarginspacing=1cm
+\def\strutdepth{\dp\strutbox}
+%
+\def\doinmargin#1#2{\strut\vadjust{%
+  \nobreak
+  \kern-\strutdepth
+  \vtop to \strutdepth{%
+    \baselineskip=\strutdepth
+    \vss
+    % if you have multiple lines of stuff to put here, you'll need to
+    % make the vbox yourself of the appropriate size.
+    \ifx#1l%
+      \llap{\ignorespaces #2\hskip\inmarginspacing}%
+    \else
+      \rlap{\hskip\hsize \hskip\inmarginspacing \ignorespaces #2}%
+    \fi
+    \null
+  }%
+}}
+\def\inleftmargin{\doinmargin l}
+\def\inrightmargin{\doinmargin r}
+%
+% @inmargin{TEXT [, RIGHT-TEXT]}
+% (if RIGHT-TEXT is given, use TEXT for left page, RIGHT-TEXT for right;
+% else use TEXT for both).
+%
+\def\inmargin#1{\parseinmargin #1,,\finish}
+\def\parseinmargin#1,#2,#3\finish{% not perfect, but better than nothing.
+  \setbox0 = \hbox{\ignorespaces #2}%
+  \ifdim\wd0 > 0pt
+    \def\lefttext{#1}%  have both texts
+    \def\righttext{#2}%
+  \else
+    \def\lefttext{#1}%  have only one text
+    \def\righttext{#1}%
+  \fi
+  %
+  \ifodd\pageno
+    \def\temp{\inrightmargin\righttext}% odd page -> outside is right margin
+  \else
+    \def\temp{\inleftmargin\lefttext}%
+  \fi
+  \temp
+}
+
+% @include FILE -- \input text of FILE.
+%
+\def\include{\parseargusing\filenamecatcodes\includezzz}
+\def\includezzz#1{%
+  \pushthisfilestack
+  \def\thisfile{#1}%
+  {%
+    \makevalueexpandable  % we want to expand any @value in FILE.
+    \turnoffactive        % and allow special characters in the expansion
+    \indexnofonts         % Allow `@@' and other weird things in file names.
+    \wlog{texinfo.tex: doing @include of #1^^J}%
+    \edef\temp{\noexpand\input #1 }%
+    %
+    % This trickery is to read FILE outside of a group, in case it makes
+    % definitions, etc.
+    \expandafter
+  }\temp
+  \popthisfilestack
+}
+\def\filenamecatcodes{%
+  \catcode`\\=\other
+  \catcode`~=\other
+  \catcode`^=\other
+  \catcode`_=\other
+  \catcode`|=\other
+  \catcode`<=\other
+  \catcode`>=\other
+  \catcode`+=\other
+  \catcode`-=\other
+  \catcode`\`=\other
+  \catcode`\'=\other
+}
+
+\def\pushthisfilestack{%
+  \expandafter\pushthisfilestackX\popthisfilestack\StackTerm
+}
+\def\pushthisfilestackX{%
+  \expandafter\pushthisfilestackY\thisfile\StackTerm
+}
+\def\pushthisfilestackY #1\StackTerm #2\StackTerm {%
+  \gdef\popthisfilestack{\gdef\thisfile{#1}\gdef\popthisfilestack{#2}}%
+}
+
+\def\popthisfilestack{\errthisfilestackempty}
+\def\errthisfilestackempty{\errmessage{Internal error:
+  the stack of filenames is empty.}}
+%
+\def\thisfile{}
+
+% @center line
+% outputs that line, centered.
+%
+\parseargdef\center{%
+  \ifhmode
+    \let\centersub\centerH
+  \else
+    \let\centersub\centerV
+  \fi
+  \centersub{\hfil \ignorespaces#1\unskip \hfil}%
+  \let\centersub\relax % don't let the definition persist, just in case
+}
+\def\centerH#1{{%
+  \hfil\break
+  \advance\hsize by -\leftskip
+  \advance\hsize by -\rightskip
+  \line{#1}%
+  \break
+}}
+%
+\newcount\centerpenalty
+\def\centerV#1{%
+  % The idea here is the same as in \startdefun, \cartouche, etc.: if
+  % @center is the first thing after a section heading, we need to wipe
+  % out the negative parskip inserted by \sectionheading, but still
+  % prevent a page break here.
+  \centerpenalty = \lastpenalty
+  \ifnum\centerpenalty>10000 \vskip\parskip \fi
+  \ifnum\centerpenalty>9999 \penalty\centerpenalty \fi
+  \line{\kern\leftskip #1\kern\rightskip}%
+}
+
+% @sp n   outputs n lines of vertical space
+%
+\parseargdef\sp{\vskip #1\baselineskip}
+
+% @comment ...line which is ignored...
+% @c is the same as @comment
+% @ignore ... @end ignore  is another way to write a comment
+
+
+\def\c{\begingroup \catcode`\^^M=\active%
+\catcode`\@=\other \catcode`\{=\other \catcode`\}=\other%
+\cxxx}
+{\catcode`\^^M=\active \gdef\cxxx#1^^M{\endgroup}}
+%
+\let\comment\c
+
+% @paragraphindent NCHARS
+% We'll use ems for NCHARS, close enough.
+% NCHARS can also be the word `asis' or `none'.
+% We cannot feasibly implement @paragraphindent asis, though.
+%
+\def\asisword{asis} % no translation, these are keywords
+\def\noneword{none}
+%
+\parseargdef\paragraphindent{%
+  \def\temp{#1}%
+  \ifx\temp\asisword
+  \else
+    \ifx\temp\noneword
+      \defaultparindent = 0pt
+    \else
+      \defaultparindent = #1em
+    \fi
+  \fi
+  \parindent = \defaultparindent
+}
+
+% @exampleindent NCHARS
+% We'll use ems for NCHARS like @paragraphindent.
+% It seems @exampleindent asis isn't necessary, but
+% I preserve it to make it similar to @paragraphindent.
+\parseargdef\exampleindent{%
+  \def\temp{#1}%
+  \ifx\temp\asisword
+  \else
+    \ifx\temp\noneword
+      \lispnarrowing = 0pt
+    \else
+      \lispnarrowing = #1em
+    \fi
+  \fi
+}
+
+% @firstparagraphindent WORD
+% If WORD is `none', then suppress indentation of the first paragraph
+% after a section heading.  If WORD is `insert', then do indent at such
+% paragraphs.
+%
+% The paragraph indentation is suppressed or not by calling
+% \suppressfirstparagraphindent, which the sectioning commands do.
+% We switch the definition of this back and forth according to WORD.
+% By default, we suppress indentation.
+%
+\def\suppressfirstparagraphindent{\dosuppressfirstparagraphindent}
+\def\insertword{insert}
+%
+\parseargdef\firstparagraphindent{%
+  \def\temp{#1}%
+  \ifx\temp\noneword
+    \let\suppressfirstparagraphindent = \dosuppressfirstparagraphindent
+  \else\ifx\temp\insertword
+    \let\suppressfirstparagraphindent = \relax
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @firstparagraphindent option `\temp'}%
+  \fi\fi
+}
+
+% Here is how we actually suppress indentation.  Redefine \everypar to
+% \kern backwards by \parindent, and then reset itself to empty.
+%
+% We also make \indent itself not actually do anything until the next
+% paragraph.
+%
+\gdef\dosuppressfirstparagraphindent{%
+  \gdef\indent  {\restorefirstparagraphindent \indent}%
+  \gdef\noindent{\restorefirstparagraphindent \noindent}%
+  \global\everypar = {\kern -\parindent \restorefirstparagraphindent}%
+}
+%
+\gdef\restorefirstparagraphindent{%
+  \global\let\indent = \ptexindent
+  \global\let\noindent = \ptexnoindent
+  \global\everypar = {}%
+}
+
+
+% @refill is a no-op.
+\let\refill=\relax
+
+% @setfilename INFO-FILENAME - ignored
+\let\setfilename=\comment
+
+% @bye.
+\outer\def\bye{\pagealignmacro\tracingstats=1\ptexend}
+
+
+\message{pdf,}
+% adobe `portable' document format
+\newcount\tempnum
+\newcount\lnkcount
+\newtoks\filename
+\newcount\filenamelength
+\newcount\pgn
+\newtoks\toksA
+\newtoks\toksB
+\newtoks\toksC
+\newtoks\toksD
+\newbox\boxA
+\newbox\boxB
+\newcount\countA
+\newif\ifpdf
+\newif\ifpdfmakepagedest
+
+%
+% For LuaTeX
+%
+
+\newif\iftxiuseunicodedestname
+\txiuseunicodedestnamefalse % For pdfTeX etc.
+
+\ifx\luatexversion\thisisundefined
+\else
+  % Use Unicode destination names
+  \txiuseunicodedestnametrue
+  % Escape PDF strings with converting UTF-16 from UTF-8
+  \begingroup
+    \catcode`\%=12
+    \directlua{
+      function UTF16oct(str)
+        tex.sprint(string.char(0x5c) .. '376' .. string.char(0x5c) .. '377')
+        for c in string.utfvalues(str) do
+          if c < 0x10000 then
+            tex.sprint(
+              string.format(string.char(0x5c) .. string.char(0x25) .. '03o' ..
+                            string.char(0x5c) .. string.char(0x25) .. '03o',
+                            (c / 256), (c % 256)))
+          else
+            c = c - 0x10000
+            local c_hi = c / 1024 + 0xd800
+            local c_lo = c % 1024 + 0xdc00
+            tex.sprint(
+              string.format(string.char(0x5c) .. string.char(0x25) .. '03o' ..
+                            string.char(0x5c) .. string.char(0x25) .. '03o' ..
+                            string.char(0x5c) .. string.char(0x25) .. '03o' ..
+                            string.char(0x5c) .. string.char(0x25) .. '03o',
+                            (c_hi / 256), (c_hi % 256),
+                            (c_lo / 256), (c_lo % 256)))
+          end
+        end
+      end
+    }
+  \endgroup
+  \def\pdfescapestrutfsixteen#1{\directlua{UTF16oct('\luaescapestring{#1}')}}
+  % Escape PDF strings without converting
+  \begingroup
+    \directlua{
+      function PDFescstr(str)
+        for c in string.bytes(str) do
+          if c <= 0x20 or c >= 0x80 or c == 0x28 or c == 0x29 or c == 0x5c then
+            tex.sprint(
+              string.format(string.char(0x5c) .. string.char(0x25) .. '03o',
+                            c))
+          else
+            tex.sprint(string.char(c))
+          end
+        end
+      end
+    }
+  \endgroup
+  \def\pdfescapestring#1{\directlua{PDFescstr('\luaescapestring{#1}')}}
+  \ifnum\luatexversion>84
+    % For LuaTeX >= 0.85
+    \def\pdfdest{\pdfextension dest}
+    \let\pdfoutput\outputmode
+    \def\pdfliteral{\pdfextension literal}
+    \def\pdfcatalog{\pdfextension catalog}
+    \def\pdftexversion{\numexpr\pdffeedback version\relax}
+    \let\pdfximage\saveimageresource
+    \let\pdfrefximage\useimageresource
+    \let\pdflastximage\lastsavedimageresourceindex
+    \def\pdfendlink{\pdfextension endlink\relax}
+    \def\pdfoutline{\pdfextension outline}
+    \def\pdfstartlink{\pdfextension startlink}
+    \def\pdffontattr{\pdfextension fontattr}
+    \def\pdfobj{\pdfextension obj}
+    \def\pdflastobj{\numexpr\pdffeedback lastobj\relax}
+    \let\pdfpagewidth\pagewidth
+    \let\pdfpageheight\pageheight
+    \edef\pdfhorigin{\pdfvariable horigin}
+    \edef\pdfvorigin{\pdfvariable vorigin}
+  \fi
+\fi
+
+% when pdftex is run in dvi mode, \pdfoutput is defined (so \pdfoutput=1
+% can be set).  So we test for \relax and 0 as well as being undefined.
+\ifx\pdfoutput\thisisundefined
+\else
+  \ifx\pdfoutput\relax
+  \else
+    \ifcase\pdfoutput
+    \else
+      \pdftrue
+    \fi
+  \fi
+\fi
+
+% PDF uses PostScript string constants for the names of xref targets,
+% for display in the outlines, and in other places.  Thus, we have to
+% double any backslashes.  Otherwise, a name like "\node" will be
+% interpreted as a newline (\n), followed by o, d, e.  Not good.
+% 
+% See http://www.ntg.nl/pipermail/ntg-pdftex/2004-July/000654.html and
+% related messages.  The final outcome is that it is up to the TeX user
+% to double the backslashes and otherwise make the string valid, so
+% that's what we do.  pdftex 1.30.0 (ca.2005) introduced a primitive to
+% do this reliably, so we use it.
+
+% #1 is a control sequence in which to do the replacements,
+% which we \xdef.
+\def\txiescapepdf#1{%
+  \ifx\pdfescapestring\thisisundefined
+    % No primitive available; should we give a warning or log?
+    % Many times it won't matter.
+    \xdef#1{#1}%
+  \else
+    % The expandable \pdfescapestring primitive escapes parentheses,
+    % backslashes, and other special chars.
+    \xdef#1{\pdfescapestring{#1}}%
+  \fi
+}
+\def\txiescapepdfutfsixteen#1{%
+  \ifx\pdfescapestrutfsixteen\thisisundefined
+    % No UTF-16 converting macro available.
+    \txiescapepdf{#1}%
+  \else
+    \xdef#1{\pdfescapestrutfsixteen{#1}}%
+  \fi
+}
+
+\newhelp\nopdfimagehelp{Texinfo supports .png, .jpg, .jpeg, and .pdf images
+with PDF output, and none of those formats could be found.  (.eps cannot
+be supported due to the design of the PDF format; use regular TeX (DVI
+output) for that.)}
+
+\ifpdf
+  %
+  % Color manipulation macros using ideas from pdfcolor.tex,
+  % except using rgb instead of cmyk; the latter is said to render as a
+  % very dark gray on-screen and a very dark halftone in print, instead
+  % of actual black. The dark red here is dark enough to print on paper as
+  % nearly black, but still distinguishable for online viewing.  We use
+  % black by default, though.
+  \def\rgbDarkRed{0.50 0.09 0.12}
+  \def\rgbBlack{0 0 0}
+  %
+  % rg sets the color for filling (usual text, etc.);
+  % RG sets the color for stroking (thin rules, e.g., normal _'s).
+  \def\pdfsetcolor#1{\pdfliteral{#1 rg  #1 RG}}
+  %
+  % Set color, and create a mark which defines \thiscolor accordingly,
+  % so that \makeheadline knows which color to restore.
+  \def\setcolor#1{%
+    \xdef\lastcolordefs{\gdef\noexpand\thiscolor{#1}}%
+    \domark
+    \pdfsetcolor{#1}%
+  }
+  %
+  \def\maincolor{\rgbBlack}
+  \pdfsetcolor{\maincolor}
+  \edef\thiscolor{\maincolor}
+  \def\lastcolordefs{}
+  %
+  \def\makefootline{%
+    \baselineskip24pt
+    \line{\pdfsetcolor{\maincolor}\the\footline}%
+  }
+  %
+  \def\makeheadline{%
+    \vbox to 0pt{%
+      \vskip-22.5pt
+      \line{%
+        \vbox to8.5pt{}%
+        % Extract \thiscolor definition from the marks.
+        \getcolormarks
+        % Typeset the headline with \maincolor, then restore the color.
+        \pdfsetcolor{\maincolor}\the\headline\pdfsetcolor{\thiscolor}%
+      }%
+      \vss
+    }%
+    \nointerlineskip
+  }
+  %
+  %
+  \pdfcatalog{/PageMode /UseOutlines}
+  %
+  % #1 is image name, #2 width (might be empty/whitespace), #3 height (ditto).
+  \def\dopdfimage#1#2#3{%
+    \def\pdfimagewidth{#2}\setbox0 = \hbox{\ignorespaces #2}%
+    \def\pdfimageheight{#3}\setbox2 = \hbox{\ignorespaces #3}%
+    %
+    % pdftex (and the PDF format) support .pdf, .png, .jpg (among
+    % others).  Let's try in that order, PDF first since if
+    % someone has a scalable image, presumably better to use that than a
+    % bitmap.
+    \let\pdfimgext=\empty
+    \begingroup
+      \openin 1 #1.pdf \ifeof 1
+        \openin 1 #1.PDF \ifeof 1
+          \openin 1 #1.png \ifeof 1
+            \openin 1 #1.jpg \ifeof 1
+              \openin 1 #1.jpeg \ifeof 1
+                \openin 1 #1.JPG \ifeof 1
+                  \errhelp = \nopdfimagehelp
+                  \errmessage{Could not find image file #1 for pdf}%
+                \else \gdef\pdfimgext{JPG}%
+                \fi
+              \else \gdef\pdfimgext{jpeg}%
+              \fi
+            \else \gdef\pdfimgext{jpg}%
+            \fi
+          \else \gdef\pdfimgext{png}%
+          \fi
+        \else \gdef\pdfimgext{PDF}%
+        \fi
+      \else \gdef\pdfimgext{pdf}%
+      \fi
+      \closein 1
+    \endgroup
+    %
+    % without \immediate, ancient pdftex seg faults when the same image is
+    % included twice.  (Version 3.14159-pre-1.0-unofficial-20010704.)
+    \ifnum\pdftexversion < 14
+      \immediate\pdfimage
+    \else
+      \immediate\pdfximage
+    \fi
+      \ifdim \wd0 >0pt width \pdfimagewidth \fi
+      \ifdim \wd2 >0pt height \pdfimageheight \fi
+      \ifnum\pdftexversion<13
+         #1.\pdfimgext
+       \else
+         {#1.\pdfimgext}%
+       \fi
+    \ifnum\pdftexversion < 14 \else
+      \pdfrefximage \pdflastximage
+    \fi}
+  %
+  \def\setpdfdestname#1{{%
+    % We have to set dummies so commands such as @code, and characters
+    % such as \, aren't expanded when present in a section title.
+    \indexnofonts
+    \makevalueexpandable
+    \turnoffactive
+    \iftxiuseunicodedestname
+      \ifx \declaredencoding \latone
+        % Pass through Latin-1 characters.
+        % LuaTeX with byte wise I/O converts Latin-1 characters to Unicode.
+      \else
+        \ifx \declaredencoding \utfeight
+          % Pass through Unicode characters.
+        \else
+          % Use ASCII approximations in destination names.
+          \passthroughcharsfalse
+        \fi
+      \fi
+    \else
+      % Use ASCII approximations in destination names.
+      \passthroughcharsfalse
+    \fi
+    \def\pdfdestname{#1}%
+    \txiescapepdf\pdfdestname
+  }}
+  %
+  \def\setpdfoutlinetext#1{{%
+    \indexnofonts
+    \makevalueexpandable
+    \turnoffactive
+    \ifx \declaredencoding \latone
+      % The PDF format can use an extended form of Latin-1 in bookmark
+      % strings.  See Appendix D of the PDF Reference, Sixth Edition, for
+      % the "PDFDocEncoding".
+      \passthroughcharstrue
+      % Pass through Latin-1 characters.
+      %   LuaTeX: Convert to Unicode
+      %   pdfTeX: Use Latin-1 as PDFDocEncoding
+      \def\pdfoutlinetext{#1}%
+    \else
+      \ifx \declaredencoding \utfeight
+        \ifx\luatexversion\thisisundefined
+          % For pdfTeX  with UTF-8.
+          % TODO: the PDF format can use UTF-16 in bookmark strings,
+          % but the code for this isn't done yet.
+          % Use ASCII approximations.
+          \passthroughcharsfalse
+          \def\pdfoutlinetext{#1}%
+        \else
+          % For LuaTeX with UTF-8.
+          % Pass through Unicode characters for title texts.
+          \passthroughcharstrue
+          \def\pdfoutlinetext{#1}%
+        \fi
+      \else
+        % For non-Latin-1 or non-UTF-8 encodings.
+        % Use ASCII approximations.
+        \passthroughcharsfalse
+        \def\pdfoutlinetext{#1}%
+      \fi
+    \fi
+    % LuaTeX: Convert to UTF-16
+    % pdfTeX: Use Latin-1 as PDFDocEncoding
+    \txiescapepdfutfsixteen\pdfoutlinetext
+  }}
+  %
+  \def\pdfmkdest#1{%
+    \setpdfdestname{#1}%
+    \safewhatsit{\pdfdest name{\pdfdestname} xyz}%
+  }
+  %
+  % used to mark target names; must be expandable.
+  \def\pdfmkpgn#1{#1}
+  %
+  % by default, use black for everything.
+  \def\urlcolor{\rgbBlack}
+  \def\linkcolor{\rgbBlack}
+  \def\endlink{\setcolor{\maincolor}\pdfendlink}
+  %
+  % Adding outlines to PDF; macros for calculating structure of outlines
+  % come from Petr Olsak
+  \def\expnumber#1{\expandafter\ifx\csname#1\endcsname\relax 0%
+    \else \csname#1\endcsname \fi}
+  \def\advancenumber#1{\tempnum=\expnumber{#1}\relax
+    \advance\tempnum by 1
+    \expandafter\xdef\csname#1\endcsname{\the\tempnum}}
+  %
+  % #1 is the section text, which is what will be displayed in the
+  % outline by the pdf viewer.  #2 is the pdf expression for the number
+  % of subentries (or empty, for subsubsections).  #3 is the node text,
+  % which might be empty if this toc entry had no corresponding node.
+  % #4 is the page number
+  %
+  \def\dopdfoutline#1#2#3#4{%
+    % Generate a link to the node text if that exists; else, use the
+    % page number.  We could generate a destination for the section
+    % text in the case where a section has no node, but it doesn't
+    % seem worth the trouble, since most documents are normally structured.
+    \setpdfoutlinetext{#1}
+    \setpdfdestname{#3}
+    \ifx\pdfdestname\empty
+      \def\pdfdestname{#4}%
+    \fi
+    %
+    \pdfoutline goto name{\pdfmkpgn{\pdfdestname}}#2{\pdfoutlinetext}%
+  }
+  %
+  \def\pdfmakeoutlines{%
+    \begingroup
+      % Read toc silently, to get counts of subentries for \pdfoutline.
+      \def\partentry##1##2##3##4{}% ignore parts in the outlines
+      \def\numchapentry##1##2##3##4{%
+       \def\thischapnum{##2}%
+       \def\thissecnum{0}%
+       \def\thissubsecnum{0}%
+      }%
+      \def\numsecentry##1##2##3##4{%
+       \advancenumber{chap\thischapnum}%
+       \def\thissecnum{##2}%
+       \def\thissubsecnum{0}%
+      }%
+      \def\numsubsecentry##1##2##3##4{%
+       \advancenumber{sec\thissecnum}%
+       \def\thissubsecnum{##2}%
+      }%
+      \def\numsubsubsecentry##1##2##3##4{%
+       \advancenumber{subsec\thissubsecnum}%
+      }%
+      \def\thischapnum{0}%
+      \def\thissecnum{0}%
+      \def\thissubsecnum{0}%
+      %
+      % use \def rather than \let here because we redefine \chapentry et
+      % al. a second time, below.
+      \def\appentry{\numchapentry}%
+      \def\appsecentry{\numsecentry}%
+      \def\appsubsecentry{\numsubsecentry}%
+      \def\appsubsubsecentry{\numsubsubsecentry}%
+      \def\unnchapentry{\numchapentry}%
+      \def\unnsecentry{\numsecentry}%
+      \def\unnsubsecentry{\numsubsecentry}%
+      \def\unnsubsubsecentry{\numsubsubsecentry}%
+      \readdatafile{toc}%
+      %
+      % Read toc second time, this time actually producing the outlines.
+      % The `-' means take the \expnumber as the absolute number of
+      % subentries, which we calculated on our first read of the .toc above.
+      %
+      % We use the node names as the destinations.
+      \def\numchapentry##1##2##3##4{%
+        \dopdfoutline{##1}{count-\expnumber{chap##2}}{##3}{##4}}%
+      \def\numsecentry##1##2##3##4{%
+        \dopdfoutline{##1}{count-\expnumber{sec##2}}{##3}{##4}}%
+      \def\numsubsecentry##1##2##3##4{%
+        \dopdfoutline{##1}{count-\expnumber{subsec##2}}{##3}{##4}}%
+      \def\numsubsubsecentry##1##2##3##4{% count is always zero
+        \dopdfoutline{##1}{}{##3}{##4}}%
+      %
+      % PDF outlines are displayed using system fonts, instead of
+      % document fonts.  Therefore we cannot use special characters,
+      % since the encoding is unknown.  For example, the eogonek from
+      % Latin 2 (0xea) gets translated to a | character.  Info from
+      % Staszek Wawrykiewicz, 19 Jan 2004 04:09:24 +0100.
+      %
+      % TODO this right, we have to translate 8-bit characters to
+      % their "best" equivalent, based on the @documentencoding.  Too
+      % much work for too little return.  Just use the ASCII equivalents
+      % we use for the index sort strings.
+      % 
+      \indexnofonts
+      \setupdatafile
+      % We can have normal brace characters in the PDF outlines, unlike
+      % Texinfo index files.  So set that up.
+      \def\{{\lbracecharliteral}%
+      \def\}{\rbracecharliteral}%
+      \catcode`\\=\active \otherbackslash
+      \input \tocreadfilename
+    \endgroup
+  }
+  {\catcode`[=1 \catcode`]=2
+   \catcode`{=\other \catcode`}=\other
+   \gdef\lbracecharliteral[{]%
+   \gdef\rbracecharliteral[}]%
+  ]
+  %
+  \def\skipspaces#1{\def\PP{#1}\def\D{|}%
+    \ifx\PP\D\let\nextsp\relax
+    \else\let\nextsp\skipspaces
+      \addtokens{\filename}{\PP}%
+      \advance\filenamelength by 1
+    \fi
+    \nextsp}
+  \def\getfilename#1{%
+    \filenamelength=0
+    % If we don't expand the argument now, \skipspaces will get
+    % snagged on things like "@value{foo}".
+    \edef\temp{#1}%
+    \expandafter\skipspaces\temp|\relax
+  }
+  \ifnum\pdftexversion < 14
+    \let \startlink \pdfannotlink
+  \else
+    \let \startlink \pdfstartlink
+  \fi
+  % make a live url in pdf output.
+  \def\pdfurl#1{%
+    \begingroup
+      % it seems we really need yet another set of dummies; have not
+      % tried to figure out what each command should do in the context
+      % of @url.  for now, just make @/ a no-op, that's the only one
+      % people have actually reported a problem with.
+      %
+      \normalturnoffactive
+      \def\@{@}%
+      \let\/=\empty
+      \makevalueexpandable
+      % do we want to go so far as to use \indexnofonts instead of just
+      % special-casing \var here?
+      \def\var##1{##1}%
+      %
+      \leavevmode\setcolor{\urlcolor}%
+      \startlink attr{/Border [0 0 0]}%
+        user{/Subtype /Link /A << /S /URI /URI (#1) >>}%
+    \endgroup}
+  \def\pdfgettoks#1.{\setbox\boxA=\hbox{\toksA={#1.}\toksB={}\maketoks}}
+  \def\addtokens#1#2{\edef\addtoks{\noexpand#1={\the#1#2}}\addtoks}
+  \def\adn#1{\addtokens{\toksC}{#1}\global\countA=1\let\next=\maketoks}
+  \def\poptoks#1#2|ENDTOKS|{\let\first=#1\toksD={#1}\toksA={#2}}
+  \def\maketoks{%
+    \expandafter\poptoks\the\toksA|ENDTOKS|\relax
+    \ifx\first0\adn0
+    \else\ifx\first1\adn1 \else\ifx\first2\adn2 \else\ifx\first3\adn3
+    \else\ifx\first4\adn4 \else\ifx\first5\adn5 \else\ifx\first6\adn6
+    \else\ifx\first7\adn7 \else\ifx\first8\adn8 \else\ifx\first9\adn9
+    \else
+      \ifnum0=\countA\else\makelink\fi
+      \ifx\first.\let\next=\done\else
+        \let\next=\maketoks
+        \addtokens{\toksB}{\the\toksD}
+        \ifx\first,\addtokens{\toksB}{\space}\fi
+      \fi
+    \fi\fi\fi\fi\fi\fi\fi\fi\fi\fi
+    \next}
+  \def\makelink{\addtokens{\toksB}%
+    {\noexpand\pdflink{\the\toksC}}\toksC={}\global\countA=0}
+  \def\pdflink#1{%
+    \startlink attr{/Border [0 0 0]} goto name{\pdfmkpgn{#1}}
+    \setcolor{\linkcolor}#1\endlink}
+  \def\done{\edef\st{\global\noexpand\toksA={\the\toksB}}\st}
+\else
+  % non-pdf mode
+  \let\pdfmkdest = \gobble
+  \let\pdfurl = \gobble
+  \let\endlink = \relax
+  \let\setcolor = \gobble
+  \let\pdfsetcolor = \gobble
+  \let\pdfmakeoutlines = \relax
+\fi  % \ifx\pdfoutput
+
+%
+% For XeTeX
+%
+\ifx\XeTeXrevision\thisisundefined
+\else
+  %
+  % XeTeX version check
+  %
+  \ifnum\strcmp{\the\XeTeXversion\XeTeXrevision}{0.99996}>-1
+    % TeX Live 2016 contains XeTeX 0.99996 and xdvipdfmx 20160307.
+    % It can use the `dvipdfmx:config' special (from TeX Live SVN r40941).
+    % For avoiding PDF destination name replacement, we use this special
+    % instead of xdvipdfmx's command line option `-C 0x0010'.
+    \special{dvipdfmx:config C 0x0010}
+    % XeTeX 0.99995+ comes with xdvipdfmx 20160307+.
+    % It can handle Unicode destination names for PDF.
+    \txiuseunicodedestnametrue
+  \else
+    % XeTeX < 0.99996 (TeX Live < 2016) cannot use the
+    % `dvipdfmx:config' special.
+    % So for avoiding PDF destination name replacement,
+    % xdvipdfmx's command line option `-C 0x0010' is necessary.
+    %
+    % XeTeX < 0.99995 can not handle Unicode destination names for PDF
+    % because xdvipdfmx 20150315 has a UTF-16 conversion issue.
+    % It is fixed by xdvipdfmx 20160106 (TeX Live SVN r39753).
+    \txiuseunicodedestnamefalse
+  \fi
+  %
+  % Color support
+  %
+  \def\rgbDarkRed{0.50 0.09 0.12}
+  \def\rgbBlack{0 0 0}
+  %
+  \def\pdfsetcolor#1{\special{pdf:scolor [#1]}}
+  %
+  % Set color, and create a mark which defines \thiscolor accordingly,
+  % so that \makeheadline knows which color to restore.
+  \def\setcolor#1{%
+    \xdef\lastcolordefs{\gdef\noexpand\thiscolor{#1}}%
+    \domark
+    \pdfsetcolor{#1}%
+  }
+  %
+  \def\maincolor{\rgbBlack}
+  \pdfsetcolor{\maincolor}
+  \edef\thiscolor{\maincolor}
+  \def\lastcolordefs{}
+  %
+  \def\makefootline{%
+    \baselineskip24pt
+    \line{\pdfsetcolor{\maincolor}\the\footline}%
+  }
+  %
+  \def\makeheadline{%
+    \vbox to 0pt{%
+      \vskip-22.5pt
+      \line{%
+        \vbox to8.5pt{}%
+        % Extract \thiscolor definition from the marks.
+        \getcolormarks
+        % Typeset the headline with \maincolor, then restore the color.
+        \pdfsetcolor{\maincolor}\the\headline\pdfsetcolor{\thiscolor}%
+      }%
+      \vss
+    }%
+    \nointerlineskip
+  }
+  %
+  % PDF outline support
+  %
+  % Emulate pdfTeX primitive
+  \def\pdfdest name#1 xyz{%
+    \special{pdf:dest (#1) [@thispage /XYZ @xpos @ypos null]}%
+  }
+  %
+  \def\setpdfdestname#1{{%
+    % We have to set dummies so commands such as @code, and characters
+    % such as \, aren't expanded when present in a section title.
+    \indexnofonts
+    \makevalueexpandable
+    \turnoffactive
+    \iftxiuseunicodedestname
+      % Pass through Unicode characters.
+    \else
+      % Use ASCII approximations in destination names.
+      \passthroughcharsfalse
+    \fi
+    \def\pdfdestname{#1}%
+    \txiescapepdf\pdfdestname
+  }}
+  %
+  \def\setpdfoutlinetext#1{{%
+    \turnoffactive
+    % Always use Unicode characters in title texts.
+    \def\pdfoutlinetext{#1}%
+    % For XeTeX, xdvipdfmx converts to UTF-16.
+    % So we do not convert.
+    \txiescapepdf\pdfoutlinetext
+  }}
+  %
+  \def\pdfmkdest#1{%
+    \setpdfdestname{#1}%
+    \safewhatsit{\pdfdest name{\pdfdestname} xyz}%
+  }
+  %
+  % by default, use black for everything.
+  \def\urlcolor{\rgbBlack}
+  \def\linkcolor{\rgbBlack}
+  \def\endlink{\setcolor{\maincolor}\pdfendlink}
+  %
+  \def\dopdfoutline#1#2#3#4{%
+    \setpdfoutlinetext{#1}
+    \setpdfdestname{#3}
+    \ifx\pdfdestname\empty
+      \def\pdfdestname{#4}%
+    \fi
+    %
+    \special{pdf:out [-] #2 << /Title (\pdfoutlinetext) /A
+      << /S /GoTo /D (\pdfdestname) >> >> }%
+  }
+  %
+  \def\pdfmakeoutlines{%
+    \begingroup
+      %
+      % For XeTeX, counts of subentries are not necessary.
+      % Therefore, we read toc only once.
+      %
+      % We use node names as destinations.
+      \def\partentry##1##2##3##4{}% ignore parts in the outlines
+      \def\numchapentry##1##2##3##4{%
+        \dopdfoutline{##1}{1}{##3}{##4}}%
+      \def\numsecentry##1##2##3##4{%
+        \dopdfoutline{##1}{2}{##3}{##4}}%
+      \def\numsubsecentry##1##2##3##4{%
+        \dopdfoutline{##1}{3}{##3}{##4}}%
+      \def\numsubsubsecentry##1##2##3##4{%
+        \dopdfoutline{##1}{4}{##3}{##4}}%
+      %
+      \let\appentry\numchapentry%
+      \let\appsecentry\numsecentry%
+      \let\appsubsecentry\numsubsecentry%
+      \let\appsubsubsecentry\numsubsubsecentry%
+      \let\unnchapentry\numchapentry%
+      \let\unnsecentry\numsecentry%
+      \let\unnsubsecentry\numsubsecentry%
+      \let\unnsubsubsecentry\numsubsubsecentry%
+      %
+      % For XeTeX, xdvipdfmx converts strings to UTF-16.
+      % Therefore, the encoding and the language may not be considered.
+      %
+      \indexnofonts
+      \setupdatafile
+      % We can have normal brace characters in the PDF outlines, unlike
+      % Texinfo index files.  So set that up.
+      \def\{{\lbracecharliteral}%
+      \def\}{\rbracecharliteral}%
+      \catcode`\\=\active \otherbackslash
+      \input \tocreadfilename
+    \endgroup
+  }
+  {\catcode`[=1 \catcode`]=2
+   \catcode`{=\other \catcode`}=\other
+   \gdef\lbracecharliteral[{]%
+   \gdef\rbracecharliteral[}]%
+  ]
+
+  \special{pdf:docview << /PageMode /UseOutlines >> }
+  % ``\special{pdf:tounicode ...}'' is not necessary
+  % because xdvipdfmx converts strings from UTF-8 to UTF-16 without it.
+  % However, due to a UTF-16 conversion issue of xdvipdfmx 20150315,
+  % ``\special{pdf:dest ...}'' cannot handle non-ASCII strings.
+  % It is fixed by xdvipdfmx 20160106 (TeX Live SVN r39753).
+%
+  \def\skipspaces#1{\def\PP{#1}\def\D{|}%
+    \ifx\PP\D\let\nextsp\relax
+    \else\let\nextsp\skipspaces
+      \addtokens{\filename}{\PP}%
+      \advance\filenamelength by 1
+    \fi
+    \nextsp}
+  \def\getfilename#1{%
+    \filenamelength=0
+    % If we don't expand the argument now, \skipspaces will get
+    % snagged on things like "@value{foo}".
+    \edef\temp{#1}%
+    \expandafter\skipspaces\temp|\relax
+  }
+  % make a live url in pdf output.
+  \def\pdfurl#1{%
+    \begingroup
+      % it seems we really need yet another set of dummies; have not
+      % tried to figure out what each command should do in the context
+      % of @url.  for now, just make @/ a no-op, that's the only one
+      % people have actually reported a problem with.
+      %
+      \normalturnoffactive
+      \def\@{@}%
+      \let\/=\empty
+      \makevalueexpandable
+      % do we want to go so far as to use \indexnofonts instead of just
+      % special-casing \var here?
+      \def\var##1{##1}%
+      %
+      \leavevmode\setcolor{\urlcolor}%
+      \special{pdf:bann << /Border [0 0 0]
+        /Subtype /Link /A << /S /URI /URI (#1) >> >>}%
+    \endgroup}
+  \def\endlink{\setcolor{\maincolor}\special{pdf:eann}}
+  \def\pdfgettoks#1.{\setbox\boxA=\hbox{\toksA={#1.}\toksB={}\maketoks}}
+  \def\addtokens#1#2{\edef\addtoks{\noexpand#1={\the#1#2}}\addtoks}
+  \def\adn#1{\addtokens{\toksC}{#1}\global\countA=1\let\next=\maketoks}
+  \def\poptoks#1#2|ENDTOKS|{\let\first=#1\toksD={#1}\toksA={#2}}
+  \def\maketoks{%
+    \expandafter\poptoks\the\toksA|ENDTOKS|\relax
+    \ifx\first0\adn0
+    \else\ifx\first1\adn1 \else\ifx\first2\adn2 \else\ifx\first3\adn3
+    \else\ifx\first4\adn4 \else\ifx\first5\adn5 \else\ifx\first6\adn6
+    \else\ifx\first7\adn7 \else\ifx\first8\adn8 \else\ifx\first9\adn9
+    \else
+      \ifnum0=\countA\else\makelink\fi
+      \ifx\first.\let\next=\done\else
+        \let\next=\maketoks
+        \addtokens{\toksB}{\the\toksD}
+        \ifx\first,\addtokens{\toksB}{\space}\fi
+      \fi
+    \fi\fi\fi\fi\fi\fi\fi\fi\fi\fi
+    \next}
+  \def\makelink{\addtokens{\toksB}%
+    {\noexpand\pdflink{\the\toksC}}\toksC={}\global\countA=0}
+  \def\pdflink#1{%
+    \special{pdf:bann << /Border [0 0 0]
+      /Type /Annot /Subtype /Link /A << /S /GoTo /D (#1) >> >>}%
+    \setcolor{\linkcolor}#1\endlink}
+  \def\done{\edef\st{\global\noexpand\toksA={\the\toksB}}\st}
+%
+  %
+  % @image support
+  %
+  % #1 is image name, #2 width (might be empty/whitespace), #3 height (ditto).
+  \def\doxeteximage#1#2#3{%
+    \def\xeteximagewidth{#2}\setbox0 = \hbox{\ignorespaces #2}%
+    \def\xeteximageheight{#3}\setbox2 = \hbox{\ignorespaces #3}%
+    %
+    % XeTeX (and the PDF format) supports .pdf, .png, .jpg (among
+    % others).  Let's try in that order, PDF first since if
+    % someone has a scalable image, presumably better to use that than a
+    % bitmap.
+    \let\xeteximgext=\empty
+    \begingroup
+      \openin 1 #1.pdf \ifeof 1
+        \openin 1 #1.PDF \ifeof 1
+          \openin 1 #1.png \ifeof 1
+            \openin 1 #1.jpg \ifeof 1
+              \openin 1 #1.jpeg \ifeof 1
+                \openin 1 #1.JPG \ifeof 1
+                  \errmessage{Could not find image file #1 for XeTeX}%
+                \else \gdef\xeteximgext{JPG}%
+                \fi
+              \else \gdef\xeteximgext{jpeg}%
+              \fi
+            \else \gdef\xeteximgext{jpg}%
+            \fi
+          \else \gdef\xeteximgext{png}%
+          \fi
+        \else \gdef\xeteximgext{PDF}%
+        \fi
+      \else \gdef\xeteximgext{pdf}%
+      \fi
+      \closein 1
+    \endgroup
+    %
+    \def\xetexpdfext{pdf}%
+    \ifx\xeteximgext\xetexpdfext
+      \XeTeXpdffile "#1".\xeteximgext ""
+    \else
+      \def\xetexpdfext{PDF}%
+      \ifx\xeteximgext\xetexpdfext
+        \XeTeXpdffile "#1".\xeteximgext ""
+      \else
+        \XeTeXpicfile "#1".\xeteximgext ""
+      \fi
+    \fi
+    \ifdim \wd0 >0pt width \xeteximagewidth \fi
+    \ifdim \wd2 >0pt height \xeteximageheight \fi \relax
+  }
+\fi
+
+
+%
+\message{fonts,}
+
+% Set the baselineskip to #1, and the lineskip and strut size
+% correspondingly.  There is no deep meaning behind these magic numbers
+% used as factors; they just match (closely enough) what Knuth defined.
+%
+\def\lineskipfactor{.08333}
+\def\strutheightpercent{.70833}
+\def\strutdepthpercent {.29167}
+%
+% can get a sort of poor man's double spacing by redefining this.
+\def\baselinefactor{1}
+%
+\newdimen\textleading
+\def\setleading#1{%
+  \dimen0 = #1\relax
+  \normalbaselineskip = \baselinefactor\dimen0
+  \normallineskip = \lineskipfactor\normalbaselineskip
+  \normalbaselines
+  \setbox\strutbox =\hbox{%
+    \vrule width0pt height\strutheightpercent\baselineskip
+                    depth \strutdepthpercent \baselineskip
+  }%
+}
+
+% PDF CMaps.  See also LaTeX's t1.cmap.
+%
+% do nothing with this by default.
+\expandafter\let\csname cmapOT1\endcsname\gobble
+\expandafter\let\csname cmapOT1IT\endcsname\gobble
+\expandafter\let\csname cmapOT1TT\endcsname\gobble
+
+% if we are producing pdf, and we have \pdffontattr, then define cmaps.
+% (\pdffontattr was introduced many years ago, but people still run
+% older pdftex's; it's easy to conditionalize, so we do.)
+\ifpdf \ifx\pdffontattr\thisisundefined \else
+  \begingroup
+    \catcode`\^^M=\active \def^^M{^^J}% Output line endings as the ^^J char.
+    \catcode`\%=12 \immediate\pdfobj stream {%!PS-Adobe-3.0 Resource-CMap
+%%DocumentNeededResources: ProcSet (CIDInit)
+%%IncludeResource: ProcSet (CIDInit)
+%%BeginResource: CMap (TeX-OT1-0)
+%%Title: (TeX-OT1-0 TeX OT1 0)
+%%Version: 1.000
+%%EndComments
+/CIDInit /ProcSet findresource begin
+12 dict begin
+begincmap
+/CIDSystemInfo
+<< /Registry (TeX)
+/Ordering (OT1)
+/Supplement 0
+>> def
+/CMapName /TeX-OT1-0 def
+/CMapType 2 def
+1 begincodespacerange
+<00> <7F>
+endcodespacerange
+8 beginbfrange
+<00> <01> <0393>
+<09> <0A> <03A8>
+<23> <26> <0023>
+<28> <3B> <0028>
+<3F> <5B> <003F>
+<5D> <5E> <005D>
+<61> <7A> <0061>
+<7B> <7C> <2013>
+endbfrange
+40 beginbfchar
+<02> <0398>
+<03> <039B>
+<04> <039E>
+<05> <03A0>
+<06> <03A3>
+<07> <03D2>
+<08> <03A6>
+<0B> <00660066>
+<0C> <00660069>
+<0D> <0066006C>
+<0E> <006600660069>
+<0F> <00660066006C>
+<10> <0131>
+<11> <0237>
+<12> <0060>
+<13> <00B4>
+<14> <02C7>
+<15> <02D8>
+<16> <00AF>
+<17> <02DA>
+<18> <00B8>
+<19> <00DF>
+<1A> <00E6>
+<1B> <0153>
+<1C> <00F8>
+<1D> <00C6>
+<1E> <0152>
+<1F> <00D8>
+<21> <0021>
+<22> <201D>
+<27> <2019>
+<3C> <00A1>
+<3D> <003D>
+<3E> <00BF>
+<5C> <201C>
+<5F> <02D9>
+<60> <2018>
+<7D> <02DD>
+<7E> <007E>
+<7F> <00A8>
+endbfchar
+endcmap
+CMapName currentdict /CMap defineresource pop
+end
+end
+%%EndResource
+%%EOF
+    }\endgroup
+  \expandafter\edef\csname cmapOT1\endcsname#1{%
+    \pdffontattr#1{/ToUnicode \the\pdflastobj\space 0 R}%
+  }%
+%
+% \cmapOT1IT
+  \begingroup
+    \catcode`\^^M=\active \def^^M{^^J}% Output line endings as the ^^J char.
+    \catcode`\%=12 \immediate\pdfobj stream {%!PS-Adobe-3.0 Resource-CMap
+%%DocumentNeededResources: ProcSet (CIDInit)
+%%IncludeResource: ProcSet (CIDInit)
+%%BeginResource: CMap (TeX-OT1IT-0)
+%%Title: (TeX-OT1IT-0 TeX OT1IT 0)
+%%Version: 1.000
+%%EndComments
+/CIDInit /ProcSet findresource begin
+12 dict begin
+begincmap
+/CIDSystemInfo
+<< /Registry (TeX)
+/Ordering (OT1IT)
+/Supplement 0
+>> def
+/CMapName /TeX-OT1IT-0 def
+/CMapType 2 def
+1 begincodespacerange
+<00> <7F>
+endcodespacerange
+8 beginbfrange
+<00> <01> <0393>
+<09> <0A> <03A8>
+<25> <26> <0025>
+<28> <3B> <0028>
+<3F> <5B> <003F>
+<5D> <5E> <005D>
+<61> <7A> <0061>
+<7B> <7C> <2013>
+endbfrange
+42 beginbfchar
+<02> <0398>
+<03> <039B>
+<04> <039E>
+<05> <03A0>
+<06> <03A3>
+<07> <03D2>
+<08> <03A6>
+<0B> <00660066>
+<0C> <00660069>
+<0D> <0066006C>
+<0E> <006600660069>
+<0F> <00660066006C>
+<10> <0131>
+<11> <0237>
+<12> <0060>
+<13> <00B4>
+<14> <02C7>
+<15> <02D8>
+<16> <00AF>
+<17> <02DA>
+<18> <00B8>
+<19> <00DF>
+<1A> <00E6>
+<1B> <0153>
+<1C> <00F8>
+<1D> <00C6>
+<1E> <0152>
+<1F> <00D8>
+<21> <0021>
+<22> <201D>
+<23> <0023>
+<24> <00A3>
+<27> <2019>
+<3C> <00A1>
+<3D> <003D>
+<3E> <00BF>
+<5C> <201C>
+<5F> <02D9>
+<60> <2018>
+<7D> <02DD>
+<7E> <007E>
+<7F> <00A8>
+endbfchar
+endcmap
+CMapName currentdict /CMap defineresource pop
+end
+end
+%%EndResource
+%%EOF
+    }\endgroup
+  \expandafter\edef\csname cmapOT1IT\endcsname#1{%
+    \pdffontattr#1{/ToUnicode \the\pdflastobj\space 0 R}%
+  }%
+%
+% \cmapOT1TT
+  \begingroup
+    \catcode`\^^M=\active \def^^M{^^J}% Output line endings as the ^^J char.
+    \catcode`\%=12 \immediate\pdfobj stream {%!PS-Adobe-3.0 Resource-CMap
+%%DocumentNeededResources: ProcSet (CIDInit)
+%%IncludeResource: ProcSet (CIDInit)
+%%BeginResource: CMap (TeX-OT1TT-0)
+%%Title: (TeX-OT1TT-0 TeX OT1TT 0)
+%%Version: 1.000
+%%EndComments
+/CIDInit /ProcSet findresource begin
+12 dict begin
+begincmap
+/CIDSystemInfo
+<< /Registry (TeX)
+/Ordering (OT1TT)
+/Supplement 0
+>> def
+/CMapName /TeX-OT1TT-0 def
+/CMapType 2 def
+1 begincodespacerange
+<00> <7F>
+endcodespacerange
+5 beginbfrange
+<00> <01> <0393>
+<09> <0A> <03A8>
+<21> <26> <0021>
+<28> <5F> <0028>
+<61> <7E> <0061>
+endbfrange
+32 beginbfchar
+<02> <0398>
+<03> <039B>
+<04> <039E>
+<05> <03A0>
+<06> <03A3>
+<07> <03D2>
+<08> <03A6>
+<0B> <2191>
+<0C> <2193>
+<0D> <0027>
+<0E> <00A1>
+<0F> <00BF>
+<10> <0131>
+<11> <0237>
+<12> <0060>
+<13> <00B4>
+<14> <02C7>
+<15> <02D8>
+<16> <00AF>
+<17> <02DA>
+<18> <00B8>
+<19> <00DF>
+<1A> <00E6>
+<1B> <0153>
+<1C> <00F8>
+<1D> <00C6>
+<1E> <0152>
+<1F> <00D8>
+<20> <2423>
+<27> <2019>
+<60> <2018>
+<7F> <00A8>
+endbfchar
+endcmap
+CMapName currentdict /CMap defineresource pop
+end
+end
+%%EndResource
+%%EOF
+    }\endgroup
+  \expandafter\edef\csname cmapOT1TT\endcsname#1{%
+    \pdffontattr#1{/ToUnicode \the\pdflastobj\space 0 R}%
+  }%
+\fi\fi
+
+
+% Set the font macro #1 to the font named \fontprefix#2.
+% #3 is the font's design size, #4 is a scale factor, #5 is the CMap
+% encoding (only OT1, OT1IT and OT1TT are allowed, or empty to omit).
+% Example:
+% #1 = \textrm
+% #2 = \rmshape
+% #3 = 10
+% #4 = \mainmagstep
+% #5 = OT1
+%
+\def\setfont#1#2#3#4#5{%
+  \font#1=\fontprefix#2#3 scaled #4
+  \csname cmap#5\endcsname#1%
+}
+% This is what gets called when #5 of \setfont is empty.
+\let\cmap\gobble
+%
+% (end of cmaps)
+
+% Use cm as the default font prefix.
+% To specify the font prefix, you must define \fontprefix
+% before you read in texinfo.tex.
+\ifx\fontprefix\thisisundefined
+\def\fontprefix{cm}
+\fi
+% Support font families that don't use the same naming scheme as CM.
+\def\rmshape{r}
+\def\rmbshape{bx}               % where the normal face is bold
+\def\bfshape{b}
+\def\bxshape{bx}
+\def\ttshape{tt}
+\def\ttbshape{tt}
+\def\ttslshape{sltt}
+\def\itshape{ti}
+\def\itbshape{bxti}
+\def\slshape{sl}
+\def\slbshape{bxsl}
+\def\sfshape{ss}
+\def\sfbshape{ss}
+\def\scshape{csc}
+\def\scbshape{csc}
+
+% Definitions for a main text size of 11pt.  (The default in Texinfo.)
+%
+\def\definetextfontsizexi{%
+% Text fonts (11.2pt, magstep1).
+\def\textnominalsize{11pt}
+\edef\mainmagstep{\magstephalf}
+\setfont\textrm\rmshape{10}{\mainmagstep}{OT1}
+\setfont\texttt\ttshape{10}{\mainmagstep}{OT1TT}
+\setfont\textbf\bfshape{10}{\mainmagstep}{OT1}
+\setfont\textit\itshape{10}{\mainmagstep}{OT1IT}
+\setfont\textsl\slshape{10}{\mainmagstep}{OT1}
+\setfont\textsf\sfshape{10}{\mainmagstep}{OT1}
+\setfont\textsc\scshape{10}{\mainmagstep}{OT1}
+\setfont\textttsl\ttslshape{10}{\mainmagstep}{OT1TT}
+\font\texti=cmmi10 scaled \mainmagstep
+\font\textsy=cmsy10 scaled \mainmagstep
+\def\textecsize{1095}
+
+% A few fonts for @defun names and args.
+\setfont\defbf\bfshape{10}{\magstep1}{OT1}
+\setfont\deftt\ttshape{10}{\magstep1}{OT1TT}
+\setfont\defsl\slshape{10}{\magstep1}{OT1TT}
+\setfont\defttsl\ttslshape{10}{\magstep1}{OT1TT}
+\def\df{\let\ttfont=\deftt \let\bffont = \defbf
+\let\ttslfont=\defttsl \let\slfont=\defsl \bf}
+
+% Fonts for indices, footnotes, small examples (9pt).
+\def\smallnominalsize{9pt}
+\setfont\smallrm\rmshape{9}{1000}{OT1}
+\setfont\smalltt\ttshape{9}{1000}{OT1TT}
+\setfont\smallbf\bfshape{10}{900}{OT1}
+\setfont\smallit\itshape{9}{1000}{OT1IT}
+\setfont\smallsl\slshape{9}{1000}{OT1}
+\setfont\smallsf\sfshape{9}{1000}{OT1}
+\setfont\smallsc\scshape{10}{900}{OT1}
+\setfont\smallttsl\ttslshape{10}{900}{OT1TT}
+\font\smalli=cmmi9
+\font\smallsy=cmsy9
+\def\smallecsize{0900}
+
+% Fonts for small examples (8pt).
+\def\smallernominalsize{8pt}
+\setfont\smallerrm\rmshape{8}{1000}{OT1}
+\setfont\smallertt\ttshape{8}{1000}{OT1TT}
+\setfont\smallerbf\bfshape{10}{800}{OT1}
+\setfont\smallerit\itshape{8}{1000}{OT1IT}
+\setfont\smallersl\slshape{8}{1000}{OT1}
+\setfont\smallersf\sfshape{8}{1000}{OT1}
+\setfont\smallersc\scshape{10}{800}{OT1}
+\setfont\smallerttsl\ttslshape{10}{800}{OT1TT}
+\font\smalleri=cmmi8
+\font\smallersy=cmsy8
+\def\smallerecsize{0800}
+
+% Fonts for math mode superscripts (7pt).
+\def\sevennominalsize{7pt}
+\setfont\sevenrm\rmshape{7}{1000}{OT1}
+\setfont\seventt\ttshape{10}{700}{OT1TT}
+\setfont\sevenbf\bfshape{10}{700}{OT1}
+\setfont\sevenit\itshape{7}{1000}{OT1IT}
+\setfont\sevensl\slshape{10}{700}{OT1}
+\setfont\sevensf\sfshape{10}{700}{OT1}
+\setfont\sevensc\scshape{10}{700}{OT1}
+\setfont\seventtsl\ttslshape{10}{700}{OT1TT}
+\font\seveni=cmmi7
+\font\sevensy=cmsy7
+\def\sevenecsize{0700}
+
+% Fonts for title page (20.4pt):
+\def\titlenominalsize{20pt}
+\setfont\titlerm\rmbshape{12}{\magstep3}{OT1}
+\setfont\titleit\itbshape{10}{\magstep4}{OT1IT}
+\setfont\titlesl\slbshape{10}{\magstep4}{OT1}
+\setfont\titlett\ttbshape{12}{\magstep3}{OT1TT}
+\setfont\titlettsl\ttslshape{10}{\magstep4}{OT1TT}
+\setfont\titlesf\sfbshape{17}{\magstep1}{OT1}
+\let\titlebf=\titlerm
+\setfont\titlesc\scbshape{10}{\magstep4}{OT1}
+\font\titlei=cmmi12 scaled \magstep3
+\font\titlesy=cmsy10 scaled \magstep4
+\def\titleecsize{2074}
+
+% Chapter (and unnumbered) fonts (17.28pt).
+\def\chapnominalsize{17pt}
+\setfont\chaprm\rmbshape{12}{\magstep2}{OT1}
+\setfont\chapit\itbshape{10}{\magstep3}{OT1IT}
+\setfont\chapsl\slbshape{10}{\magstep3}{OT1}
+\setfont\chaptt\ttbshape{12}{\magstep2}{OT1TT}
+\setfont\chapttsl\ttslshape{10}{\magstep3}{OT1TT}
+\setfont\chapsf\sfbshape{17}{1000}{OT1}
+\let\chapbf=\chaprm
+\setfont\chapsc\scbshape{10}{\magstep3}{OT1}
+\font\chapi=cmmi12 scaled \magstep2
+\font\chapsy=cmsy10 scaled \magstep3
+\def\chapecsize{1728}
+
+% Section fonts (14.4pt).
+\def\secnominalsize{14pt}
+\setfont\secrm\rmbshape{12}{\magstep1}{OT1}
+\setfont\secrmnotbold\rmshape{12}{\magstep1}{OT1}
+\setfont\secit\itbshape{10}{\magstep2}{OT1IT}
+\setfont\secsl\slbshape{10}{\magstep2}{OT1}
+\setfont\sectt\ttbshape{12}{\magstep1}{OT1TT}
+\setfont\secttsl\ttslshape{10}{\magstep2}{OT1TT}
+\setfont\secsf\sfbshape{12}{\magstep1}{OT1}
+\let\secbf\secrm
+\setfont\secsc\scbshape{10}{\magstep2}{OT1}
+\font\seci=cmmi12 scaled \magstep1
+\font\secsy=cmsy10 scaled \magstep2
+\def\sececsize{1440}
+
+% Subsection fonts (13.15pt).
+\def\ssecnominalsize{13pt}
+\setfont\ssecrm\rmbshape{12}{\magstephalf}{OT1}
+\setfont\ssecit\itbshape{10}{1315}{OT1IT}
+\setfont\ssecsl\slbshape{10}{1315}{OT1}
+\setfont\ssectt\ttbshape{12}{\magstephalf}{OT1TT}
+\setfont\ssecttsl\ttslshape{10}{1315}{OT1TT}
+\setfont\ssecsf\sfbshape{12}{\magstephalf}{OT1}
+\let\ssecbf\ssecrm
+\setfont\ssecsc\scbshape{10}{1315}{OT1}
+\font\sseci=cmmi12 scaled \magstephalf
+\font\ssecsy=cmsy10 scaled 1315
+\def\ssececsize{1200}
+
+% Reduced fonts for @acronym in text (10pt).
+\def\reducednominalsize{10pt}
+\setfont\reducedrm\rmshape{10}{1000}{OT1}
+\setfont\reducedtt\ttshape{10}{1000}{OT1TT}
+\setfont\reducedbf\bfshape{10}{1000}{OT1}
+\setfont\reducedit\itshape{10}{1000}{OT1IT}
+\setfont\reducedsl\slshape{10}{1000}{OT1}
+\setfont\reducedsf\sfshape{10}{1000}{OT1}
+\setfont\reducedsc\scshape{10}{1000}{OT1}
+\setfont\reducedttsl\ttslshape{10}{1000}{OT1TT}
+\font\reducedi=cmmi10
+\font\reducedsy=cmsy10
+\def\reducedecsize{1000}
+
+\textleading = 13.2pt % line spacing for 11pt CM
+\textfonts            % reset the current fonts
+\rm
+} % end of 11pt text font size definitions, \definetextfontsizexi
+
+
+% Definitions to make the main text be 10pt Computer Modern, with
+% section, chapter, etc., sizes following suit.  This is for the GNU
+% Press printing of the Emacs 22 manual.  Maybe other manuals in the
+% future.  Used with @smallbook, which sets the leading to 12pt.
+%
+\def\definetextfontsizex{%
+% Text fonts (10pt).
+\def\textnominalsize{10pt}
+\edef\mainmagstep{1000}
+\setfont\textrm\rmshape{10}{\mainmagstep}{OT1}
+\setfont\texttt\ttshape{10}{\mainmagstep}{OT1TT}
+\setfont\textbf\bfshape{10}{\mainmagstep}{OT1}
+\setfont\textit\itshape{10}{\mainmagstep}{OT1IT}
+\setfont\textsl\slshape{10}{\mainmagstep}{OT1}
+\setfont\textsf\sfshape{10}{\mainmagstep}{OT1}
+\setfont\textsc\scshape{10}{\mainmagstep}{OT1}
+\setfont\textttsl\ttslshape{10}{\mainmagstep}{OT1TT}
+\font\texti=cmmi10 scaled \mainmagstep
+\font\textsy=cmsy10 scaled \mainmagstep
+\def\textecsize{1000}
+
+% A few fonts for @defun names and args.
+\setfont\defbf\bfshape{10}{\magstephalf}{OT1}
+\setfont\deftt\ttshape{10}{\magstephalf}{OT1TT}
+\setfont\defsl\slshape{10}{\magstephalf}{OT1TT}
+\setfont\defttsl\ttslshape{10}{\magstephalf}{OT1TT}
+\def\df{\let\ttfont=\deftt \let\bffont = \defbf
+\let\slfont=\defsl \let\ttslfont=\defttsl \bf}
+
+% Fonts for indices, footnotes, small examples (9pt).
+\def\smallnominalsize{9pt}
+\setfont\smallrm\rmshape{9}{1000}{OT1}
+\setfont\smalltt\ttshape{9}{1000}{OT1TT}
+\setfont\smallbf\bfshape{10}{900}{OT1}
+\setfont\smallit\itshape{9}{1000}{OT1IT}
+\setfont\smallsl\slshape{9}{1000}{OT1}
+\setfont\smallsf\sfshape{9}{1000}{OT1}
+\setfont\smallsc\scshape{10}{900}{OT1}
+\setfont\smallttsl\ttslshape{10}{900}{OT1TT}
+\font\smalli=cmmi9
+\font\smallsy=cmsy9
+\def\smallecsize{0900}
+
+% Fonts for small examples (8pt).
+\def\smallernominalsize{8pt}
+\setfont\smallerrm\rmshape{8}{1000}{OT1}
+\setfont\smallertt\ttshape{8}{1000}{OT1TT}
+\setfont\smallerbf\bfshape{10}{800}{OT1}
+\setfont\smallerit\itshape{8}{1000}{OT1IT}
+\setfont\smallersl\slshape{8}{1000}{OT1}
+\setfont\smallersf\sfshape{8}{1000}{OT1}
+\setfont\smallersc\scshape{10}{800}{OT1}
+\setfont\smallerttsl\ttslshape{10}{800}{OT1TT}
+\font\smalleri=cmmi8
+\font\smallersy=cmsy8
+\def\smallerecsize{0800}
+
+% Fonts for math mode superscripts (7pt).
+\def\sevennominalsize{7pt}
+\setfont\sevenrm\rmshape{7}{1000}{OT1}
+\setfont\seventt\ttshape{10}{700}{OT1TT}
+\setfont\sevenbf\bfshape{10}{700}{OT1}
+\setfont\sevenit\itshape{7}{1000}{OT1IT}
+\setfont\sevensl\slshape{10}{700}{OT1}
+\setfont\sevensf\sfshape{10}{700}{OT1}
+\setfont\sevensc\scshape{10}{700}{OT1}
+\setfont\seventtsl\ttslshape{10}{700}{OT1TT}
+\font\seveni=cmmi7
+\font\sevensy=cmsy7
+\def\sevenecsize{0700}
+
+% Fonts for title page (20.4pt):
+\def\titlenominalsize{20pt}
+\setfont\titlerm\rmbshape{12}{\magstep3}{OT1}
+\setfont\titleit\itbshape{10}{\magstep4}{OT1IT}
+\setfont\titlesl\slbshape{10}{\magstep4}{OT1}
+\setfont\titlett\ttbshape{12}{\magstep3}{OT1TT}
+\setfont\titlettsl\ttslshape{10}{\magstep4}{OT1TT}
+\setfont\titlesf\sfbshape{17}{\magstep1}{OT1}
+\let\titlebf=\titlerm
+\setfont\titlesc\scbshape{10}{\magstep4}{OT1}
+\font\titlei=cmmi12 scaled \magstep3
+\font\titlesy=cmsy10 scaled \magstep4
+\def\titleecsize{2074}
+
+% Chapter fonts (14.4pt).
+\def\chapnominalsize{14pt}
+\setfont\chaprm\rmbshape{12}{\magstep1}{OT1}
+\setfont\chapit\itbshape{10}{\magstep2}{OT1IT}
+\setfont\chapsl\slbshape{10}{\magstep2}{OT1}
+\setfont\chaptt\ttbshape{12}{\magstep1}{OT1TT}
+\setfont\chapttsl\ttslshape{10}{\magstep2}{OT1TT}
+\setfont\chapsf\sfbshape{12}{\magstep1}{OT1}
+\let\chapbf\chaprm
+\setfont\chapsc\scbshape{10}{\magstep2}{OT1}
+\font\chapi=cmmi12 scaled \magstep1
+\font\chapsy=cmsy10 scaled \magstep2
+\def\chapecsize{1440}
+
+% Section fonts (12pt).
+\def\secnominalsize{12pt}
+\setfont\secrm\rmbshape{12}{1000}{OT1}
+\setfont\secit\itbshape{10}{\magstep1}{OT1IT}
+\setfont\secsl\slbshape{10}{\magstep1}{OT1}
+\setfont\sectt\ttbshape{12}{1000}{OT1TT}
+\setfont\secttsl\ttslshape{10}{\magstep1}{OT1TT}
+\setfont\secsf\sfbshape{12}{1000}{OT1}
+\let\secbf\secrm
+\setfont\secsc\scbshape{10}{\magstep1}{OT1}
+\font\seci=cmmi12
+\font\secsy=cmsy10 scaled \magstep1
+\def\sececsize{1200}
+
+% Subsection fonts (10pt).
+\def\ssecnominalsize{10pt}
+\setfont\ssecrm\rmbshape{10}{1000}{OT1}
+\setfont\ssecit\itbshape{10}{1000}{OT1IT}
+\setfont\ssecsl\slbshape{10}{1000}{OT1}
+\setfont\ssectt\ttbshape{10}{1000}{OT1TT}
+\setfont\ssecttsl\ttslshape{10}{1000}{OT1TT}
+\setfont\ssecsf\sfbshape{10}{1000}{OT1}
+\let\ssecbf\ssecrm
+\setfont\ssecsc\scbshape{10}{1000}{OT1}
+\font\sseci=cmmi10
+\font\ssecsy=cmsy10
+\def\ssececsize{1000}
+
+% Reduced fonts for @acronym in text (9pt).
+\def\reducednominalsize{9pt}
+\setfont\reducedrm\rmshape{9}{1000}{OT1}
+\setfont\reducedtt\ttshape{9}{1000}{OT1TT}
+\setfont\reducedbf\bfshape{10}{900}{OT1}
+\setfont\reducedit\itshape{9}{1000}{OT1IT}
+\setfont\reducedsl\slshape{9}{1000}{OT1}
+\setfont\reducedsf\sfshape{9}{1000}{OT1}
+\setfont\reducedsc\scshape{10}{900}{OT1}
+\setfont\reducedttsl\ttslshape{10}{900}{OT1TT}
+\font\reducedi=cmmi9
+\font\reducedsy=cmsy9
+\def\reducedecsize{0900}
+
+\divide\parskip by 2  % reduce space between paragraphs
+\textleading = 12pt   % line spacing for 10pt CM
+\textfonts            % reset the current fonts
+\rm
+} % end of 10pt text font size definitions, \definetextfontsizex
+
+% Fonts for short table of contents.
+\setfont\shortcontrm\rmshape{12}{1000}{OT1}
+\setfont\shortcontbf\bfshape{10}{\magstep1}{OT1}  % no cmb12
+\setfont\shortcontsl\slshape{12}{1000}{OT1}
+\setfont\shortconttt\ttshape{12}{1000}{OT1TT}
+
+
+% We provide the user-level command
+%   @fonttextsize 10
+% (or 11) to redefine the text font size.  pt is assumed.
+%
+\def\xiword{11}
+\def\xword{10}
+\def\xwordpt{10pt}
+%
+\parseargdef\fonttextsize{%
+  \def\textsizearg{#1}%
+  %\wlog{doing @fonttextsize \textsizearg}%
+  %
+  % Set \globaldefs so that documents can use this inside @tex, since
+  % makeinfo 4.8 does not support it, but we need it nonetheless.
+  %
+ \begingroup \globaldefs=1
+  \ifx\textsizearg\xword \definetextfontsizex
+  \else \ifx\textsizearg\xiword \definetextfontsizexi
+  \else
+    \errhelp=\EMsimple
+    \errmessage{@fonttextsize only supports `10' or `11', not `\textsizearg'}
+  \fi\fi
+ \endgroup
+}
+
+%
+% Change the current font style to #1, remembering it in \curfontstyle.
+% For now, we do not accumulate font styles: @b{@i{foo}} prints foo in
+% italics, not bold italics.
+%
+\def\setfontstyle#1{%
+  \def\curfontstyle{#1}% not as a control sequence, because we are \edef'd.
+  \csname #1font\endcsname  % change the current font
+}
+
+\def\rm{\fam=0 \setfontstyle{rm}}
+\def\it{\fam=\itfam \setfontstyle{it}}
+\def\sl{\fam=\slfam \setfontstyle{sl}}
+\def\bf{\fam=\bffam \setfontstyle{bf}}\def\bfstylename{bf}
+\def\tt{\fam=\ttfam \setfontstyle{tt}}
+
+% Texinfo sort of supports the sans serif font style, which plain TeX does not.
+% So we set up a \sf.
+\newfam\sffam
+\def\sf{\fam=\sffam \setfontstyle{sf}}
+
+% We don't need math for this font style.
+\def\ttsl{\setfontstyle{ttsl}}
+
+
+% In order for the font changes to affect most math symbols and letters,
+% we have to define the \textfont of the standard families.
+% We don't bother to reset \scriptscriptfont; awaiting user need.
+%
+\def\resetmathfonts{%
+  \textfont0=\rmfont \textfont1=\ifont \textfont2=\syfont
+  \textfont\itfam=\itfont \textfont\slfam=\slfont \textfont\bffam=\bffont
+  \textfont\ttfam=\ttfont \textfont\sffam=\sffont
+  %
+  % Fonts for superscript.  Note that the 7pt fonts are used regardless
+  % of the current font size.
+  \scriptfont0=\sevenrm \scriptfont1=\seveni \scriptfont2=\sevensy
+  \scriptfont\itfam=\sevenit \scriptfont\slfam=\sevensl
+  \scriptfont\bffam=\sevenbf \scriptfont\ttfam=\seventt
+  \scriptfont\sffam=\sevensf
+}
+
+%
+
+% The font-changing commands (all called \...fonts) redefine the meanings
+% of \STYLEfont, instead of just \STYLE.  We do this because \STYLE needs
+% to also set the current \fam for math mode.  Our \STYLE (e.g., \rm)
+% commands hardwire \STYLEfont to set the current font.
+%
+% The fonts used for \ifont are for "math italics"  (\itfont is for italics
+% in regular text).  \syfont is also used in math mode only.
+%
+% Each font-changing command also sets the names \lsize (one size lower)
+% and \lllsize (three sizes lower).  These relative commands are used
+% in, e.g., the LaTeX logo and acronyms.
+%
+% This all needs generalizing, badly.
+%
+
+\def\assignfonts#1{%
+  \expandafter\let\expandafter\rmfont\csname #1rm\endcsname
+  \expandafter\let\expandafter\itfont\csname #1it\endcsname
+  \expandafter\let\expandafter\slfont\csname #1sl\endcsname
+  \expandafter\let\expandafter\bffont\csname #1bf\endcsname
+  \expandafter\let\expandafter\ttfont\csname #1tt\endcsname
+  \expandafter\let\expandafter\smallcaps\csname #1sc\endcsname
+  \expandafter\let\expandafter\sffont  \csname #1sf\endcsname
+  \expandafter\let\expandafter\ifont   \csname #1i\endcsname
+  \expandafter\let\expandafter\syfont  \csname #1sy\endcsname
+  \expandafter\let\expandafter\ttslfont\csname #1ttsl\endcsname
+}
+
+\newif\ifrmisbold
+
+% Select smaller font size with the current style.  Used to change font size
+% in, e.g., the LaTeX logo and acronyms.  If we are using bold fonts for
+% normal roman text, also use bold fonts for roman text in the smaller size.
+\def\switchtolllsize{%
+   \expandafter\assignfonts\expandafter{\lllsize}%
+   \ifrmisbold
+     \let\rmfont\bffont
+   \fi
+   \csname\curfontstyle\endcsname
+}%
+
+\def\switchtolsize{%
+   \expandafter\assignfonts\expandafter{\lsize}%
+   \ifrmisbold
+     \let\rmfont\bffont
+   \fi
+   \csname\curfontstyle\endcsname
+}%
+
+\def\definefontsetatsize#1#2#3#4#5{%
+\expandafter\def\csname #1fonts\endcsname{%
+  \def\curfontsize{#1}%
+  \def\lsize{#2}\def\lllsize{#3}%
+  \csname rmisbold#5\endcsname
+  \assignfonts{#1}%
+  \resetmathfonts
+  \setleading{#4}%
+}}
+
+\definefontsetatsize{text}   {reduced}{smaller}{\textleading}{false}
+\definefontsetatsize{title}  {chap}   {subsec} {27pt}  {true}
+\definefontsetatsize{chap}   {sec}    {text}   {19pt}  {true}
+\definefontsetatsize{sec}    {subsec} {reduced}{17pt}  {true}
+\definefontsetatsize{ssec}   {text}   {small}  {15pt}  {true}
+\definefontsetatsize{reduced}{small}  {smaller}{10.5pt}{false}
+\definefontsetatsize{small}  {smaller}{smaller}{10.5pt}{false}
+\definefontsetatsize{smaller}{smaller}{smaller}{9.5pt} {false}
+
+\def\titlefont#1{{\titlefonts\rm #1}}
+\let\subsecfonts = \ssecfonts
+\let\subsubsecfonts = \ssecfonts
+
+% Define these just so they can be easily changed for other fonts.
+\def\angleleft{$\langle$}
+\def\angleright{$\rangle$}
+
+% Set the fonts to use with the @small... environments.
+\let\smallexamplefonts = \smallfonts
+
+% About \smallexamplefonts.  If we use \smallfonts (9pt), @smallexample
+% can fit this many characters:
+%   8.5x11=86   smallbook=72  a4=90  a5=69
+% If we use \scriptfonts (8pt), then we can fit this many characters:
+%   8.5x11=90+  smallbook=80  a4=90+  a5=77
+% For me, subjectively, the few extra characters that fit aren't worth
+% the additional smallness of 8pt.  So I'm making the default 9pt.
+%
+% By the way, for comparison, here's what fits with @example (10pt):
+%   8.5x11=71  smallbook=60  a4=75  a5=58
+% --karl, 24jan03.
+
+% Set up the default fonts, so we can use them for creating boxes.
+%
+\definetextfontsizexi
+
+
+\message{markup,}
+
+% Check if we are currently using a typewriter font.  Since all the
+% Computer Modern typewriter fonts have zero interword stretch (and
+% shrink), and it is reasonable to expect all typewriter fonts to have
+% this property, we can check that font parameter.
+%
+\def\ifmonospace{\ifdim\fontdimen3\font=0pt }
+
+% Markup style infrastructure.  \defmarkupstylesetup\INITMACRO will
+% define and register \INITMACRO to be called on markup style changes.
+% \INITMACRO can check \currentmarkupstyle for the innermost
+% style.
+
+\let\currentmarkupstyle\empty
+
+\def\setupmarkupstyle#1{%
+  \def\currentmarkupstyle{#1}%
+  \markupstylesetup
+}
+
+\let\markupstylesetup\empty
+
+\def\defmarkupstylesetup#1{%
+  \expandafter\def\expandafter\markupstylesetup
+    \expandafter{\markupstylesetup #1}%
+  \def#1%
+}
+
+% Markup style setup for left and right quotes.
+\defmarkupstylesetup\markupsetuplq{%
+  \expandafter\let\expandafter \temp
+    \csname markupsetuplq\currentmarkupstyle\endcsname
+  \ifx\temp\relax \markupsetuplqdefault \else \temp \fi
+}
+
+\defmarkupstylesetup\markupsetuprq{%
+  \expandafter\let\expandafter \temp
+    \csname markupsetuprq\currentmarkupstyle\endcsname
+  \ifx\temp\relax \markupsetuprqdefault \else \temp \fi
+}
+
+{
+\catcode`\'=\active
+\catcode`\`=\active
+
+\gdef\markupsetuplqdefault{\let`\lq}
+\gdef\markupsetuprqdefault{\let'\rq}
+
+\gdef\markupsetcodequoteleft{\let`\codequoteleft}
+\gdef\markupsetcodequoteright{\let'\codequoteright}
+}
+
+\let\markupsetuplqcode \markupsetcodequoteleft
+\let\markupsetuprqcode \markupsetcodequoteright
+%
+\let\markupsetuplqexample \markupsetcodequoteleft
+\let\markupsetuprqexample \markupsetcodequoteright
+%
+\let\markupsetuplqkbd     \markupsetcodequoteleft
+\let\markupsetuprqkbd     \markupsetcodequoteright
+%
+\let\markupsetuplqsamp \markupsetcodequoteleft
+\let\markupsetuprqsamp \markupsetcodequoteright
+%
+\let\markupsetuplqverb \markupsetcodequoteleft
+\let\markupsetuprqverb \markupsetcodequoteright
+%
+\let\markupsetuplqverbatim \markupsetcodequoteleft
+\let\markupsetuprqverbatim \markupsetcodequoteright
+
+% Allow an option to not use regular directed right quote/apostrophe
+% (char 0x27), but instead the undirected quote from cmtt (char 0x0d).
+% The undirected quote is ugly, so don't make it the default, but it
+% works for pasting with more pdf viewers (at least evince), the
+% lilypond developers report.  xpdf does work with the regular 0x27.
+%
+\def\codequoteright{%
+  \ifmonospace
+    \expandafter\ifx\csname SETtxicodequoteundirected\endcsname\relax
+      \expandafter\ifx\csname SETcodequoteundirected\endcsname\relax
+        '%
+      \else \char'15 \fi
+    \else \char'15 \fi
+   \else
+     '%
+   \fi
+}
+%
+% and a similar option for the left quote char vs. a grave accent.
+% Modern fonts display ASCII 0x60 as a grave accent, so some people like
+% the code environments to do likewise.
+%
+\def\codequoteleft{%
+  \ifmonospace
+    \expandafter\ifx\csname SETtxicodequotebacktick\endcsname\relax
+      \expandafter\ifx\csname SETcodequotebacktick\endcsname\relax
+        % [Knuth] pp. 380,381,391
+        % \relax disables Spanish ligatures ?` and !` of \tt font.
+        \relax`%
+      \else \char'22 \fi
+    \else \char'22 \fi
+   \else
+     \relax`%
+   \fi
+}
+
+% Commands to set the quote options.
+% 
+\parseargdef\codequoteundirected{%
+  \def\temp{#1}%
+  \ifx\temp\onword
+    \expandafter\let\csname SETtxicodequoteundirected\endcsname
+      = t%
+  \else\ifx\temp\offword
+    \expandafter\let\csname SETtxicodequoteundirected\endcsname
+      = \relax
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @codequoteundirected value `\temp', must be on|off}%
+  \fi\fi
+}
+%
+\parseargdef\codequotebacktick{%
+  \def\temp{#1}%
+  \ifx\temp\onword
+    \expandafter\let\csname SETtxicodequotebacktick\endcsname
+      = t%
+  \else\ifx\temp\offword
+    \expandafter\let\csname SETtxicodequotebacktick\endcsname
+      = \relax
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @codequotebacktick value `\temp', must be on|off}%
+  \fi\fi
+}
+
+% [Knuth] pp. 380,381,391, disable Spanish ligatures ?` and !` of \tt font.
+\def\noligaturesquoteleft{\relax\lq}
+
+% Count depth in font-changes, for error checks
+\newcount\fontdepth \fontdepth=0
+
+% Font commands.
+
+% #1 is the font command (\sl or \it), #2 is the text to slant.
+% If we are in a monospaced environment, however, 1) always use \ttsl,
+% and 2) do not add an italic correction.
+\def\dosmartslant#1#2{%
+  \ifusingtt 
+    {{\ttsl #2}\let\next=\relax}%
+    {\def\next{{#1#2}\futurelet\next\smartitaliccorrection}}%
+  \next
+}
+\def\smartslanted{\dosmartslant\sl}
+\def\smartitalic{\dosmartslant\it}
+
+% Output an italic correction unless \next (presumed to be the following
+% character) is such as not to need one.
+\def\smartitaliccorrection{%
+  \ifx\next,%
+  \else\ifx\next-%
+  \else\ifx\next.%
+  \else\ifx\next\.%
+  \else\ifx\next\comma%
+  \else\ptexslash
+  \fi\fi\fi\fi\fi
+  \aftersmartic
+}
+
+% Unconditional use \ttsl, and no ic.  @var is set to this for defuns.
+\def\ttslanted#1{{\ttsl #1}}
+
+% @cite is like \smartslanted except unconditionally use \sl.  We never want
+% ttsl for book titles, do we?
+\def\cite#1{{\sl #1}\futurelet\next\smartitaliccorrection}
+
+\def\aftersmartic{}
+\def\var#1{%
+  \let\saveaftersmartic = \aftersmartic
+  \def\aftersmartic{\null\let\aftersmartic=\saveaftersmartic}%
+  \smartslanted{#1}%
+}
+
+\let\i=\smartitalic
+\let\slanted=\smartslanted
+\let\dfn=\smartslanted
+\let\emph=\smartitalic
+
+% Explicit font changes: @r, @sc, undocumented @ii.
+\def\r#1{{\rm #1}}              % roman font
+\def\sc#1{{\smallcaps#1}}       % smallcaps font
+\def\ii#1{{\it #1}}             % italic font
+
+% @b, explicit bold.  Also @strong.
+\def\b#1{{\bf #1}}
+\let\strong=\b
+
+% @sansserif, explicit sans.
+\def\sansserif#1{{\sf #1}}
+
+% We can't just use \exhyphenpenalty, because that only has effect at
+% the end of a paragraph.  Restore normal hyphenation at the end of the
+% group within which \nohyphenation is presumably called.
+%
+\def\nohyphenation{\hyphenchar\font = -1  \aftergroup\restorehyphenation}
+\def\restorehyphenation{\hyphenchar\font = `- }
+
+% Set sfcode to normal for the chars that usually have another value.
+% Can't use plain's \frenchspacing because it uses the `\x notation, and
+% sometimes \x has an active definition that messes things up.
+%
+\catcode`@=11
+  \def\plainfrenchspacing{%
+    \sfcode`\.=\@m \sfcode`\?=\@m \sfcode`\!=\@m
+    \sfcode`\:=\@m \sfcode`\;=\@m \sfcode`\,=\@m
+    \def\endofsentencespacefactor{1000}% for @. and friends
+  }
+  \def\plainnonfrenchspacing{%
+    \sfcode`\.3000\sfcode`\?3000\sfcode`\!3000
+    \sfcode`\:2000\sfcode`\;1500\sfcode`\,1250
+    \def\endofsentencespacefactor{3000}% for @. and friends
+  }
+\catcode`@=\other
+\def\endofsentencespacefactor{3000}% default
+
+% @t, explicit typewriter.
+\def\t#1{%
+  {\tt \rawbackslash \plainfrenchspacing #1}%
+  \null
+}
+
+% @samp.
+\def\samp#1{{\setupmarkupstyle{samp}\lq\tclose{#1}\rq\null}}
+
+% @indicateurl is \samp, that is, with quotes.
+\let\indicateurl=\samp
+
+% @code (and similar) prints in typewriter, but with spaces the same
+% size as normal in the surrounding text, without hyphenation, etc.
+% This is a subroutine for that.
+\def\tclose#1{%
+  {%
+    % Change normal interword space to be same as for the current font.
+    \spaceskip = \fontdimen2\font
+    %
+    % Switch to typewriter.
+    \tt
+    %
+    % But `\ ' produces the large typewriter interword space.
+    \def\ {{\spaceskip = 0pt{} }}%
+    %
+    % Turn off hyphenation.
+    \nohyphenation
+    %
+    \rawbackslash
+    \plainfrenchspacing
+    #1%
+  }%
+  \null % reset spacefactor to 1000
+}
+
+% We *must* turn on hyphenation at `-' and `_' in @code.
+% (But see \codedashfinish below.)
+% Otherwise, it is too hard to avoid overfull hboxes
+% in the Emacs manual, the Library manual, etc.
+%
+% Unfortunately, TeX uses one parameter (\hyphenchar) to control
+% both hyphenation at - and hyphenation within words.
+% We must therefore turn them both off (\tclose does that)
+% and arrange explicitly to hyphenate at a dash. -- rms.
+{
+  \catcode`\-=\active \catcode`\_=\active
+  \catcode`\'=\active \catcode`\`=\active
+  \global\let'=\rq \global\let`=\lq  % default definitions
+  %
+  \global\def\code{\begingroup
+    \setupmarkupstyle{code}%
+    % The following should really be moved into \setupmarkupstyle handlers.
+    \catcode\dashChar=\active  \catcode\underChar=\active
+    \ifallowcodebreaks
+     \let-\codedash
+     \let_\codeunder
+    \else
+     \let-\normaldash
+     \let_\realunder
+    \fi
+    % Given -foo (with a single dash), we do not want to allow a break
+    % after the hyphen.
+    \global\let\codedashprev=\codedash
+    %
+    \codex
+  }
+  %
+  \gdef\codedash{\futurelet\next\codedashfinish}
+  \gdef\codedashfinish{%
+    \normaldash % always output the dash character itself.
+    % 
+    % Now, output a discretionary to allow a line break, unless
+    % (a) the next character is a -, or
+    % (b) the preceding character is a -.
+    % E.g., given --posix, we do not want to allow a break after either -.
+    % Given --foo-bar, we do want to allow a break between the - and the b.
+    \ifx\next\codedash \else
+      \ifx\codedashprev\codedash 
+      \else \discretionary{}{}{}\fi
+    \fi
+    % we need the space after the = for the case when \next itself is a
+    % space token; it would get swallowed otherwise.  As in @code{- a}.
+    \global\let\codedashprev= \next
+  }
+}
+\def\normaldash{-}
+%
+\def\codex #1{\tclose{#1}\endgroup}
+
+\def\codeunder{%
+  % this is all so @math{@code{var_name}+1} can work.  In math mode, _
+  % is "active" (mathcode"8000) and \normalunderscore (or \char95, etc.)
+  % will therefore expand the active definition of _, which is us
+  % (inside @code that is), therefore an endless loop.
+  \ifusingtt{\ifmmode
+               \mathchar"075F % class 0=ordinary, family 7=ttfam, pos 0x5F=_.
+             \else\normalunderscore \fi
+             \discretionary{}{}{}}%
+            {\_}%
+}
+
+% An additional complication: the above will allow breaks after, e.g.,
+% each of the four underscores in __typeof__.  This is bad.
+% @allowcodebreaks provides a document-level way to turn breaking at -
+% and _ on and off.
+%
+\newif\ifallowcodebreaks  \allowcodebreakstrue
+
+\def\keywordtrue{true}
+\def\keywordfalse{false}
+
+\parseargdef\allowcodebreaks{%
+  \def\txiarg{#1}%
+  \ifx\txiarg\keywordtrue
+    \allowcodebreakstrue
+  \else\ifx\txiarg\keywordfalse
+    \allowcodebreaksfalse
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @allowcodebreaks option `\txiarg', must be true|false}%
+  \fi\fi
+}
+
+% For @command, @env, @file, @option quotes seem unnecessary,
+% so use \code rather than \samp.
+\let\command=\code
+\let\env=\code
+\let\file=\code
+\let\option=\code
+
+% @uref (abbreviation for `urlref') aka @url takes an optional
+% (comma-separated) second argument specifying the text to display and
+% an optional third arg as text to display instead of (rather than in
+% addition to) the url itself.  First (mandatory) arg is the url.
+
+% TeX-only option to allow changing PDF output to show only the second
+% arg (if given), and not the url (which is then just the link target).
+\newif\ifurefurlonlylink
+
+% The main macro is \urefbreak, which allows breaking at expected
+% places within the url.  (There used to be another version, which
+% didn't support automatic breaking.)
+\def\urefbreak{\begingroup \urefcatcodes \dourefbreak}
+\let\uref=\urefbreak
+%
+\def\dourefbreak#1{\urefbreakfinish #1,,,\finish}
+\def\urefbreakfinish#1,#2,#3,#4\finish{% doesn't work in @example
+  \unsepspaces
+  \pdfurl{#1}%
+  \setbox0 = \hbox{\ignorespaces #3}%
+  \ifdim\wd0 > 0pt
+    \unhbox0 % third arg given, show only that
+  \else
+    \setbox0 = \hbox{\ignorespaces #2}% look for second arg
+    \ifdim\wd0 > 0pt
+      \ifpdf
+        % For pdfTeX and LuaTeX
+        \ifurefurlonlylink
+          % PDF plus option to not display url, show just arg
+          \unhbox0             
+        \else
+          % PDF, normally display both arg and url for consistency,
+          % visibility, if the pdf is eventually used to print, etc.
+          \unhbox0\ (\urefcode{#1})%
+        \fi
+      \else
+        \ifx\XeTeXrevision\thisisundefined
+          \unhbox0\ (\urefcode{#1})% DVI, always show arg and url
+        \else
+          % For XeTeX
+          \ifurefurlonlylink
+            % PDF plus option to not display url, show just arg
+            \unhbox0             
+          \else
+            % PDF, normally display both arg and url for consistency,
+            % visibility, if the pdf is eventually used to print, etc.
+            \unhbox0\ (\urefcode{#1})%
+          \fi
+        \fi
+      \fi
+    \else
+      \urefcode{#1}% only url given, so show it
+    \fi
+  \fi
+  \endlink
+\endgroup}
+
+% Allow line breaks around only a few characters (only).
+\def\urefcatcodes{%
+  \catcode`\&=\active \catcode`\.=\active
+  \catcode`\#=\active \catcode`\?=\active
+  \catcode`\/=\active
+}
+{
+  \urefcatcodes
+  %
+  \global\def\urefcode{\begingroup
+    \setupmarkupstyle{code}%
+    \urefcatcodes
+    \let&\urefcodeamp
+    \let.\urefcodedot
+    \let#\urefcodehash
+    \let?\urefcodequest
+    \let/\urefcodeslash
+    \codex
+  }
+  %
+  % By default, they are just regular characters.
+  \global\def&{\normalamp}
+  \global\def.{\normaldot}
+  \global\def#{\normalhash}
+  \global\def?{\normalquest}
+  \global\def/{\normalslash}
+}
+
+% we put a little stretch before and after the breakable chars, to help
+% line breaking of long url's.  The unequal skips make look better in
+% cmtt at least, especially for dots.
+\def\urefprestretchamount{.13em}
+\def\urefpoststretchamount{.1em}
+\def\urefprestretch{\urefprebreak \hskip0pt plus\urefprestretchamount\relax}
+\def\urefpoststretch{\urefpostbreak \hskip0pt plus\urefprestretchamount\relax}
+%
+\def\urefcodeamp{\urefprestretch \&\urefpoststretch}
+\def\urefcodedot{\urefprestretch .\urefpoststretch}
+\def\urefcodehash{\urefprestretch \#\urefpoststretch}
+\def\urefcodequest{\urefprestretch ?\urefpoststretch}
+\def\urefcodeslash{\futurelet\next\urefcodeslashfinish}
+{
+  \catcode`\/=\active
+  \global\def\urefcodeslashfinish{%
+    \urefprestretch \slashChar
+    % Allow line break only after the final / in a sequence of
+    % slashes, to avoid line break between the slashes in http://.
+    \ifx\next/\else \urefpoststretch \fi
+  }
+}
+
+% One more complication: by default we'll break after the special
+% characters, but some people like to break before the special chars, so
+% allow that.  Also allow no breaking at all, for manual control.
+% 
+\parseargdef\urefbreakstyle{%
+  \def\txiarg{#1}%
+  \ifx\txiarg\wordnone
+    \def\urefprebreak{\nobreak}\def\urefpostbreak{\nobreak}
+  \else\ifx\txiarg\wordbefore
+    \def\urefprebreak{\allowbreak}\def\urefpostbreak{\nobreak}
+  \else\ifx\txiarg\wordafter
+    \def\urefprebreak{\nobreak}\def\urefpostbreak{\allowbreak}
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @urefbreakstyle setting `\txiarg'}%
+  \fi\fi\fi
+}
+\def\wordafter{after}
+\def\wordbefore{before}
+\def\wordnone{none}
+
+\urefbreakstyle after
+
+% @url synonym for @uref, since that's how everyone uses it.
+%
+\let\url=\uref
+
+% rms does not like angle brackets --karl, 17may97.
+% So now @email is just like @uref, unless we are pdf.
+%
+%\def\email#1{\angleleft{\tt #1}\angleright}
+\ifpdf
+  \def\email#1{\doemail#1,,\finish}
+  \def\doemail#1,#2,#3\finish{\begingroup
+    \unsepspaces
+    \pdfurl{mailto:#1}%
+    \setbox0 = \hbox{\ignorespaces #2}%
+    \ifdim\wd0>0pt\unhbox0\else\code{#1}\fi
+    \endlink
+  \endgroup}
+\else
+  \ifx\XeTeXrevision\thisisundefined
+    \let\email=\uref
+  \else
+    \def\email#1{\doemail#1,,\finish}
+    \def\doemail#1,#2,#3\finish{\begingroup
+      \unsepspaces
+      \pdfurl{mailto:#1}%
+      \setbox0 = \hbox{\ignorespaces #2}%
+      \ifdim\wd0>0pt\unhbox0\else\code{#1}\fi
+      \endlink
+    \endgroup}
+  \fi
+\fi
+
+% @kbdinputstyle -- arg is `distinct' (@kbd uses slanted tty font always),
+%   `example' (@kbd uses ttsl only inside of @example and friends),
+%   or `code' (@kbd uses normal tty font always).
+\parseargdef\kbdinputstyle{%
+  \def\txiarg{#1}%
+  \ifx\txiarg\worddistinct
+    \gdef\kbdexamplefont{\ttsl}\gdef\kbdfont{\ttsl}%
+  \else\ifx\txiarg\wordexample
+    \gdef\kbdexamplefont{\ttsl}\gdef\kbdfont{\tt}%
+  \else\ifx\txiarg\wordcode
+    \gdef\kbdexamplefont{\tt}\gdef\kbdfont{\tt}%
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @kbdinputstyle setting `\txiarg'}%
+  \fi\fi\fi
+}
+\def\worddistinct{distinct}
+\def\wordexample{example}
+\def\wordcode{code}
+
+% Default is `distinct'.
+\kbdinputstyle distinct
+
+% @kbd is like @code, except that if the argument is just one @key command,
+% then @kbd has no effect.
+\def\kbd#1{{\def\look{#1}\expandafter\kbdsub\look??\par}}
+
+\def\xkey{\key}
+\def\kbdsub#1#2#3\par{%
+  \def\one{#1}\def\three{#3}\def\threex{??}%
+  \ifx\one\xkey\ifx\threex\three \key{#2}%
+  \else{\tclose{\kbdfont\setupmarkupstyle{kbd}\look}}\fi
+  \else{\tclose{\kbdfont\setupmarkupstyle{kbd}\look}}\fi
+}
+
+% definition of @key that produces a lozenge.  Doesn't adjust to text size.
+%\setfont\keyrm\rmshape{8}{1000}{OT1}
+%\font\keysy=cmsy9
+%\def\key#1{{\keyrm\textfont2=\keysy \leavevmode\hbox{%
+%  \raise0.4pt\hbox{\angleleft}\kern-.08em\vtop{%
+%    \vbox{\hrule\kern-0.4pt
+%     \hbox{\raise0.4pt\hbox{\vphantom{\angleleft}}#1}}%
+%    \kern-0.4pt\hrule}%
+%  \kern-.06em\raise0.4pt\hbox{\angleright}}}}
+
+% definition of @key with no lozenge.  If the current font is already
+% monospace, don't change it; that way, we respect @kbdinputstyle.  But
+% if it isn't monospace, then use \tt.
+%
+\def\key#1{{\setupmarkupstyle{key}%
+  \nohyphenation
+  \ifmonospace\else\tt\fi
+  #1}\null}
+
+% @clicksequence{File @click{} Open ...}
+\def\clicksequence#1{\begingroup #1\endgroup}
+
+% @clickstyle @arrow   (by default)
+\parseargdef\clickstyle{\def\click{#1}}
+\def\click{\arrow}
+
+% Typeset a dimension, e.g., `in' or `pt'.  The only reason for the
+% argument is to make the input look right: @dmn{pt} instead of @dmn{}pt.
+%
+\def\dmn#1{\thinspace #1}
+
+% @acronym for "FBI", "NATO", and the like.
+% We print this one point size smaller, since it's intended for
+% all-uppercase.
+%
+\def\acronym#1{\doacronym #1,,\finish}
+\def\doacronym#1,#2,#3\finish{%
+  {\switchtolsize #1}%
+  \def\temp{#2}%
+  \ifx\temp\empty \else
+    \space ({\unsepspaces \ignorespaces \temp \unskip})%
+  \fi
+  \null % reset \spacefactor=1000
+}
+
+% @abbr for "Comput. J." and the like.
+% No font change, but don't do end-of-sentence spacing.
+%
+\def\abbr#1{\doabbr #1,,\finish}
+\def\doabbr#1,#2,#3\finish{%
+  {\plainfrenchspacing #1}%
+  \def\temp{#2}%
+  \ifx\temp\empty \else
+    \space ({\unsepspaces \ignorespaces \temp \unskip})%
+  \fi
+  \null % reset \spacefactor=1000
+}
+
+% @asis just yields its argument.  Used with @table, for example.
+%
+\def\asis#1{#1}
+
+% @math outputs its argument in math mode.
+%
+% One complication: _ usually means subscripts, but it could also mean
+% an actual _ character, as in @math{@var{some_variable} + 1}.  So make
+% _ active, and distinguish by seeing if the current family is \slfam,
+% which is what @var uses.
+{
+  \catcode`\_ = \active
+  \gdef\mathunderscore{%
+    \catcode`\_=\active
+    \def_{\ifnum\fam=\slfam \_\else\sb\fi}%
+  }
+}
+% Another complication: we want \\ (and @\) to output a math (or tt) \.
+% FYI, plain.tex uses \\ as a temporary control sequence (for no
+% particular reason), but this is not advertised and we don't care.
+%
+% The \mathchar is class=0=ordinary, family=7=ttfam, position=5C=\.
+\def\mathbackslash{\ifnum\fam=\ttfam \mathchar"075C \else\backslash \fi}
+%
+\def\math{%
+  \ifmmode\else % only go into math if not in math mode already
+    \tex
+    \mathunderscore
+    \let\\ = \mathbackslash
+    \mathactive
+    % make the texinfo accent commands work in math mode
+    \let\"=\ddot
+    \let\'=\acute
+    \let\==\bar
+    \let\^=\hat
+    \let\`=\grave
+    \let\u=\breve
+    \let\v=\check
+    \let\~=\tilde
+    \let\dotaccent=\dot
+    % have to provide another name for sup operator
+    \let\mathopsup=\sup
+  $\expandafter\finishmath\fi
+}
+\def\finishmath#1{#1$\endgroup}  % Close the group opened by \tex.
+
+% Some active characters (such as <) are spaced differently in math.
+% We have to reset their definitions in case the @math was an argument
+% to a command which sets the catcodes (such as @item or @section).
+%
+{
+  \catcode`^ = \active
+  \catcode`< = \active
+  \catcode`> = \active
+  \catcode`+ = \active
+  \catcode`' = \active
+  \gdef\mathactive{%
+    \let^ = \ptexhat
+    \let< = \ptexless
+    \let> = \ptexgtr
+    \let+ = \ptexplus
+    \let' = \ptexquoteright
+  }
+}
+
+% for @sub and @sup, if in math mode, just do a normal sub/superscript.
+% If in text, use math to place as sub/superscript, but switch
+% into text mode, with smaller fonts.  This is a different font than the
+% one used for real math sub/superscripts (8pt vs. 7pt), but let's not
+% fix it (significant additions to font machinery) until someone notices.
+%
+\def\sub{\ifmmode \expandafter\sb \else \expandafter\finishsub\fi}
+\def\finishsub#1{$\sb{\hbox{\switchtolllsize #1}}$}%
+%
+\def\sup{\ifmmode \expandafter\ptexsp \else \expandafter\finishsup\fi}
+\def\finishsup#1{$\ptexsp{\hbox{\switchtolllsize #1}}$}%
+
+% @inlinefmt{FMTNAME,PROCESSED-TEXT} and @inlineraw{FMTNAME,RAW-TEXT}.
+% Ignore unless FMTNAME == tex; then it is like @iftex and @tex,
+% except specified as a normal braced arg, so no newlines to worry about.
+% 
+\def\outfmtnametex{tex}
+%
+\long\def\inlinefmt#1{\doinlinefmt #1,\finish}
+\long\def\doinlinefmt#1,#2,\finish{%
+  \def\inlinefmtname{#1}%
+  \ifx\inlinefmtname\outfmtnametex \ignorespaces #2\fi
+}
+% 
+% @inlinefmtifelse{FMTNAME,THEN-TEXT,ELSE-TEXT} expands THEN-TEXT if
+% FMTNAME is tex, else ELSE-TEXT.
+\long\def\inlinefmtifelse#1{\doinlinefmtifelse #1,,,\finish}
+\long\def\doinlinefmtifelse#1,#2,#3,#4,\finish{%
+  \def\inlinefmtname{#1}%
+  \ifx\inlinefmtname\outfmtnametex \ignorespaces #2\else \ignorespaces #3\fi
+}
+%
+% For raw, must switch into @tex before parsing the argument, to avoid
+% setting catcodes prematurely.  Doing it this way means that, for
+% example, @inlineraw{html, foo{bar} gets a parse error instead of being
+% ignored.  But this isn't important because if people want a literal
+% *right* brace they would have to use a command anyway, so they may as
+% well use a command to get a left brace too.  We could re-use the
+% delimiter character idea from \verb, but it seems like overkill.
+% 
+\long\def\inlineraw{\tex \doinlineraw}
+\long\def\doinlineraw#1{\doinlinerawtwo #1,\finish}
+\def\doinlinerawtwo#1,#2,\finish{%
+  \def\inlinerawname{#1}%
+  \ifx\inlinerawname\outfmtnametex \ignorespaces #2\fi
+  \endgroup % close group opened by \tex.
+}
+
+% @inlineifset{VAR, TEXT} expands TEXT if VAR is @set.
+%
+\long\def\inlineifset#1{\doinlineifset #1,\finish}
+\long\def\doinlineifset#1,#2,\finish{%
+  \def\inlinevarname{#1}%
+  \expandafter\ifx\csname SET\inlinevarname\endcsname\relax
+  \else\ignorespaces#2\fi
+}
+
+% @inlineifclear{VAR, TEXT} expands TEXT if VAR is not @set.
+%
+\long\def\inlineifclear#1{\doinlineifclear #1,\finish}
+\long\def\doinlineifclear#1,#2,\finish{%
+  \def\inlinevarname{#1}%
+  \expandafter\ifx\csname SET\inlinevarname\endcsname\relax \ignorespaces#2\fi
+}
+
+
+\message{glyphs,}
+% and logos.
+
+% @@ prints an @, as does @atchar{}.
+\def\@{\char64 }
+\let\atchar=\@
+
+% @{ @} @lbracechar{} @rbracechar{} all generate brace characters.
+\def\lbracechar{{\ifmonospace\char123\else\ensuremath\lbrace\fi}}
+\def\rbracechar{{\ifmonospace\char125\else\ensuremath\rbrace\fi}}
+\let\{=\lbracechar
+\let\}=\rbracechar
+
+% @comma{} to avoid , parsing problems.
+\let\comma = ,
+
+% Accents: @, @dotaccent @ringaccent @ubaraccent @udotaccent
+% Others are defined by plain TeX: @` @' @" @^ @~ @= @u @v @H.
+\let\, = \ptexc
+\let\dotaccent = \ptexdot
+\def\ringaccent#1{{\accent23 #1}}
+\let\tieaccent = \ptext
+\let\ubaraccent = \ptexb
+\let\udotaccent = \d
+
+% Other special characters: @questiondown @exclamdown @ordf @ordm
+% Plain TeX defines: @AA @AE @O @OE @L (plus lowercase versions) @ss.
+\def\questiondown{?`}
+\def\exclamdown{!`}
+\def\ordf{\leavevmode\raise1ex\hbox{\switchtolllsize \underbar{a}}}
+\def\ordm{\leavevmode\raise1ex\hbox{\switchtolllsize \underbar{o}}}
+
+% Dotless i and dotless j, used for accents.
+\def\imacro{i}
+\def\jmacro{j}
+\def\dotless#1{%
+  \def\temp{#1}%
+  \ifx\temp\imacro \ifmmode\imath \else\ptexi \fi
+  \else\ifx\temp\jmacro \ifmmode\jmath \else\j \fi
+  \else \errmessage{@dotless can be used only with i or j}%
+  \fi\fi
+}
+
+% The \TeX{} logo, as in plain, but resetting the spacing so that a
+% period following counts as ending a sentence.  (Idea found in latex.)
+%
+\edef\TeX{\TeX \spacefactor=1000 }
+
+% @LaTeX{} logo.  Not quite the same results as the definition in
+% latex.ltx, since we use a different font for the raised A; it's most
+% convenient for us to use an explicitly smaller font, rather than using
+% the \scriptstyle font (since we don't reset \scriptstyle and
+% \scriptscriptstyle).
+%
+\def\LaTeX{%
+  L\kern-.36em
+  {\setbox0=\hbox{T}%
+   \vbox to \ht0{\hbox{%
+     \ifx\textnominalsize\xwordpt
+       % for 10pt running text, lllsize (8pt) is too small for the A in LaTeX.
+       % Revert to plain's \scriptsize, which is 7pt.
+       \count255=\the\fam $\fam\count255 \scriptstyle A$%
+     \else
+       % For 11pt, we can use our lllsize.
+       \switchtolllsize A%
+     \fi
+     }%
+     \vss
+  }}%
+  \kern-.15em
+  \TeX
+}
+
+% Some math mode symbols.  Define \ensuremath to switch into math mode
+% unless we are already there.  Expansion tricks may not be needed here,
+% but safer, and can't hurt.
+\def\ensuremath{\ifmmode \expandafter\asis \else\expandafter\ensuredmath \fi}
+\def\ensuredmath#1{$\relax#1$}
+%
+\def\bullet{\ensuremath\ptexbullet}
+\def\geq{\ensuremath\ge}
+\def\leq{\ensuremath\le}
+\def\minus{\ensuremath-}
+
+% @dots{} outputs an ellipsis using the current font.
+% We do .5em per period so that it has the same spacing in the cm
+% typewriter fonts as three actual period characters; on the other hand,
+% in other typewriter fonts three periods are wider than 1.5em.  So do
+% whichever is larger.
+%
+\def\dots{%
+  \leavevmode
+  \setbox0=\hbox{...}% get width of three periods
+  \ifdim\wd0 > 1.5em
+    \dimen0 = \wd0
+  \else
+    \dimen0 = 1.5em
+  \fi
+  \hbox to \dimen0{%
+    \hskip 0pt plus.25fil
+    .\hskip 0pt plus1fil
+    .\hskip 0pt plus1fil
+    .\hskip 0pt plus.5fil
+  }%
+}
+
+% @enddots{} is an end-of-sentence ellipsis.
+%
+\def\enddots{%
+  \dots
+  \spacefactor=\endofsentencespacefactor
+}
+
+% @point{}, @result{}, @expansion{}, @print{}, @equiv{}.
+%
+% Since these characters are used in examples, they should be an even number of
+% \tt widths. Each \tt character is 1en, so two makes it 1em.
+%
+\def\point{$\star$}
+\def\arrow{\leavevmode\raise.05ex\hbox to 1em{\hfil$\rightarrow$\hfil}}
+\def\result{\leavevmode\raise.05ex\hbox to 1em{\hfil$\Rightarrow$\hfil}}
+\def\expansion{\leavevmode\hbox to 1em{\hfil$\mapsto$\hfil}}
+\def\print{\leavevmode\lower.1ex\hbox to 1em{\hfil$\dashv$\hfil}}
+\def\equiv{\leavevmode\hbox to 1em{\hfil$\ptexequiv$\hfil}}
+
+% The @error{} command.
+% Adapted from the TeXbook's \boxit.
+%
+\newbox\errorbox
+%
+{\ttfont \global\dimen0 = 3em}% Width of the box.
+\dimen2 = .55pt % Thickness of rules
+% The text. (`r' is open on the right, `e' somewhat less so on the left.)
+\setbox0 = \hbox{\kern-.75pt \reducedsf \putworderror\kern-1.5pt}
+%
+\setbox\errorbox=\hbox to \dimen0{\hfil
+   \hsize = \dimen0 \advance\hsize by -5.8pt % Space to left+right.
+   \advance\hsize by -2\dimen2 % Rules.
+   \vbox{%
+      \hrule height\dimen2
+      \hbox{\vrule width\dimen2 \kern3pt          % Space to left of text.
+         \vtop{\kern2.4pt \box0 \kern2.4pt}% Space above/below.
+         \kern3pt\vrule width\dimen2}% Space to right.
+      \hrule height\dimen2}
+    \hfil}
+%
+\def\error{\leavevmode\lower.7ex\copy\errorbox}
+
+% @pounds{} is a sterling sign, which Knuth put in the CM italic font.
+%
+\def\pounds{{\it\$}}
+
+% @euro{} comes from a separate font, depending on the current style.
+% We use the free feym* fonts from the eurosym package by Henrik
+% Theiling, which support regular, slanted, bold and bold slanted (and
+% "outlined" (blackboard board, sort of) versions, which we don't need).
+% It is available from http://www.ctan.org/tex-archive/fonts/eurosym.
+%
+% Although only regular is the truly official Euro symbol, we ignore
+% that.  The Euro is designed to be slightly taller than the regular
+% font height.
+%
+% feymr - regular
+% feymo - slanted
+% feybr - bold
+% feybo - bold slanted
+%
+% There is no good (free) typewriter version, to my knowledge.
+% A feymr10 euro is ~7.3pt wide, while a normal cmtt10 char is ~5.25pt wide.
+% Hmm.
+%
+% Also doesn't work in math.  Do we need to do math with euro symbols?
+% Hope not.
+%
+%
+\def\euro{{\eurofont e}}
+\def\eurofont{%
+  % We set the font at each command, rather than predefining it in
+  % \textfonts and the other font-switching commands, so that
+  % installations which never need the symbol don't have to have the
+  % font installed.
+  %
+  % There is only one designed size (nominal 10pt), so we always scale
+  % that to the current nominal size.
+  %
+  % By the way, simply using "at 1em" works for cmr10 and the like, but
+  % does not work for cmbx10 and other extended/shrunken fonts.
+  %
+  \def\eurosize{\csname\curfontsize nominalsize\endcsname}%
+  %
+  \ifx\curfontstyle\bfstylename
+    % bold:
+    \font\thiseurofont = \ifusingit{feybo10}{feybr10} at \eurosize
+  \else
+    % regular:
+    \font\thiseurofont = \ifusingit{feymo10}{feymr10} at \eurosize
+  \fi
+  \thiseurofont
+}
+
+% Glyphs from the EC fonts.  We don't use \let for the aliases, because
+% sometimes we redefine the original macro, and the alias should reflect
+% the redefinition.
+%
+% Use LaTeX names for the Icelandic letters.
+\def\DH{{\ecfont \char"D0}} % Eth
+\def\dh{{\ecfont \char"F0}} % eth
+\def\TH{{\ecfont \char"DE}} % Thorn
+\def\th{{\ecfont \char"FE}} % thorn
+%
+\def\guillemetleft{{\ecfont \char"13}}
+\def\guillemotleft{\guillemetleft}
+\def\guillemetright{{\ecfont \char"14}}
+\def\guillemotright{\guillemetright}
+\def\guilsinglleft{{\ecfont \char"0E}}
+\def\guilsinglright{{\ecfont \char"0F}}
+\def\quotedblbase{{\ecfont \char"12}}
+\def\quotesinglbase{{\ecfont \char"0D}}
+%
+% This positioning is not perfect (see the ogonek LaTeX package), but
+% we have the precomposed glyphs for the most common cases.  We put the
+% tests to use those glyphs in the single \ogonek macro so we have fewer
+% dummy definitions to worry about for index entries, etc.
+%
+% ogonek is also used with other letters in Lithuanian (IOU), but using
+% the precomposed glyphs for those is not so easy since they aren't in
+% the same EC font.
+\def\ogonek#1{{%
+  \def\temp{#1}%
+  \ifx\temp\macrocharA\Aogonek
+  \else\ifx\temp\macrochara\aogonek
+  \else\ifx\temp\macrocharE\Eogonek
+  \else\ifx\temp\macrochare\eogonek
+  \else
+    \ecfont \setbox0=\hbox{#1}%
+    \ifdim\ht0=1ex\accent"0C #1%
+    \else\ooalign{\unhbox0\crcr\hidewidth\char"0C \hidewidth}%
+    \fi
+  \fi\fi\fi\fi
+  }%
+}
+\def\Aogonek{{\ecfont \char"81}}\def\macrocharA{A}
+\def\aogonek{{\ecfont \char"A1}}\def\macrochara{a}
+\def\Eogonek{{\ecfont \char"86}}\def\macrocharE{E}
+\def\eogonek{{\ecfont \char"A6}}\def\macrochare{e}
+%
+% Use the European Computer Modern fonts (cm-super in outline format)
+% for non-CM glyphs.  That is ec* for regular text and tc* for the text
+% companion symbols (LaTeX TS1 encoding).  Both are part of the ec
+% package and follow the same conventions.
+% 
+\def\ecfont{\etcfont{e}}
+\def\tcfont{\etcfont{t}}
+%
+\def\etcfont#1{%
+  % We can't distinguish serif/sans and italic/slanted, but this
+  % is used for crude hacks anyway (like adding French and German
+  % quotes to documents typeset with CM, where we lose kerning), so
+  % hopefully nobody will notice/care.
+  \edef\ecsize{\csname\curfontsize ecsize\endcsname}%
+  \edef\nominalsize{\csname\curfontsize nominalsize\endcsname}%
+  \ifmonospace
+    % typewriter:
+    \font\thisecfont = #1ctt\ecsize \space at \nominalsize
+  \else
+    \ifx\curfontstyle\bfstylename
+      % bold:
+      \font\thisecfont = #1cb\ifusingit{i}{x}\ecsize \space at \nominalsize
+    \else
+      % regular:
+      \font\thisecfont = #1c\ifusingit{ti}{rm}\ecsize \space at \nominalsize
+    \fi
+  \fi
+  \thisecfont
+}
+
+% @registeredsymbol - R in a circle.  The font for the R should really
+% be smaller yet, but lllsize is the best we can do for now.
+% Adapted from the plain.tex definition of \copyright.
+%
+\def\registeredsymbol{%
+  $^{{\ooalign{\hfil\raise.07ex\hbox{\switchtolllsize R}%
+               \hfil\crcr\Orb}}%
+    }$%
+}
+
+% @textdegree - the normal degrees sign.
+%
+\def\textdegree{$^\circ$}
+
+% Laurent Siebenmann reports \Orb undefined with:
+%  Textures 1.7.7 (preloaded format=plain 93.10.14)  (68K)  16 APR 2004 02:38
+% so we'll define it if necessary.
+%
+\ifx\Orb\thisisundefined
+\def\Orb{\mathhexbox20D}
+\fi
+
+% Quotes.
+\chardef\quotedblleft="5C
+\chardef\quotedblright=`\"
+\chardef\quoteleft=`\`
+\chardef\quoteright=`\'
+
+
+\message{page headings,}
+
+\newskip\titlepagetopglue \titlepagetopglue = 1.5in
+\newskip\titlepagebottomglue \titlepagebottomglue = 2pc
+
+% First the title page.  Must do @settitle before @titlepage.
+\newif\ifseenauthor
+\newif\iffinishedtitlepage
+
+% @setcontentsaftertitlepage used to do an implicit @contents or
+% @shortcontents after @end titlepage, but it is now obsolete.
+\def\setcontentsaftertitlepage{%
+  \errmessage{@setcontentsaftertitlepage has been removed as a Texinfo
+              command; move your @contents command if you want the contents
+              after the title page.}}%
+\def\setshortcontentsaftertitlepage{%
+  \errmessage{@setshortcontentsaftertitlepage has been removed as a Texinfo
+              command; move your @shortcontents and @contents commands if you 
+              want the contents after the title page.}}%
+
+\parseargdef\shorttitlepage{%
+  \begingroup \hbox{}\vskip 1.5in \chaprm \centerline{#1}%
+  \endgroup\page\hbox{}\page}
+
+\envdef\titlepage{%
+  % Open one extra group, as we want to close it in the middle of \Etitlepage.
+  \begingroup
+    \parindent=0pt \textfonts
+    % Leave some space at the very top of the page.
+    \vglue\titlepagetopglue
+    % No rule at page bottom unless we print one at the top with @title.
+    \finishedtitlepagetrue
+    %
+    % Most title ``pages'' are actually two pages long, with space
+    % at the top of the second.  We don't want the ragged left on the second.
+    \let\oldpage = \page
+    \def\page{%
+      \iffinishedtitlepage\else
+        \finishtitlepage
+      \fi
+      \let\page = \oldpage
+      \page
+      \null
+    }%
+}
+
+\def\Etitlepage{%
+    \iffinishedtitlepage\else
+       \finishtitlepage
+    \fi
+    % It is important to do the page break before ending the group,
+    % because the headline and footline are only empty inside the group.
+    % If we use the new definition of \page, we always get a blank page
+    % after the title page, which we certainly don't want.
+    \oldpage
+  \endgroup
+  %
+  % Need this before the \...aftertitlepage checks so that if they are
+  % in effect the toc pages will come out with page numbers.
+  \HEADINGSon
+}
+
+\def\finishtitlepage{%
+  \vskip4pt \hrule height 2pt width \hsize
+  \vskip\titlepagebottomglue
+  \finishedtitlepagetrue
+}
+
+% Settings used for typesetting titles: no hyphenation, no indentation,
+% don't worry much about spacing, ragged right.  This should be used
+% inside a \vbox, and fonts need to be set appropriately first. \par should
+% be specified before the end of the \vbox, since a vbox is a group.
+% 
+\def\raggedtitlesettings{%
+  \rm
+  \hyphenpenalty=10000
+  \parindent=0pt
+  \tolerance=5000
+  \ptexraggedright
+}
+
+% Macros to be used within @titlepage:
+
+\let\subtitlerm=\rmfont
+\def\subtitlefont{\subtitlerm \normalbaselineskip = 13pt \normalbaselines}
+
+\parseargdef\title{%
+  \checkenv\titlepage
+  \vbox{\titlefonts \raggedtitlesettings #1\par}%
+  % print a rule at the page bottom also.
+  \finishedtitlepagefalse
+  \vskip4pt \hrule height 4pt width \hsize \vskip4pt
+}
+
+\parseargdef\subtitle{%
+  \checkenv\titlepage
+  {\subtitlefont \rightline{#1}}%
+}
+
+% @author should come last, but may come many times.
+% It can also be used inside @quotation.
+%
+\parseargdef\author{%
+  \def\temp{\quotation}%
+  \ifx\thisenv\temp
+    \def\quotationauthor{#1}% printed in \Equotation.
+  \else
+    \checkenv\titlepage
+    \ifseenauthor\else \vskip 0pt plus 1filll \seenauthortrue \fi
+    {\secfonts\rm \leftline{#1}}%
+  \fi
+}
+
+
+% Set up page headings and footings.
+
+\let\thispage=\folio
+
+\newtoks\evenheadline    % headline on even pages
+\newtoks\oddheadline     % headline on odd pages
+\newtoks\evenfootline    % footline on even pages
+\newtoks\oddfootline     % footline on odd pages
+
+% Now make \makeheadline and \makefootline in Plain TeX use those variables
+\headline={{\textfonts\rm \ifodd\pageno \the\oddheadline
+                            \else \the\evenheadline \fi}}
+\footline={{\textfonts\rm \ifodd\pageno \the\oddfootline
+                            \else \the\evenfootline \fi}\HEADINGShook}
+\let\HEADINGShook=\relax
+
+% Commands to set those variables.
+% For example, this is what  @headings on  does
+% @evenheading @thistitle|@thispage|@thischapter
+% @oddheading @thischapter|@thispage|@thistitle
+% @evenfooting @thisfile||
+% @oddfooting ||@thisfile
+
+
+\def\evenheading{\parsearg\evenheadingxxx}
+\def\evenheadingxxx #1{\evenheadingyyy #1\|\|\|\|\finish}
+\def\evenheadingyyy #1\|#2\|#3\|#4\finish{%
+\global\evenheadline={\rlap{\centerline{#2}}\line{#1\hfil#3}}}
+
+\def\oddheading{\parsearg\oddheadingxxx}
+\def\oddheadingxxx #1{\oddheadingyyy #1\|\|\|\|\finish}
+\def\oddheadingyyy #1\|#2\|#3\|#4\finish{%
+\global\oddheadline={\rlap{\centerline{#2}}\line{#1\hfil#3}}}
+
+\parseargdef\everyheading{\oddheadingxxx{#1}\evenheadingxxx{#1}}%
+
+\def\evenfooting{\parsearg\evenfootingxxx}
+\def\evenfootingxxx #1{\evenfootingyyy #1\|\|\|\|\finish}
+\def\evenfootingyyy #1\|#2\|#3\|#4\finish{%
+\global\evenfootline={\rlap{\centerline{#2}}\line{#1\hfil#3}}}
+
+\def\oddfooting{\parsearg\oddfootingxxx}
+\def\oddfootingxxx #1{\oddfootingyyy #1\|\|\|\|\finish}
+\def\oddfootingyyy #1\|#2\|#3\|#4\finish{%
+  \global\oddfootline = {\rlap{\centerline{#2}}\line{#1\hfil#3}}%
+  %
+  % Leave some space for the footline.  Hopefully ok to assume
+  % @evenfooting will not be used by itself.
+  \global\advance\txipageheight by -12pt
+  \global\advance\vsize by -12pt
+}
+
+\parseargdef\everyfooting{\oddfootingxxx{#1}\evenfootingxxx{#1}}
+
+% @evenheadingmarks top     \thischapter <- chapter at the top of a page
+% @evenheadingmarks bottom  \thischapter <- chapter at the bottom of a page
+%
+% The same set of arguments for:
+%
+% @oddheadingmarks
+% @evenfootingmarks
+% @oddfootingmarks
+% @everyheadingmarks
+% @everyfootingmarks
+
+% These define \getoddheadingmarks, \getevenheadingmarks,
+% \getoddfootingmarks, and \getevenfootingmarks, each to one of
+% \gettopheadingmarks, \getbottomheadingmarks.
+%
+\def\evenheadingmarks{\headingmarks{even}{heading}}
+\def\oddheadingmarks{\headingmarks{odd}{heading}}
+\def\evenfootingmarks{\headingmarks{even}{footing}}
+\def\oddfootingmarks{\headingmarks{odd}{footing}}
+\parseargdef\everyheadingmarks{\headingmarks{even}{heading}{#1}
+                          \headingmarks{odd}{heading}{#1} }
+\parseargdef\everyfootingmarks{\headingmarks{even}{footing}{#1}
+                          \headingmarks{odd}{footing}{#1} }
+% #1 = even/odd, #2 = heading/footing, #3 = top/bottom.
+\def\headingmarks#1#2#3 {%
+  \expandafter\let\expandafter\temp \csname get#3headingmarks\endcsname
+  \global\expandafter\let\csname get#1#2marks\endcsname \temp
+}
+
+\everyheadingmarks bottom
+\everyfootingmarks bottom
+
+% @headings double      turns headings on for double-sided printing.
+% @headings single      turns headings on for single-sided printing.
+% @headings off         turns them off.
+% @headings on          same as @headings double, retained for compatibility.
+% @headings after       turns on double-sided headings after this page.
+% @headings doubleafter turns on double-sided headings after this page.
+% @headings singleafter turns on single-sided headings after this page.
+% By default, they are off at the start of a document,
+% and turned `on' after @end titlepage.
+
+\parseargdef\headings{\csname HEADINGS#1\endcsname}
+
+\def\headingsoff{% non-global headings elimination
+  \evenheadline={\hfil}\evenfootline={\hfil}%
+   \oddheadline={\hfil}\oddfootline={\hfil}%
+}
+
+\def\HEADINGSoff{{\globaldefs=1 \headingsoff}} % global setting
+\HEADINGSoff  % it's the default
+
+% When we turn headings on, set the page number to 1.
+% For double-sided printing, put current file name in lower left corner,
+% chapter name on inside top of right hand pages, document
+% title on inside top of left hand pages, and page numbers on outside top
+% edge of all pages.
+\def\HEADINGSdouble{%
+\global\pageno=1
+\global\evenfootline={\hfil}
+\global\oddfootline={\hfil}
+\global\evenheadline={\line{\folio\hfil\thistitle}}
+\global\oddheadline={\line{\thischapterheading\hfil\folio}}
+\global\let\contentsalignmacro = \chapoddpage
+}
+\let\contentsalignmacro = \chappager
+
+% For single-sided printing, chapter title goes across top left of page,
+% page number on top right.
+\def\HEADINGSsingle{%
+\global\pageno=1
+\global\evenfootline={\hfil}
+\global\oddfootline={\hfil}
+\global\evenheadline={\line{\thischapterheading\hfil\folio}}
+\global\oddheadline={\line{\thischapterheading\hfil\folio}}
+\global\let\contentsalignmacro = \chappager
+}
+\def\HEADINGSon{\HEADINGSdouble}
+
+\def\HEADINGSafter{\let\HEADINGShook=\HEADINGSdoublex}
+\let\HEADINGSdoubleafter=\HEADINGSafter
+\def\HEADINGSdoublex{%
+\global\evenfootline={\hfil}
+\global\oddfootline={\hfil}
+\global\evenheadline={\line{\folio\hfil\thistitle}}
+\global\oddheadline={\line{\thischapterheading\hfil\folio}}
+\global\let\contentsalignmacro = \chapoddpage
+}
+
+\def\HEADINGSsingleafter{\let\HEADINGShook=\HEADINGSsinglex}
+\def\HEADINGSsinglex{%
+\global\evenfootline={\hfil}
+\global\oddfootline={\hfil}
+\global\evenheadline={\line{\thischapterheading\hfil\folio}}
+\global\oddheadline={\line{\thischapterheading\hfil\folio}}
+\global\let\contentsalignmacro = \chappager
+}
+
+% Subroutines used in generating headings
+% This produces Day Month Year style of output.
+% Only define if not already defined, in case a txi-??.tex file has set
+% up a different format (e.g., txi-cs.tex does this).
+\ifx\today\thisisundefined
+\def\today{%
+  \number\day\space
+  \ifcase\month
+  \or\putwordMJan\or\putwordMFeb\or\putwordMMar\or\putwordMApr
+  \or\putwordMMay\or\putwordMJun\or\putwordMJul\or\putwordMAug
+  \or\putwordMSep\or\putwordMOct\or\putwordMNov\or\putwordMDec
+  \fi
+  \space\number\year}
+\fi
+
+% @settitle line...  specifies the title of the document, for headings.
+% It generates no output of its own.
+\def\thistitle{\putwordNoTitle}
+\def\settitle{\parsearg{\gdef\thistitle}}
+
+
+\message{tables,}
+% Tables -- @table, @ftable, @vtable, @item(x).
+
+% default indentation of table text
+\newdimen\tableindent \tableindent=.8in
+% default indentation of @itemize and @enumerate text
+\newdimen\itemindent  \itemindent=.3in
+% margin between end of table item and start of table text.
+\newdimen\itemmargin  \itemmargin=.1in
+
+% used internally for \itemindent minus \itemmargin
+\newdimen\itemmax
+
+% Note @table, @ftable, and @vtable define @item, @itemx, etc., with
+% these defs.
+% They also define \itemindex
+% to index the item name in whatever manner is desired (perhaps none).
+
+\newif\ifitemxneedsnegativevskip
+
+\def\itemxpar{\par\ifitemxneedsnegativevskip\nobreak\vskip-\parskip\nobreak\fi}
+
+\def\internalBitem{\smallbreak \parsearg\itemzzz}
+\def\internalBitemx{\itemxpar \parsearg\itemzzz}
+
+\def\itemzzz #1{\begingroup %
+  \advance\hsize by -\rightskip
+  \advance\hsize by -\tableindent
+  \setbox0=\hbox{\itemindicate{#1}}%
+  \itemindex{#1}%
+  \nobreak % This prevents a break before @itemx.
+  %
+  % If the item text does not fit in the space we have, put it on a line
+  % by itself, and do not allow a page break either before or after that
+  % line.  We do not start a paragraph here because then if the next
+  % command is, e.g., @kindex, the whatsit would get put into the
+  % horizontal list on a line by itself, resulting in extra blank space.
+  \ifdim \wd0>\itemmax
+    %
+    % Make this a paragraph so we get the \parskip glue and wrapping,
+    % but leave it ragged-right.
+    \begingroup
+      \advance\leftskip by-\tableindent
+      \advance\hsize by\tableindent
+      \advance\rightskip by0pt plus1fil\relax
+      \leavevmode\unhbox0\par
+    \endgroup
+    %
+    % We're going to be starting a paragraph, but we don't want the
+    % \parskip glue -- logically it's part of the @item we just started.
+    \nobreak \vskip-\parskip
+    %
+    % Stop a page break at the \parskip glue coming up.  However, if
+    % what follows is an environment such as @example, there will be no
+    % \parskip glue; then the negative vskip we just inserted would
+    % cause the example and the item to crash together.  So we use this
+    % bizarre value of 10001 as a signal to \aboveenvbreak to insert
+    % \parskip glue after all.  Section titles are handled this way also.
+    %
+    \penalty 10001
+    \endgroup
+    \itemxneedsnegativevskipfalse
+  \else
+    % The item text fits into the space.  Start a paragraph, so that the
+    % following text (if any) will end up on the same line.
+    \noindent
+    % Do this with kerns and \unhbox so that if there is a footnote in
+    % the item text, it can migrate to the main vertical list and
+    % eventually be printed.
+    \nobreak\kern-\tableindent
+    \dimen0 = \itemmax  \advance\dimen0 by \itemmargin \advance\dimen0 by -\wd0
+    \unhbox0
+    \nobreak\kern\dimen0
+    \endgroup
+    \itemxneedsnegativevskiptrue
+  \fi
+}
+
+\def\item{\errmessage{@item while not in a list environment}}
+\def\itemx{\errmessage{@itemx while not in a list environment}}
+
+% @table, @ftable, @vtable.
+\envdef\table{%
+  \let\itemindex\gobble
+  \tablecheck{table}%
+}
+\envdef\ftable{%
+  \def\itemindex ##1{\doind {fn}{\code{##1}}}%
+  \tablecheck{ftable}%
+}
+\envdef\vtable{%
+  \def\itemindex ##1{\doind {vr}{\code{##1}}}%
+  \tablecheck{vtable}%
+}
+\def\tablecheck#1{%
+  \ifnum \the\catcode`\^^M=\active
+    \endgroup
+    \errmessage{This command won't work in this context; perhaps the problem is
+      that we are \inenvironment\thisenv}%
+    \def\next{\doignore{#1}}%
+  \else
+    \let\next\tablex
+  \fi
+  \next
+}
+\def\tablex#1{%
+  \def\itemindicate{#1}%
+  \parsearg\tabley
+}
+\def\tabley#1{%
+  {%
+    \makevalueexpandable
+    \edef\temp{\noexpand\tablez #1\space\space\space}%
+    \expandafter
+  }\temp \endtablez
+}
+\def\tablez #1 #2 #3 #4\endtablez{%
+  \aboveenvbreak
+  \ifnum 0#1>0 \advance \leftskip by #1\mil \fi
+  \ifnum 0#2>0 \tableindent=#2\mil \fi
+  \ifnum 0#3>0 \advance \rightskip by #3\mil \fi
+  \itemmax=\tableindent
+  \advance \itemmax by -\itemmargin
+  \advance \leftskip by \tableindent
+  \exdentamount=\tableindent
+  \parindent = 0pt
+  \parskip = \smallskipamount
+  \ifdim \parskip=0pt \parskip=2pt \fi
+  \let\item = \internalBitem
+  \let\itemx = \internalBitemx
+}
+\def\Etable{\endgraf\afterenvbreak}
+\let\Eftable\Etable
+\let\Evtable\Etable
+\let\Eitemize\Etable
+\let\Eenumerate\Etable
+
+% This is the counter used by @enumerate, which is really @itemize
+
+\newcount \itemno
+
+\envdef\itemize{\parsearg\doitemize}
+
+\def\doitemize#1{%
+  \aboveenvbreak
+  \itemmax=\itemindent
+  \advance\itemmax by -\itemmargin
+  \advance\leftskip by \itemindent
+  \exdentamount=\itemindent
+  \parindent=0pt
+  \parskip=\smallskipamount
+  \ifdim\parskip=0pt \parskip=2pt \fi
+  %
+  % Try typesetting the item mark so that if the document erroneously says
+  % something like @itemize @samp (intending @table), there's an error
+  % right away at the @itemize.  It's not the best error message in the
+  % world, but it's better than leaving it to the @item.  This means if
+  % the user wants an empty mark, they have to say @w{} not just @w.
+  \def\itemcontents{#1}%
+  \setbox0 = \hbox{\itemcontents}%
+  %
+  % @itemize with no arg is equivalent to @itemize @bullet.
+  \ifx\itemcontents\empty\def\itemcontents{\bullet}\fi
+  %
+  \let\item=\itemizeitem
+}
+
+% Definition of @item while inside @itemize and @enumerate.
+%
+\def\itemizeitem{%
+  \advance\itemno by 1  % for enumerations
+  {\let\par=\endgraf \smallbreak}% reasonable place to break
+  {%
+   % If the document has an @itemize directly after a section title, a
+   % \nobreak will be last on the list, and \sectionheading will have
+   % done a \vskip-\parskip.  In that case, we don't want to zero
+   % parskip, or the item text will crash with the heading.  On the
+   % other hand, when there is normal text preceding the item (as there
+   % usually is), we do want to zero parskip, or there would be too much
+   % space.  In that case, we won't have a \nobreak before.  At least
+   % that's the theory.
+   \ifnum\lastpenalty<10000 \parskip=0in \fi
+   \noindent
+   \hbox to 0pt{\hss \itemcontents \kern\itemmargin}%
+   %
+   \ifinner\else
+     \vadjust{\penalty 1200}% not good to break after first line of item.
+   \fi
+   % We can be in inner vertical mode in a footnote, although an
+   % @itemize looks awful there.
+  }%
+  \flushcr
+}
+
+% \splitoff TOKENS\endmark defines \first to be the first token in
+% TOKENS, and \rest to be the remainder.
+%
+\def\splitoff#1#2\endmark{\def\first{#1}\def\rest{#2}}%
+
+% Allow an optional argument of an uppercase letter, lowercase letter,
+% or number, to specify the first label in the enumerated list.  No
+% argument is the same as `1'.
+%
+\envparseargdef\enumerate{\enumeratey #1  \endenumeratey}
+\def\enumeratey #1 #2\endenumeratey{%
+  % If we were given no argument, pretend we were given `1'.
+  \def\thearg{#1}%
+  \ifx\thearg\empty \def\thearg{1}\fi
+  %
+  % Detect if the argument is a single token.  If so, it might be a
+  % letter.  Otherwise, the only valid thing it can be is a number.
+  % (We will always have one token, because of the test we just made.
+  % This is a good thing, since \splitoff doesn't work given nothing at
+  % all -- the first parameter is undelimited.)
+  \expandafter\splitoff\thearg\endmark
+  \ifx\rest\empty
+    % Only one token in the argument.  It could still be anything.
+    % A ``lowercase letter'' is one whose \lccode is nonzero.
+    % An ``uppercase letter'' is one whose \lccode is both nonzero, and
+    %   not equal to itself.
+    % Otherwise, we assume it's a number.
+    %
+    % We need the \relax at the end of the \ifnum lines to stop TeX from
+    % continuing to look for a <number>.
+    %
+    \ifnum\lccode\expandafter`\thearg=0\relax
+      \numericenumerate % a number (we hope)
+    \else
+      % It's a letter.
+      \ifnum\lccode\expandafter`\thearg=\expandafter`\thearg\relax
+        \lowercaseenumerate % lowercase letter
+      \else
+        \uppercaseenumerate % uppercase letter
+      \fi
+    \fi
+  \else
+    % Multiple tokens in the argument.  We hope it's a number.
+    \numericenumerate
+  \fi
+}
+
+% An @enumerate whose labels are integers.  The starting integer is
+% given in \thearg.
+%
+\def\numericenumerate{%
+  \itemno = \thearg
+  \startenumeration{\the\itemno}%
+}
+
+% The starting (lowercase) letter is in \thearg.
+\def\lowercaseenumerate{%
+  \itemno = \expandafter`\thearg
+  \startenumeration{%
+    % Be sure we're not beyond the end of the alphabet.
+    \ifnum\itemno=0
+      \errmessage{No more lowercase letters in @enumerate; get a bigger
+                  alphabet}%
+    \fi
+    \char\lccode\itemno
+  }%
+}
+
+% The starting (uppercase) letter is in \thearg.
+\def\uppercaseenumerate{%
+  \itemno = \expandafter`\thearg
+  \startenumeration{%
+    % Be sure we're not beyond the end of the alphabet.
+    \ifnum\itemno=0
+      \errmessage{No more uppercase letters in @enumerate; get a bigger
+                  alphabet}
+    \fi
+    \char\uccode\itemno
+  }%
+}
+
+% Call \doitemize, adding a period to the first argument and supplying the
+% common last two arguments.  Also subtract one from the initial value in
+% \itemno, since @item increments \itemno.
+%
+\def\startenumeration#1{%
+  \advance\itemno by -1
+  \doitemize{#1.}\flushcr
+}
+
+% @alphaenumerate and @capsenumerate are abbreviations for giving an arg
+% to @enumerate.
+%
+\def\alphaenumerate{\enumerate{a}}
+\def\capsenumerate{\enumerate{A}}
+\def\Ealphaenumerate{\Eenumerate}
+\def\Ecapsenumerate{\Eenumerate}
+
+
+% @multitable macros
+% Amy Hendrickson, 8/18/94, 3/6/96
+%
+% @multitable ... @end multitable will make as many columns as desired.
+% Contents of each column will wrap at width given in preamble.  Width
+% can be specified either with sample text given in a template line,
+% or in percent of \hsize, the current width of text on page.
+
+% Table can continue over pages but will only break between lines.
+
+% To make preamble:
+%
+% Either define widths of columns in terms of percent of \hsize:
+%   @multitable @columnfractions .25 .3 .45
+%   @item ...
+%
+%   Numbers following @columnfractions are the percent of the total
+%   current hsize to be used for each column. You may use as many
+%   columns as desired.
+
+
+% Or use a template:
+%   @multitable {Column 1 template} {Column 2 template} {Column 3 template}
+%   @item ...
+%   using the widest term desired in each column.
+
+% Each new table line starts with @item, each subsequent new column
+% starts with @tab. Empty columns may be produced by supplying @tab's
+% with nothing between them for as many times as empty columns are needed,
+% ie, @tab@tab@tab will produce two empty columns.
+
+% @item, @tab do not need to be on their own lines, but it will not hurt
+% if they are.
+
+% Sample multitable:
+
+%   @multitable {Column 1 template} {Column 2 template} {Column 3 template}
+%   @item first col stuff @tab second col stuff @tab third col
+%   @item
+%   first col stuff
+%   @tab
+%   second col stuff
+%   @tab
+%   third col
+%   @item first col stuff @tab second col stuff
+%   @tab Many paragraphs of text may be used in any column.
+%
+%         They will wrap at the width determined by the template.
+%   @item@tab@tab This will be in third column.
+%   @end multitable
+
+% Default dimensions may be reset by user.
+% @multitableparskip is vertical space between paragraphs in table.
+% @multitableparindent is paragraph indent in table.
+% @multitablecolmargin is horizontal space to be left between columns.
+% @multitablelinespace is space to leave between table items, baseline
+%                                                            to baseline.
+%   0pt means it depends on current normal line spacing.
+%
+\newskip\multitableparskip
+\newskip\multitableparindent
+\newdimen\multitablecolspace
+\newskip\multitablelinespace
+\multitableparskip=0pt
+\multitableparindent=6pt
+\multitablecolspace=12pt
+\multitablelinespace=0pt
+
+% Macros used to set up halign preamble:
+%
+\let\endsetuptable\relax
+\def\xendsetuptable{\endsetuptable}
+\let\columnfractions\relax
+\def\xcolumnfractions{\columnfractions}
+\newif\ifsetpercent
+
+% #1 is the @columnfraction, usually a decimal number like .5, but might
+% be just 1.  We just use it, whatever it is.
+%
+\def\pickupwholefraction#1 {%
+  \global\advance\colcount by 1
+  \expandafter\xdef\csname col\the\colcount\endcsname{#1\hsize}%
+  \setuptable
+}
+
+\newcount\colcount
+\def\setuptable#1{%
+  \def\firstarg{#1}%
+  \ifx\firstarg\xendsetuptable
+    \let\go = \relax
+  \else
+    \ifx\firstarg\xcolumnfractions
+      \global\setpercenttrue
+    \else
+      \ifsetpercent
+         \let\go\pickupwholefraction
+      \else
+         \global\advance\colcount by 1
+         \setbox0=\hbox{#1\unskip\space}% Add a normal word space as a
+                   % separator; typically that is always in the input, anyway.
+         \expandafter\xdef\csname col\the\colcount\endcsname{\the\wd0}%
+      \fi
+    \fi
+    \ifx\go\pickupwholefraction
+      % Put the argument back for the \pickupwholefraction call, so
+      % we'll always have a period there to be parsed.
+      \def\go{\pickupwholefraction#1}%
+    \else
+      \let\go = \setuptable
+    \fi%
+  \fi
+  \go
+}
+
+% multitable-only commands.
+% 
+% @headitem starts a heading row, which we typeset in bold.  Assignments
+% have to be global since we are inside the implicit group of an
+% alignment entry.  \everycr below resets \everytab so we don't have to
+% undo it ourselves.
+\def\headitemfont{\b}% for people to use in the template row; not changeable
+\def\headitem{%
+  \checkenv\multitable
+  \crcr
+  \gdef\headitemcrhook{\nobreak}% attempt to avoid page break after headings
+  \global\everytab={\bf}% can't use \headitemfont since the parsing differs
+  \the\everytab % for the first item
+}%
+%
+% default for tables with no headings.
+\let\headitemcrhook=\relax
+%
+% A \tab used to include \hskip1sp.  But then the space in a template
+% line is not enough.  That is bad.  So let's go back to just `&' until
+% we again encounter the problem the 1sp was intended to solve.
+%                                      --karl, address@hidden, 20apr99.
+\def\tab{\checkenv\multitable &\the\everytab}%
+
+% @multitable ... @end multitable definitions:
+%
+\newtoks\everytab  % insert after every tab.
+%
+\envdef\multitable{%
+  \vskip\parskip
+  \startsavinginserts
+  %
+  % @item within a multitable starts a normal row.
+  % We use \def instead of \let so that if one of the multitable entries
+  % contains an @itemize, we don't choke on the \item (seen as \crcr aka
+  % \endtemplate) expanding \doitemize.
+  \def\item{\crcr}%
+  %
+  \tolerance=9500
+  \hbadness=9500
+  \setmultitablespacing
+  \parskip=\multitableparskip
+  \parindent=\multitableparindent
+  \overfullrule=0pt
+  \global\colcount=0
+  %
+  \everycr = {%
+    \noalign{%
+      \global\everytab={}% Reset from possible headitem.
+      \global\colcount=0 % Reset the column counter.
+      %
+      % Check for saved footnotes, etc.:
+      \checkinserts
+      %
+      % Perhaps a \nobreak, then reset:
+      \headitemcrhook
+      \global\let\headitemcrhook=\relax
+    }%
+  }%
+  %
+  \parsearg\domultitable
+}
+\def\domultitable#1{%
+  % To parse everything between @multitable and @item:
+  \setuptable#1 \endsetuptable
+  %
+  % This preamble sets up a generic column definition, which will
+  % be used as many times as user calls for columns.
+  % \vtop will set a single line and will also let text wrap and
+  % continue for many paragraphs if desired.
+  \halign\bgroup &%
+    \global\advance\colcount by 1
+    \multistrut
+    \vtop{%
+      % Use the current \colcount to find the correct column width:
+      \hsize=\expandafter\csname col\the\colcount\endcsname
+      %
+      % In order to keep entries from bumping into each other
+      % we will add a \leftskip of \multitablecolspace to all columns after
+      % the first one.
+      %
+      % If a template has been used, we will add \multitablecolspace
+      % to the width of each template entry.
+      %
+      % If the user has set preamble in terms of percent of \hsize we will
+      % use that dimension as the width of the column, and the \leftskip
+      % will keep entries from bumping into each other.  Table will start at
+      % left margin and final column will justify at right margin.
+      %
+      % Make sure we don't inherit \rightskip from the outer environment.
+      \rightskip=0pt
+      \ifnum\colcount=1
+       % The first column will be indented with the surrounding text.
+       \advance\hsize by\leftskip
+      \else
+       \ifsetpercent \else
+         % If user has not set preamble in terms of percent of \hsize
+         % we will advance \hsize by \multitablecolspace.
+         \advance\hsize by \multitablecolspace
+       \fi
+       % In either case we will make \leftskip=\multitablecolspace:
+      \leftskip=\multitablecolspace
+      \fi
+      % Ignoring space at the beginning and end avoids an occasional spurious
+      % blank line, when TeX decides to break the line at the space before the
+      % box from the multistrut, so the strut ends up on a line by itself.
+      % For example:
+      % @multitable @columnfractions .11 .89
+      % @item @code{#}
+      % @tab Legal holiday which is valid in major parts of the whole country.
+      % Is automatically provided with highlighting sequences respectively
+      % marking characters.
+      \noindent\ignorespaces##\unskip\multistrut
+    }\cr
+}
+\def\Emultitable{%
+  \crcr
+  \egroup % end the \halign
+  \global\setpercentfalse
+}
+
+\def\setmultitablespacing{%
+  \def\multistrut{\strut}% just use the standard line spacing
+  %
+  % Compute \multitablelinespace (if not defined by user) for use in
+  % \multitableparskip calculation.  We used define \multistrut based on
+  % this, but (ironically) that caused the spacing to be off.
+  % See bug-texinfo report from Werner Lemberg, 31 Oct 2004 12:52:20 +0100.
+\ifdim\multitablelinespace=0pt
+\setbox0=\vbox{X}\global\multitablelinespace=\the\baselineskip
+\global\advance\multitablelinespace by-\ht0
+\fi
+% Test to see if parskip is larger than space between lines of
+% table. If not, do nothing.
+%        If so, set to same dimension as multitablelinespace.
+\ifdim\multitableparskip>\multitablelinespace
+\global\multitableparskip=\multitablelinespace
+\global\advance\multitableparskip-7pt % to keep parskip somewhat smaller
+                                      % than skip between lines in the table.
+\fi%
+\ifdim\multitableparskip=0pt
+\global\multitableparskip=\multitablelinespace
+\global\advance\multitableparskip-7pt % to keep parskip somewhat smaller
+                                      % than skip between lines in the table.
+\fi}
+
+
+\message{conditionals,}
+
+% @iftex, @ifnotdocbook, @ifnothtml, @ifnotinfo, @ifnotplaintext,
+% @ifnotxml always succeed.  They currently do nothing; we don't
+% attempt to check whether the conditionals are properly nested.  But we
+% have to remember that they are conditionals, so that @end doesn't
+% attempt to close an environment group.
+%
+\def\makecond#1{%
+  \expandafter\let\csname #1\endcsname = \relax
+  \expandafter\let\csname iscond.#1\endcsname = 1
+}
+\makecond{iftex}
+\makecond{ifnotdocbook}
+\makecond{ifnothtml}
+\makecond{ifnotinfo}
+\makecond{ifnotplaintext}
+\makecond{ifnotxml}
+
+% Ignore @ignore, @ifhtml, @ifinfo, and the like.
+%
+\def\direntry{\doignore{direntry}}
+\def\documentdescription{\doignore{documentdescription}}
+\def\docbook{\doignore{docbook}}
+\def\html{\doignore{html}}
+\def\ifdocbook{\doignore{ifdocbook}}
+\def\ifhtml{\doignore{ifhtml}}
+\def\ifinfo{\doignore{ifinfo}}
+\def\ifnottex{\doignore{ifnottex}}
+\def\ifplaintext{\doignore{ifplaintext}}
+\def\ifxml{\doignore{ifxml}}
+\def\ignore{\doignore{ignore}}
+\def\menu{\doignore{menu}}
+\def\xml{\doignore{xml}}
+
+% Ignore text until a line `@end #1', keeping track of nested conditionals.
+%
+% A count to remember the depth of nesting.
+\newcount\doignorecount
+
+\def\doignore#1{\begingroup
+  % Scan in ``verbatim'' mode:
+  \obeylines
+  \catcode`\@ = \other
+  \catcode`\{ = \other
+  \catcode`\} = \other
+  %
+  % Make sure that spaces turn into tokens that match what \doignoretext wants.
+  \spaceisspace
+  %
+  % Count number of #1's that we've seen.
+  \doignorecount = 0
+  %
+  % Swallow text until we reach the matching `@end #1'.
+  \dodoignore{#1}%
+}
+
+{ \catcode`_=11 % We want to use \_STOP_ which cannot appear in texinfo source.
+  \obeylines %
+  %
+  \gdef\dodoignore#1{%
+    % #1 contains the command name as a string, e.g., `ifinfo'.
+    %
+    % Define a command to find the next `@end #1'.
+    \long\def\doignoretext##1^^M@end #1{%
+      \doignoretextyyy##1^^M@#1\_STOP_}%
+    %
+    % And this command to find another #1 command, at the beginning of a
+    % line.  (Otherwise, we would consider a line `@c @ifset', for
+    % example, to count as an @ifset for nesting.)
+    \long\def\doignoretextyyy##1^^M@#1##2\_STOP_{\doignoreyyy{##2}\_STOP_}%
+    %
+    % And now expand that command.
+    \doignoretext ^^M%
+  }%
+}
+
+\def\doignoreyyy#1{%
+  \def\temp{#1}%
+  \ifx\temp\empty                      % Nothing found.
+    \let\next\doignoretextzzz
+  \else                                        % Found a nested condition, ...
+    \advance\doignorecount by 1
+    \let\next\doignoretextyyy          % ..., look for another.
+    % If we're here, #1 ends with ^^M\ifinfo (for example).
+  \fi
+  \next #1% the token \_STOP_ is present just after this macro.
+}
+
+% We have to swallow the remaining "\_STOP_".
+%
+\def\doignoretextzzz#1{%
+  \ifnum\doignorecount = 0     % We have just found the outermost @end.
+    \let\next\enddoignore
+  \else                                % Still inside a nested condition.
+    \advance\doignorecount by -1
+    \let\next\doignoretext      % Look for the next @end.
+  \fi
+  \next
+}
+
+% Finish off ignored text.
+{ \obeylines%
+  % Ignore anything after the last `@end #1'; this matters in verbatim
+  % environments, where otherwise the newline after an ignored conditional
+  % would result in a blank line in the output.
+  \gdef\enddoignore#1^^M{\endgroup\ignorespaces}%
+}
+
+
+% @set VAR sets the variable VAR to an empty value.
+% @set VAR REST-OF-LINE sets VAR to the value REST-OF-LINE.
+%
+% Since we want to separate VAR from REST-OF-LINE (which might be
+% empty), we can't just use \parsearg; we have to insert a space of our
+% own to delimit the rest of the line, and then take it out again if we
+% didn't need it.
+% We rely on the fact that \parsearg sets \catcode`\ =10.
+%
+\parseargdef\set{\setyyy#1 \endsetyyy}
+\def\setyyy#1 #2\endsetyyy{%
+  {%
+    \makevalueexpandable
+    \def\temp{#2}%
+    \edef\next{\gdef\makecsname{SET#1}}%
+    \ifx\temp\empty
+      \next{}%
+    \else
+      \setzzz#2\endsetzzz
+    \fi
+  }%
+}
+% Remove the trailing space \setxxx inserted.
+\def\setzzz#1 \endsetzzz{\next{#1}}
+
+% @clear VAR clears (i.e., unsets) the variable VAR.
+%
+\parseargdef\clear{%
+  {%
+    \makevalueexpandable
+    \global\expandafter\let\csname SET#1\endcsname=\relax
+  }%
+}
+
+% @value{foo} gets the text saved in variable foo.
+\def\value{\begingroup\makevalueexpandable\valuexxx}
+\def\valuexxx#1{\expandablevalue{#1}\endgroup}
+{
+  \catcode`\-=\active \catcode`\_=\active
+  %
+  \gdef\makevalueexpandable{%
+    \let\value = \expandablevalue
+    % We don't want these characters active, ...
+    \catcode`\-=\other \catcode`\_=\other
+    % ..., but we might end up with active ones in the argument if
+    % we're called from @code, as @code{@value{foo-bar_}}, though.
+    % So \let them to their normal equivalents.
+    \let-\normaldash \let_\normalunderscore
+  }
+}
+
+% We have this subroutine so that we can handle at least some @value's
+% properly in indexes (we call \makevalueexpandable in \indexdummies).
+% The command has to be fully expandable (if the variable is set), since
+% the result winds up in the index file.  This means that if the
+% variable's value contains other Texinfo commands, it's almost certain
+% it will fail (although perhaps we could fix that with sufficient work
+% to do a one-level expansion on the result, instead of complete).
+% 
+% Unfortunately, this has the consequence that when _ is in the *value*
+% of an @set, it does not print properly in the roman fonts (get the cmr
+% dot accent at position 126 instead).  No fix comes to mind, and it's
+% been this way since 2003 or earlier, so just ignore it.
+% 
+\def\expandablevalue#1{%
+  \expandafter\ifx\csname SET#1\endcsname\relax
+    {[No value for ``#1'']}%
+    \message{Variable `#1', used in @value, is not set.}%
+  \else
+    \csname SET#1\endcsname
+  \fi
+}
+
+% Like \expandablevalue, but completely expandable (the \message in the
+% definition above operates at the execution level of TeX).  Used when
+% writing to auxiliary files, due to the expansion that \write does.
+% If flag is undefined, pass through an unexpanded @value command: maybe it 
+% will be set by the time it is read back in.
+%
+% NB flag names containing - or _ may not work here.
+\def\dummyvalue#1{%
+  \expandafter\ifx\csname SET#1\endcsname\relax
+    \noexpand\value{#1}%
+  \else
+    \csname SET#1\endcsname
+  \fi
+}
+
+% Used for @value's in index entries to form the sort key: expand the @value
+% if possible, otherwise sort late.
+\def\indexnofontsvalue#1{%
+  \expandafter\ifx\csname SET#1\endcsname\relax
+    ZZZZZZZ
+  \else
+    \csname SET#1\endcsname
+  \fi
+}
+
+% @ifset VAR ... @end ifset reads the `...' iff VAR has been defined
+% with @set.
+% 
+% To get the special treatment we need for `@end ifset,' we call
+% \makecond and then redefine.
+%
+\makecond{ifset}
+\def\ifset{\parsearg{\doifset{\let\next=\ifsetfail}}}
+\def\doifset#1#2{%
+  {%
+    \makevalueexpandable
+    \let\next=\empty
+    \expandafter\ifx\csname SET#2\endcsname\relax
+      #1% If not set, redefine \next.
+    \fi
+    \expandafter
+  }\next
+}
+\def\ifsetfail{\doignore{ifset}}
+
+% @ifclear VAR ... @end executes the `...' iff VAR has never been
+% defined with @set, or has been undefined with @clear.
+%
+% The `\else' inside the `\doifset' parameter is a trick to reuse the
+% above code: if the variable is not set, do nothing, if it is set,
+% then redefine \next to \ifclearfail.
+%
+\makecond{ifclear}
+\def\ifclear{\parsearg{\doifset{\else \let\next=\ifclearfail}}}
+\def\ifclearfail{\doignore{ifclear}}
+
+% @ifcommandisdefined CMD ... @end executes the `...' if CMD (written
+% without the @) is in fact defined.  We can only feasibly check at the
+% TeX level, so something like `mathcode' is going to considered
+% defined even though it is not a Texinfo command.
+% 
+\makecond{ifcommanddefined}
+\def\ifcommanddefined{\parsearg{\doifcmddefined{\let\next=\ifcmddefinedfail}}}
+%
+\def\doifcmddefined#1#2{{%
+    \makevalueexpandable
+    \let\next=\empty
+    \expandafter\ifx\csname #2\endcsname\relax
+      #1% If not defined, \let\next as above.
+    \fi
+    \expandafter
+  }\next
+}
+\def\ifcmddefinedfail{\doignore{ifcommanddefined}}
+
+% @ifcommandnotdefined CMD ... handled similar to @ifclear above.
+\makecond{ifcommandnotdefined}
+\def\ifcommandnotdefined{%
+  \parsearg{\doifcmddefined{\else \let\next=\ifcmdnotdefinedfail}}}
+\def\ifcmdnotdefinedfail{\doignore{ifcommandnotdefined}}
+
+% Set the `txicommandconditionals' variable, so documents have a way to
+% test if the @ifcommand...defined conditionals are available.
+\set txicommandconditionals
+
+% @dircategory CATEGORY  -- specify a category of the dir file
+% which this file should belong to.  Ignore this in TeX.
+\let\dircategory=\comment
+
+% @defininfoenclose.
+\let\definfoenclose=\comment
+
+
+\message{indexing,}
+% Index generation facilities
+
+% Define \newwrite to be identical to plain tex's \newwrite
+% except not \outer, so it can be used within macros and \if's.
+\edef\newwrite{\makecsname{ptexnewwrite}}
+
+% \newindex {foo} defines an index named IX.
+% It automatically defines \IXindex such that
+% \IXindex ...rest of line... puts an entry in the index IX.
+% It also defines \IXindfile to be the number of the output channel for
+% the file that accumulates this index.  The file's extension is IX.
+% The name of an index should be no more than 2 characters long
+% for the sake of vms.
+%
+\def\newindex#1{%
+  \expandafter\chardef\csname#1indfile\endcsname=0
+  \expandafter\xdef\csname#1index\endcsname{%     % Define @#1index
+    \noexpand\doindex{#1}}
+}
+
+% @defindex foo  ==  \newindex{foo}
+%
+\def\defindex{\parsearg\newindex}
+
+% Define @defcodeindex, like @defindex except put all entries in @code.
+%
+\def\defcodeindex{\parsearg\newcodeindex}
+%
+\def\newcodeindex#1{%
+  \expandafter\chardef\csname#1indfile\endcsname=0
+  \expandafter\xdef\csname#1index\endcsname{%
+    \noexpand\docodeindex{#1}}%
+}
+
+% The default indices:
+\newindex{cp}%      concepts,
+\newcodeindex{fn}%  functions,
+\newcodeindex{vr}%  variables,
+\newcodeindex{tp}%  types,
+\newcodeindex{ky}%  keys
+\newcodeindex{pg}%  and programs.
+
+
+% @synindex foo bar    makes index foo feed into index bar.
+% Do this instead of @defindex foo if you don't want it as a separate index.
+%
+% @syncodeindex foo bar   similar, but put all entries made for index foo
+% inside @code.
+%
+\def\synindex#1 #2 {\dosynindex\doindex{#1}{#2}}
+\def\syncodeindex#1 #2 {\dosynindex\docodeindex{#1}{#2}}
+
+% #1 is \doindex or \docodeindex, #2 the index getting redefined (foo),
+% #3 the target index (bar).
+\def\dosynindex#1#2#3{%
+  \requireopenindexfile{#3}%
+  % redefine \fooindfile:
+  \expandafter\let\expandafter\temp\expandafter=\csname#3indfile\endcsname
+  \expandafter\let\csname#2indfile\endcsname=\temp
+  % redefine \fooindex:
+  \expandafter\xdef\csname#2index\endcsname{\noexpand#1{#3}}%
+}
+
+% Define \doindex, the driver for all index macros.
+% Argument #1 is generated by the calling \fooindex macro,
+% and it is the two-letter name of the index.
+
+\def\doindex#1{\edef\indexname{#1}\parsearg\doindexxxx}
+\def\doindexxxx #1{\doind{\indexname}{#1}}
+
+% like the previous two, but they put @code around the argument.
+\def\docodeindex#1{\edef\indexname{#1}\parsearg\docodeindexxxx}
+\def\docodeindexxxx #1{\doind{\indexname}{\code{#1}}}
+
+
+% Used when writing an index entry out to an index file to prevent
+% expansion of Texinfo commands that can appear in an index entry.
+%
+\def\indexdummies{%
+  \escapechar = `\\     % use backslash in output files.
+  \definedummyletter\@%
+  \definedummyletter\ %
+  %
+  % For texindex which always views { and } as separators.
+  \def\{{\lbracechar{}}%
+  \def\}{\rbracechar{}}%
+  %
+  % Do the redefinitions.
+  \definedummies
+}
+
+% Used for the aux and toc files, where @ is the escape character.
+%
+\def\atdummies{%
+  \definedummyletter\@%
+  \definedummyletter\ %
+  \definedummyletter\{%
+  \definedummyletter\}%
+  %
+  % Do the redefinitions.
+  \definedummies
+  \otherbackslash
+}
+
+% \definedummyword defines \#1 as \string\#1\space, thus effectively
+% preventing its expansion.  This is used only for control words,
+% not control letters, because the \space would be incorrect for
+% control characters, but is needed to separate the control word
+% from whatever follows.
+%
+% These can be used both for control words that take an argument and
+% those that do not.  If it is followed by {arg} in the input, then
+% that will dutifully get written to the index (or wherever).
+%
+% For control letters, we have \definedummyletter, which omits the
+% space.
+%
+\def\definedummyword  #1{\def#1{\string#1\space}}%
+\def\definedummyletter#1{\def#1{\string#1}}%
+\let\definedummyaccent\definedummyletter
+
+% Called from \indexdummies and \atdummies, to effectively prevent
+% the expansion of commands.
+%
+\def\definedummies{%
+  %
+  \let\commondummyword\definedummyword
+  \let\commondummyletter\definedummyletter
+  \let\commondummyaccent\definedummyaccent
+  \commondummiesnofonts
+  %
+  \definedummyletter\_%
+  \definedummyletter\-%
+  %
+  % Non-English letters.
+  \definedummyword\AA
+  \definedummyword\AE
+  \definedummyword\DH
+  \definedummyword\L
+  \definedummyword\O
+  \definedummyword\OE
+  \definedummyword\TH
+  \definedummyword\aa
+  \definedummyword\ae
+  \definedummyword\dh
+  \definedummyword\exclamdown
+  \definedummyword\l
+  \definedummyword\o
+  \definedummyword\oe
+  \definedummyword\ordf
+  \definedummyword\ordm
+  \definedummyword\questiondown
+  \definedummyword\ss
+  \definedummyword\th
+  %
+  % Although these internal commands shouldn't show up, sometimes they do.
+  \definedummyword\bf
+  \definedummyword\gtr
+  \definedummyword\hat
+  \definedummyword\less
+  \definedummyword\sf
+  \definedummyword\sl
+  \definedummyword\tclose
+  \definedummyword\tt
+  %
+  \definedummyword\LaTeX
+  \definedummyword\TeX
+  %
+  % Assorted special characters.
+  \definedummyword\atchar
+  \definedummyword\arrow
+  \definedummyword\bullet
+  \definedummyword\comma
+  \definedummyword\copyright
+  \definedummyword\registeredsymbol
+  \definedummyword\dots
+  \definedummyword\enddots
+  \definedummyword\entrybreak
+  \definedummyword\equiv
+  \definedummyword\error
+  \definedummyword\euro
+  \definedummyword\expansion
+  \definedummyword\geq
+  \definedummyword\guillemetleft
+  \definedummyword\guillemetright
+  \definedummyword\guilsinglleft
+  \definedummyword\guilsinglright
+  \definedummyword\lbracechar
+  \definedummyword\leq
+  \definedummyword\mathopsup
+  \definedummyword\minus
+  \definedummyword\ogonek
+  \definedummyword\pounds
+  \definedummyword\point
+  \definedummyword\print
+  \definedummyword\quotedblbase
+  \definedummyword\quotedblleft
+  \definedummyword\quotedblright
+  \definedummyword\quoteleft
+  \definedummyword\quoteright
+  \definedummyword\quotesinglbase
+  \definedummyword\rbracechar
+  \definedummyword\result
+  \definedummyword\sub
+  \definedummyword\sup
+  \definedummyword\textdegree
+  %
+  % We want to disable all macros so that they are not expanded by \write.
+  \macrolist
+  \let\value\dummyvalue
+  %
+  \normalturnoffactive
+}
+
+% \commondummiesnofonts: common to \definedummies and \indexnofonts.
+% Define \commondummyletter, \commondummyaccent and \commondummyword before
+% using.  Used for accents, font commands, and various control letters.
+%
+\def\commondummiesnofonts{%
+  % Control letters and accents.
+  \commondummyletter\!%
+  \commondummyaccent\"%
+  \commondummyaccent\'%
+  \commondummyletter\*%
+  \commondummyaccent\,%
+  \commondummyletter\.%
+  \commondummyletter\/%
+  \commondummyletter\:%
+  \commondummyaccent\=%
+  \commondummyletter\?%
+  \commondummyaccent\^%
+  \commondummyaccent\`%
+  \commondummyaccent\~%
+  \commondummyword\u
+  \commondummyword\v
+  \commondummyword\H
+  \commondummyword\dotaccent
+  \commondummyword\ogonek
+  \commondummyword\ringaccent
+  \commondummyword\tieaccent
+  \commondummyword\ubaraccent
+  \commondummyword\udotaccent
+  \commondummyword\dotless
+  %
+  % Texinfo font commands.
+  \commondummyword\b
+  \commondummyword\i
+  \commondummyword\r
+  \commondummyword\sansserif
+  \commondummyword\sc
+  \commondummyword\slanted
+  \commondummyword\t
+  %
+  % Commands that take arguments.
+  \commondummyword\abbr
+  \commondummyword\acronym
+  \commondummyword\anchor
+  \commondummyword\cite
+  \commondummyword\code
+  \commondummyword\command
+  \commondummyword\dfn
+  \commondummyword\dmn
+  \commondummyword\email
+  \commondummyword\emph
+  \commondummyword\env
+  \commondummyword\file
+  \commondummyword\image
+  \commondummyword\indicateurl
+  \commondummyword\inforef
+  \commondummyword\kbd
+  \commondummyword\key
+  \commondummyword\math
+  \commondummyword\option
+  \commondummyword\pxref
+  \commondummyword\ref
+  \commondummyword\samp
+  \commondummyword\strong
+  \commondummyword\tie
+  \commondummyword\U
+  \commondummyword\uref
+  \commondummyword\url
+  \commondummyword\var
+  \commondummyword\verb
+  \commondummyword\w
+  \commondummyword\xref
+}
+
+% For testing: output @{ and @} in index sort strings as \{ and \}.
+\newif\ifusebracesinindexes
+
+\let\indexlbrace\relax
+\let\indexrbrace\relax
+
+{\catcode`\@=0
+\catcode`\\=13
+  @gdef@backslashdisappear{@def\{}}
+}
+
+{
+\catcode`\<=13
+\catcode`\-=13
+\catcode`\`=13
+  \gdef\indexnonalnumdisappear{%
+    \expandafter\ifx\csname SETtxiindexlquoteignore\endcsname\relax\else
+      % @set txiindexlquoteignore makes us ignore left quotes in the sort term.
+      % (Introduced for FSFS 2nd ed.)
+      \let`=\empty
+    \fi
+    %
+    \expandafter\ifx\csname SETtxiindexbackslashignore\endcsname\relax\else
+      \backslashdisappear
+    \fi
+    %
+    \expandafter\ifx\csname SETtxiindexhyphenignore\endcsname\relax\else
+      \def-{}%
+    \fi
+    \expandafter\ifx\csname SETtxiindexlessthanignore\endcsname\relax\else
+      \def<{}%
+    \fi
+    \expandafter\ifx\csname SETtxiindexatsignignore\endcsname\relax\else
+      \def\@{}%
+    \fi
+  }
+
+  \gdef\indexnonalnumreappear{%
+    \useindexbackslash
+    \let-\normaldash
+    \let<\normalless
+    \def\@{@}%
+  }
+}
+
+
+% \indexnofonts is used when outputting the strings to sort the index
+% by, and when constructing control sequence names.  It eliminates all
+% control sequences and just writes whatever the best ASCII sort string
+% would be for a given command (usually its argument).
+%
+\def\indexnofonts{%
+  % Accent commands should become @asis.
+  \def\commondummyaccent##1{\let##1\asis}%
+  % We can just ignore other control letters.
+  \def\commondummyletter##1{\let##1\empty}%
+  % All control words become @asis by default; overrides below.
+  \let\commondummyword\commondummyaccent
+  \commondummiesnofonts
+  %
+  % Don't no-op \tt, since it isn't a user-level command
+  % and is used in the definitions of the active chars like <, >, |, etc.
+  % Likewise with the other plain tex font commands.
+  %\let\tt=\asis
+  %
+  \def\ { }%
+  \def\@{@}%
+  \def\_{\normalunderscore}%
+  \def\-{}% @- shouldn't affect sorting
+  %
+  \uccode`\1=`\{ \uppercase{\def\{{1}}%
+  \uccode`\1=`\} \uppercase{\def\}{1}}%
+  \let\lbracechar\{%
+  \let\rbracechar\}%
+  %
+  % Non-English letters.
+  \def\AA{AA}%
+  \def\AE{AE}%
+  \def\DH{DZZ}%
+  \def\L{L}%
+  \def\OE{OE}%
+  \def\O{O}%
+  \def\TH{TH}%
+  \def\aa{aa}%
+  \def\ae{ae}%
+  \def\dh{dzz}%
+  \def\exclamdown{!}%
+  \def\l{l}%
+  \def\oe{oe}%
+  \def\ordf{a}%
+  \def\ordm{o}%
+  \def\o{o}%
+  \def\questiondown{?}%
+  \def\ss{ss}%
+  \def\th{th}%
+  %
+  \def\LaTeX{LaTeX}%
+  \def\TeX{TeX}%
+  %
+  % Assorted special characters.  \defglyph gives the control sequence a
+  % definition that removes the {} that follows its use.
+  \defglyph\atchar{@}%
+  \defglyph\arrow{->}%
+  \defglyph\bullet{bullet}%
+  \defglyph\comma{,}%
+  \defglyph\copyright{copyright}%
+  \defglyph\dots{...}%
+  \defglyph\enddots{...}%
+  \defglyph\equiv{==}%
+  \defglyph\error{error}%
+  \defglyph\euro{euro}%
+  \defglyph\expansion{==>}%
+  \defglyph\geq{>=}%
+  \defglyph\guillemetleft{<<}%
+  \defglyph\guillemetright{>>}%
+  \defglyph\guilsinglleft{<}%
+  \defglyph\guilsinglright{>}%
+  \defglyph\leq{<=}%
+  \defglyph\lbracechar{\{}%
+  \defglyph\minus{-}%
+  \defglyph\point{.}%
+  \defglyph\pounds{pounds}%
+  \defglyph\print{-|}%
+  \defglyph\quotedblbase{"}%
+  \defglyph\quotedblleft{"}%
+  \defglyph\quotedblright{"}%
+  \defglyph\quoteleft{`}%
+  \defglyph\quoteright{'}%
+  \defglyph\quotesinglbase{,}%
+  \defglyph\rbracechar{\}}%
+  \defglyph\registeredsymbol{R}%
+  \defglyph\result{=>}%
+  \defglyph\textdegree{o}%
+  %
+  % We need to get rid of all macros, leaving only the arguments (if present).
+  % Of course this is not nearly correct, but it is the best we can do for now.
+  % makeinfo does not expand macros in the argument to @deffn, which ends up
+  % writing an index entry, and texindex isn't prepared for an index sort entry
+  % that starts with \.
+  %
+  % Since macro invocations are followed by braces, we can just redefine them
+  % to take a single TeX argument.  The case of a macro invocation that
+  % goes to end-of-line is not handled.
+  %
+  \macrolist
+  \let\value\indexnofontsvalue
+}
+\def\defglyph#1#2{\def#1##1{#2}} % see above
+
+
+
+
+\let\SETmarginindex=\relax % put index entries in margin (undocumented)?
+
+% Most index entries go through here, but \dosubind is the general case.
+% #1 is the index name, #2 is the entry text.
+\def\doind#1#2{\dosubind{#1}{#2}{}}
+
+% There is also \dosubind {index}{topic}{subtopic}
+% which makes an entry in a two-level index such as the operation index.
+% TODO: Two-level index?  Operation index?
+
+% Workhorse for all indexes.
+% #1 is name of index, #2 is stuff to put there, #3 is subentry --
+% empty if called from \doind, as we usually are (the main exception
+% is with most defuns, which call us directly).
+%
+\def\dosubind#1#2#3{%
+  \iflinks
+  {%
+    \requireopenindexfile{#1}%
+    % Store the main index entry text (including the third arg).
+    \toks0 = {#2}%
+    % If third arg is present, precede it with a space.
+    \def\thirdarg{#3}%
+    \ifx\thirdarg\empty \else
+      \toks0 = \expandafter{\the\toks0 \space #3}%
+    \fi
+    %
+    \edef\writeto{\csname#1indfile\endcsname}%
+    %
+    \safewhatsit\dosubindwrite
+  }%
+  \fi
+}
+
+% Check if an index file has been opened, and if not, open it.
+\def\requireopenindexfile#1{%
+\ifnum\csname #1indfile\endcsname=0
+  \expandafter\newwrite \csname#1indfile\endcsname
+  \edef\suffix{#1}%
+  % A .fls suffix would conflict with the file extension for the output
+  % of -recorder, so use .f1s instead.
+  \ifx\suffix\indexisfl\def\suffix{f1}\fi
+  % Open the file
+  \immediate\openout\csname#1indfile\endcsname \jobname.\suffix
+  % Using \immediate above here prevents an object entering into the current 
+  % box, which could confound checks such as those in \safewhatsit for
+  % preceding skips.
+  \typeout{Writing index file \jobname.\suffix}%
+\fi}
+\def\indexisfl{fl}
+
+% Output \ as {\indexbackslash}, because \ is an escape character in
+% the index files.
+\let\indexbackslash=\relax
+{\catcode`\@=0 \catcode`\\=\active
+  @gdef@useindexbackslash{@def\{{@indexbackslash}}}
+}
+
+% Definition for writing index entry text.
+\def\sortas#1{\ignorespaces}%
+
+% Definition for writing index entry sort key.  Should occur at the at
+% the beginning of the index entry, like
+%     @cindex @sortas{september} \september
+% The \ignorespaces takes care of following space, but there's no way
+% to remove space before it.
+{
+\catcode`\-=13
+\gdef\indexwritesortas{%
+  \begingroup
+  \indexnonalnumreappear
+  \indexwritesortasxxx}
+\gdef\indexwritesortasxxx#1{%
+  \xdef\indexsortkey{#1}\endgroup}
+}
+
+
+% Write the entry in \toks0 to the index file.
+%
+\def\dosubindwrite{%
+  % Put the index entry in the margin if desired.
+  \ifx\SETmarginindex\relax\else
+    \insert\margin{\hbox{\vrule height8pt depth3pt width0pt \the\toks0}}%
+  \fi
+  %
+  % Remember, we are within a group.
+  \indexdummies % Must do this here, since \bf, etc expand at this stage
+  \useindexbackslash % \indexbackslash isn't defined now so it will be output 
+                     % as is; and it will print as backslash.
+  % The braces around \indexbrace are recognized by texindex.
+  %
+  % Get the string to sort by, by processing the index entry with all
+  % font commands turned off.
+  {\indexnofonts
+   \def\lbracechar{{\indexlbrace}}%
+   \def\rbracechar{{\indexrbrace}}%
+   \let\{=\lbracechar
+   \let\}=\rbracechar
+   \indexnonalnumdisappear
+   \xdef\indexsortkey{}%
+   \let\sortas=\indexwritesortas
+   \edef\temp{\the\toks0}%
+   \setbox\dummybox = \hbox{\temp}% Make sure to execute any \sortas
+   \ifx\indexsortkey\empty
+     \xdef\indexsortkey{\temp}%
+     \ifx\indexsortkey\empty\xdef\indexsortkey{ }\fi
+   \fi
+  }%
+  %
+  % Set up the complete index entry, with both the sort key and
+  % the original text, including any font commands.  We write
+  % three arguments to \entry to the .?? file (four in the
+  % subentry case), texindex reduces to two when writing the .??s
+  % sorted result.
+  \edef\temp{%
+    \write\writeto{%
+      \string\entry{\indexsortkey}{\noexpand\folio}{\the\toks0}}%
+  }%
+  \temp
+}
+\newbox\dummybox % used above
+
+% Take care of unwanted page breaks/skips around a whatsit:
+%
+% If a skip is the last thing on the list now, preserve it
+% by backing up by \lastskip, doing the \write, then inserting
+% the skip again.  Otherwise, the whatsit generated by the
+% \write or \pdfdest will make \lastskip zero.  The result is that
+% sequences like this:
+% @end defun
+% @tindex whatever
+% @defun ...
+% will have extra space inserted, because the \medbreak in the
+% start of the @defun won't see the skip inserted by the @end of
+% the previous defun.
+%
+% But don't do any of this if we're not in vertical mode.  We
+% don't want to do a \vskip and prematurely end a paragraph.
+%
+% Avoid page breaks due to these extra skips, too.
+%
+% But wait, there is a catch there:
+% We'll have to check whether \lastskip is zero skip.  \ifdim is not
+% sufficient for this purpose, as it ignores stretch and shrink parts
+% of the skip.  The only way seems to be to check the textual
+% representation of the skip.
+%
+% The following is almost like \def\zeroskipmacro{0.0pt} except that
+% the ``p'' and ``t'' characters have catcode \other, not 11 (letter).
+%
+\edef\zeroskipmacro{\expandafter\the\csname z@skip\endcsname}
+%
+\newskip\whatsitskip
+\newcount\whatsitpenalty
+%
+% ..., ready, GO:
+%
+\def\safewhatsit#1{\ifhmode
+  #1%
+ \else
+  % \lastskip and \lastpenalty cannot both be nonzero simultaneously.
+  \whatsitskip = \lastskip
+  \edef\lastskipmacro{\the\lastskip}%
+  \whatsitpenalty = \lastpenalty
+  %
+  % If \lastskip is nonzero, that means the last item was a
+  % skip.  And since a skip is discardable, that means this
+  % -\whatsitskip glue we're inserting is preceded by a
+  % non-discardable item, therefore it is not a potential
+  % breakpoint, therefore no \nobreak needed.
+  \ifx\lastskipmacro\zeroskipmacro
+  \else
+    \vskip-\whatsitskip
+  \fi
+  %
+  #1%
+  %
+  \ifx\lastskipmacro\zeroskipmacro
+    % If \lastskip was zero, perhaps the last item was a penalty, and
+    % perhaps it was >=10000, e.g., a \nobreak.  In that case, we want
+    % to re-insert the same penalty (values >10000 are used for various
+    % signals); since we just inserted a non-discardable item, any
+    % following glue (such as a \parskip) would be a breakpoint.  For example:
+    %   @deffn deffn-whatever
+    %   @vindex index-whatever
+    %   Description.
+    % would allow a break between the index-whatever whatsit
+    % and the "Description." paragraph.
+    \ifnum\whatsitpenalty>9999 \penalty\whatsitpenalty \fi
+  \else
+    % On the other hand, if we had a nonzero \lastskip,
+    % this make-up glue would be preceded by a non-discardable item
+    % (the whatsit from the \write), so we must insert a \nobreak.
+    \nobreak\vskip\whatsitskip
+  \fi
+\fi}
+
+% The index entry written in the file actually looks like
+%  \entry {sortstring}{page}{topic}
+% or
+%  \entry {sortstring}{page}{topic}{subtopic}
+% The texindex program reads in these files and writes files
+% containing these kinds of lines:
+%  \initial {c}
+%     before the first topic whose initial is c
+%  \entry {topic}{pagelist}
+%     for a topic that is used without subtopics
+%  \primary {topic}
+%     for the beginning of a topic that is used with subtopics
+%  \secondary {subtopic}{pagelist}
+%     for each subtopic.
+
+% Define the user-accessible indexing commands
+% @findex, @vindex, @kindex, @cindex.
+
+\def\findex {\fnindex}
+\def\kindex {\kyindex}
+\def\cindex {\cpindex}
+\def\vindex {\vrindex}
+\def\tindex {\tpindex}
+\def\pindex {\pgindex}
+
+\def\cindexsub {\begingroup\obeylines\cindexsub}
+{\obeylines %
+\gdef\cindexsub "#1" #2^^M{\endgroup %
+\dosubind{cp}{#2}{#1}}}
+
+% Define the macros used in formatting output of the sorted index material.
+
+% @printindex causes a particular index (the ??s file) to get printed.
+% It does not print any chapter heading (usually an @unnumbered).
+%
+\parseargdef\printindex{\begingroup
+  \dobreak \chapheadingskip{10000}%
+  %
+  \smallfonts \rm
+  \tolerance = 9500
+  \plainfrenchspacing
+  \everypar = {}% don't want the \kern\-parindent from indentation suppression.
+  %
+  % See if the index file exists and is nonempty.
+  % Change catcode of @ here so that if the index file contains
+  % \initial {@}
+  % as its first line, TeX doesn't complain about mismatched braces
+  % (because it thinks @} is a control sequence).
+  \catcode`\@ = 12
+  % See comment in \requireopenindexfile.
+  \def\indexname{#1}\ifx\indexname\indexisfl\def\indexname{f1}\fi
+  \openin 1 \jobname.\indexname s
+  \ifeof 1
+    % \enddoublecolumns gets confused if there is no text in the index,
+    % and it loses the chapter title and the aux file entries for the
+    % index.  The easiest way to prevent this problem is to make sure
+    % there is some text.
+    \putwordIndexNonexistent
+    \typeout{No file \jobname.\indexname s.}%
+  \else
+    \catcode`\\ = 0
+    %
+    % If the index file exists but is empty, then \openin leaves \ifeof
+    % false.  We have to make TeX try to read something from the file, so
+    % it can discover if there is anything in it.
+    \read 1 to \thisline
+    \ifeof 1
+      \putwordIndexIsEmpty
+    \else
+      % Index files are almost Texinfo source, but we use \ as the escape
+      % character.  It would be better to use @, but that's too big a change
+      % to make right now.
+      \def\indexbackslash{\ttbackslash}%
+      \let\indexlbrace\{   % Likewise, set these sequences for braces
+      \let\indexrbrace\}   % used in the sort key.
+      \begindoublecolumns
+      \let\dotheinsertentrybox\dotheinsertentryboxwithpenalty
+      %
+      % Read input from the index file line by line.
+      \loopdo
+        \ifeof1 \else
+          \read 1 to \nextline
+        \fi
+        %
+        \indexinputprocessing
+        \thisline
+        %
+        \ifeof1\else
+        \let\thisline\nextline
+      \repeat
+      %%
+      \enddoublecolumns
+    \fi
+  \fi
+  \closein 1
+\endgroup}
+\def\loopdo#1\repeat{\def\body{#1}\loopdoxxx}
+\def\loopdoxxx{\let\next=\relax\body\let\next=\loopdoxxx\fi\next}
+
+\def\indexinputprocessing{%
+  \ifeof1
+    \let\firsttoken\relax
+  \else
+    \edef\act{\gdef\noexpand\firsttoken{\getfirsttoken\nextline}}%
+    \act
+  \fi
+}
+\def\getfirsttoken#1{\expandafter\getfirsttokenx#1\endfirsttoken}
+\long\def\getfirsttokenx#1#2\endfirsttoken{\noexpand#1}
+
+
+% These macros are used by the sorted index file itself.
+% Change them to control the appearance of the index.
+
+{\catcode`\/=13 \catcode`\-=13 \catcode`\^=13 \catcode`\~=13 \catcode`\_=13
+\catcode`\|=13 \catcode`\<=13 \catcode`\>=13 \catcode`\+=13 \catcode`\"=13
+\catcode`\$=3
+\gdef\initialglyphs{%
+  % Some changes for non-alphabetic characters.  Using the glyphs from the
+  % math fonts looks more consistent than the typewriter font used elsewhere
+  % for these characters.
+  \def\indexbackslash{\math{\backslash}}%
+  \let\\=\indexbackslash
+  %
+  % Can't get bold backslash so don't use bold forward slash
+  \catcode`\/=13
+  \def/{{\secrmnotbold \normalslash}}%
+  \def-{{\normaldash\normaldash}}% en dash `--'
+  \def^{{\chapbf \normalcaret}}%
+  \def~{{\chapbf \normaltilde}}%
+  \def\_{%
+     \leavevmode \kern.07em \vbox{\hrule width.3em height.1ex}\kern .07em }%
+  \def|{$\vert$}%
+  \def<{$\less$}%
+  \def>{$\gtr$}%
+  \def+{$\normalplus$}%
+}}
+
+\def\initial{%
+  \bgroup
+  \initialglyphs
+  \initialx
+}
+
+\def\initialx#1{%
+  % Remove any glue we may have, we'll be inserting our own.
+  \removelastskip
+  %
+  % We like breaks before the index initials, so insert a bonus.
+  % The glue before the bonus allows a little bit of space at the
+  % bottom of a column to reduce an increase in inter-line spacing.
+  \nobreak
+  \vskip 0pt plus 5\baselineskip
+  \penalty -300 
+  \vskip 0pt plus -5\baselineskip
+  %
+  % Typeset the initial.  Making this add up to a whole number of
+  % baselineskips increases the chance of the dots lining up from column
+  % to column.  It still won't often be perfect, because of the stretch
+  % we need before each entry, but it's better.
+  %
+  % No shrink because it confuses \balancecolumns.
+  \vskip 1.67\baselineskip plus 1\baselineskip
+  \leftline{\secfonts \kern-0.05em \secbf #1}%
+  % \secfonts is inside the argument of \leftline so that the change of
+  % \baselineskip will not affect any glue inserted before the vbox that
+  % \leftline creates.
+  % Do our best not to break after the initial.
+  \nobreak
+  \vskip .33\baselineskip plus .1\baselineskip
+  \egroup % \initialglyphs
+}
+
+\newdimen\entryrightmargin
+\entryrightmargin=0pt
+
+% \entry typesets a paragraph consisting of the text (#1), dot leaders, and
+% then page number (#2) flushed to the right margin.  It is used for index
+% and table of contents entries.  The paragraph is indented by \leftskip.
+%
+\def\entry{%
+  \begingroup
+    %
+    % For pdfTeX and XeTeX.
+    % The redefinition of \domark stops marks being added in \pdflink to 
+    % preserve coloured links across page boundaries.  Otherwise the marks
+    % would get in the way of \lastbox in \insertentrybox.
+    \let\domark\relax
+    %
+    % Start a new paragraph if necessary, so our assignments below can't
+    % affect previous text.
+    \par
+    %
+    % No extra space above this paragraph.
+    \parskip = 0in
+    %
+    % When reading the text of entry, convert explicit line breaks
+    % from @* into spaces.  The user might give these in long section
+    % titles, for instance.
+    \def\*{\unskip\space\ignorespaces}%
+    \def\entrybreak{\hfil\break}% An undocumented command
+    %
+    % Swallow the left brace of the text (first parameter):
+    \afterassignment\doentry
+    \let\temp =
+}
+\def\entrybreak{\unskip\space\ignorespaces}%
+\def\doentry{%
+    % Save the text of the entry
+    \global\setbox\boxA=\hbox\bgroup
+    \bgroup % Instead of the swallowed brace.
+      \noindent
+      \aftergroup\finishentry
+      % And now comes the text of the entry.
+      % Not absorbing as a macro argument reduces the chance of problems
+      % with catcodes occurring.
+}
+{\catcode`\@=11
+\gdef\finishentry#1{%
+    \egroup % end box A
+    \dimen@ = \wd\boxA % Length of text of entry
+    \global\setbox\boxA=\hbox\bgroup\unhbox\boxA
+    % #1 is the page number.
+    %
+    % Get the width of the page numbers, and only use
+    % leaders if they are present.
+    \global\setbox\boxB = \hbox{#1}%
+    \ifdim\wd\boxB = 0pt
+      \null\nobreak\hfill\ %
+    \else
+      %
+      \null\nobreak\indexdotfill % Have leaders before the page number.
+      %
+      \ifpdf
+        \pdfgettoks#1.%
+        \hskip\skip\thinshrinkable\the\toksA
+      \else
+        \ifx\XeTeXrevision\thisisundefined
+          \hskip\skip\thinshrinkable #1%
+        \else
+          \pdfgettoks#1.%
+          \hskip\skip\thinshrinkable\the\toksA
+        \fi
+      \fi
+    \fi
+    \egroup % end \boxA
+    \ifdim\wd\boxB = 0pt
+      \global\setbox\entrybox=\vbox{\unhbox\boxA}%
+    \else
+    \global\setbox\entrybox=\vbox\bgroup
+      % We want the text of the entries to be aligned to the left, and the
+      % page numbers to be aligned to the right.
+      %
+      \parindent = 0pt
+      \advance\leftskip by 0pt plus 1fil
+      \advance\leftskip by 0pt plus -1fill
+      \rightskip = 0pt plus -1fil
+      \advance\rightskip by 0pt plus 1fill
+      % Cause last line, which could consist of page numbers on their own
+      % if the list of page numbers is long, to be aligned to the right.
+      \parfillskip=0pt plus -1fill
+      %
+      \advance\rightskip by \entryrightmargin
+      % Determine how far we can stretch into the margin.
+      % This allows, e.g., "Appendix H  GNU Free Documentation License" to
+      % fit on one line in @letterpaper format.
+      \ifdim\entryrightmargin>2.1em
+        \dimen@i=2.1em
+      \else
+        \dimen@i=0em
+      \fi
+      \advance \parfillskip by 0pt minus 1\dimen@i
+      %
+      \dimen@ii = \hsize
+      \advance\dimen@ii by -1\leftskip
+      \advance\dimen@ii by -1\entryrightmargin
+      \advance\dimen@ii by 1\dimen@i
+      \ifdim\wd\boxA > \dimen@ii % If the entry doesn't fit in one line
+      \ifdim\dimen@ > 0.8\dimen@ii   % due to long index text
+        % Try to split the text roughly evenly.  \dimen@ will be the length of 
+        % the first line.
+        \dimen@ = 0.7\dimen@
+        \dimen@ii = \hsize
+        \ifnum\dimen@>\dimen@ii
+          % If the entry is too long (for example, if it needs more than
+          % two lines), use all the space in the first line.
+          \dimen@ = \dimen@ii
+        \fi
+        \advance\leftskip by 0pt plus 1fill % ragged right
+        \advance \dimen@ by 1\rightskip
+        \parshape = 2 0pt \dimen@ 0em \dimen@ii
+        % Ideally we'd add a finite glue at the end of the first line only,
+        % instead of using \parshape with explicit line lengths, but TeX
+        % doesn't seem to provide a way to do such a thing.
+        %
+        % Indent all lines but the first one.
+        \advance\leftskip by 1em
+        \advance\parindent by -1em
+      \fi\fi
+      \indent % start paragraph
+      \unhbox\boxA
+      %
+      % Do not prefer a separate line ending with a hyphen to fewer lines.
+      \finalhyphendemerits = 0
+      %
+      % Word spacing - no stretch
+      \spaceskip=\fontdimen2\font minus \fontdimen4\font
+      %
+      \linepenalty=1000  % Discourage line breaks.
+      \hyphenpenalty=5000  % Discourage hyphenation.
+      %
+      \par % format the paragraph
+    \egroup % The \vbox
+    \fi
+  \endgroup
+  \dotheinsertentrybox
+}}
+
+\newskip\thinshrinkable
+\skip\thinshrinkable=.15em minus .15em
+
+\newbox\entrybox
+\def\insertentrybox{%
+  \ourunvbox\entrybox
+}
+
+% default definition
+\let\dotheinsertentrybox\insertentrybox
+
+% Use \lastbox to take apart vbox box by box, and add each sub-box
+% to the current vertical list.
+\def\ourunvbox#1{%
+\bgroup % for local binding of \delayedbox
+  % Remove the last box from box #1
+  \global\setbox#1=\vbox{%
+    \unvbox#1%
+    \unskip % remove any glue
+    \unpenalty
+    \global\setbox\interbox=\lastbox
+  }%
+  \setbox\delayedbox=\box\interbox
+  \ifdim\ht#1=0pt\else
+    \ourunvbox#1 % Repeat on what's left of the box
+    \nobreak
+  \fi
+  \box\delayedbox
+\egroup
+}
+\newbox\delayedbox
+\newbox\interbox
+
+% Used from \printindex.  \firsttoken should be the first token
+% after the \entry.  If it's not another \entry, we are at the last
+% line of a group of index entries, so insert a penalty to discourage
+% widowed index entries.
+\def\dotheinsertentryboxwithpenalty{%
+  \ifx\firsttoken\isentry
+  \else
+    \penalty 9000
+  \fi
+  \insertentrybox
+}
+\def\isentry{\entry}%
+
+% Like plain.tex's \dotfill, except uses up at least 1 em.
+% The filll stretch here overpowers both the fil and fill stretch to push
+% the page number to the right.
+\def\indexdotfill{\cleaders
+  \hbox{$\mathsurround=0pt \mkern1.5mu.\mkern1.5mu$}\hskip 1em plus 1filll}
+
+
+\def\primary #1{\line{#1\hfil}}
+
+\newskip\secondaryindent \secondaryindent=0.5cm
+\def\secondary#1#2{{%
+  \parfillskip=0in
+  \parskip=0in
+  \hangindent=1in
+  \hangafter=1
+  \noindent\hskip\secondaryindent\hbox{#1}\indexdotfill
+  \ifpdf
+    \pdfgettoks#2.\ \the\toksA % The page number ends the paragraph.
+  \else
+    \ifx\XeTeXrevision\thisisundefined
+      #2
+    \else
+      \pdfgettoks#2.\ \the\toksA % The page number ends the paragraph.
+    \fi
+  \fi
+  \par
+}}
+
+% Define two-column mode, which we use to typeset indexes.
+% Adapted from the TeXbook, page 416, which is to say,
+% the manmac.tex format used to print the TeXbook itself.
+\catcode`\@=11  % private names
+
+\newbox\partialpage
+\newdimen\doublecolumnhsize
+
+% Use inside an output routine to save \topmark and \firstmark
+\def\savemarks{%
+  \global\savedtopmark=\expandafter{\topmark }%
+  \global\savedfirstmark=\expandafter{\firstmark }%
+}
+\newtoks\savedtopmark
+\newtoks\savedfirstmark
+
+% Set \topmark and \firstmark for next time \output runs.
+% Can't be run from withinside \output (because any material
+% added while an output routine is active, including 
+% penalties, is saved for after it finishes).  The page so far
+% should be empty, otherwise what's on it will be thrown away.
+\def\restoremarks{%
+  \mark{\the\savedtopmark}%
+  \bgroup\output = {%
+    \setbox\dummybox=\box\PAGE
+  }abc\eject\egroup
+  % "abc" because output routine doesn't fire for a completely empty page.
+  \mark{\the\savedfirstmark}%
+}
+
+\def\begindoublecolumns{\begingroup % ended by \enddoublecolumns
+  % If not much space left on page, start a new page.
+  \ifdim\pagetotal>0.8\vsize\vfill\eject\fi
+  %
+  % Grab any single-column material above us.
+  \output = {%
+    %
+    % Here is a possibility not foreseen in manmac: if we accumulate a
+    % whole lot of material, we might end up calling this \output
+    % routine twice in a row (see the doublecol-lose test, which is
+    % essentially a couple of indexes with @setchapternewpage off).  In
+    % that case we just ship out what is in \partialpage with the normal
+    % output routine.  Generally, \partialpage will be empty when this
+    % runs and this will be a no-op.  See the indexspread.tex test case.
+    \ifvoid\partialpage \else
+      \onepageout{\pagecontents\partialpage}%
+    \fi
+    %
+    \global\setbox\partialpage = \vbox{%
+      % Unvbox the main output page.
+      \unvbox\PAGE
+      \kern-\topskip \kern\baselineskip
+    }%
+    \savemarks
+  }%
+  \eject % run that output routine to set \partialpage
+  \restoremarks
+  %
+  % We recover the two marks that the last output routine saved in order
+  % to propagate the information in marks added around a chapter heading,
+  % which could be otherwise be lost by the time the final page is output.
+  %
+  %
+  % Use the double-column output routine for subsequent pages.
+  \output = {\doublecolumnout}%
+  %
+  % Change the page size parameters.  We could do this once outside this
+  % routine, in each of @smallbook, @afourpaper, and the default 8.5x11
+  % format, but then we repeat the same computation.  Repeating a couple
+  % of assignments once per index is clearly meaningless for the
+  % execution time, so we may as well do it in one place.
+  %
+  % First we halve the line length, less a little for the gutter between
+  % the columns.  We compute the gutter based on the line length, so it
+  % changes automatically with the paper format.  The magic constant
+  % below is chosen so that the gutter has the same value (well, +-<1pt)
+  % as it did when we hard-coded it.
+  %
+  % We put the result in a separate register, \doublecolumhsize, so we
+  % can restore it in \pagesofar, after \hsize itself has (potentially)
+  % been clobbered.
+  %
+  \doublecolumnhsize = \hsize
+    \advance\doublecolumnhsize by -.04154\hsize
+    \divide\doublecolumnhsize by 2
+  \hsize = \doublecolumnhsize
+  %
+  % Double the \vsize as well.
+  \advance\vsize by -\ht\partialpage
+  \vsize = 2\vsize
+  %
+  % For the benefit of balancing columns
+  \advance\baselineskip by 0pt plus 0.5pt
+}
+
+% The double-column output routine for all double-column pages except
+% the last, which is done by \balancecolumns.
+%
+\def\doublecolumnout{%
+  %
+  \splittopskip=\topskip \splitmaxdepth=\maxdepth
+  % Get the available space for the double columns -- the normal
+  % (undoubled) page height minus any material left over from the
+  % previous page.
+  \dimen@ = \vsize
+  \divide\dimen@ by 2
+  %
+  % box0 will be the left-hand column, box2 the right.
+  \setbox0=\vsplit\PAGE to\dimen@ \setbox2=\vsplit\PAGE to\dimen@
+  \global\advance\vsize by 2\ht\partialpage
+  \onepageout\pagesofar
+  \unvbox\PAGE
+  \penalty\outputpenalty
+}
+%
+% Re-output the contents of the output page -- any previous material,
+% followed by the two boxes we just split, in box0 and box2.
+\def\pagesofar{%
+  \unvbox\partialpage
+  %
+  \hsize = \doublecolumnhsize
+  \wd0=\hsize \wd2=\hsize
+  \hbox to\txipagewidth{\box0\hfil\box2}%
+}
+
+
+% Finished with with double columns.
+\def\enddoublecolumns{%
+  % The following penalty ensures that the page builder is exercised
+  % _before_ we change the output routine.  This is necessary in the
+  % following situation:
+  %
+  % The last section of the index consists only of a single entry.
+  % Before this section, \pagetotal is less than \pagegoal, so no
+  % break occurs before the last section starts.  However, the last
+  % section, consisting of \initial and the single \entry, does not
+  % fit on the page and has to be broken off.  Without the following
+  % penalty the page builder will not be exercised until \eject
+  % below, and by that time we'll already have changed the output
+  % routine to the \balancecolumns version, so the next-to-last
+  % double-column page will be processed with \balancecolumns, which
+  % is wrong:  The two columns will go to the main vertical list, with
+  % the broken-off section in the recent contributions.  As soon as
+  % the output routine finishes, TeX starts reconsidering the page
+  % break.  The two columns and the broken-off section both fit on the
+  % page, because the two columns now take up only half of the page
+  % goal.  When TeX sees \eject from below which follows the final
+  % section, it invokes the new output routine that we've set after
+  % \balancecolumns below; \onepageout will try to fit the two columns
+  % and the final section into the vbox of \txipageheight (see
+  % \pagebody), causing an overfull box.
+  %
+  % Note that glue won't work here, because glue does not exercise the
+  % page builder, unlike penalties (see The TeXbook, pp. 280-281).
+  \penalty0
+  %
+  \output = {%
+    % Split the last of the double-column material.
+    \savemarks
+    \balancecolumns
+  }%
+  \eject % call the \output just set
+  \ifdim\pagetotal=0pt
+    % Having called \balancecolumns once, we do not
+    % want to call it again.  Therefore, reset \output to its normal
+    % definition right away.
+    \global\output = {\onepageout{\pagecontents\PAGE}}%
+    %
+    \endgroup % started in \begindoublecolumns
+    \restoremarks
+    % Leave the double-column material on the current page, no automatic
+    % page break.
+    \box\balancedcolumns
+    %
+    % \pagegoal was set to the doubled \vsize above, since we restarted
+    % the current page.  We're now back to normal single-column
+    % typesetting, so reset \pagegoal to the normal \vsize.
+    \global\vsize = \txipageheight %
+    \pagegoal = \txipageheight %
+  \else
+    % We had some left-over material.  This might happen when \doublecolumnout
+    % is called in \balancecolumns.  Try again.
+    \expandafter\enddoublecolumns
+  \fi
+}
+\newbox\balancedcolumns
+\setbox\balancedcolumns=\vbox{shouldnt see this}%
+%
+% Only called for the last of the double column material.  \doublecolumnout 
+% does the others.
+\def\balancecolumns{%
+  \setbox0 = \vbox{\unvbox\PAGE}% like \box255 but more efficient, see p.120.
+  \dimen@ = \ht0
+  \advance\dimen@ by \topskip
+  \advance\dimen@ by-\baselineskip
+  \ifdim\dimen@<5\baselineskip
+    % Don't split a short final column in two.
+    \setbox2=\vbox{}%
+    \global\setbox\balancedcolumns=\vbox{\pagesofar}%
+  \else
+    \divide\dimen@ by 2 % target to split to
+    \dimen@ii = \dimen@
+    \splittopskip = \topskip
+    % Loop until left column is at least as high as the right column.
+    {%
+      \vbadness = 10000
+      \loop
+        \global\setbox3 = \copy0
+        \global\setbox1 = \vsplit3 to \dimen@
+      \ifdim\ht1<\ht3
+        \global\advance\dimen@ by 1pt
+      \repeat
+    }%
+    % Now the left column is in box 1, and the right column in box 3.
+    %
+    % Check whether the left column has come out higher than the page itself.  
+    % (Note that we have doubled \vsize for the double columns, so
+    % the actual height of the page is 0.5\vsize).
+    \ifdim2\ht1>\vsize
+      % It appears that we have been called upon to balance too much material.
+      % Output some of it with \doublecolumnout, leaving the rest on the page.
+      \setbox\PAGE=\box0
+      \doublecolumnout
+    \else
+      % Compare the heights of the two columns.
+      \ifdim4\ht1>5\ht3
+        % Column heights are too different, so don't make their bottoms
+        % flush with each other.
+        \setbox2=\vbox to \ht1 {\unvbox3\vfill}%
+        \setbox0=\vbox to \ht1 {\unvbox1\vfill}%
+      \else
+        % Make column bottoms flush with each other.
+        \setbox2=\vbox to\ht1{\unvbox3\unskip}%
+        \setbox0=\vbox to\ht1{\unvbox1\unskip}%
+      \fi
+      \global\setbox\balancedcolumns=\vbox{\pagesofar}%
+    \fi
+  \fi
+  %
+}
+\catcode`\@ = \other
+
+
+\message{sectioning,}
+% Chapters, sections, etc.
+
+% Let's start with @part.
+\outer\parseargdef\part{\partzzz{#1}}
+\def\partzzz#1{%
+  \chapoddpage
+  \null
+  \vskip.3\vsize  % move it down on the page a bit
+  \begingroup
+    \noindent \titlefonts\rm #1\par % the text
+    \let\lastnode=\empty      % no node to associate with
+    \writetocentry{part}{#1}{}% but put it in the toc
+    \headingsoff              % no headline or footline on the part page
+    % This outputs a mark at the end of the page that clears \thischapter
+    % and \thissection, as is done in \startcontents.
+    \let\pchapsepmacro\relax
+    \chapmacro{}{Yomitfromtoc}{}%
+    \chapoddpage
+  \endgroup
+}
+
+% \unnumberedno is an oxymoron.  But we count the unnumbered
+% sections so that we can refer to them unambiguously in the pdf
+% outlines by their "section number".  We avoid collisions with chapter
+% numbers by starting them at 10000.  (If a document ever has 10000
+% chapters, we're in trouble anyway, I'm sure.)
+\newcount\unnumberedno \unnumberedno = 10000
+\newcount\chapno
+\newcount\secno        \secno=0
+\newcount\subsecno     \subsecno=0
+\newcount\subsubsecno  \subsubsecno=0
+
+% This counter is funny since it counts through charcodes of letters A, B, ...
+\newcount\appendixno  \appendixno = `\@
+%
+% \def\appendixletter{\char\the\appendixno}
+% We do the following ugly conditional instead of the above simple
+% construct for the sake of pdftex, which needs the actual
+% letter in the expansion, not just typeset.
+%
+\def\appendixletter{%
+  \ifnum\appendixno=`A A%
+  \else\ifnum\appendixno=`B B%
+  \else\ifnum\appendixno=`C C%
+  \else\ifnum\appendixno=`D D%
+  \else\ifnum\appendixno=`E E%
+  \else\ifnum\appendixno=`F F%
+  \else\ifnum\appendixno=`G G%
+  \else\ifnum\appendixno=`H H%
+  \else\ifnum\appendixno=`I I%
+  \else\ifnum\appendixno=`J J%
+  \else\ifnum\appendixno=`K K%
+  \else\ifnum\appendixno=`L L%
+  \else\ifnum\appendixno=`M M%
+  \else\ifnum\appendixno=`N N%
+  \else\ifnum\appendixno=`O O%
+  \else\ifnum\appendixno=`P P%
+  \else\ifnum\appendixno=`Q Q%
+  \else\ifnum\appendixno=`R R%
+  \else\ifnum\appendixno=`S S%
+  \else\ifnum\appendixno=`T T%
+  \else\ifnum\appendixno=`U U%
+  \else\ifnum\appendixno=`V V%
+  \else\ifnum\appendixno=`W W%
+  \else\ifnum\appendixno=`X X%
+  \else\ifnum\appendixno=`Y Y%
+  \else\ifnum\appendixno=`Z Z%
+  % The \the is necessary, despite appearances, because \appendixletter is
+  % expanded while writing the .toc file.  \char\appendixno is not
+  % expandable, thus it is written literally, thus all appendixes come out
+  % with the same letter (or @) in the toc without it.
+  \else\char\the\appendixno
+  \fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi
+  \fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi}
+
+% Each @chapter defines these (using marks) as the number+name, number
+% and name of the chapter.  Page headings and footings can use
+% these.  @section does likewise.
+\def\thischapter{}
+\def\thischapternum{}
+\def\thischaptername{}
+\def\thissection{}
+\def\thissectionnum{}
+\def\thissectionname{}
+
+\newcount\absseclevel % used to calculate proper heading level
+\newcount\secbase\secbase=0 % @raisesections/@lowersections modify this count
+
+% @raisesections: treat @section as chapter, @subsection as section, etc.
+\def\raisesections{\global\advance\secbase by -1}
+\let\up=\raisesections % original BFox name
+
+% @lowersections: treat @chapter as section, @section as subsection, etc.
+\def\lowersections{\global\advance\secbase by 1}
+\let\down=\lowersections % original BFox name
+
+% we only have subsub.
+\chardef\maxseclevel = 3
+%
+% A numbered section within an unnumbered changes to unnumbered too.
+% To achieve this, remember the "biggest" unnum. sec. we are currently in:
+\chardef\unnlevel = \maxseclevel
+%
+% Trace whether the current chapter is an appendix or not:
+% \chapheadtype is "N" or "A", unnumbered chapters are ignored.
+\def\chapheadtype{N}
+
+% Choose a heading macro
+% #1 is heading type
+% #2 is heading level
+% #3 is text for heading
+\def\genhead#1#2#3{%
+  % Compute the abs. sec. level:
+  \absseclevel=#2
+  \advance\absseclevel by \secbase
+  % Make sure \absseclevel doesn't fall outside the range:
+  \ifnum \absseclevel < 0
+    \absseclevel = 0
+  \else
+    \ifnum \absseclevel > 3
+      \absseclevel = 3
+    \fi
+  \fi
+  % The heading type:
+  \def\headtype{#1}%
+  \if \headtype U%
+    \ifnum \absseclevel < \unnlevel
+      \chardef\unnlevel = \absseclevel
+    \fi
+  \else
+    % Check for appendix sections:
+    \ifnum \absseclevel = 0
+      \edef\chapheadtype{\headtype}%
+    \else
+      \if \headtype A\if \chapheadtype N%
+       \errmessage{@appendix... within a non-appendix chapter}%
+      \fi\fi
+    \fi
+    % Check for numbered within unnumbered:
+    \ifnum \absseclevel > \unnlevel
+      \def\headtype{U}%
+    \else
+      \chardef\unnlevel = 3
+    \fi
+  \fi
+  % Now print the heading:
+  \if \headtype U%
+    \ifcase\absseclevel
+       \unnumberedzzz{#3}%
+    \or \unnumberedseczzz{#3}%
+    \or \unnumberedsubseczzz{#3}%
+    \or \unnumberedsubsubseczzz{#3}%
+    \fi
+  \else
+    \if \headtype A%
+      \ifcase\absseclevel
+         \appendixzzz{#3}%
+      \or \appendixsectionzzz{#3}%
+      \or \appendixsubseczzz{#3}%
+      \or \appendixsubsubseczzz{#3}%
+      \fi
+    \else
+      \ifcase\absseclevel
+         \chapterzzz{#3}%
+      \or \seczzz{#3}%
+      \or \numberedsubseczzz{#3}%
+      \or \numberedsubsubseczzz{#3}%
+      \fi
+    \fi
+  \fi
+  \suppressfirstparagraphindent
+}
+
+% an interface:
+\def\numhead{\genhead N}
+\def\apphead{\genhead A}
+\def\unnmhead{\genhead U}
+
+% @chapter, @appendix, @unnumbered.  Increment top-level counter, reset
+% all lower-level sectioning counters to zero.
+%
+% Also set \chaplevelprefix, which we prepend to @float sequence numbers
+% (e.g., figures), q.v.  By default (before any chapter), that is empty.
+\let\chaplevelprefix = \empty
+%
+\outer\parseargdef\chapter{\numhead0{#1}} % normally numhead0 calls chapterzzz
+\def\chapterzzz#1{%
+  % section resetting is \global in case the chapter is in a group, such
+  % as an @include file.
+  \global\secno=0 \global\subsecno=0 \global\subsubsecno=0
+    \global\advance\chapno by 1
+  %
+  % Used for \float.
+  \gdef\chaplevelprefix{\the\chapno.}%
+  \resetallfloatnos
+  %
+  % \putwordChapter can contain complex things in translations.
+  \toks0=\expandafter{\putwordChapter}%
+  \message{\the\toks0 \space \the\chapno}%
+  %
+  % Write the actual heading.
+  \chapmacro{#1}{Ynumbered}{\the\chapno}%
+  %
+  % So @section and the like are numbered underneath this chapter.
+  \global\let\section = \numberedsec
+  \global\let\subsection = \numberedsubsec
+  \global\let\subsubsection = \numberedsubsubsec
+}
+
+\outer\parseargdef\appendix{\apphead0{#1}} % normally calls appendixzzz
+%
+\def\appendixzzz#1{%
+  \global\secno=0 \global\subsecno=0 \global\subsubsecno=0
+    \global\advance\appendixno by 1
+  \gdef\chaplevelprefix{\appendixletter.}%
+  \resetallfloatnos
+  %
+  % \putwordAppendix can contain complex things in translations.
+  \toks0=\expandafter{\putwordAppendix}%
+  \message{\the\toks0 \space \appendixletter}%
+  %
+  \chapmacro{#1}{Yappendix}{\appendixletter}%
+  %
+  \global\let\section = \appendixsec
+  \global\let\subsection = \appendixsubsec
+  \global\let\subsubsection = \appendixsubsubsec
+}
+
+% normally unnmhead0 calls unnumberedzzz:
+\outer\parseargdef\unnumbered{\unnmhead0{#1}}
+\def\unnumberedzzz#1{%
+  \global\secno=0 \global\subsecno=0 \global\subsubsecno=0
+    \global\advance\unnumberedno by 1
+  %
+  % Since an unnumbered has no number, no prefix for figures.
+  \global\let\chaplevelprefix = \empty
+  \resetallfloatnos
+  %
+  % This used to be simply \message{#1}, but TeX fully expands the
+  % argument to \message.  Therefore, if #1 contained @-commands, TeX
+  % expanded them.  For example, in `@unnumbered The @cite{Book}', TeX
+  % expanded @cite (which turns out to cause errors because \cite is meant
+  % to be executed, not expanded).
+  %
+  % Anyway, we don't want the fully-expanded definition of @cite to appear
+  % as a result of the \message, we just want `@cite' itself.  We use
+  % \the<toks register> to achieve this: TeX expands \the<toks> only once,
+  % simply yielding the contents of <toks register>.  (We also do this for
+  % the toc entries.)
+  \toks0 = {#1}%
+  \message{(\the\toks0)}%
+  %
+  \chapmacro{#1}{Ynothing}{\the\unnumberedno}%
+  %
+  \global\let\section = \unnumberedsec
+  \global\let\subsection = \unnumberedsubsec
+  \global\let\subsubsection = \unnumberedsubsubsec
+}
+
+% @centerchap is like @unnumbered, but the heading is centered.
+\outer\parseargdef\centerchap{%
+  \let\centerparametersmaybe = \centerparameters
+  \unnmhead0{#1}%
+  \let\centerparametersmaybe = \relax
+}
+
+% @top is like @unnumbered.
+\let\top\unnumbered
+
+% Sections.
+% 
+\outer\parseargdef\numberedsec{\numhead1{#1}} % normally calls seczzz
+\def\seczzz#1{%
+  \global\subsecno=0 \global\subsubsecno=0  \global\advance\secno by 1
+  \sectionheading{#1}{sec}{Ynumbered}{\the\chapno.\the\secno}%
+}
+
+% normally calls appendixsectionzzz:
+\outer\parseargdef\appendixsection{\apphead1{#1}}
+\def\appendixsectionzzz#1{%
+  \global\subsecno=0 \global\subsubsecno=0  \global\advance\secno by 1
+  \sectionheading{#1}{sec}{Yappendix}{\appendixletter.\the\secno}%
+}
+\let\appendixsec\appendixsection
+
+% normally calls unnumberedseczzz:
+\outer\parseargdef\unnumberedsec{\unnmhead1{#1}}
+\def\unnumberedseczzz#1{%
+  \global\subsecno=0 \global\subsubsecno=0  \global\advance\secno by 1
+  \sectionheading{#1}{sec}{Ynothing}{\the\unnumberedno.\the\secno}%
+}
+
+% Subsections.
+% 
+% normally calls numberedsubseczzz:
+\outer\parseargdef\numberedsubsec{\numhead2{#1}}
+\def\numberedsubseczzz#1{%
+  \global\subsubsecno=0  \global\advance\subsecno by 1
+  \sectionheading{#1}{subsec}{Ynumbered}{\the\chapno.\the\secno.\the\subsecno}%
+}
+
+% normally calls appendixsubseczzz:
+\outer\parseargdef\appendixsubsec{\apphead2{#1}}
+\def\appendixsubseczzz#1{%
+  \global\subsubsecno=0  \global\advance\subsecno by 1
+  \sectionheading{#1}{subsec}{Yappendix}%
+                 {\appendixletter.\the\secno.\the\subsecno}%
+}
+
+% normally calls unnumberedsubseczzz:
+\outer\parseargdef\unnumberedsubsec{\unnmhead2{#1}}
+\def\unnumberedsubseczzz#1{%
+  \global\subsubsecno=0  \global\advance\subsecno by 1
+  \sectionheading{#1}{subsec}{Ynothing}%
+                 {\the\unnumberedno.\the\secno.\the\subsecno}%
+}
+
+% Subsubsections.
+% 
+% normally numberedsubsubseczzz:
+\outer\parseargdef\numberedsubsubsec{\numhead3{#1}}
+\def\numberedsubsubseczzz#1{%
+  \global\advance\subsubsecno by 1
+  \sectionheading{#1}{subsubsec}{Ynumbered}%
+                 {\the\chapno.\the\secno.\the\subsecno.\the\subsubsecno}%
+}
+
+% normally appendixsubsubseczzz:
+\outer\parseargdef\appendixsubsubsec{\apphead3{#1}}
+\def\appendixsubsubseczzz#1{%
+  \global\advance\subsubsecno by 1
+  \sectionheading{#1}{subsubsec}{Yappendix}%
+                 {\appendixletter.\the\secno.\the\subsecno.\the\subsubsecno}%
+}
+
+% normally unnumberedsubsubseczzz:
+\outer\parseargdef\unnumberedsubsubsec{\unnmhead3{#1}}
+\def\unnumberedsubsubseczzz#1{%
+  \global\advance\subsubsecno by 1
+  \sectionheading{#1}{subsubsec}{Ynothing}%
+                 {\the\unnumberedno.\the\secno.\the\subsecno.\the\subsubsecno}%
+}
+
+% These macros control what the section commands do, according
+% to what kind of chapter we are in (ordinary, appendix, or unnumbered).
+% Define them by default for a numbered chapter.
+\let\section = \numberedsec
+\let\subsection = \numberedsubsec
+\let\subsubsection = \numberedsubsubsec
+
+% Define @majorheading, @heading and @subheading
+
+\def\majorheading{%
+  {\advance\chapheadingskip by 10pt \chapbreak }%
+  \parsearg\chapheadingzzz
+}
+
+\def\chapheading{\chapbreak \parsearg\chapheadingzzz}
+\def\chapheadingzzz#1{%
+  \vbox{\chapfonts \raggedtitlesettings #1\par}%
+  \nobreak\bigskip \nobreak
+  \suppressfirstparagraphindent
+}
+
+% @heading, @subheading, @subsubheading.
+\parseargdef\heading{\sectionheading{#1}{sec}{Yomitfromtoc}{}
+  \suppressfirstparagraphindent}
+\parseargdef\subheading{\sectionheading{#1}{subsec}{Yomitfromtoc}{}
+  \suppressfirstparagraphindent}
+\parseargdef\subsubheading{\sectionheading{#1}{subsubsec}{Yomitfromtoc}{}
+  \suppressfirstparagraphindent}
+
+% These macros generate a chapter, section, etc. heading only
+% (including whitespace, linebreaking, etc. around it),
+% given all the information in convenient, parsed form.
+
+% Args are the skip and penalty (usually negative)
+\def\dobreak#1#2{\par\ifdim\lastskip<#1\removelastskip\penalty#2\vskip#1\fi}
+
+% Parameter controlling skip before chapter headings (if needed)
+\newskip\chapheadingskip
+
+% Define plain chapter starts, and page on/off switching for it.
+\def\chapbreak{\dobreak \chapheadingskip {-4000}}
+
+% Start a new page
+\def\chappager{\par\vfill\supereject}
+
+% \chapoddpage - start on an odd page for a new chapter
+% Because \domark is called before \chapoddpage, the filler page will
+% get the headings for the next chapter, which is wrong.  But we don't
+% care -- we just disable all headings on the filler page.
+\def\chapoddpage{%
+  \chappager
+  \ifodd\pageno \else
+    \begingroup
+      \headingsoff
+      \null
+      \chappager
+    \endgroup
+  \fi
+}
+
+\parseargdef\setchapternewpage{\csname CHAPPAG#1\endcsname}
+
+\def\CHAPPAGoff{%
+\global\let\contentsalignmacro = \chappager
+\global\let\pchapsepmacro=\chapbreak
+\global\let\pagealignmacro=\chappager}
+
+\def\CHAPPAGon{%
+\global\let\contentsalignmacro = \chappager
+\global\let\pchapsepmacro=\chappager
+\global\let\pagealignmacro=\chappager
+\global\def\HEADINGSon{\HEADINGSsingle}}
+
+\def\CHAPPAGodd{%
+\global\let\contentsalignmacro = \chapoddpage
+\global\let\pchapsepmacro=\chapoddpage
+\global\let\pagealignmacro=\chapoddpage
+\global\def\HEADINGSon{\HEADINGSdouble}}
+
+\CHAPPAGon
+
+% \chapmacro - Chapter opening.
+%
+% #1 is the text, #2 is the section type (Ynumbered, Ynothing,
+% Yappendix, Yomitfromtoc), #3 the chapter number.
+% Not used for @heading series.
+%
+% To test against our argument.
+\def\Ynothingkeyword{Ynothing}
+\def\Yappendixkeyword{Yappendix}
+\def\Yomitfromtockeyword{Yomitfromtoc}
+%
+\def\chapmacro#1#2#3{%
+  \expandafter\ifx\thisenv\titlepage\else
+    \checkenv{}% chapters, etc., should not start inside an environment.
+  \fi
+  % FIXME: \chapmacro is currently called from inside \titlepage when
+  % \setcontentsaftertitlepage to print the "Table of Contents" heading, but
+  % this should probably be done by \sectionheading with an option to print
+  % in chapter size.
+  %
+  % Insert the first mark before the heading break (see notes for \domark).
+  \let\prevchapterdefs=\lastchapterdefs
+  \let\prevsectiondefs=\lastsectiondefs
+  \gdef\lastsectiondefs{\gdef\thissectionname{}\gdef\thissectionnum{}%
+                        \gdef\thissection{}}%
+  %
+  \def\temptype{#2}%
+  \ifx\temptype\Ynothingkeyword
+    \gdef\lastchapterdefs{\gdef\thischaptername{#1}\gdef\thischapternum{}%
+                          \gdef\thischapter{\thischaptername}}%
+  \else\ifx\temptype\Yomitfromtockeyword
+    \gdef\lastchapterdefs{\gdef\thischaptername{#1}\gdef\thischapternum{}%
+                          \gdef\thischapter{}}%
+  \else\ifx\temptype\Yappendixkeyword
+    \toks0={#1}%
+    \xdef\lastchapterdefs{%
+      \gdef\noexpand\thischaptername{\the\toks0}%
+      \gdef\noexpand\thischapternum{\appendixletter}%
+      % \noexpand\putwordAppendix avoids expanding indigestible
+      % commands in some of the translations.
+      \gdef\noexpand\thischapter{\noexpand\putwordAppendix{}
+                                 \noexpand\thischapternum:
+                                 \noexpand\thischaptername}%
+    }%
+  \else
+    \toks0={#1}%
+    \xdef\lastchapterdefs{%
+      \gdef\noexpand\thischaptername{\the\toks0}%
+      \gdef\noexpand\thischapternum{\the\chapno}%
+      % \noexpand\putwordChapter avoids expanding indigestible
+      % commands in some of the translations.
+      \gdef\noexpand\thischapter{\noexpand\putwordChapter{}
+                                 \noexpand\thischapternum:
+                                 \noexpand\thischaptername}%
+    }%
+  \fi\fi\fi
+  %
+  % Output the mark.  Pass it through \safewhatsit, to take care of
+  % the preceding space.
+  \safewhatsit\domark
+  %
+  % Insert the chapter heading break.
+  \pchapsepmacro
+  %
+  % Now the second mark, after the heading break.  No break points
+  % between here and the heading.
+  \let\prevchapterdefs=\lastchapterdefs
+  \let\prevsectiondefs=\lastsectiondefs
+  \domark
+  %
+  {%
+    \chapfonts \rm
+    \let\footnote=\errfootnoteheading % give better error message
+    %
+    % Have to define \lastsection before calling \donoderef, because the
+    % xref code eventually uses it.  On the other hand, it has to be called
+    % after \pchapsepmacro, or the headline will change too soon.
+    \gdef\lastsection{#1}%
+    %
+    % Only insert the separating space if we have a chapter/appendix
+    % number, and don't print the unnumbered ``number''.
+    \ifx\temptype\Ynothingkeyword
+      \setbox0 = \hbox{}%
+      \def\toctype{unnchap}%
+    \else\ifx\temptype\Yomitfromtockeyword
+      \setbox0 = \hbox{}% contents like unnumbered, but no toc entry
+      \def\toctype{omit}%
+    \else\ifx\temptype\Yappendixkeyword
+      \setbox0 = \hbox{\putwordAppendix{} #3\enspace}%
+      \def\toctype{app}%
+    \else
+      \setbox0 = \hbox{#3\enspace}%
+      \def\toctype{numchap}%
+    \fi\fi\fi
+    %
+    % Write the toc entry for this chapter.  Must come before the
+    % \donoderef, because we include the current node name in the toc
+    % entry, and \donoderef resets it to empty.
+    \writetocentry{\toctype}{#1}{#3}%
+    %
+    % For pdftex, we have to write out the node definition (aka, make
+    % the pdfdest) after any page break, but before the actual text has
+    % been typeset.  If the destination for the pdf outline is after the
+    % text, then jumping from the outline may wind up with the text not
+    % being visible, for instance under high magnification.
+    \donoderef{#2}%
+    %
+    % Typeset the actual heading.
+    \nobreak % Avoid page breaks at the interline glue.
+    \vbox{\raggedtitlesettings \hangindent=\wd0 \centerparametersmaybe
+          \unhbox0 #1\par}%
+  }%
+  \nobreak\bigskip % no page break after a chapter title
+  \nobreak
+}
+
+% @centerchap -- centered and unnumbered.
+\let\centerparametersmaybe = \relax
+\def\centerparameters{%
+  \advance\rightskip by 3\rightskip
+  \leftskip = \rightskip
+  \parfillskip = 0pt
+}
+
+
+% Section titles.  These macros combine the section number parts and
+% call the generic \sectionheading to do the printing.
+%
+\newskip\secheadingskip
+\def\secheadingbreak{\dobreak \secheadingskip{-1000}}
+
+% Subsection titles.
+\newskip\subsecheadingskip
+\def\subsecheadingbreak{\dobreak \subsecheadingskip{-500}}
+
+% Subsubsection titles.
+\def\subsubsecheadingskip{\subsecheadingskip}
+\def\subsubsecheadingbreak{\subsecheadingbreak}
+
+
+% Print any size, any type, section title.
+%
+% #1 is the text of the title,
+% #2 is the section level (sec/subsec/subsubsec),
+% #3 is the section type (Ynumbered, Ynothing, Yappendix, Yomitfromtoc),
+% #4 is the section number.
+%
+\def\seckeyword{sec}
+%
+\def\sectionheading#1#2#3#4{%
+  {%
+    \def\sectionlevel{#2}%
+    \def\temptype{#3}%
+    %
+    % It is ok for the @heading series commands to appear inside an
+    % environment (it's been historically allowed, though the logic is
+    % dubious), but not the others.
+    \ifx\temptype\Yomitfromtockeyword\else
+      \checkenv{}% non-@*heading should not be in an environment.
+    \fi
+    \let\footnote=\errfootnoteheading
+    %
+    % Switch to the right set of fonts.
+    \csname #2fonts\endcsname \rm
+    %
+    % Insert first mark before the heading break (see notes for \domark).
+    \let\prevsectiondefs=\lastsectiondefs
+    \ifx\temptype\Ynothingkeyword
+      \ifx\sectionlevel\seckeyword
+        \gdef\lastsectiondefs{\gdef\thissectionname{#1}\gdef\thissectionnum{}%
+                              \gdef\thissection{\thissectionname}}%
+      \fi
+    \else\ifx\temptype\Yomitfromtockeyword
+      % Don't redefine \thissection.
+    \else\ifx\temptype\Yappendixkeyword
+      \ifx\sectionlevel\seckeyword
+        \toks0={#1}%
+        \xdef\lastsectiondefs{%
+          \gdef\noexpand\thissectionname{\the\toks0}%
+          \gdef\noexpand\thissectionnum{#4}%
+          % \noexpand\putwordSection avoids expanding indigestible
+          % commands in some of the translations.
+          \gdef\noexpand\thissection{\noexpand\putwordSection{}
+                                     \noexpand\thissectionnum:
+                                     \noexpand\thissectionname}%
+        }%
+      \fi
+    \else
+      \ifx\sectionlevel\seckeyword
+        \toks0={#1}%
+        \xdef\lastsectiondefs{%
+          \gdef\noexpand\thissectionname{\the\toks0}%
+          \gdef\noexpand\thissectionnum{#4}%
+          % \noexpand\putwordSection avoids expanding indigestible
+          % commands in some of the translations.
+          \gdef\noexpand\thissection{\noexpand\putwordSection{}
+                                     \noexpand\thissectionnum:
+                                     \noexpand\thissectionname}%
+        }%
+      \fi
+    \fi\fi\fi
+    %
+    % Go into vertical mode.  Usually we'll already be there, but we
+    % don't want the following whatsit to end up in a preceding paragraph
+    % if the document didn't happen to have a blank line.
+    \par
+    %
+    % Output the mark.  Pass it through \safewhatsit, to take care of
+    % the preceding space.
+    \safewhatsit\domark
+    %
+    % Insert space above the heading.
+    \csname #2headingbreak\endcsname
+    %
+    % Now the second mark, after the heading break.  No break points
+    % between here and the heading.
+    \global\let\prevsectiondefs=\lastsectiondefs
+    \domark
+    %
+    % Only insert the space after the number if we have a section number.
+    \ifx\temptype\Ynothingkeyword
+      \setbox0 = \hbox{}%
+      \def\toctype{unn}%
+      \gdef\lastsection{#1}%
+    \else\ifx\temptype\Yomitfromtockeyword
+      % for @headings -- no section number, don't include in toc,
+      % and don't redefine \lastsection.
+      \setbox0 = \hbox{}%
+      \def\toctype{omit}%
+      \let\sectionlevel=\empty
+    \else\ifx\temptype\Yappendixkeyword
+      \setbox0 = \hbox{#4\enspace}%
+      \def\toctype{app}%
+      \gdef\lastsection{#1}%
+    \else
+      \setbox0 = \hbox{#4\enspace}%
+      \def\toctype{num}%
+      \gdef\lastsection{#1}%
+    \fi\fi\fi
+    %
+    % Write the toc entry (before \donoderef).  See comments in \chapmacro.
+    \writetocentry{\toctype\sectionlevel}{#1}{#4}%
+    %
+    % Write the node reference (= pdf destination for pdftex).
+    % Again, see comments in \chapmacro.
+    \donoderef{#3}%
+    %
+    % Interline glue will be inserted when the vbox is completed.
+    % That glue will be a valid breakpoint for the page, since it'll be
+    % preceded by a whatsit (usually from the \donoderef, or from the
+    % \writetocentry if there was no node).  We don't want to allow that
+    % break, since then the whatsits could end up on page n while the
+    % section is on page n+1, thus toc/etc. are wrong.  Debian bug 276000.
+    \nobreak
+    %
+    % Output the actual section heading.
+    \vbox{\hyphenpenalty=10000 \tolerance=5000 \parindent=0pt \ptexraggedright
+          \hangindent=\wd0  % zero if no section number
+          \unhbox0 #1}%
+  }%
+  % Add extra space after the heading -- half of whatever came above it.
+  % Don't allow stretch, though.
+  \kern .5 \csname #2headingskip\endcsname
+  %
+  % Do not let the kern be a potential breakpoint, as it would be if it
+  % was followed by glue.
+  \nobreak
+  %
+  % We'll almost certainly start a paragraph next, so don't let that
+  % glue accumulate.  (Not a breakpoint because it's preceded by a
+  % discardable item.)  However, when a paragraph is not started next
+  % (\startdefun, \cartouche, \center, etc.), this needs to be wiped out
+  % or the negative glue will cause weirdly wrong output, typically
+  % obscuring the section heading with something else.
+  \vskip-\parskip
+  %
+  % This is so the last item on the main vertical list is a known
+  % \penalty > 10000, so \startdefun, etc., can recognize the situation
+  % and do the needful.
+  \penalty 10001
+}
+
+
+\message{toc,}
+% Table of contents.
+\newwrite\tocfile
+
+% Write an entry to the toc file, opening it if necessary.
+% Called from @chapter, etc.
+%
+% Example usage: \writetocentry{sec}{Section Name}{\the\chapno.\the\secno}
+% We append the current node name (if any) and page number as additional
+% arguments for the \{chap,sec,...}entry macros which will eventually
+% read this.  The node name is used in the pdf outlines as the
+% destination to jump to.
+%
+% We open the .toc file for writing here instead of at @setfilename (or
+% any other fixed time) so that @contents can be anywhere in the document.
+% But if #1 is `omit', then we don't do anything.  This is used for the
+% table of contents chapter openings themselves.
+%
+\newif\iftocfileopened
+\def\omitkeyword{omit}%
+%
+\def\writetocentry#1#2#3{%
+  \edef\writetoctype{#1}%
+  \ifx\writetoctype\omitkeyword \else
+    \iftocfileopened\else
+      \immediate\openout\tocfile = \jobname.toc
+      \global\tocfileopenedtrue
+    \fi
+    %
+    \iflinks
+      {\atdummies
+       \edef\temp{%
+         \write\tocfile{@#1entry{#2}{#3}{\lastnode}{\noexpand\folio}}}%
+       \temp
+      }%
+    \fi
+  \fi
+  %
+  % Tell \shipout to create a pdf destination on each page, if we're
+  % writing pdf.  These are used in the table of contents.  We can't
+  % just write one on every page because the title pages are numbered
+  % 1 and 2 (the page numbers aren't printed), and so are the first
+  % two pages of the document.  Thus, we'd have two destinations named
+  % `1', and two named `2'.
+  \ifpdf
+    \global\pdfmakepagedesttrue
+  \else
+    \ifx\XeTeXrevision\thisisundefined
+    \else
+      \global\pdfmakepagedesttrue
+    \fi
+  \fi
+}
+
+
+% These characters do not print properly in the Computer Modern roman
+% fonts, so we must take special care.  This is more or less redundant
+% with the Texinfo input format setup at the end of this file.
+%
+\def\activecatcodes{%
+  \catcode`\"=\active
+  \catcode`\$=\active
+  \catcode`\<=\active
+  \catcode`\>=\active
+  \catcode`\\=\active
+  \catcode`\^=\active
+  \catcode`\_=\active
+  \catcode`\|=\active
+  \catcode`\~=\active
+}
+
+
+% Read the toc file, which is essentially Texinfo input.
+\def\readtocfile{%
+  \setupdatafile
+  \activecatcodes
+  \input \tocreadfilename
+}
+
+\newskip\contentsrightmargin \contentsrightmargin=1in
+\newcount\savepageno
+\newcount\lastnegativepageno \lastnegativepageno = -1
+
+% Prepare to read what we've written to \tocfile.
+%
+\def\startcontents#1{%
+  % If @setchapternewpage on, and @headings double, the contents should
+  % start on an odd page, unlike chapters.  Thus, we maintain
+  % \contentsalignmacro in parallel with \pagealignmacro.
+  % From: Torbjorn Granlund <address@hidden>
+  \contentsalignmacro
+  \immediate\closeout\tocfile
+  %
+  % Don't need to put `Contents' or `Short Contents' in the headline.
+  % It is abundantly clear what they are.
+  \chapmacro{#1}{Yomitfromtoc}{}%
+  %
+  \savepageno = \pageno
+  \begingroup                  % Set up to handle contents files properly.
+    \raggedbottom              % Worry more about breakpoints than the bottom.
+    \entryrightmargin=\contentsrightmargin % Don't use the full line length.
+    %
+    % Roman numerals for page numbers.
+    \ifnum \pageno>0 \global\pageno = \lastnegativepageno \fi
+}
+
+% redefined for the two-volume lispref.  We always output on
+% \jobname.toc even if this is redefined.
+%
+\def\tocreadfilename{\jobname.toc}
+
+% Normal (long) toc.
+%
+\def\contents{%
+  \startcontents{\putwordTOC}%
+    \openin 1 \tocreadfilename\space
+    \ifeof 1 \else
+      \readtocfile
+    \fi
+    \vfill \eject
+    \contentsalignmacro % in case @setchapternewpage odd is in effect
+    \ifeof 1 \else
+      \pdfmakeoutlines
+    \fi
+    \closein 1
+  \endgroup
+  \lastnegativepageno = \pageno
+  \global\pageno = \savepageno
+}
+
+% And just the chapters.
+\def\summarycontents{%
+  \startcontents{\putwordShortTOC}%
+    %
+    \let\partentry = \shortpartentry
+    \let\numchapentry = \shortchapentry
+    \let\appentry = \shortchapentry
+    \let\unnchapentry = \shortunnchapentry
+    % We want a true roman here for the page numbers.
+    \secfonts
+    \let\rm=\shortcontrm \let\bf=\shortcontbf
+    \let\sl=\shortcontsl \let\tt=\shortconttt
+    \rm
+    \hyphenpenalty = 10000
+    \advance\baselineskip by 1pt % Open it up a little.
+    \def\numsecentry##1##2##3##4{}
+    \let\appsecentry = \numsecentry
+    \let\unnsecentry = \numsecentry
+    \let\numsubsecentry = \numsecentry
+    \let\appsubsecentry = \numsecentry
+    \let\unnsubsecentry = \numsecentry
+    \let\numsubsubsecentry = \numsecentry
+    \let\appsubsubsecentry = \numsecentry
+    \let\unnsubsubsecentry = \numsecentry
+    \openin 1 \tocreadfilename\space
+    \ifeof 1 \else
+      \readtocfile
+    \fi
+    \closein 1
+    \vfill \eject
+    \contentsalignmacro % in case @setchapternewpage odd is in effect
+  \endgroup
+  \lastnegativepageno = \pageno
+  \global\pageno = \savepageno
+}
+\let\shortcontents = \summarycontents
+
+% Typeset the label for a chapter or appendix for the short contents.
+% The arg is, e.g., `A' for an appendix, or `3' for a chapter.
+%
+\def\shortchaplabel#1{%
+  % This space should be enough, since a single number is .5em, and the
+  % widest letter (M) is 1em, at least in the Computer Modern fonts.
+  % But use \hss just in case.
+  % (This space doesn't include the extra space that gets added after
+  % the label; that gets put in by \shortchapentry above.)
+  %
+  % We'd like to right-justify chapter numbers, but that looks strange
+  % with appendix letters.  And right-justifying numbers and
+  % left-justifying letters looks strange when there is less than 10
+  % chapters.  Have to read the whole toc once to know how many chapters
+  % there are before deciding ...
+  \hbox to 1em{#1\hss}%
+}
+
+% These macros generate individual entries in the table of contents.
+% The first argument is the chapter or section name.
+% The last argument is the page number.
+% The arguments in between are the chapter number, section number, ...
+
+% Parts, in the main contents.  Replace the part number, which doesn't
+% exist, with an empty box.  Let's hope all the numbers have the same width.
+% Also ignore the page number, which is conventionally not printed.
+\def\numeralbox{\setbox0=\hbox{8}\hbox to \wd0{\hfil}}
+\def\partentry#1#2#3#4{%
+  % Add stretch and a bonus for breaking the page before the part heading.
+  % This reduces the chance of the page being broken immediately after the
+  % part heading, before a following chapter heading.
+  \vskip 0pt plus 5\baselineskip
+  \penalty-300
+  \vskip 0pt plus -5\baselineskip
+  \dochapentry{\numeralbox\labelspace#1}{}%
+}
+%
+% Parts, in the short toc.
+\def\shortpartentry#1#2#3#4{%
+  \penalty-300
+  \vskip.5\baselineskip plus.15\baselineskip minus.1\baselineskip
+  \shortchapentry{{\bf #1}}{\numeralbox}{}{}%
+}
+
+% Chapters, in the main contents.
+\def\numchapentry#1#2#3#4{\dochapentry{#2\labelspace#1}{#4}}
+
+% Chapters, in the short toc.
+% See comments in \dochapentry re vbox and related settings.
+\def\shortchapentry#1#2#3#4{%
+  \tocentry{\shortchaplabel{#2}\labelspace #1}{\doshortpageno\bgroup#4\egroup}%
+}
+
+% Appendices, in the main contents.
+% Need the word Appendix, and a fixed-size box.
+%
+\def\appendixbox#1{%
+  % We use M since it's probably the widest letter.
+  \setbox0 = \hbox{\putwordAppendix{} M}%
+  \hbox to \wd0{\putwordAppendix{} #1\hss}}
+%
+\def\appentry#1#2#3#4{\dochapentry{\appendixbox{#2}\hskip.7em#1}{#4}}
+
+% Unnumbered chapters.
+\def\unnchapentry#1#2#3#4{\dochapentry{#1}{#4}}
+\def\shortunnchapentry#1#2#3#4{\tocentry{#1}{\doshortpageno\bgroup#4\egroup}}
+
+% Sections.
+\def\numsecentry#1#2#3#4{\dosecentry{#2\labelspace#1}{#4}}
+\let\appsecentry=\numsecentry
+\def\unnsecentry#1#2#3#4{\dosecentry{#1}{#4}}
+
+% Subsections.
+\def\numsubsecentry#1#2#3#4{\dosubsecentry{#2\labelspace#1}{#4}}
+\let\appsubsecentry=\numsubsecentry
+\def\unnsubsecentry#1#2#3#4{\dosubsecentry{#1}{#4}}
+
+% And subsubsections.
+\def\numsubsubsecentry#1#2#3#4{\dosubsubsecentry{#2\labelspace#1}{#4}}
+\let\appsubsubsecentry=\numsubsubsecentry
+\def\unnsubsubsecentry#1#2#3#4{\dosubsubsecentry{#1}{#4}}
+
+% This parameter controls the indentation of the various levels.
+% Same as \defaultparindent.
+\newdimen\tocindent \tocindent = 15pt
+
+% Now for the actual typesetting. In all these, #1 is the text and #2 is the
+% page number.
+%
+% If the toc has to be broken over pages, we want it to be at chapters
+% if at all possible; hence the \penalty.
+\def\dochapentry#1#2{%
+   \penalty-300 \vskip1\baselineskip plus.33\baselineskip minus.25\baselineskip
+   \begingroup
+     % Move the page numbers slightly to the right
+     \advance\entryrightmargin by -0.05em
+     \chapentryfonts
+     \tocentry{#1}{\dopageno\bgroup#2\egroup}%
+   \endgroup
+   \nobreak\vskip .25\baselineskip plus.1\baselineskip
+}
+
+\def\dosecentry#1#2{\begingroup
+  \secentryfonts \leftskip=\tocindent
+  \tocentry{#1}{\dopageno\bgroup#2\egroup}%
+\endgroup}
+
+\def\dosubsecentry#1#2{\begingroup
+  \subsecentryfonts \leftskip=2\tocindent
+  \tocentry{#1}{\dopageno\bgroup#2\egroup}%
+\endgroup}
+
+\def\dosubsubsecentry#1#2{\begingroup
+  \subsubsecentryfonts \leftskip=3\tocindent
+  \tocentry{#1}{\dopageno\bgroup#2\egroup}%
+\endgroup}
+
+% We use the same \entry macro as for the index entries.
+\let\tocentry = \entry
+
+% Space between chapter (or whatever) number and the title.
+\def\labelspace{\hskip1em \relax}
+
+\def\dopageno#1{{\rm #1}}
+\def\doshortpageno#1{{\rm #1}}
+
+\def\chapentryfonts{\secfonts \rm}
+\def\secentryfonts{\textfonts}
+\def\subsecentryfonts{\textfonts}
+\def\subsubsecentryfonts{\textfonts}
+
+
+\message{environments,}
+% @foo ... @end foo.
+
+% @tex ... @end tex    escapes into raw TeX temporarily.
+% One exception: @ is still an escape character, so that @end tex works.
+% But \@ or @@ will get a plain @ character.
+
+\envdef\tex{%
+  \setupmarkupstyle{tex}%
+  \catcode `\\=0 \catcode `\{=1 \catcode `\}=2
+  \catcode `\$=3 \catcode `\&=4 \catcode `\#=6
+  \catcode `\^=7 \catcode `\_=8 \catcode `\~=\active \let~=\tie
+  \catcode `\%=14
+  \catcode `\+=\other
+  \catcode `\"=\other
+  \catcode `\|=\other
+  \catcode `\<=\other
+  \catcode `\>=\other
+  \catcode `\`=\other
+  \catcode `\'=\other
+  %
+  % ' is active in math mode (mathcode"8000).  So reset it, and all our
+  % other math active characters (just in case), to plain's definitions.
+  \mathactive
+  %
+  % Inverse of the list at the beginning of the file.
+  \let\b=\ptexb
+  \let\bullet=\ptexbullet
+  \let\c=\ptexc
+  \let\,=\ptexcomma
+  \let\.=\ptexdot
+  \let\dots=\ptexdots
+  \let\equiv=\ptexequiv
+  \let\!=\ptexexclam
+  \let\i=\ptexi
+  \let\indent=\ptexindent
+  \let\noindent=\ptexnoindent
+  \let\{=\ptexlbrace
+  \let\+=\tabalign
+  \let\}=\ptexrbrace
+  \let\/=\ptexslash
+  \let\sp=\ptexsp
+  \let\*=\ptexstar
+  %\let\sup=\ptexsup % do not redefine, we want @sup to work in math mode
+  \let\t=\ptext
+  \expandafter \let\csname top\endcsname=\ptextop  % we've made it outer
+  \let\frenchspacing=\plainfrenchspacing
+  %
+  \def\endldots{\mathinner{\ldots\ldots\ldots\ldots}}%
+  \def\enddots{\relax\ifmmode\endldots\else$\mathsurround=0pt \endldots\,$\fi}%
+  \def\@{@}%
+}
+% There is no need to define \Etex.
+
+% Define @lisp ... @end lisp.
+% @lisp environment forms a group so it can rebind things,
+% including the definition of @end lisp (which normally is erroneous).
+
+% Amount to narrow the margins by for @lisp.
+\newskip\lispnarrowing \lispnarrowing=0.4in
+
+% This is the definition that ^^M gets inside @lisp, @example, and other
+% such environments.  \null is better than a space, since it doesn't
+% have any width.
+\def\lisppar{\null\endgraf}
+
+% This space is always present above and below environments.
+\newskip\envskipamount \envskipamount = 0pt
+
+% Make spacing and below environment symmetrical.  We use \parskip here
+% to help in doing that, since in @example-like environments \parskip
+% is reset to zero; thus the \afterenvbreak inserts no space -- but the
+% start of the next paragraph will insert \parskip.
+%
+\def\aboveenvbreak{{%
+  % =10000 instead of <10000 because of a special case in \itemzzz and
+  % \sectionheading, q.v.
+  \ifnum \lastpenalty=10000 \else
+    \advance\envskipamount by \parskip
+    \endgraf
+    \ifdim\lastskip<\envskipamount
+      \removelastskip
+      \ifnum\lastpenalty<10000
+        % Penalize breaking before the environment, because preceding text
+        % often leads into it.
+        \penalty100
+      \fi
+      \vskip\envskipamount
+    \fi
+  \fi
+}}
+
+\def\afterenvbreak{{%
+  % =10000 instead of <10000 because of a special case in \itemzzz and
+  % \sectionheading, q.v.
+  \ifnum \lastpenalty=10000 \else
+    \advance\envskipamount by \parskip
+    \endgraf
+    \ifdim\lastskip<\envskipamount
+      \removelastskip
+      % it's not a good place to break if the last penalty was \nobreak
+      % or better ...
+      \ifnum\lastpenalty<10000 \penalty-50 \fi
+      \vskip\envskipamount
+    \fi
+  \fi
+}}
+
+% \nonarrowing is a flag.  If "set", @lisp etc don't narrow margins; it will
+% also clear it, so that its embedded environments do the narrowing again.
+\let\nonarrowing=\relax
+
+% @cartouche ... @end cartouche: draw rectangle w/rounded corners around
+% environment contents.
+\font\circle=lcircle10
+\newdimen\circthick
+\newdimen\cartouter\newdimen\cartinner
+\newskip\normbskip\newskip\normpskip\newskip\normlskip
+\circthick=\fontdimen8\circle
+%
+\def\ctl{{\circle\char'013\hskip -6pt}}% 6pt from pl file: 1/2charwidth
+\def\ctr{{\hskip 6pt\circle\char'010}}
+\def\cbl{{\circle\char'012\hskip -6pt}}
+\def\cbr{{\hskip 6pt\circle\char'011}}
+\def\carttop{\hbox to \cartouter{\hskip\lskip
+        \ctl\leaders\hrule height\circthick\hfil\ctr
+        \hskip\rskip}}
+\def\cartbot{\hbox to \cartouter{\hskip\lskip
+        \cbl\leaders\hrule height\circthick\hfil\cbr
+        \hskip\rskip}}
+%
+\newskip\lskip\newskip\rskip
+
+\envdef\cartouche{%
+  \ifhmode\par\fi  % can't be in the midst of a paragraph.
+  \startsavinginserts
+  \lskip=\leftskip \rskip=\rightskip
+  \leftskip=0pt\rightskip=0pt % we want these *outside*.
+  \cartinner=\hsize \advance\cartinner by-\lskip
+  \advance\cartinner by-\rskip
+  \cartouter=\hsize
+  \advance\cartouter by 18.4pt % allow for 3pt kerns on either
+                               % side, and for 6pt waste from
+                               % each corner char, and rule thickness
+  \normbskip=\baselineskip \normpskip=\parskip \normlskip=\lineskip
+  %
+  % If this cartouche directly follows a sectioning command, we need the
+  % \parskip glue (backspaced over by default) or the cartouche can
+  % collide with the section heading.
+  \ifnum\lastpenalty>10000 \vskip\parskip \penalty\lastpenalty \fi
+  %
+  \setbox\groupbox=\vbox\bgroup
+      \baselineskip=0pt\parskip=0pt\lineskip=0pt
+      \carttop
+      \hbox\bgroup
+         \hskip\lskip
+         \vrule\kern3pt
+         \vbox\bgroup
+             \kern3pt
+             \hsize=\cartinner
+             \baselineskip=\normbskip
+             \lineskip=\normlskip
+             \parskip=\normpskip
+             \vskip -\parskip
+             \comment % For explanation, see the end of def\group.
+}
+\def\Ecartouche{%
+              \ifhmode\par\fi
+             \kern3pt
+         \egroup
+         \kern3pt\vrule
+         \hskip\rskip
+      \egroup
+      \cartbot
+  \egroup
+  \addgroupbox
+  \checkinserts
+}
+
+
+% This macro is called at the beginning of all the @example variants,
+% inside a group.
+\newdimen\nonfillparindent
+\def\nonfillstart{%
+  \aboveenvbreak
+  \ifdim\hfuzz < 12pt \hfuzz = 12pt \fi % Don't be fussy
+  \sepspaces % Make spaces be word-separators rather than space tokens.
+  \let\par = \lisppar % don't ignore blank lines
+  \obeylines % each line of input is a line of output
+  \parskip = 0pt
+  % Turn off paragraph indentation but redefine \indent to emulate
+  % the normal \indent.
+  \nonfillparindent=\parindent
+  \parindent = 0pt
+  \let\indent\nonfillindent
+  %
+  \emergencystretch = 0pt % don't try to avoid overfull boxes
+  \ifx\nonarrowing\relax
+    \advance \leftskip by \lispnarrowing
+    \exdentamount=\lispnarrowing
+  \else
+    \let\nonarrowing = \relax
+  \fi
+  \let\exdent=\nofillexdent
+}
+
+\begingroup
+\obeyspaces
+% We want to swallow spaces (but not other tokens) after the fake
+% @indent in our nonfill-environments, where spaces are normally
+% active and set to @tie, resulting in them not being ignored after
+% @indent.
+\gdef\nonfillindent{\futurelet\temp\nonfillindentcheck}%
+\gdef\nonfillindentcheck{%
+\ifx\temp %
+\expandafter\nonfillindentgobble%
+\else%
+\leavevmode\nonfillindentbox%
+\fi%
+}%
+\endgroup
+\def\nonfillindentgobble#1{\nonfillindent}
+\def\nonfillindentbox{\hbox to \nonfillparindent{\hss}}
+
+% If you want all examples etc. small: @set dispenvsize small.
+% If you want even small examples the full size: @set dispenvsize nosmall.
+% This affects the following displayed environments:
+%    @example, @display, @format, @lisp
+%
+\def\smallword{small}
+\def\nosmallword{nosmall}
+\let\SETdispenvsize\relax
+\def\setnormaldispenv{%
+  \ifx\SETdispenvsize\smallword
+    % end paragraph for sake of leading, in case document has no blank
+    % line.  This is redundant with what happens in \aboveenvbreak, but
+    % we need to do it before changing the fonts, and it's inconvenient
+    % to change the fonts afterward.
+    \ifnum \lastpenalty=10000 \else \endgraf \fi
+    \smallexamplefonts \rm
+  \fi
+}
+\def\setsmalldispenv{%
+  \ifx\SETdispenvsize\nosmallword
+  \else
+    \ifnum \lastpenalty=10000 \else \endgraf \fi
+    \smallexamplefonts \rm
+  \fi
+}
+
+% We often define two environments, @foo and @smallfoo.
+% Let's do it in one command.  #1 is the env name, #2 the definition.
+\def\makedispenvdef#1#2{%
+  \expandafter\envdef\csname#1\endcsname {\setnormaldispenv #2}%
+  \expandafter\envdef\csname small#1\endcsname {\setsmalldispenv #2}%
+  \expandafter\let\csname E#1\endcsname \afterenvbreak
+  \expandafter\let\csname Esmall#1\endcsname \afterenvbreak
+}
+
+% Define two environment synonyms (#1 and #2) for an environment.
+\def\maketwodispenvdef#1#2#3{%
+  \makedispenvdef{#1}{#3}%
+  \makedispenvdef{#2}{#3}%
+}
+%
+% @lisp: indented, narrowed, typewriter font;
+% @example: same as @lisp.
+%
+% @smallexample and @smalllisp: use smaller fonts.
+% Originally contributed by Pavel@xerox.
+%
+\maketwodispenvdef{lisp}{example}{%
+  \nonfillstart
+  \tt\setupmarkupstyle{example}%
+  \let\kbdfont = \kbdexamplefont % Allow @kbd to do something special.
+  \gobble % eat return
+}
+% @display/@smalldisplay: same as @lisp except keep current font.
+%
+\makedispenvdef{display}{%
+  \nonfillstart
+  \gobble
+}
+
+% @format/@smallformat: same as @display except don't narrow margins.
+%
+\makedispenvdef{format}{%
+  \let\nonarrowing = t%
+  \nonfillstart
+  \gobble
+}
+
+% @flushleft: same as @format, but doesn't obey \SETdispenvsize.
+\envdef\flushleft{%
+  \let\nonarrowing = t%
+  \nonfillstart
+  \gobble
+}
+\let\Eflushleft = \afterenvbreak
+
+% @flushright.
+%
+\envdef\flushright{%
+  \let\nonarrowing = t%
+  \nonfillstart
+  \advance\leftskip by 0pt plus 1fill\relax
+  \gobble
+}
+\let\Eflushright = \afterenvbreak
+
+
+% @raggedright does more-or-less normal line breaking but no right
+% justification.  From plain.tex.  Don't stretch around special
+% characters in urls in this environment, since the stretch at the right
+% should be enough.
+\envdef\raggedright{%
+  \rightskip0pt plus2.4em \spaceskip.3333em \xspaceskip.5em\relax
+  \def\urefprestretchamount{0pt}%
+  \def\urefpoststretchamount{0pt}%
+}
+\let\Eraggedright\par
+
+\envdef\raggedleft{%
+  \parindent=0pt \leftskip0pt plus2em
+  \spaceskip.3333em \xspaceskip.5em \parfillskip=0pt
+  \hbadness=10000 % Last line will usually be underfull, so turn off
+                  % badness reporting.
+}
+\let\Eraggedleft\par
+
+\envdef\raggedcenter{%
+  \parindent=0pt \rightskip0pt plus1em \leftskip0pt plus1em
+  \spaceskip.3333em \xspaceskip.5em \parfillskip=0pt
+  \hbadness=10000 % Last line will usually be underfull, so turn off
+                  % badness reporting.
+}
+\let\Eraggedcenter\par
+
+
+% @quotation does normal linebreaking (hence we can't use \nonfillstart)
+% and narrows the margins.  We keep \parskip nonzero in general, since
+% we're doing normal filling.  So, when using \aboveenvbreak and
+% \afterenvbreak, temporarily make \parskip 0.
+%
+\makedispenvdef{quotation}{\quotationstart}
+%
+\def\quotationstart{%
+  \indentedblockstart % same as \indentedblock, but increase right margin too.
+  \ifx\nonarrowing\relax
+    \advance\rightskip by \lispnarrowing
+  \fi
+  \parsearg\quotationlabel
+}
+
+% We have retained a nonzero parskip for the environment, since we're
+% doing normal filling.
+%
+\def\Equotation{%
+  \par
+  \ifx\quotationauthor\thisisundefined\else
+    % indent a bit.
+    \leftline{\kern 2\leftskip \sl ---\quotationauthor}%
+  \fi
+  {\parskip=0pt \afterenvbreak}%
+}
+\def\Esmallquotation{\Equotation}
+
+% If we're given an argument, typeset it in bold with a colon after.
+\def\quotationlabel#1{%
+  \def\temp{#1}%
+  \ifx\temp\empty \else
+    {\bf #1: }%
+  \fi
+}
+
+% @indentedblock is like @quotation, but indents only on the left and
+% has no optional argument.
+% 
+\makedispenvdef{indentedblock}{\indentedblockstart}
+%
+\def\indentedblockstart{%
+  {\parskip=0pt \aboveenvbreak}% because \aboveenvbreak inserts \parskip
+  \parindent=0pt
+  %
+  % @cartouche defines \nonarrowing to inhibit narrowing at next level down.
+  \ifx\nonarrowing\relax
+    \advance\leftskip by \lispnarrowing
+    \exdentamount = \lispnarrowing
+  \else
+    \let\nonarrowing = \relax
+  \fi
+}
+
+% Keep a nonzero parskip for the environment, since we're doing normal filling.
+%
+\def\Eindentedblock{%
+  \par
+  {\parskip=0pt \afterenvbreak}%
+}
+\def\Esmallindentedblock{\Eindentedblock}
+
+
+% LaTeX-like @verbatim...@end verbatim and @verb{<char>...<char>}
+% If we want to allow any <char> as delimiter,
+% we need the curly braces so that makeinfo sees the @verb command, eg:
+% `@verbx...x' would look like the '@verbx' command.  --address@hidden
+%
+% [Knuth]: Donald Ervin Knuth, 1996.  The TeXbook.
+%
+% [Knuth] p.344; only we need to do the other characters Texinfo sets
+% active too.  Otherwise, they get lost as the first character on a
+% verbatim line.
+\def\dospecials{%
+  \do\ \do\\\do\{\do\}\do\$\do\&%
+  \do\#\do\^\do\^^K\do\_\do\^^A\do\%\do\~%
+  \do\<\do\>\do\|\do\@\do+\do\"%
+  % Don't do the quotes -- if we do, @set txicodequoteundirected and
+  % @set txicodequotebacktick will not have effect on @verb and
+  % @verbatim, and ?` and !` ligatures won't get disabled.
+  %\do\`\do\'%
+}
+%
+% [Knuth] p. 380
+\def\uncatcodespecials{%
+  \def\do##1{\catcode`##1=\other}\dospecials}
+%
+% Setup for the @verb command.
+%
+% Eight spaces for a tab
+\begingroup
+  \catcode`\^^I=\active
+  \gdef\tabeightspaces{\catcode`\^^I=\active\def^^I{\ \ \ \ \ \ \ \ }}
+\endgroup
+%
+\def\setupverb{%
+  \tt  % easiest (and conventionally used) font for verbatim
+  \def\par{\leavevmode\endgraf}%
+  \setupmarkupstyle{verb}%
+  \tabeightspaces
+  % Respect line breaks,
+  % print special symbols as themselves, and
+  % make each space count
+  % must do in this order:
+  \obeylines \uncatcodespecials \sepspaces
+}
+
+% Setup for the @verbatim environment
+%
+% Real tab expansion.
+\newdimen\tabw \setbox0=\hbox{\tt\space} \tabw=8\wd0 % tab amount
+%
+% We typeset each line of the verbatim in an \hbox, so we can handle
+% tabs.  The \global is in case the verbatim line starts with an accent,
+% or some other command that starts with a begin-group.  Otherwise, the
+% entire \verbbox would disappear at the corresponding end-group, before
+% it is typeset.  Meanwhile, we can't have nested verbatim commands
+% (can we?), so the \global won't be overwriting itself.
+\newbox\verbbox
+\def\starttabbox{\global\setbox\verbbox=\hbox\bgroup}
+%
+\begingroup
+  \catcode`\^^I=\active
+  \gdef\tabexpand{%
+    \catcode`\^^I=\active
+    \def^^I{\leavevmode\egroup
+      \dimen\verbbox=\wd\verbbox % the width so far, or since the previous tab
+      \divide\dimen\verbbox by\tabw
+      \multiply\dimen\verbbox by\tabw % compute previous multiple of \tabw
+      \advance\dimen\verbbox by\tabw  % advance to next multiple of \tabw
+      \wd\verbbox=\dimen\verbbox \box\verbbox \starttabbox
+    }%
+  }
+\endgroup
+
+% start the verbatim environment.
+\def\setupverbatim{%
+  \let\nonarrowing = t%
+  \nonfillstart
+  \tt % easiest (and conventionally used) font for verbatim
+  % The \leavevmode here is for blank lines.  Otherwise, we would
+  % never \starttabox and the \egroup would end verbatim mode.
+  \def\par{\leavevmode\egroup\box\verbbox\endgraf}%
+  \tabexpand
+  \setupmarkupstyle{verbatim}%
+  % Respect line breaks,
+  % print special symbols as themselves, and
+  % make each space count.
+  % Must do in this order:
+  \obeylines \uncatcodespecials \sepspaces
+  \everypar{\starttabbox}%
+}
+
+% Do the @verb magic: verbatim text is quoted by unique
+% delimiter characters.  Before first delimiter expect a
+% right brace, after last delimiter expect closing brace:
+%
+%    \def\doverb'{'<char>#1<char>'}'{#1}
+%
+% [Knuth] p. 382; only eat outer {}
+\begingroup
+  \catcode`[=1\catcode`]=2\catcode`\{=\other\catcode`\}=\other
+  \gdef\doverb{#1[\def\next##1#1}[##1\endgroup]\next]
+\endgroup
+%
+\def\verb{\begingroup\setupverb\doverb}
+%
+%
+% Do the @verbatim magic: define the macro \doverbatim so that
+% the (first) argument ends when '@end verbatim' is reached, ie:
+%
+%     \def\doverbatim#1@end verbatim{#1}
+%
+% For Texinfo it's a lot easier than for LaTeX,
+% because texinfo's \verbatim doesn't stop at '\end{verbatim}':
+% we need not redefine '\', '{' and '}'.
+%
+% Inspired by LaTeX's verbatim command set [latex.ltx]
+%
+\begingroup
+  \catcode`\ =\active
+  \obeylines %
+  % ignore everything up to the first ^^M, that's the newline at the end
+  % of the @verbatim input line itself.  Otherwise we get an extra blank
+  % line in the output.
+  \xdef\doverbatim#1^^M#2@end verbatim{#2\noexpand\end\gobble verbatim}%
+  % We really want {...\end verbatim} in the body of the macro, but
+  % without the active space; thus we have to use \xdef and \gobble.
+\endgroup
+%
+\envdef\verbatim{%
+    \setupverbatim\doverbatim
+}
+\let\Everbatim = \afterenvbreak
+
+
+% @verbatiminclude FILE - insert text of file in verbatim environment.
+%
+\def\verbatiminclude{\parseargusing\filenamecatcodes\doverbatiminclude}
+%
+\def\doverbatiminclude#1{%
+  {%
+    \makevalueexpandable
+    \setupverbatim
+    \indexnofonts       % Allow `@@' and other weird things in file names.
+    \wlog{texinfo.tex: doing @verbatiminclude of #1^^J}%
+    \input #1
+    \afterenvbreak
+  }%
+}
+
+% @copying ... @end copying.
+% Save the text away for @insertcopying later.
+%
+% We save the uninterpreted tokens, rather than creating a box.
+% Saving the text in a box would be much easier, but then all the
+% typesetting commands (@smallbook, font changes, etc.) have to be done
+% beforehand -- and a) we want @copying to be done first in the source
+% file; b) letting users define the frontmatter in as flexible order as
+% possible is desirable.
+%
+\def\copying{\checkenv{}\begingroup\scanargctxt\docopying}
+\def\docopying#1@end copying{\endgroup\def\copyingtext{#1}}
+%
+\def\insertcopying{%
+  \begingroup
+    \parindent = 0pt  % paragraph indentation looks wrong on title page
+    \scanexp\copyingtext
+  \endgroup
+}
+
+
+\message{defuns,}
+% @defun etc.
+
+\newskip\defbodyindent \defbodyindent=.4in
+\newskip\defargsindent \defargsindent=50pt
+\newskip\deflastargmargin \deflastargmargin=18pt
+\newcount\defunpenalty
+
+% Start the processing of @deffn:
+\def\startdefun{%
+  \ifnum\lastpenalty<10000
+    \medbreak
+    \defunpenalty=10003 % Will keep this @deffn together with the
+                        % following @def command, see below.
+  \else
+    % If there are two @def commands in a row, we'll have a \nobreak,
+    % which is there to keep the function description together with its
+    % header.  But if there's nothing but headers, we need to allow a
+    % break somewhere.  Check specifically for penalty 10002, inserted
+    % by \printdefunline, instead of 10000, since the sectioning
+    % commands also insert a nobreak penalty, and we don't want to allow
+    % a break between a section heading and a defun.
+    %
+    % As a further refinement, we avoid "club" headers by signalling
+    % with penalty of 10003 after the very first @deffn in the
+    % sequence (see above), and penalty of 10002 after any following
+    % @def command.
+    \ifnum\lastpenalty=10002 \penalty2000 \else \defunpenalty=10002 \fi
+    %
+    % Similarly, after a section heading, do not allow a break.
+    % But do insert the glue.
+    \medskip  % preceded by discardable penalty, so not a breakpoint
+  \fi
+  %
+  \parindent=0in
+  \advance\leftskip by \defbodyindent
+  \exdentamount=\defbodyindent
+}
+
+\def\dodefunx#1{%
+  % First, check whether we are in the right environment:
+  \checkenv#1%
+  %
+  % As above, allow line break if we have multiple x headers in a row.
+  % It's not a great place, though.
+  \ifnum\lastpenalty=10002 \penalty3000 \else \defunpenalty=10002 \fi
+  %
+  % And now, it's time to reuse the body of the original defun:
+  \expandafter\gobbledefun#1%
+}
+\def\gobbledefun#1\startdefun{}
+
+% \printdefunline \deffnheader{text}
+%
+\def\printdefunline#1#2{%
+  \begingroup
+    % call \deffnheader:
+    #1#2 \endheader
+    % common ending:
+    \interlinepenalty = 10000
+    \advance\rightskip by 0pt plus 1fil\relax
+    \endgraf
+    \nobreak\vskip -\parskip
+    \penalty\defunpenalty  % signal to \startdefun and \dodefunx
+    % Some of the @defun-type tags do not enable magic parentheses,
+    % rendering the following check redundant.  But we don't optimize.
+    \checkparencounts
+  \endgroup
+}
+
+\def\Edefun{\endgraf\medbreak}
+
+% \makedefun{deffn} creates \deffn, \deffnx and \Edeffn;
+% the only thing remaining is to define \deffnheader.
+%
+\def\makedefun#1{%
+  \expandafter\let\csname E#1\endcsname = \Edefun
+  \edef\temp{\noexpand\domakedefun
+    \makecsname{#1}\makecsname{#1x}\makecsname{#1header}}%
+  \temp
+}
+
+% \domakedefun \deffn \deffnx \deffnheader { (defn. of \deffnheader) }
+%
+% Define \deffn and \deffnx, without parameters.
+% \deffnheader has to be defined explicitly.
+%
+\def\domakedefun#1#2#3{%
+  \envdef#1{%
+    \startdefun
+    \doingtypefnfalse    % distinguish typed functions from all else
+    \parseargusing\activeparens{\printdefunline#3}%
+  }%
+  \def#2{\dodefunx#1}%
+  \def#3%
+}
+
+\newif\ifdoingtypefn       % doing typed function?
+\newif\ifrettypeownline    % typeset return type on its own line?
+
+% @deftypefnnewline on|off says whether the return type of typed functions
+% are printed on their own line.  This affects @deftypefn, @deftypefun,
+% @deftypeop, and @deftypemethod.
+% 
+\parseargdef\deftypefnnewline{%
+  \def\temp{#1}%
+  \ifx\temp\onword
+    \expandafter\let\csname SETtxideftypefnnl\endcsname
+      = \empty
+  \else\ifx\temp\offword
+    \expandafter\let\csname SETtxideftypefnnl\endcsname
+      = \relax
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @txideftypefnnl value `\temp',
+                must be on|off}%
+  \fi\fi
+}
+
+% Untyped functions:
+
+% @deffn category name args
+\makedefun{deffn}{\deffngeneral{}}
+
+% @deffn category class name args
+\makedefun{defop}#1 {\defopon{#1\ \putwordon}}
+
+% \defopon {category on}class name args
+\def\defopon#1#2 {\deffngeneral{\putwordon\ \code{#2}}{#1\ \code{#2}} }
+
+% \deffngeneral {subind}category name args
+%
+\def\deffngeneral#1#2 #3 #4\endheader{%
+  % Remember that \dosubind{fn}{foo}{} is equivalent to \doind{fn}{foo}.
+  \dosubind{fn}{\code{#3}}{#1}%
+  \defname{#2}{}{#3}\magicamp\defunargs{#4\unskip}%
+}
+
+% Typed functions:
+
+% @deftypefn category type name args
+\makedefun{deftypefn}{\deftypefngeneral{}}
+
+% @deftypeop category class type name args
+\makedefun{deftypeop}#1 {\deftypeopon{#1\ \putwordon}}
+
+% \deftypeopon {category on}class type name args
+\def\deftypeopon#1#2 {\deftypefngeneral{\putwordon\ \code{#2}}{#1\ \code{#2}} }
+
+% \deftypefngeneral {subind}category type name args
+%
+\def\deftypefngeneral#1#2 #3 #4 #5\endheader{%
+  \dosubind{fn}{\code{#4}}{#1}%
+  \doingtypefntrue
+  \defname{#2}{#3}{#4}\defunargs{#5\unskip}%
+}
+
+% Typed variables:
+
+% @deftypevr category type var args
+\makedefun{deftypevr}{\deftypecvgeneral{}}
+
+% @deftypecv category class type var args
+\makedefun{deftypecv}#1 {\deftypecvof{#1\ \putwordof}}
+
+% \deftypecvof {category of}class type var args
+\def\deftypecvof#1#2 {\deftypecvgeneral{\putwordof\ \code{#2}}{#1\ \code{#2}} }
+
+% \deftypecvgeneral {subind}category type var args
+%
+\def\deftypecvgeneral#1#2 #3 #4 #5\endheader{%
+  \dosubind{vr}{\code{#4}}{#1}%
+  \defname{#2}{#3}{#4}\defunargs{#5\unskip}%
+}
+
+% Untyped variables:
+
+% @defvr category var args
+\makedefun{defvr}#1 {\deftypevrheader{#1} {} }
+
+% @defcv category class var args
+\makedefun{defcv}#1 {\defcvof{#1\ \putwordof}}
+
+% \defcvof {category of}class var args
+\def\defcvof#1#2 {\deftypecvof{#1}#2 {} }
+
+% Types:
+
+% @deftp category name args
+\makedefun{deftp}#1 #2 #3\endheader{%
+  \doind{tp}{\code{#2}}%
+  \defname{#1}{}{#2}\defunargs{#3\unskip}%
+}
+
+% Remaining @defun-like shortcuts:
+\makedefun{defun}{\deffnheader{\putwordDeffunc} }
+\makedefun{defmac}{\deffnheader{\putwordDefmac} }
+\makedefun{defspec}{\deffnheader{\putwordDefspec} }
+\makedefun{deftypefun}{\deftypefnheader{\putwordDeffunc} }
+\makedefun{defvar}{\defvrheader{\putwordDefvar} }
+\makedefun{defopt}{\defvrheader{\putwordDefopt} }
+\makedefun{deftypevar}{\deftypevrheader{\putwordDefvar} }
+\makedefun{defmethod}{\defopon\putwordMethodon}
+\makedefun{deftypemethod}{\deftypeopon\putwordMethodon}
+\makedefun{defivar}{\defcvof\putwordInstanceVariableof}
+\makedefun{deftypeivar}{\deftypecvof\putwordInstanceVariableof}
+
+% \defname, which formats the name of the @def (not the args).
+% #1 is the category, such as "Function".
+% #2 is the return type, if any.
+% #3 is the function name.
+%
+% We are followed by (but not passed) the arguments, if any.
+%
+\def\defname#1#2#3{%
+  \par
+  % Get the values of \leftskip and \rightskip as they were outside the @def...
+  \advance\leftskip by -\defbodyindent
+  %
+  % Determine if we are typesetting the return type of a typed function
+  % on a line by itself.
+  \rettypeownlinefalse
+  \ifdoingtypefn  % doing a typed function specifically?
+    % then check user option for putting return type on its own line:
+    \expandafter\ifx\csname SETtxideftypefnnl\endcsname\relax \else
+      \rettypeownlinetrue
+    \fi
+  \fi
+  %
+  % How we'll format the category name.  Putting it in brackets helps
+  % distinguish it from the body text that may end up on the next line
+  % just below it.
+  \def\temp{#1}%
+  \setbox0=\hbox{\kern\deflastargmargin \ifx\temp\empty\else [\rm\temp]\fi}
+  %
+  % Figure out line sizes for the paragraph shape.  We'll always have at
+  % least two.
+  \tempnum = 2
+  %
+  % The first line needs space for \box0; but if \rightskip is nonzero,
+  % we need only space for the part of \box0 which exceeds it:
+  \dimen0=\hsize  \advance\dimen0 by -\wd0  \advance\dimen0 by \rightskip
+  %
+  % If doing a return type on its own line, we'll have another line.
+  \ifrettypeownline
+    \advance\tempnum by 1
+    \def\maybeshapeline{0in \hsize}%
+  \else
+    \def\maybeshapeline{}%
+  \fi
+  %
+  % The continuations:
+  \dimen2=\hsize  \advance\dimen2 by -\defargsindent
+  %
+  % The final paragraph shape:
+  \parshape \tempnum  0in \dimen0  \maybeshapeline  \defargsindent \dimen2
+  %
+  % Put the category name at the right margin.
+  \noindent
+  \hbox to 0pt{%
+    \hfil\box0 \kern-\hsize
+    % \hsize has to be shortened this way:
+    \kern\leftskip
+    % Intentionally do not respect \rightskip, since we need the space.
+  }%
+  %
+  % Allow all lines to be underfull without complaint:
+  \tolerance=10000 \hbadness=10000
+  \exdentamount=\defbodyindent
+  {%
+    % defun fonts. We use typewriter by default (used to be bold) because:
+    % . we're printing identifiers, they should be in tt in principle.
+    % . in languages with many accents, such as Czech or French, it's
+    %   common to leave accents off identifiers.  The result looks ok in
+    %   tt, but exceedingly strange in rm.
+    % . we don't want -- and --- to be treated as ligatures.
+    % . this still does not fix the ?` and !` ligatures, but so far no
+    %   one has made identifiers using them :).
+    \df \tt
+    \def\temp{#2}% text of the return type
+    \ifx\temp\empty\else
+      \tclose{\temp}% typeset the return type
+      \ifrettypeownline
+        % put return type on its own line; prohibit line break following:
+        \hfil\vadjust{\nobreak}\break  
+      \else
+        \space  % type on same line, so just followed by a space
+      \fi
+    \fi           % no return type
+    #3% output function name
+  }%
+  {\rm\enskip}% hskip 0.5 em of \rmfont
+  %
+  \boldbrax
+  % arguments will be output next, if any.
+}
+
+% Print arguments in slanted roman (not ttsl), inconsistently with using
+% tt for the name.  This is because literal text is sometimes needed in
+% the argument list (groff manual), and ttsl and tt are not very
+% distinguishable.  Prevent hyphenation at `-' chars.
+%
+\def\defunargs#1{%
+  % use sl by default (not ttsl),
+  % tt for the names.
+  \df \sl \hyphenchar\font=0
+  %
+  % On the other hand, if an argument has two dashes (for instance), we
+  % want a way to get ttsl.  We used to recommend @var for that, so
+  % leave the code in, but it's strange for @var to lead to typewriter.
+  % Nowadays we recommend @code, since the difference between a ttsl hyphen
+  % and a tt hyphen is pretty tiny.  @code also disables ?` !`.
+  \def\var##1{{\setupmarkupstyle{var}\ttslanted{##1}}}%
+  #1%
+  \sl\hyphenchar\font=45
+}
+
+% We want ()&[] to print specially on the defun line.
+%
+\def\activeparens{%
+  \catcode`\(=\active \catcode`\)=\active
+  \catcode`\[=\active \catcode`\]=\active
+  \catcode`\&=\active
+}
+
+% Make control sequences which act like normal parenthesis chars.
+\let\lparen = ( \let\rparen = )
+
+% Be sure that we always have a definition for `(', etc.  For example,
+% if the fn name has parens in it, \boldbrax will not be in effect yet,
+% so TeX would otherwise complain about undefined control sequence.
+{
+  \activeparens
+  \global\let(=\lparen \global\let)=\rparen
+  \global\let[=\lbrack \global\let]=\rbrack
+  \global\let& = \&
+
+  \gdef\boldbrax{\let(=\opnr\let)=\clnr\let[=\lbrb\let]=\rbrb}
+  \gdef\magicamp{\let&=\amprm}
+}
+
+\newcount\parencount
+
+% If we encounter &foo, then turn on ()-hacking afterwards
+\newif\ifampseen
+\def\amprm#1 {\ampseentrue{\bf\&#1 }}
+
+\def\parenfont{%
+  \ifampseen
+    % At the first level, print parens in roman,
+    % otherwise use the default font.
+    \ifnum \parencount=1 \rm \fi
+  \else
+    % The \sf parens (in \boldbrax) actually are a little bolder than
+    % the contained text.  This is especially needed for [ and ] .
+    \sf
+  \fi
+}
+\def\infirstlevel#1{%
+  \ifampseen
+    \ifnum\parencount=1
+      #1%
+    \fi
+  \fi
+}
+\def\bfafterword#1 {#1 \bf}
+
+\def\opnr{%
+  \global\advance\parencount by 1
+  {\parenfont(}%
+  \infirstlevel \bfafterword
+}
+\def\clnr{%
+  {\parenfont)}%
+  \infirstlevel \sl
+  \global\advance\parencount by -1
+}
+
+\newcount\brackcount
+\def\lbrb{%
+  \global\advance\brackcount by 1
+  {\bf[}%
+}
+\def\rbrb{%
+  {\bf]}%
+  \global\advance\brackcount by -1
+}
+
+\def\checkparencounts{%
+  \ifnum\parencount=0 \else \badparencount \fi
+  \ifnum\brackcount=0 \else \badbrackcount \fi
+}
+% these should not use \errmessage; the glibc manual, at least, actually
+% has such constructs (when documenting function pointers).
+\def\badparencount{%
+  \message{Warning: unbalanced parentheses in @def...}%
+  \global\parencount=0
+}
+\def\badbrackcount{%
+  \message{Warning: unbalanced square brackets in @def...}%
+  \global\brackcount=0
+}
+
+
+\message{macros,}
+% @macro.
+
+% To do this right we need a feature of e-TeX, \scantokens,
+% which we arrange to emulate with a temporary file in ordinary TeX.
+\ifx\eTeXversion\thisisundefined
+  \newwrite\macscribble
+  \def\scantokens#1{%
+    \toks0={#1}%
+    \immediate\openout\macscribble=\jobname.tmp
+    \immediate\write\macscribble{\the\toks0}%
+    \immediate\closeout\macscribble
+    \input \jobname.tmp
+  }
+\fi
+
+% alias because \c means cedilla in @tex or @math
+\let\texinfoc=\c
+
+\newcount\savedcatcodeone
+\newcount\savedcatcodetwo
+
+% Used at the time of macro expansion.
+% Argument is macro body with arguments substituted
+\def\scanmacro#1{%
+  \newlinechar`\^^M
+  \def\xeatspaces{\eatspaces}%
+  %
+  % Temporarily undo catcode changes of \printindex.  Set catcode of @ to
+  % 0 so that @-commands in macro expansions aren't printed literally when 
+  % formatting an index file, where \ is used as the escape character.
+  \savedcatcodeone=\catcode`\@
+  \savedcatcodetwo=\catcode`\\
+  \catcode`\@=0
+  \catcode`\\=\active
+  %
+  % Process the macro body under the current catcode regime.
+  \scantokens{#1@texinfoc}%
+  %
+  \catcode`\@=\savedcatcodeone
+  \catcode`\\=\savedcatcodetwo
+  %
+  % The \texinfoc is to remove the \newlinechar added by \scantokens, and
+  % can be noticed by \parsearg.
+  %   We avoid surrounding the call to \scantokens with \bgroup and \egroup
+  % to allow macros to open or close groups themselves.
+}
+
+% Used for copying and captions
+\def\scanexp#1{%
+  \expandafter\scanmacro\expandafter{#1}%
+}
+
+\newcount\paramno   % Count of parameters
+\newtoks\macname    % Macro name
+\newif\ifrecursive  % Is it recursive?
+
+% List of all defined macros in the form
+%    \commondummyword\macro1\commondummyword\macro2...
+% Currently is also contains all @aliases; the list can be split
+% if there is a need.
+\def\macrolist{}
+
+% Add the macro to \macrolist
+\def\addtomacrolist#1{\expandafter \addtomacrolistxxx \csname#1\endcsname}
+\def\addtomacrolistxxx#1{%
+     \toks0 = \expandafter{\macrolist\commondummyword#1}%
+     \xdef\macrolist{\the\toks0}%
+}
+
+% Utility routines.
+% This does \let #1 = #2, with \csnames; that is,
+%   \let \csname#1\endcsname = \csname#2\endcsname
+% (except of course we have to play expansion games).
+%
+\def\cslet#1#2{%
+  \expandafter\let
+  \csname#1\expandafter\endcsname
+  \csname#2\endcsname
+}
+
+% Trim leading and trailing spaces off a string.
+% Concepts from aro-bend problem 15 (see CTAN).
+{\catcode`\@=11
+\gdef\eatspaces #1{\expandafter\trim@\expandafter{#1 }}
+\gdef\trim@ #1{\trim@@ @#1 @ #1 @ @@}
+\gdef\trim@@ #1@ #2@ #3@@{\trim@@@\empty #2 @}
+\def\unbrace#1{#1}
+\unbrace{\gdef\trim@@@ #1 } #2@{#1}
+}
+
+% Trim a single trailing ^^M off a string.
+{\catcode`\^^M=\other \catcode`\Q=3%
+\gdef\eatcr #1{\eatcra #1Q^^MQ}%
+\gdef\eatcra#1^^MQ{\eatcrb#1Q}%
+\gdef\eatcrb#1Q#2Q{#1}%
+}
+
+% Macro bodies are absorbed as an argument in a context where
+% all characters are catcode 10, 11 or 12, except \ which is active
+% (as in normal texinfo). It is necessary to change the definition of \
+% to recognize macro arguments; this is the job of \mbodybackslash.
+%
+% Non-ASCII encodings make 8-bit characters active, so un-activate
+% them to avoid their expansion.  Must do this non-globally, to
+% confine the change to the current group.
+%
+% It's necessary to have hard CRs when the macro is executed. This is
+% done by making ^^M (\endlinechar) catcode 12 when reading the macro
+% body, and then making it the \newlinechar in \scanmacro.
+%
+\def\scanctxt{% used as subroutine
+  \catcode`\"=\other
+  \catcode`\+=\other
+  \catcode`\<=\other
+  \catcode`\>=\other
+  \catcode`\^=\other
+  \catcode`\_=\other
+  \catcode`\|=\other
+  \catcode`\~=\other
+  \passthroughcharstrue
+}
+
+\def\scanargctxt{% used for copying and captions, not macros.
+  \scanctxt
+  \catcode`\@=\other
+  \catcode`\\=\other
+  \catcode`\^^M=\other
+}
+
+\def\macrobodyctxt{% used for @macro definitions
+  \scanctxt
+  \catcode`\ =\other
+  \catcode`\@=\other
+  \catcode`\{=\other
+  \catcode`\}=\other
+  \catcode`\^^M=\other
+  \usembodybackslash
+}
+
+% Used when scanning braced macro arguments.  Note, however, that catcode
+% changes here are ineffectual if the macro invocation was nested inside
+% an argument to another Texinfo command.
+\def\macroargctxt{%
+  \scanctxt
+  \catcode`\ =\active
+  \catcode`\^^M=\other
+  \catcode`\\=\active
+}
+
+\def\macrolineargctxt{% used for whole-line arguments without braces
+  \scanctxt
+  \catcode`\{=\other
+  \catcode`\}=\other
+}
+
+% \mbodybackslash is the definition of \ in @macro bodies.
+% It maps \foo\ => \csname macarg.foo\endcsname => #N
+% where N is the macro parameter number.
+% We define \csname macarg.\endcsname to be \realbackslash, so
+% \\ in macro replacement text gets you a backslash.
+%
+{\catcode`@=0 @catcode`@\=@active
+ @gdef@usembodybackslash{@let\=@mbodybackslash}
+ @gdef@mbodybackslash#1\{@csname macarg.#1@endcsname}
+}
+\expandafter\def\csname macarg.\endcsname{\realbackslash}
+
+\def\margbackslash#1{\char`\#1 }
+
+\def\macro{\recursivefalse\parsearg\macroxxx}
+\def\rmacro{\recursivetrue\parsearg\macroxxx}
+
+\def\macroxxx#1{%
+  \getargs{#1}% now \macname is the macname and \argl the arglist
+  \ifx\argl\empty       % no arguments
+     \paramno=0\relax
+  \else
+     \expandafter\parsemargdef \argl;%
+     \if\paramno>256\relax
+       \ifx\eTeXversion\thisisundefined
+         \errhelp = \EMsimple
+         \errmessage{You need eTeX to compile a file with macros with more 
than 256 arguments}
+       \fi
+     \fi
+  \fi
+  \if1\csname ismacro.\the\macname\endcsname
+     \message{Warning: redefining \the\macname}%
+  \else
+     \expandafter\ifx\csname \the\macname\endcsname \relax
+     \else \errmessage{Macro name \the\macname\space already defined}\fi
+     \global\cslet{macsave.\the\macname}{\the\macname}%
+     \global\expandafter\let\csname ismacro.\the\macname\endcsname=1%
+     \addtomacrolist{\the\macname}%
+  \fi
+  \begingroup \macrobodyctxt
+  \ifrecursive \expandafter\parsermacbody
+  \else \expandafter\parsemacbody
+  \fi}
+
+\parseargdef\unmacro{%
+  \if1\csname ismacro.#1\endcsname
+    \global\cslet{#1}{macsave.#1}%
+    \global\expandafter\let \csname ismacro.#1\endcsname=0%
+    % Remove the macro name from \macrolist:
+    \begingroup
+      \expandafter\let\csname#1\endcsname \relax
+      \let\commondummyword\unmacrodo
+      \xdef\macrolist{\macrolist}%
+    \endgroup
+  \else
+    \errmessage{Macro #1 not defined}%
+  \fi
+}
+
+% Called by \do from \dounmacro on each macro.  The idea is to omit any
+% macro definitions that have been changed to \relax.
+%
+\def\unmacrodo#1{%
+  \ifx #1\relax
+    % remove this
+  \else
+    \noexpand\commondummyword \noexpand#1%
+  \fi
+}
+
+% \getargs -- Parse the arguments to a @macro line.  Set \macname to
+% the name of the macro, and \argl to the braced argument list.
+\def\getargs#1{\getargsxxx#1{}}
+\def\getargsxxx#1#{\getmacname #1 \relax\getmacargs}
+\def\getmacname#1 #2\relax{\macname={#1}}
+\def\getmacargs#1{\def\argl{#1}}
+% This made use of the feature that if the last token of a
+% <parameter list> is #, then the preceding argument is delimited by
+% an opening brace, and that opening brace is not consumed.
+
+% Parse the optional {params} list to @macro or @rmacro.
+% Set \paramno to the number of arguments,
+% and \paramlist to a parameter text for the macro (e.g. #1,#2,#3 for a
+% three-param macro.)  Define \macarg.BLAH for each BLAH in the params
+% list to some hook where the argument is to be expanded.  If there are
+% less than 10 arguments that hook is to be replaced by ##N where N
+% is the position in that list, that is to say the macro arguments are to be
+% defined `a la TeX in the macro body.  
+%
+% That gets used by \mbodybackslash (above).
+%
+% If there are 10 or more arguments, a different technique is used: see
+% \parsemmanyargdef.
+%
+\def\parsemargdef#1;{%
+  \paramno=0\def\paramlist{}%
+  \let\hash\relax
+  % \hash is redefined to `#' later to get it into definitions
+  \let\xeatspaces\relax
+  \parsemargdefxxx#1,;,%
+  \ifnum\paramno<10\relax\else
+    \paramno0\relax
+    \parsemmanyargdef@@#1,;,% 10 or more arguments
+  \fi
+}
+\def\parsemargdefxxx#1,{%
+  \if#1;\let\next=\relax
+  \else \let\next=\parsemargdefxxx
+    \advance\paramno by 1
+    \expandafter\edef\csname macarg.\eatspaces{#1}\endcsname
+        {\xeatspaces{\hash\the\paramno}}%
+    \edef\paramlist{\paramlist\hash\the\paramno,}%
+  \fi\next}
+
+% \parsemacbody, \parsermacbody
+%
+% Read recursive and nonrecursive macro bodies. (They're different since
+% rec and nonrec macros end differently.)
+% 
+% We are in \macrobodyctxt, and the \xdef causes backslashshes in the macro 
+% body to be transformed.
+% Set \macrobody to the body of the macro, and call \defmacro.
+%
+{\catcode`\ =\other\long\gdef\parsemacbody#1@end macro{%
+\xdef\macrobody{\eatcr{#1}}\endgroup\defmacro}}%
+{\catcode`\ =\other\long\gdef\parsermacbody#1@end rmacro{%
+\xdef\macrobody{\eatcr{#1}}\endgroup\defmacro}}%
+
+% Make @ a letter, so that we can make private-to-Texinfo macro names.
+\edef\texiatcatcode{\the\catcode`\@}
+\catcode `@=11\relax
+
+%%%%%%%%%%%%%% Code for > 10 arguments only   %%%%%%%%%%%%%%%%%%
+
+% If there are 10 or more arguments, a different technique is used, where the
+% hook remains in the body, and when macro is to be expanded the body is
+% processed again to replace the arguments.
+%
+% In that case, the hook is \the\toks N-1, and we simply set \toks N-1 to the
+% argument N value and then \edef the body (nothing else will expand because of
+% the catcode regime under which the body was input).
+%
+% If you compile with TeX (not eTeX), and you have macros with 10 or more
+% arguments, no macro can have more than 256 arguments (else error).
+%
+% In case that there are 10 or more arguments we parse again the arguments
+% list to set new definitions for the \macarg.BLAH macros corresponding to
+% each BLAH argument. It was anyhow needed to parse already once this list
+% in order to count the arguments, and as macros with at most 9 arguments
+% are by far more frequent than macro with 10 or more arguments, defining
+% twice the \macarg.BLAH macros does not cost too much processing power.
+\def\parsemmanyargdef@@#1,{%
+  \if#1;\let\next=\relax
+  \else 
+    \let\next=\parsemmanyargdef@@
+    \edef\tempb{\eatspaces{#1}}%
+    \expandafter\def\expandafter\tempa
+       \expandafter{\csname macarg.\tempb\endcsname}%
+    % Note that we need some extra \noexpand\noexpand, this is because we
+    % don't want \the  to be expanded in the \parsermacbody  as it uses an
+    % \xdef .
+    \expandafter\edef\tempa
+      {\noexpand\noexpand\noexpand\the\toks\the\paramno}%
+    \advance\paramno by 1\relax
+  \fi\next}
+
+
+\let\endargs@\relax
+\let\nil@\relax
+\def\nilm@{\nil@}%
+\long\def\nillm@{\nil@}%
+
+% This macro is expanded during the Texinfo macro expansion, not during its
+% definition.  It gets all the arguments' values and assigns them to macros
+% macarg.ARGNAME
+%
+% #1 is the macro name
+% #2 is the list of argument names
+% #3 is the list of argument values
+\def\getargvals@#1#2#3{%
+  \def\macargdeflist@{}%
+  \def\saveparamlist@{#2}% Need to keep a copy for parameter expansion.
+  \def\paramlist{#2,\nil@}%
+  \def\macroname{#1}%
+  \begingroup
+  \macroargctxt
+  \def\argvaluelist{#3,\nil@}%
+  \def\@tempa{#3}%
+  \ifx\@tempa\empty
+    \setemptyargvalues@
+  \else
+    \getargvals@@
+  \fi
+}
+\def\getargvals@@{%
+  \ifx\paramlist\nilm@
+      % Some sanity check needed here that \argvaluelist is also empty.
+      \ifx\argvaluelist\nillm@
+      \else
+        \errhelp = \EMsimple
+        \errmessage{Too many arguments in macro `\macroname'!}%
+      \fi
+      \let\next\macargexpandinbody@
+  \else
+    \ifx\argvaluelist\nillm@
+       % No more arguments values passed to macro.  Set remaining named-arg
+       % macros to empty.
+       \let\next\setemptyargvalues@
+    \else
+      % pop current arg name into \@tempb
+      \def\@tempa##1{\pop@{\@tempb}{\paramlist}##1\endargs@}%
+      \expandafter\@tempa\expandafter{\paramlist}%
+       % pop current argument value into \@tempc
+      \def\@tempa##1{\longpop@{\@tempc}{\argvaluelist}##1\endargs@}%
+      \expandafter\@tempa\expandafter{\argvaluelist}%
+       % Here \@tempb is the current arg name and \@tempc is the current arg 
value.
+       % First place the new argument macro definition into \@tempd
+       \expandafter\macname\expandafter{\@tempc}%
+       \expandafter\let\csname macarg.\@tempb\endcsname\relax
+       \expandafter\def\expandafter\@tempe\expandafter{%
+         \csname macarg.\@tempb\endcsname}%
+       \edef\@tempd{\long\def\@tempe{\the\macname}}%
+       \push@\@tempd\macargdeflist@
+       \let\next\getargvals@@
+    \fi
+  \fi
+  \next
+}
+
+\def\push@#1#2{%
+  \expandafter\expandafter\expandafter\def
+  \expandafter\expandafter\expandafter#2%
+  \expandafter\expandafter\expandafter{%
+  \expandafter#1#2}%
+}
+
+% Replace arguments by their values in the macro body, and place the result
+% in macro \@tempa.
+% 
+\def\macvalstoargs@{%
+  %  To do this we use the property that token registers that are \the'ed
+  % within an \edef  expand only once. So we are going to place all argument
+  % values into respective token registers.
+  %
+  % First we save the token context, and initialize argument numbering.
+  \begingroup
+    \paramno0\relax
+    % Then, for each argument number #N, we place the corresponding argument
+    % value into a new token list register \toks#N
+    \expandafter\putargsintokens@\saveparamlist@,;,%
+    % Then, we expand the body so that argument are replaced by their
+    % values. The trick for values not to be expanded themselves is that they
+    % are within tokens and that tokens expand only once in an \edef .
+    \edef\@tempc{\csname mac.\macroname .body\endcsname}%
+    % Now we restore the token stack pointer to free the token list registers
+    % which we have used, but we make sure that expanded body is saved after
+    % group.
+    \expandafter
+  \endgroup
+  \expandafter\def\expandafter\@tempa\expandafter{\@tempc}%
+  }
+
+% Define the named-macro outside of this group and then close this group. 
+% 
+\def\macargexpandinbody@{% 
+  \expandafter
+  \endgroup
+  \macargdeflist@
+  % First the replace in body the macro arguments by their values, the result
+  % is in \@tempa .
+  \macvalstoargs@
+  % Then we point at the \norecurse or \gobble (for recursive) macro value
+  % with \@tempb .
+  \expandafter\let\expandafter\@tempb\csname mac.\macroname .recurse\endcsname
+  % Depending on whether it is recursive or not, we need some tailing
+  % \egroup .
+  \ifx\@tempb\gobble
+     \let\@tempc\relax
+  \else
+     \let\@tempc\egroup
+  \fi
+  % And now we do the real job:
+  
\edef\@tempd{\noexpand\@tempb{\macroname}\noexpand\scanmacro{\@tempa}\@tempc}%
+  \@tempd
+}
+
+\def\putargsintokens@#1,{%
+  \if#1;\let\next\relax
+  \else
+    \let\next\putargsintokens@
+    % First we allocate the new token list register, and give it a temporary
+    % alias \@tempb .
+    \toksdef\@tempb\the\paramno
+    % Then we place the argument value into that token list register.
+    \expandafter\let\expandafter\@tempa\csname macarg.#1\endcsname
+    \expandafter\@tempb\expandafter{\@tempa}%
+    \advance\paramno by 1\relax
+  \fi
+  \next
+}
+
+% Trailing missing arguments are set to empty.
+% 
+\def\setemptyargvalues@{%
+  \ifx\paramlist\nilm@
+    \let\next\macargexpandinbody@
+  \else
+    \expandafter\setemptyargvaluesparser@\paramlist\endargs@
+    \let\next\setemptyargvalues@
+  \fi
+  \next
+}
+
+\def\setemptyargvaluesparser@#1,#2\endargs@{%
+  \expandafter\def\expandafter\@tempa\expandafter{%
+    \expandafter\def\csname macarg.#1\endcsname{}}%
+  \push@\@tempa\macargdeflist@
+  \def\paramlist{#2}%
+}
+
+% #1 is the element target macro
+% #2 is the list macro
+% #3,#4\endargs@ is the list value
+\def\pop@#1#2#3,#4\endargs@{%
+   \def#1{#3}%
+   \def#2{#4}%
+}
+\long\def\longpop@#1#2#3,#4\endargs@{%
+   \long\def#1{#3}%
+   \long\def#2{#4}%
+}
+
+
+%%%%%%%%%%%%%% End of code for > 10 arguments %%%%%%%%%%%%%%%%%%
+
+
+% This defines a Texinfo @macro or @rmacro, called by \parsemacbody.
+%    \macrobody has the body of the macro in it, with placeholders for
+% its parameters, looking like "\xeatspaces{\hash 1}".
+%    \paramno is the number of parameters
+%    \paramlist is a TeX parameter text, e.g. "#1,#2,#3,"
+% There are four cases: macros of zero, one, up to nine, and many arguments.
+% \xdef is used so that macro definitions will survive the file
+% they're defined in: @include reads the file inside a group.
+%
+\def\defmacro{%
+  \let\hash=##% convert placeholders to macro parameter chars
+  \ifnum\paramno=1
+    \def\xeatspaces##1{##1}%
+    % This removes the pair of braces around the argument.  We don't
+    % use \eatspaces, because this can cause ends of lines to be lost
+    % when the argument to \eatspaces is read, leading to line-based
+    % commands like "@itemize" not being read correctly.
+  \else
+    \let\xeatspaces\relax % suppress expansion
+  \fi
+  \ifcase\paramno
+  % 0
+    \expandafter\xdef\csname\the\macname\endcsname{%
+      \bgroup
+        \noexpand\spaceisspace
+        \noexpand\endlineisspace
+        \noexpand\expandafter % skip any whitespace after the macro name.
+        \expandafter\noexpand\csname\the\macname @@@\endcsname}%
+    \expandafter\xdef\csname\the\macname @@@\endcsname{%
+      \egroup
+      \noexpand\scanmacro{\macrobody}}%
+  \or % 1
+    \expandafter\xdef\csname\the\macname\endcsname{%
+       \bgroup
+       \noexpand\braceorline
+       \expandafter\noexpand\csname\the\macname @@@\endcsname}%
+    \expandafter\xdef\csname\the\macname @@@\endcsname##1{%
+      \egroup
+      \noexpand\scanmacro{\macrobody}%
+      }%
+  \else % at most 9
+    \ifnum\paramno<10\relax
+      % @MACNAME sets the context for reading the macro argument
+      % @MACNAME@@ gets the argument, processes backslashes and appends a 
+      % comma.
+      % @MACNAME@@@ removes braces surrounding the argument list.
+      % @MACNAME@@@@ scans the macro body with arguments substituted.
+      \expandafter\xdef\csname\the\macname\endcsname{%
+        \bgroup
+        \noexpand\expandafter  % This \expandafter skip any spaces after the
+        \noexpand\macroargctxt % macro before we change the catcode of space.
+        \noexpand\expandafter
+        \expandafter\noexpand\csname\the\macname @@\endcsname}%
+      \expandafter\xdef\csname\the\macname @@\endcsname##1{%
+          \noexpand\passargtomacro
+          \expandafter\noexpand\csname\the\macname @@@\endcsname{##1,}}%
+      \expandafter\xdef\csname\the\macname @@@\endcsname##1{%
+          \expandafter\noexpand\csname\the\macname @@@@\endcsname ##1}%
+      \expandafter\expandafter
+      \expandafter\xdef
+      \expandafter\expandafter
+        \csname\the\macname @@@@\endcsname\paramlist{%
+          \egroup\noexpand\scanmacro{\macrobody}}%
+    \else % 10 or more:
+      \expandafter\xdef\csname\the\macname\endcsname{%
+        \noexpand\getargvals@{\the\macname}{\argl}%
+      }%
+      \global\expandafter\let\csname mac.\the\macname .body\endcsname\macrobody
+      \global\expandafter\let\csname mac.\the\macname .recurse\endcsname\gobble
+    \fi
+  \fi}
+
+\catcode `\@\texiatcatcode\relax % end private-to-Texinfo catcodes
+
+\def\norecurse#1{\bgroup\cslet{#1}{macsave.#1}}
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%
+{\catcode`\@=0 \catcode`\\=13  % We need to manipulate \ so use @ as escape
+@catcode`@_=11  % private names
+@catcode`@!=11  % used as argument separator
+
+% \passargtomacro#1#2 -
+% Call #1 with a list of tokens #2, with any doubled backslashes in #2
+% compressed to one.
+%
+% This implementation works by expansion, and not execution (so we cannot use 
+% \def or similar).  This reduces the risk of this failing in contexts where 
+% complete expansion is done with no execution (for example, in writing out to 
+% an auxiliary file for an index entry).
+% 
+% State is kept in the input stream: the argument passed to
+% @look_ahead, @gobble_and_check_finish and @add_segment is
+%
+% THE_MACRO ARG_RESULT ! {PENDING_BS} NEXT_TOKEN  (... rest of input)
+%
+% where:
+% THE_MACRO - name of the macro we want to call
+% ARG_RESULT - argument list we build to pass to that macro
+% PENDING_BS - either a backslash or nothing
+% NEXT_TOKEN - used to look ahead in the input stream to see what's coming next
+
+@gdef@passargtomacro#1#2{%
+  @add_segment #1!{}@relax#2\@_finish\%
+}
+@gdef@_finish{@_finishx} @global@let@_finishx@relax
+
+% #1 - THE_MACRO ARG_RESULT
+% #2 - PENDING_BS
+% #3 - NEXT_TOKEN
+% #4 used to look ahead
+%
+% If the next token is not a backslash, process the rest of the argument; 
+% otherwise, remove the next token.
+@gdef@look_ahead#1!#2#3#4{%
+  @ifx#4\%
+   @expandafter@gobble_and_check_finish 
+  @else
+   @expandafter@add_segment
+  @fi#1!{#2}#4#4%
+}
+
+% #1 - THE_MACRO ARG_RESULT
+% #2 - PENDING_BS
+% #3 - NEXT_TOKEN
+% #4 should be a backslash, which is gobbled.
+% #5 looks ahead
+%
+% Double backslash found.  Add a single backslash, and look ahead.
+@gdef@gobble_and_check_finish#1!#2#3#4#5{%
+  @add_segment#1\!{}#5#5%
+}
+
+@gdef@is_fi{@fi}
+
+% #1 - THE_MACRO ARG_RESULT
+% #2 - PENDING_BS
+% #3 - NEXT_TOKEN
+% #4 is input stream until next backslash
+%
+% Input stream is either at the start of the argument, or just after a 
+% backslash sequence, either a lone backslash, or a doubled backslash.  
+% NEXT_TOKEN contains the first token in the input stream: if it is \finish, 
+% finish; otherwise, append to ARG_RESULT the segment of the argument up until
+% the next backslash.  PENDING_BACKSLASH contains a backslash to represent
+% a backslash just before the start of the input stream that has not been
+% added to ARG_RESULT.
+@gdef@add_segment#1!#2#3#4\{%
+@ifx#3@_finish
+  @call_the_macro#1!%
+@else
+  % append the pending backslash to the result, followed by the next segment
+  @expandafter@is_fi@look_ahead#1#2#4!{\}@fi
+  % this @fi is discarded by @look_ahead.
+  % we can't get rid of it with \expandafter because we don't know how 
+  % long #4 is.
+}
+
+% #1 - THE_MACRO
+% #2 - ARG_RESULT
+% #3 discards the res of the conditional in @add_segment, and @is_fi ends the 
+% conditional.
+@gdef@call_the_macro#1#2!#3@fi{@is_fi #1{#2}}
+
+}
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+% \braceorline MAC is used for a one-argument macro MAC.  It checks
+% whether the next non-whitespace character is a {.  It sets the context
+% for reading the argument (slightly different in the two cases).  Then,
+% to read the argument, in the whole-line case, it then calls the regular
+% \parsearg MAC; in the lbrace case, it calls \passargtomacro MAC.
+% 
+\def\braceorline#1{\let\macnamexxx=#1\futurelet\nchar\braceorlinexxx}
+\def\braceorlinexxx{%
+  \ifx\nchar\bgroup
+    \macroargctxt
+    \expandafter\passargtomacro
+  \else
+    \macrolineargctxt\expandafter\parsearg
+  \fi \macnamexxx}
+
+
+% @alias.
+% We need some trickery to remove the optional spaces around the equal
+% sign.  Make them active and then expand them all to nothing.
+%
+\def\alias{\parseargusing\obeyspaces\aliasxxx}
+\def\aliasxxx #1{\aliasyyy#1\relax}
+\def\aliasyyy #1=#2\relax{%
+  {%
+    \expandafter\let\obeyedspace=\empty
+    \addtomacrolist{#1}%
+    \xdef\next{\global\let\makecsname{#1}=\makecsname{#2}}%
+  }%
+  \next
+}
+
+
+\message{cross references,}
+
+\newwrite\auxfile
+\newif\ifhavexrefs    % True if xref values are known.
+\newif\ifwarnedxrefs  % True if we warned once that they aren't known.
+
+% @inforef is relatively simple.
+\def\inforef #1{\inforefzzz #1,,,,**}
+\def\inforefzzz #1,#2,#3,#4**{%
+  \putwordSee{} \putwordInfo{} \putwordfile{} \file{\ignorespaces #3{}},
+  node \samp{\ignorespaces#1{}}}
+
+% @node's only job in TeX is to define \lastnode, which is used in
+% cross-references.  The @node line might or might not have commas, and
+% might or might not have spaces before the first comma, like:
+% @node foo , bar , ...
+% We don't want such trailing spaces in the node name.
+%
+\parseargdef\node{\checkenv{}\donode #1 ,\finishnodeparse}
+%
+% also remove a trailing comma, in case of something like this:
+% @node Help-Cross,  ,  , Cross-refs
+\def\donode#1 ,#2\finishnodeparse{\dodonode #1,\finishnodeparse}
+\def\dodonode#1,#2\finishnodeparse{\gdef\lastnode{#1}}
+
+\let\nwnode=\node
+\let\lastnode=\empty
+
+% Write a cross-reference definition for the current node.  #1 is the
+% type (Ynumbered, Yappendix, Ynothing).
+%
+\def\donoderef#1{%
+  \ifx\lastnode\empty\else
+    \setref{\lastnode}{#1}%
+    \global\let\lastnode=\empty
+  \fi
+}
+
+% @anchor{NAME} -- define xref target at arbitrary point.
+%
+\newcount\savesfregister
+%
+\def\savesf{\relax \ifhmode \savesfregister=\spacefactor \fi}
+\def\restoresf{\relax \ifhmode \spacefactor=\savesfregister \fi}
+\def\anchor#1{\savesf \setref{#1}{Ynothing}\restoresf \ignorespaces}
+
+% \setref{NAME}{SNT} defines a cross-reference point NAME (a node or an
+% anchor), which consists of three parts:
+% 1) NAME-title - the current sectioning name taken from \lastsection,
+%                 or the anchor name.
+% 2) NAME-snt   - section number and type, passed as the SNT arg, or
+%                 empty for anchors.
+% 3) NAME-pg    - the page number.
+%
+% This is called from \donoderef, \anchor, and \dofloat.  In the case of
+% floats, there is an additional part, which is not written here:
+% 4) NAME-lof   - the text as it should appear in a @listoffloats.
+%
+\def\setref#1#2{%
+  \pdfmkdest{#1}%
+  \iflinks
+    {%
+      \requireauxfile
+      \atdummies  % preserve commands, but don't expand them
+      % match definition in \xrdef, \refx, \xrefX.
+      \def\value##1{##1}%
+      \edef\writexrdef##1##2{%
+       \write\auxfile{@xrdef{#1-% #1 of \setref, expanded by the \edef
+         ##1}{##2}}% these are parameters of \writexrdef
+      }%
+      \toks0 = \expandafter{\lastsection}%
+      \immediate \writexrdef{title}{\the\toks0 }%
+      \immediate \writexrdef{snt}{\csname #2\endcsname}% \Ynumbered etc.
+      \safewhatsit{\writexrdef{pg}{\folio}}% will be written later, at \shipout
+    }%
+  \fi
+}
+
+% @xrefautosectiontitle on|off says whether @section(ing) names are used
+% automatically in xrefs, if the third arg is not explicitly specified.
+% This was provided as a "secret" @set xref-automatic-section-title
+% variable, now it's official.
+% 
+\parseargdef\xrefautomaticsectiontitle{%
+  \def\temp{#1}%
+  \ifx\temp\onword
+    \expandafter\let\csname SETxref-automatic-section-title\endcsname
+      = \empty
+  \else\ifx\temp\offword
+    \expandafter\let\csname SETxref-automatic-section-title\endcsname
+      = \relax
+  \else
+    \errhelp = \EMsimple
+    \errmessage{Unknown @xrefautomaticsectiontitle value `\temp',
+                must be on|off}%
+  \fi\fi
+}
+
+% 
+% @xref, @pxref, and @ref generate cross-references.  For \xrefX, #1 is
+% the node name, #2 the name of the Info cross-reference, #3 the printed
+% node name, #4 the name of the Info file, #5 the name of the printed
+% manual.  All but the node name can be omitted.
+%
+\def\pxref{\putwordsee{} \xrefXX}
+\def\xref{\putwordSee{} \xrefXX}
+\def\ref{\xrefXX}
+
+\def\xrefXX#1{\def\xrefXXarg{#1}\futurelet\tokenafterxref\xrefXXX}
+\def\xrefXXX{\expandafter\xrefX\expandafter[\xrefXXarg,,,,,,,]}
+%
+\newbox\toprefbox
+\newbox\printedrefnamebox
+\newbox\infofilenamebox
+\newbox\printedmanualbox
+%
+\def\xrefX[#1,#2,#3,#4,#5,#6]{\begingroup
+  \unsepspaces
+  %
+  % Get args without leading/trailing spaces.
+  \def\printedrefname{\ignorespaces #3}%
+  \setbox\printedrefnamebox = \hbox{\printedrefname\unskip}%
+  %
+  \def\infofilename{\ignorespaces #4}%
+  \setbox\infofilenamebox = \hbox{\infofilename\unskip}%
+  %
+  \def\printedmanual{\ignorespaces #5}%
+  \setbox\printedmanualbox  = \hbox{\printedmanual\unskip}%
+  %
+  % If the printed reference name (arg #3) was not explicitly given in
+  % the @xref, figure out what we want to use.
+  \ifdim \wd\printedrefnamebox = 0pt
+    % No printed node name was explicitly given.
+    \expandafter\ifx\csname SETxref-automatic-section-title\endcsname \relax
+      % Not auto section-title: use node name inside the square brackets.
+      \def\printedrefname{\ignorespaces #1}%
+    \else
+      % Auto section-title: use chapter/section title inside
+      % the square brackets if we have it.
+      \ifdim \wd\printedmanualbox > 0pt
+        % It is in another manual, so we don't have it; use node name.
+        \def\printedrefname{\ignorespaces #1}%
+      \else
+        \ifhavexrefs
+          % We (should) know the real title if we have the xref values.
+          \def\printedrefname{\refx{#1-title}{}}%
+        \else
+          % Otherwise just copy the Info node name.
+          \def\printedrefname{\ignorespaces #1}%
+        \fi%
+      \fi
+    \fi
+  \fi
+  %
+  % Make link in pdf output.
+  \ifpdf
+    % For pdfTeX and LuaTeX
+    {\indexnofonts
+     \makevalueexpandable
+     \turnoffactive
+     % This expands tokens, so do it after making catcode changes, so _
+     % etc. don't get their TeX definitions.  This ignores all spaces in
+     % #4, including (wrongly) those in the middle of the filename.
+     \getfilename{#4}%
+     %
+     % This (wrongly) does not take account of leading or trailing
+     % spaces in #1, which should be ignored.
+     \setpdfdestname{#1}%
+     %
+     \ifx\pdfdestname\empty
+       \def\pdfdestname{Top}% no empty targets
+     \fi
+     %
+     \leavevmode
+     \startlink attr{/Border [0 0 0]}%
+     \ifnum\filenamelength>0
+       goto file{\the\filename.pdf} name{\pdfdestname}%
+     \else
+       goto name{\pdfmkpgn{\pdfdestname}}%
+     \fi
+    }%
+    \setcolor{\linkcolor}%
+  \else
+    \ifx\XeTeXrevision\thisisundefined
+    \else
+      % For XeTeX
+      {\indexnofonts
+       \makevalueexpandable
+       \turnoffactive
+       % This expands tokens, so do it after making catcode changes, so _
+       % etc. don't get their TeX definitions.  This ignores all spaces in
+       % #4, including (wrongly) those in the middle of the filename.
+       \getfilename{#4}%
+       %
+       % This (wrongly) does not take account of leading or trailing
+       % spaces in #1, which should be ignored.
+       \setpdfdestname{#1}%
+       %
+       \ifx\pdfdestname\empty
+         \def\pdfdestname{Top}% no empty targets
+       \fi
+       %
+       \leavevmode
+       \ifnum\filenamelength>0
+         % With default settings,
+         % XeTeX (xdvipdfmx) replaces link destination names with integers.
+         % In this case, the replaced destination names of
+         % remote PDFs are no longer known.  In order to avoid a replacement,
+         % you can use xdvipdfmx's command line option `-C 0x0010'.
+         % If you use XeTeX 0.99996+ (TeX Live 2016+),
+         % this command line option is no longer necessary
+         % because we can use the `dvipdfmx:config' special.
+         \special{pdf:bann << /Border [0 0 0] /Type /Annot /Subtype /Link /A
+           << /S /GoToR /F (\the\filename.pdf) /D (\pdfdestname) >> >>}%
+       \else
+         \special{pdf:bann << /Border [0 0 0] /Type /Annot /Subtype /Link /A
+           << /S /GoTo /D (\pdfdestname) >> >>}%
+       \fi
+      }%
+      \setcolor{\linkcolor}%
+    \fi
+  \fi
+  {%
+    % Have to otherify everything special to allow the \csname to
+    % include an _ in the xref name, etc.
+    \indexnofonts
+    \turnoffactive
+    \def\value##1{##1}%
+    \expandafter\global\expandafter\let\expandafter\Xthisreftitle
+      \csname XR#1-title\endcsname
+  }%
+  %
+  % Float references are printed completely differently: "Figure 1.2"
+  % instead of "[somenode], p.3".  \iffloat distinguishes them by
+  % \Xthisreftitle being set to a magic string.
+  \iffloat\Xthisreftitle
+    % If the user specified the print name (third arg) to the ref,
+    % print it instead of our usual "Figure 1.2".
+    \ifdim\wd\printedrefnamebox = 0pt
+      \refx{#1-snt}{}%
+    \else
+      \printedrefname
+    \fi
+    %
+    % If the user also gave the printed manual name (fifth arg), append
+    % "in MANUALNAME".
+    \ifdim \wd\printedmanualbox > 0pt
+      \space \putwordin{} \cite{\printedmanual}%
+    \fi
+  \else
+    % node/anchor (non-float) references.
+    % 
+    % If we use \unhbox to print the node names, TeX does not insert
+    % empty discretionaries after hyphens, which means that it will not
+    % find a line break at a hyphen in a node names.  Since some manuals
+    % are best written with fairly long node names, containing hyphens,
+    % this is a loss.  Therefore, we give the text of the node name
+    % again, so it is as if TeX is seeing it for the first time.
+    % 
+    \ifdim \wd\printedmanualbox > 0pt
+      % Cross-manual reference with a printed manual name.
+      % 
+      \crossmanualxref{\cite{\printedmanual\unskip}}%
+    %
+    \else\ifdim \wd\infofilenamebox > 0pt
+      % Cross-manual reference with only an info filename (arg 4), no
+      % printed manual name (arg 5).  This is essentially the same as
+      % the case above; we output the filename, since we have nothing else.
+      % 
+      \crossmanualxref{\code{\infofilename\unskip}}%
+    %
+    \else
+      % Reference within this manual.
+      %
+      % _ (for example) has to be the character _ for the purposes of the
+      % control sequence corresponding to the node, but it has to expand
+      % into the usual \leavevmode...\vrule stuff for purposes of
+      % printing. So we \turnoffactive for the \refx-snt, back on for the
+      % printing, back off for the \refx-pg.
+      {\turnoffactive
+       % Only output a following space if the -snt ref is nonempty; for
+       % @unnumbered and @anchor, it won't be.
+       \setbox2 = \hbox{\ignorespaces \refx{#1-snt}{}}%
+       \ifdim \wd2 > 0pt \refx{#1-snt}\space\fi
+      }%
+      % output the `[mynode]' via the macro below so it can be overridden.
+      \xrefprintnodename\printedrefname
+      %
+      % But we always want a comma and a space:
+      ,\space
+      %
+      % output the `page 3'.
+      \turnoffactive \putwordpage\tie\refx{#1-pg}{}%
+      % Add a , if xref followed by a space
+      \if\space\noexpand\tokenafterxref ,%
+      \else\ifx\       \tokenafterxref ,% @TAB
+      \else\ifx\*\tokenafterxref ,%   @*
+      \else\ifx\ \tokenafterxref ,%   @SPACE
+      \else\ifx\
+                \tokenafterxref ,%    @NL
+      \else\ifx\tie\tokenafterxref ,% @tie
+      \fi\fi\fi\fi\fi\fi
+    \fi\fi
+  \fi
+  \endlink
+\endgroup}
+
+% Output a cross-manual xref to #1.  Used just above (twice).
+% 
+% Only include the text "Section ``foo'' in" if the foo is neither
+% missing or Top.  Thus, @xref{,,,foo,The Foo Manual} outputs simply
+% "see The Foo Manual", the idea being to refer to the whole manual.
+% 
+% But, this being TeX, we can't easily compare our node name against the
+% string "Top" while ignoring the possible spaces before and after in
+% the input.  By adding the arbitrary 7sp below, we make it much less
+% likely that a real node name would have the same width as "Top" (e.g.,
+% in a monospaced font).  Hopefully it will never happen in practice.
+% 
+% For the same basic reason, we retypeset the "Top" at every
+% reference, since the current font is indeterminate.
+% 
+\def\crossmanualxref#1{%
+  \setbox\toprefbox = \hbox{Top\kern7sp}%
+  \setbox2 = \hbox{\ignorespaces \printedrefname \unskip \kern7sp}%
+  \ifdim \wd2 > 7sp  % nonempty?
+    \ifdim \wd2 = \wd\toprefbox \else  % same as Top?
+      \putwordSection{} ``\printedrefname'' \putwordin{}\space
+    \fi
+  \fi
+  #1%
+}
+
+% This macro is called from \xrefX for the `[nodename]' part of xref
+% output.  It's a separate macro only so it can be changed more easily,
+% since square brackets don't work well in some documents.  Particularly
+% one that Bob is working on :).
+%
+\def\xrefprintnodename#1{[#1]}
+
+% Things referred to by \setref.
+%
+\def\Ynothing{}
+\def\Yomitfromtoc{}
+\def\Ynumbered{%
+  \ifnum\secno=0
+    \putwordChapter@tie \the\chapno
+  \else \ifnum\subsecno=0
+    \putwordSection@tie \the\chapno.\the\secno
+  \else \ifnum\subsubsecno=0
+    \putwordSection@tie \the\chapno.\the\secno.\the\subsecno
+  \else
+    \putwordSection@tie \the\chapno.\the\secno.\the\subsecno.\the\subsubsecno
+  \fi\fi\fi
+}
+\def\Yappendix{%
+  \ifnum\secno=0
+     \putwordAppendix@tie @char\the\appendixno{}%
+  \else \ifnum\subsecno=0
+     \putwordSection@tie @char\the\appendixno.\the\secno
+  \else \ifnum\subsubsecno=0
+    \putwordSection@tie @char\the\appendixno.\the\secno.\the\subsecno
+  \else
+    \putwordSection@tie
+      @char\the\appendixno.\the\secno.\the\subsecno.\the\subsubsecno
+  \fi\fi\fi
+}
+
+% \refx{NAME}{SUFFIX} - reference a cross-reference string named NAME.  SUFFIX 
+% is output afterwards if non-empty.
+\def\refx#1#2{%
+  \requireauxfile
+  {%
+    \indexnofonts
+    \otherbackslash
+    \def\value##1{##1}%
+    \expandafter\global\expandafter\let\expandafter\thisrefX
+      \csname XR#1\endcsname
+  }%
+  \ifx\thisrefX\relax
+    % If not defined, say something at least.
+    \angleleft un\-de\-fined\angleright
+    \iflinks
+      \ifhavexrefs
+        {\toks0 = {#1}% avoid expansion of possibly-complex value
+         \message{\linenumber Undefined cross reference `\the\toks0'.}}%
+      \else
+        \ifwarnedxrefs\else
+          \global\warnedxrefstrue
+          \message{Cross reference values unknown; you must run TeX again.}%
+        \fi
+      \fi
+    \fi
+  \else
+    % It's defined, so just use it.
+    \thisrefX
+  \fi
+  #2% Output the suffix in any case.
+}
+
+% This is the macro invoked by entries in the aux file.  Define a control 
+% sequence for a cross-reference target (we prepend XR to the control sequence 
+% name to avoid collisions).  The value is the page number.  If this is a 
float 
+% type, we have more work to do.
+%
+\def\xrdef#1#2{%
+  {% Expand the node or anchor name to remove control sequences.
+   % \turnoffactive stops 8-bit characters being changed to commands
+   % like @'e.  \refx does the same to retrieve the value in the definition.
+    \indexnofonts
+    \turnoffactive
+    \def\value##1{##1}%
+    \xdef\safexrefname{#1}%
+  }%
+  %
+  \bgroup
+    \expandafter\gdef\csname XR\safexrefname\endcsname{#2}%
+  \egroup
+  % We put the \gdef inside a group to avoid the definitions building up on 
+  % TeX's save stack, which can cause it to run out of space for aux files 
with 
+  % thousands of lines.  \gdef doesn't use the save stack, but \csname does
+  % when it defines an unknown control sequence as \relax. 
+  %
+  % Was that xref control sequence that we just defined for a float?
+  \expandafter\iffloat\csname XR\safexrefname\endcsname
+    % it was a float, and we have the (safe) float type in \iffloattype.
+    \expandafter\let\expandafter\floatlist
+      \csname floatlist\iffloattype\endcsname
+    %
+    % Is this the first time we've seen this float type?
+    \expandafter\ifx\floatlist\relax
+      \toks0 = {\do}% yes, so just \do
+    \else
+      % had it before, so preserve previous elements in list.
+      \toks0 = \expandafter{\floatlist\do}%
+    \fi
+    %
+    % Remember this xref in the control sequence \floatlistFLOATTYPE,
+    % for later use in \listoffloats.
+    \expandafter\xdef\csname floatlist\iffloattype\endcsname{\the\toks0
+      {\safexrefname}}%
+  \fi
+}
+
+% If working on a large document in chapters, it is convenient to
+% be able to disable indexing, cross-referencing, and contents, for test runs.
+% This is done with @novalidate at the beginning of the file.
+%
+\newif\iflinks \linkstrue % by default we want the aux files.
+\let\novalidate = \linksfalse
+
+% Used when writing to the aux file, or when using data from it.
+\def\requireauxfile{%
+  \iflinks
+    \tryauxfile
+    % Open the new aux file.  TeX will close it automatically at exit.
+    \immediate\openout\auxfile=\jobname.aux
+  \fi
+  \global\let\requireauxfile=\relax   % Only do this once.
+}
+
+% Read the last existing aux file, if any.  No error if none exists.
+%
+\def\tryauxfile{%
+  \openin 1 \jobname.aux
+  \ifeof 1 \else
+    \readdatafile{aux}%
+    \global\havexrefstrue
+  \fi
+  \closein 1
+}
+
+\def\setupdatafile{%
+  \catcode`\^^@=\other
+  \catcode`\^^A=\other
+  \catcode`\^^B=\other
+  \catcode`\^^C=\other
+  \catcode`\^^D=\other
+  \catcode`\^^E=\other
+  \catcode`\^^F=\other
+  \catcode`\^^G=\other
+  \catcode`\^^H=\other
+  \catcode`\^^K=\other
+  \catcode`\^^L=\other
+  \catcode`\^^N=\other
+  \catcode`\^^P=\other
+  \catcode`\^^Q=\other
+  \catcode`\^^R=\other
+  \catcode`\^^S=\other
+  \catcode`\^^T=\other
+  \catcode`\^^U=\other
+  \catcode`\^^V=\other
+  \catcode`\^^W=\other
+  \catcode`\^^X=\other
+  \catcode`\^^Z=\other
+  \catcode`\^^[=\other
+  \catcode`\^^\=\other
+  \catcode`\^^]=\other
+  \catcode`\^^^=\other
+  \catcode`\^^_=\other
+  % It was suggested to set the catcode of ^ to 7, which would allow ^^e4 etc.
+  % in xref tags, i.e., node names.  But since ^^e4 notation isn't
+  % supported in the main text, it doesn't seem desirable.  Furthermore,
+  % that is not enough: for node names that actually contain a ^
+  % character, we would end up writing a line like this: 'xrdef {'hat
+  % b-title}{'hat b} and \xrdef does a \csname...\endcsname on the first
+  % argument, and \hat is not an expandable control sequence.  It could
+  % all be worked out, but why?  Either we support ^^ or we don't.
+  %
+  % The other change necessary for this was to define \auxhat:
+  % \def\auxhat{\def^{'hat }}% extra space so ok if followed by letter
+  % and then to call \auxhat in \setq.
+  %
+  \catcode`\^=\other
+  %
+  % Special characters.  Should be turned off anyway, but...
+  \catcode`\~=\other
+  \catcode`\[=\other
+  \catcode`\]=\other
+  \catcode`\"=\other
+  \catcode`\_=\other
+  \catcode`\|=\other
+  \catcode`\<=\other
+  \catcode`\>=\other
+  \catcode`\$=\other
+  \catcode`\#=\other
+  \catcode`\&=\other
+  \catcode`\%=\other
+  \catcode`+=\other % avoid \+ for paranoia even though we've turned it off
+  %
+  % This is to support \ in node names and titles, since the \
+  % characters end up in a \csname.  It's easier than
+  % leaving it active and making its active definition an actual \
+  % character.  What I don't understand is why it works in the *value*
+  % of the xrdef.  Seems like it should be a catcode12 \, and that
+  % should not typeset properly.  But it works, so I'm moving on for
+  % now.  --karl, 15jan04.
+  \catcode`\\=\other
+  %
+  % @ is our escape character in .aux files, and we need braces.
+  \catcode`\{=1
+  \catcode`\}=2
+  \catcode`\@=0
+}
+
+\def\readdatafile#1{%
+\begingroup
+  \setupdatafile
+  \input\jobname.#1
+\endgroup}
+
+
+\message{insertions,}
+% including footnotes.
+
+\newcount \footnoteno
+
+% The trailing space in the following definition for supereject is
+% vital for proper filling; pages come out unaligned when you do a
+% pagealignmacro call if that space before the closing brace is
+% removed. (Generally, numeric constants should always be followed by a
+% space to prevent strange expansion errors.)
+\def\supereject{\par\penalty -20000\footnoteno =0 }
+
+% @footnotestyle is meaningful for Info output only.
+\let\footnotestyle=\comment
+
+{\catcode `\@=11
+%
+% Auto-number footnotes.  Otherwise like plain.
+\gdef\footnote{%
+  \global\advance\footnoteno by \@ne
+  \edef\thisfootno{$^{\the\footnoteno}$}%
+  %
+  % In case the footnote comes at the end of a sentence, preserve the
+  % extra spacing after we do the footnote number.
+  \let\@sf\empty
+  \ifhmode\edef\@sf{\spacefactor\the\spacefactor}\ptexslash\fi
+  %
+  % Remove inadvertent blank space before typesetting the footnote number.
+  \unskip
+  \thisfootno\@sf
+  \dofootnote
+}%
+
+% Don't bother with the trickery in plain.tex to not require the
+% footnote text as a parameter.  Our footnotes don't need to be so general.
+%
+% Oh yes, they do; otherwise, @ifset (and anything else that uses
+% \parseargline) fails inside footnotes because the tokens are fixed when
+% the footnote is read.  --karl, 16nov96.
+%
+\gdef\dofootnote{%
+  \insert\footins\bgroup
+  %
+  % Nested footnotes are not supported in TeX, that would take a lot
+  % more work.  (\startsavinginserts does not suffice.)
+  \let\footnote=\errfootnotenest
+  %
+  % We want to typeset this text as a normal paragraph, even if the
+  % footnote reference occurs in (for example) a display environment.
+  % So reset some parameters.
+  \hsize=\txipagewidth
+  \interlinepenalty\interfootnotelinepenalty
+  \splittopskip\ht\strutbox % top baseline for broken footnotes
+  \splitmaxdepth\dp\strutbox
+  \floatingpenalty\@MM
+  \leftskip\z@skip
+  \rightskip\z@skip
+  \spaceskip\z@skip
+  \xspaceskip\z@skip
+  \parindent\defaultparindent
+  %
+  \smallfonts \rm
+  %
+  % Because we use hanging indentation in footnotes, a @noindent appears
+  % to exdent this text, so make it be a no-op.  makeinfo does not use
+  % hanging indentation so @noindent can still be needed within footnote
+  % text after an @example or the like (not that this is good style).
+  \let\noindent = \relax
+  %
+  % Hang the footnote text off the number.  Use \everypar in case the
+  % footnote extends for more than one paragraph.
+  \everypar = {\hang}%
+  \textindent{\thisfootno}%
+  %
+  % Don't crash into the line above the footnote text.  Since this
+  % expands into a box, it must come within the paragraph, lest it
+  % provide a place where TeX can split the footnote.
+  \footstrut
+  %
+  % Invoke rest of plain TeX footnote routine.
+  \futurelet\next\fo@t
+}
+}%end \catcode `\@=11
+
+\def\errfootnotenest{%
+  \errhelp=\EMsimple
+  \errmessage{Nested footnotes not supported in texinfo.tex,
+    even though they work in makeinfo; sorry}
+}
+
+\def\errfootnoteheading{%
+  \errhelp=\EMsimple
+  \errmessage{Footnotes in chapters, sections, etc., are not supported}
+}
+
+% In case a @footnote appears in a vbox, save the footnote text and create
+% the real \insert just after the vbox finished.  Otherwise, the insertion
+% would be lost.
+% Similarly, if a @footnote appears inside an alignment, save the footnote
+% text to a box and make the \insert when a row of the table is finished.
+% And the same can be done for other insert classes.  --kasal, 16nov03.
+%
+% Replace the \insert primitive by a cheating macro.
+% Deeper inside, just make sure that the saved insertions are not spilled
+% out prematurely.
+%
+\def\startsavinginserts{%
+  \ifx \insert\ptexinsert
+    \let\insert\saveinsert
+  \else
+    \let\checkinserts\relax
+  \fi
+}
+
+% This \insert replacement works for both \insert\footins{foo} and
+% \insert\footins\bgroup foo\egroup, but it doesn't work for \insert27{foo}.
+%
+\def\saveinsert#1{%
+  \edef\next{\noexpand\savetobox \makeSAVEname#1}%
+  \afterassignment\next
+  % swallow the left brace
+  \let\temp =
+}
+\def\makeSAVEname#1{\makecsname{SAVE\expandafter\gobble\string#1}}
+\def\savetobox#1{\global\setbox#1 = \vbox\bgroup \unvbox#1}
+
+\def\checksaveins#1{\ifvoid#1\else \placesaveins#1\fi}
+
+\def\placesaveins#1{%
+  \ptexinsert \csname\expandafter\gobblesave\string#1\endcsname
+    {\box#1}%
+}
+
+% eat @SAVE -- beware, all of them have catcode \other:
+{
+  \def\dospecials{\do S\do A\do V\do E} \uncatcodespecials  %  ;-)
+  \gdef\gobblesave @SAVE{}
+}
+
+% initialization:
+\def\newsaveins #1{%
+  \edef\next{\noexpand\newsaveinsX \makeSAVEname#1}%
+  \next
+}
+\def\newsaveinsX #1{%
+  \csname newbox\endcsname #1%
+  \expandafter\def\expandafter\checkinserts\expandafter{\checkinserts
+    \checksaveins #1}%
+}
+
+% initialize:
+\let\checkinserts\empty
+\newsaveins\footins
+\newsaveins\margin
+
+
+% @image.  We use the macros from epsf.tex to support this.
+% If epsf.tex is not installed and @image is used, we complain.
+%
+% Check for and read epsf.tex up front.  If we read it only at @image
+% time, we might be inside a group, and then its definitions would get
+% undone and the next image would fail.
+\openin 1 = epsf.tex
+\ifeof 1 \else
+  % Do not bother showing banner with epsf.tex v2.7k (available in
+  % doc/epsf.tex and on ctan).
+  \def\epsfannounce{\toks0 = }%
+  \input epsf.tex
+\fi
+\closein 1
+%
+% We will only complain once about lack of epsf.tex.
+\newif\ifwarnednoepsf
+\newhelp\noepsfhelp{epsf.tex must be installed for images to
+  work.  It is also included in the Texinfo distribution, or you can get
+  it from https://ctan.org/texarchive/macros/texinfo/texinfo/doc/epsf.tex.}
+%
+\def\image#1{%
+  \ifx\epsfbox\thisisundefined
+    \ifwarnednoepsf \else
+      \errhelp = \noepsfhelp
+      \errmessage{epsf.tex not found, images will be ignored}%
+      \global\warnednoepsftrue
+    \fi
+  \else
+    \imagexxx #1,,,,,\finish
+  \fi
+}
+%
+% Arguments to @image:
+% #1 is (mandatory) image filename; we tack on .eps extension.
+% #2 is (optional) width, #3 is (optional) height.
+% #4 is (ignored optional) html alt text.
+% #5 is (ignored optional) extension.
+% #6 is just the usual extra ignored arg for parsing stuff.
+\newif\ifimagevmode
+\def\imagexxx#1,#2,#3,#4,#5,#6\finish{\begingroup
+  \catcode`\^^M = 5     % in case we're inside an example
+  \normalturnoffactive  % allow _ et al. in names
+  \def\xprocessmacroarg{\eatspaces}% in case we are being used via a macro
+  % If the image is by itself, center it.
+  \ifvmode
+    \imagevmodetrue
+  \else \ifx\centersub\centerV
+    % for @center @image, we need a vbox so we can have our vertical space
+    \imagevmodetrue
+    \vbox\bgroup % vbox has better behavior than vtop herev
+  \fi\fi
+  %
+  \ifimagevmode
+    \nobreak\medskip
+    % Usually we'll have text after the image which will insert
+    % \parskip glue, so insert it here too to equalize the space
+    % above and below.
+    \nobreak\vskip\parskip
+    \nobreak
+  \fi
+  %
+  % Leave vertical mode so that indentation from an enclosing
+  %  environment such as @quotation is respected.
+  % However, if we're at the top level, we don't want the
+  %  normal paragraph indentation.
+  % On the other hand, if we are in the case of @center @image, we don't
+  %  want to start a paragraph, which will create a hsize-width box and
+  %  eradicate the centering.
+  \ifx\centersub\centerV\else \noindent \fi
+  %
+  % Output the image.
+  \ifpdf
+    % For pdfTeX and LuaTeX <= 0.80
+    \dopdfimage{#1}{#2}{#3}%
+  \else
+    \ifx\XeTeXrevision\thisisundefined
+      % For epsf.tex
+      % \epsfbox itself resets \epsf?size at each figure.
+      \setbox0 = \hbox{\ignorespaces #2}%
+        \ifdim\wd0 > 0pt \epsfxsize=#2\relax \fi
+      \setbox0 = \hbox{\ignorespaces #3}%
+        \ifdim\wd0 > 0pt \epsfysize=#3\relax \fi
+      \epsfbox{#1.eps}%
+    \else
+      % For XeTeX
+      \doxeteximage{#1}{#2}{#3}%
+    \fi
+  \fi
+  %
+  \ifimagevmode
+    \medskip  % space after a standalone image
+  \fi  
+  \ifx\centersub\centerV \egroup \fi
+\endgroup}
+
+
+% @float FLOATTYPE,LABEL,LOC ... @end float for displayed figures, tables,
+% etc.  We don't actually implement floating yet, we always include the
+% float "here".  But it seemed the best name for the future.
+%
+\envparseargdef\float{\eatcommaspace\eatcommaspace\dofloat#1, , ,\finish}
+
+% There may be a space before second and/or third parameter; delete it.
+\def\eatcommaspace#1, {#1,}
+
+% #1 is the optional FLOATTYPE, the text label for this float, typically
+% "Figure", "Table", "Example", etc.  Can't contain commas.  If omitted,
+% this float will not be numbered and cannot be referred to.
+%
+% #2 is the optional xref label.  Also must be present for the float to
+% be referable.
+%
+% #3 is the optional positioning argument; for now, it is ignored.  It
+% will somehow specify the positions allowed to float to (here, top, bottom).
+%
+% We keep a separate counter for each FLOATTYPE, which we reset at each
+% chapter-level command.
+\let\resetallfloatnos=\empty
+%
+\def\dofloat#1,#2,#3,#4\finish{%
+  \let\thiscaption=\empty
+  \let\thisshortcaption=\empty
+  %
+  % don't lose footnotes inside @float.
+  %
+  % BEWARE: when the floats start float, we have to issue warning whenever an
+  % insert appears inside a float which could possibly float. --kasal, 26may04
+  %
+  \startsavinginserts
+  %
+  % We can't be used inside a paragraph.
+  \par
+  %
+  \vtop\bgroup
+    \def\floattype{#1}%
+    \def\floatlabel{#2}%
+    \def\floatloc{#3}% we do nothing with this yet.
+    %
+    \ifx\floattype\empty
+      \let\safefloattype=\empty
+    \else
+      {%
+        % the floattype might have accents or other special characters,
+        % but we need to use it in a control sequence name.
+        \indexnofonts
+        \turnoffactive
+        \xdef\safefloattype{\floattype}%
+      }%
+    \fi
+    %
+    % If label is given but no type, we handle that as the empty type.
+    \ifx\floatlabel\empty \else
+      % We want each FLOATTYPE to be numbered separately (Figure 1,
+      % Table 1, Figure 2, ...).  (And if no label, no number.)
+      %
+      \expandafter\getfloatno\csname\safefloattype floatno\endcsname
+      \global\advance\floatno by 1
+      %
+      {%
+        % This magic value for \lastsection is output by \setref as the
+        % XREFLABEL-title value.  \xrefX uses it to distinguish float
+        % labels (which have a completely different output format) from
+        % node and anchor labels.  And \xrdef uses it to construct the
+        % lists of floats.
+        %
+        \edef\lastsection{\floatmagic=\safefloattype}%
+        \setref{\floatlabel}{Yfloat}%
+      }%
+    \fi
+    %
+    % start with \parskip glue, I guess.
+    \vskip\parskip
+    %
+    % Don't suppress indentation if a float happens to start a section.
+    \restorefirstparagraphindent
+}
+
+% we have these possibilities:
+% @float Foo,lbl & @caption{Cap}: Foo 1.1: Cap
+% @float Foo,lbl & no caption:    Foo 1.1
+% @float Foo & @caption{Cap}:     Foo: Cap
+% @float Foo & no caption:        Foo
+% @float ,lbl & Caption{Cap}:     1.1: Cap
+% @float ,lbl & no caption:       1.1
+% @float & @caption{Cap}:         Cap
+% @float & no caption:
+%
+\def\Efloat{%
+    \let\floatident = \empty
+    %
+    % In all cases, if we have a float type, it comes first.
+    \ifx\floattype\empty \else \def\floatident{\floattype}\fi
+    %
+    % If we have an xref label, the number comes next.
+    \ifx\floatlabel\empty \else
+      \ifx\floattype\empty \else % if also had float type, need tie first.
+        \appendtomacro\floatident{\tie}%
+      \fi
+      % the number.
+      \appendtomacro\floatident{\chaplevelprefix\the\floatno}%
+    \fi
+    %
+    % Start the printed caption with what we've constructed in
+    % \floatident, but keep it separate; we need \floatident again.
+    \let\captionline = \floatident
+    %
+    \ifx\thiscaption\empty \else
+      \ifx\floatident\empty \else
+        \appendtomacro\captionline{: }% had ident, so need a colon between
+      \fi
+      %
+      % caption text.
+      \appendtomacro\captionline{\scanexp\thiscaption}%
+    \fi
+    %
+    % If we have anything to print, print it, with space before.
+    % Eventually this needs to become an \insert.
+    \ifx\captionline\empty \else
+      \vskip.5\parskip
+      \captionline
+      %
+      % Space below caption.
+      \vskip\parskip
+    \fi
+    %
+    % If have an xref label, write the list of floats info.  Do this
+    % after the caption, to avoid chance of it being a breakpoint.
+    \ifx\floatlabel\empty \else
+      % Write the text that goes in the lof to the aux file as
+      % \floatlabel-lof.  Besides \floatident, we include the short
+      % caption if specified, else the full caption if specified, else nothing.
+      {%
+        \requireauxfile
+        \atdummies
+        %
+        \ifx\thisshortcaption\empty
+          \def\gtemp{\thiscaption}%
+        \else
+          \def\gtemp{\thisshortcaption}%
+        \fi
+        \immediate\write\auxfile{@xrdef{\floatlabel-lof}{\floatident
+          \ifx\gtemp\empty \else : \gtemp \fi}}%
+      }%
+    \fi
+  \egroup  % end of \vtop
+  %
+  \checkinserts
+}
+
+% Append the tokens #2 to the definition of macro #1, not expanding either.
+%
+\def\appendtomacro#1#2{%
+  \expandafter\def\expandafter#1\expandafter{#1#2}%
+}
+
+% @caption, @shortcaption
+%
+\def\caption{\docaption\thiscaption}
+\def\shortcaption{\docaption\thisshortcaption}
+\def\docaption{\checkenv\float \bgroup\scanargctxt\defcaption}
+\def\defcaption#1#2{\egroup \def#1{#2}}
+
+% The parameter is the control sequence identifying the counter we are
+% going to use.  Create it if it doesn't exist and assign it to \floatno.
+\def\getfloatno#1{%
+  \ifx#1\relax
+      % Haven't seen this figure type before.
+      \csname newcount\endcsname #1%
+      %
+      % Remember to reset this floatno at the next chap.
+      \expandafter\gdef\expandafter\resetallfloatnos
+        \expandafter{\resetallfloatnos #1=0 }%
+  \fi
+  \let\floatno#1%
+}
+
+% \setref calls this to get the XREFLABEL-snt value.  We want an @xref
+% to the FLOATLABEL to expand to "Figure 3.1".  We call \setref when we
+% first read the @float command.
+%
+\def\Yfloat{\floattype@tie \chaplevelprefix\the\floatno}%
+
+% Magic string used for the XREFLABEL-title value, so \xrefX can
+% distinguish floats from other xref types.
+\def\floatmagic{!!float!!}
+
+% #1 is the control sequence we are passed; we expand into a conditional
+% which is true if #1 represents a float ref.  That is, the magic
+% \lastsection value which we \setref above.
+%
+\def\iffloat#1{\expandafter\doiffloat#1==\finish}
+%
+% #1 is (maybe) the \floatmagic string.  If so, #2 will be the
+% (safe) float type for this float.  We set \iffloattype to #2.
+%
+\def\doiffloat#1=#2=#3\finish{%
+  \def\temp{#1}%
+  \def\iffloattype{#2}%
+  \ifx\temp\floatmagic
+}
+
+% @listoffloats FLOATTYPE - print a list of floats like a table of contents.
+%
+\parseargdef\listoffloats{%
+  \def\floattype{#1}% floattype
+  {%
+    % the floattype might have accents or other special characters,
+    % but we need to use it in a control sequence name.
+    \indexnofonts
+    \turnoffactive
+    \xdef\safefloattype{\floattype}%
+  }%
+  %
+  % \xrdef saves the floats as a \do-list in \floatlistSAFEFLOATTYPE.
+  \expandafter\ifx\csname floatlist\safefloattype\endcsname \relax
+    \ifhavexrefs
+      % if the user said @listoffloats foo but never @float foo.
+      \message{\linenumber No `\safefloattype' floats to list.}%
+    \fi
+  \else
+    \begingroup
+      \leftskip=\tocindent  % indent these entries like a toc
+      \let\do=\listoffloatsdo
+      \csname floatlist\safefloattype\endcsname
+    \endgroup
+  \fi
+}
+
+% This is called on each entry in a list of floats.  We're passed the
+% xref label, in the form LABEL-title, which is how we save it in the
+% aux file.  We strip off the -title and look up \XRLABEL-lof, which
+% has the text we're supposed to typeset here.
+%
+% Figures without xref labels will not be included in the list (since
+% they won't appear in the aux file).
+%
+\def\listoffloatsdo#1{\listoffloatsdoentry#1\finish}
+\def\listoffloatsdoentry#1-title\finish{{%
+  % Can't fully expand XR#1-lof because it can contain anything.  Just
+  % pass the control sequence.  On the other hand, XR#1-pg is just the
+  % page number, and we want to fully expand that so we can get a link
+  % in pdf output.
+  \toksA = \expandafter{\csname XR#1-lof\endcsname}%
+  %
+  % use the same \entry macro we use to generate the TOC and index.
+  \edef\writeentry{\noexpand\entry{\the\toksA}{\csname XR#1-pg\endcsname}}%
+  \writeentry
+}}
+
+
+\message{localization,}
+
+% For single-language documents, @documentlanguage is usually given very
+% early, just after @documentencoding.  Single argument is the language
+% (de) or locale (de_DE) abbreviation.
+%
+{
+  \catcode`\_ = \active
+  \globaldefs=1
+\parseargdef\documentlanguage{%
+  \tex % read txi-??.tex file in plain TeX.
+    % Read the file by the name they passed if it exists.
+    \let_ = \normalunderscore  % normal _ character for filename test
+    \openin 1 txi-#1.tex
+    \ifeof 1
+      \documentlanguagetrywithoutunderscore #1_\finish
+    \else
+      \globaldefs = 1  % everything in the txi-LL files needs to persist
+      \input txi-#1.tex
+    \fi
+    \closein 1
+  \endgroup % end raw TeX
+}
+%
+% If they passed de_DE, and txi-de_DE.tex doesn't exist,
+% try txi-de.tex.
+%
+\gdef\documentlanguagetrywithoutunderscore#1_#2\finish{%
+  \openin 1 txi-#1.tex
+  \ifeof 1
+    \errhelp = \nolanghelp
+    \errmessage{Cannot read language file txi-#1.tex}%
+  \else
+    \globaldefs = 1  % everything in the txi-LL files needs to persist
+    \input txi-#1.tex
+  \fi
+  \closein 1
+}
+}% end of special _ catcode
+%
+\newhelp\nolanghelp{The given language definition file cannot be found or
+is empty.  Maybe you need to install it?  Putting it in the current
+directory should work if nowhere else does.}
+
+% This macro is called from txi-??.tex files; the first argument is the
+% \language name to set (without the "\lang@" prefix), the second and
+% third args are \{left,right}hyphenmin.
+%
+% The language names to pass are determined when the format is built.
+% See the etex.log file created at that time, e.g.,
+% /usr/local/texlive/2008/texmf-var/web2c/pdftex/etex.log.
+%
+% With TeX Live 2008, etex now includes hyphenation patterns for all
+% available languages.  This means we can support hyphenation in
+% Texinfo, at least to some extent.  (This still doesn't solve the
+% accented characters problem.)
+%
+\catcode`@=11
+\def\txisetlanguage#1#2#3{%
+  % do not set the language if the name is undefined in the current TeX.
+  \expandafter\ifx\csname lang@#1\endcsname \relax
+    \message{no patterns for #1}%
+  \else
+    \global\language = \csname lang@#1\endcsname
+  \fi
+  % but there is no harm in adjusting the hyphenmin values regardless.
+  \global\lefthyphenmin = #2\relax
+  \global\righthyphenmin = #3\relax
+}
+
+% XeTeX and LuaTeX can handle Unicode natively.
+% Their default I/O uses UTF-8 sequences instead of a byte-wise operation.
+% Other TeX engines' I/O (pdfTeX, etc.) is byte-wise.
+%
+\newif\iftxinativeunicodecapable
+\newif\iftxiusebytewiseio
+
+\ifx\XeTeXrevision\thisisundefined
+  \ifx\luatexversion\thisisundefined
+    \txinativeunicodecapablefalse
+    \txiusebytewiseiotrue
+  \else
+    \txinativeunicodecapabletrue
+    \txiusebytewiseiofalse
+  \fi
+\else
+  \txinativeunicodecapabletrue
+  \txiusebytewiseiofalse
+\fi
+
+% Set I/O by bytes instead of UTF-8 sequence for XeTeX and LuaTex
+% for non-UTF-8 (byte-wise) encodings.
+%
+\def\setbytewiseio{%
+  \ifx\XeTeXrevision\thisisundefined
+  \else
+    \XeTeXdefaultencoding "bytes"  % For subsequent files to be read
+    \XeTeXinputencoding "bytes"  % For document root file
+    % Unfortunately, there seems to be no corresponding XeTeX command for
+    % output encoding.  This is a problem for auxiliary index and TOC files.
+    % The only solution would be perhaps to write out @U{...} sequences in
+    % place of non-ASCII characters.
+  \fi
+
+  \ifx\luatexversion\thisisundefined
+  \else
+    \directlua{
+    local utf8_char, byte, gsub = unicode.utf8.char, string.byte, string.gsub
+    local function convert_char (char)
+      return utf8_char(byte(char))
+    end
+
+    local function convert_line (line)
+      return gsub(line, ".", convert_char)
+    end
+
+    callback.register("process_input_buffer", convert_line)
+
+    local function convert_line_out (line)
+      local line_out = ""
+      for c in string.utfvalues(line) do
+         line_out = line_out .. string.char(c)
+      end
+      return line_out
+    end
+
+    callback.register("process_output_buffer", convert_line_out)
+    }
+  \fi
+
+  \txiusebytewiseiotrue
+}
+
+
+% Helpers for encodings.
+% Set the catcode of characters 128 through 255 to the specified number.
+%
+\def\setnonasciicharscatcode#1{%
+   \count255=128
+   \loop\ifnum\count255<256
+      \global\catcode\count255=#1\relax
+      \advance\count255 by 1
+   \repeat
+}
+
+\def\setnonasciicharscatcodenonglobal#1{%
+   \count255=128
+   \loop\ifnum\count255<256
+      \catcode\count255=#1\relax
+      \advance\count255 by 1
+   \repeat
+}
+
+% @documentencoding sets the definition of non-ASCII characters
+% according to the specified encoding.
+%
+\def\documentencoding{\parseargusing\filenamecatcodes\documentencodingzzz}
+\def\documentencodingzzz#1{%
+  %
+  % Encoding being declared for the document.
+  \def\declaredencoding{\csname #1.enc\endcsname}%
+  %
+  % Supported encodings: names converted to tokens in order to be able
+  % to compare them with \ifx.
+  \def\ascii{\csname US-ASCII.enc\endcsname}%
+  \def\latnine{\csname ISO-8859-15.enc\endcsname}%
+  \def\latone{\csname ISO-8859-1.enc\endcsname}%
+  \def\lattwo{\csname ISO-8859-2.enc\endcsname}%
+  \def\utfeight{\csname UTF-8.enc\endcsname}%
+  %
+  \ifx \declaredencoding \ascii
+     \asciichardefs
+  %
+  \else \ifx \declaredencoding \lattwo
+     \iftxinativeunicodecapable
+       \setbytewiseio
+     \fi
+     \setnonasciicharscatcode\active
+     \lattwochardefs
+  %
+  \else \ifx \declaredencoding \latone
+     \iftxinativeunicodecapable
+       \setbytewiseio
+     \fi
+     \setnonasciicharscatcode\active
+     \latonechardefs
+  %
+  \else \ifx \declaredencoding \latnine
+     \iftxinativeunicodecapable
+       \setbytewiseio
+     \fi
+     \setnonasciicharscatcode\active
+     \latninechardefs
+  %
+  \else \ifx \declaredencoding \utfeight
+     \iftxinativeunicodecapable
+       % For native Unicode handling (XeTeX and LuaTeX)
+       \nativeunicodechardefs
+     \else
+       % For treating UTF-8 as byte sequences (TeX, eTeX and pdfTeX)
+       \setnonasciicharscatcode\active
+       % since we already invoked \utfeightchardefs at the top level
+       % (below), do not re-invoke it, otherwise our check for duplicated
+       % definitions gets triggered.  Making non-ascii chars active is
+       % sufficient.
+     \fi
+  %
+  \else
+    \message{Ignoring unknown document encoding: #1.}%
+  %
+  \fi % utfeight
+  \fi % latnine
+  \fi % latone
+  \fi % lattwo
+  \fi % ascii
+  %
+  \ifx\XeTeXrevision\thisisundefined
+  \else
+    \ifx \declaredencoding \utfeight
+    \else
+      \ifx \declaredencoding \ascii
+      \else
+        \message{Warning: XeTeX with non-UTF-8 encodings cannot handle %
+        non-ASCII characters in auxiliary files.}%
+      \fi
+    \fi
+  \fi
+}
+
+% emacs-page
+% A message to be logged when using a character that isn't available
+% the default font encoding (OT1).
+%
+\def\missingcharmsg#1{\message{Character missing, sorry: #1.}}
+
+% Take account of \c (plain) vs. \, (Texinfo) difference.
+\def\cedilla#1{\ifx\c\ptexc\c{#1}\else\,{#1}\fi}
+
+% First, make active non-ASCII characters in order for them to be
+% correctly categorized when TeX reads the replacement text of
+% macros containing the character definitions.
+\setnonasciicharscatcode\active
+%
+
+\def\gdefchar#1#2{%
+\gdef#1{%
+   \ifpassthroughchars
+     \string#1%
+   \else
+     #2%
+   \fi
+}}
+
+% Latin1 (ISO-8859-1) character definitions.
+\def\latonechardefs{%
+  \gdefchar^^a0{\tie}
+  \gdefchar^^a1{\exclamdown}
+  \gdefchar^^a2{{\tcfont \char162}} % cent
+  \gdefchar^^a3{\pounds{}}
+  \gdefchar^^a4{{\tcfont \char164}} % currency
+  \gdefchar^^a5{{\tcfont \char165}} % yen
+  \gdefchar^^a6{{\tcfont \char166}} % broken bar
+  \gdefchar^^a7{\S}
+  \gdefchar^^a8{\"{}}
+  \gdefchar^^a9{\copyright{}}
+  \gdefchar^^aa{\ordf}
+  \gdefchar^^ab{\guillemetleft{}}
+  \gdefchar^^ac{\ensuremath\lnot}
+  \gdefchar^^ad{\-}
+  \gdefchar^^ae{\registeredsymbol{}}
+  \gdefchar^^af{\={}}
+  %
+  \gdefchar^^b0{\textdegree}
+  \gdefchar^^b1{$\pm$}
+  \gdefchar^^b2{$^2$}
+  \gdefchar^^b3{$^3$}
+  \gdefchar^^b4{\'{}}
+  \gdefchar^^b5{$\mu$}
+  \gdefchar^^b6{\P}
+  \gdefchar^^b7{\ensuremath\cdot}
+  \gdefchar^^b8{\cedilla\ }
+  \gdefchar^^b9{$^1$}
+  \gdefchar^^ba{\ordm}
+  \gdefchar^^bb{\guillemetright{}}
+  \gdefchar^^bc{$1\over4$}
+  \gdefchar^^bd{$1\over2$}
+  \gdefchar^^be{$3\over4$}
+  \gdefchar^^bf{\questiondown}
+  %
+  \gdefchar^^c0{\`A}
+  \gdefchar^^c1{\'A}
+  \gdefchar^^c2{\^A}
+  \gdefchar^^c3{\~A}
+  \gdefchar^^c4{\"A}
+  \gdefchar^^c5{\ringaccent A}
+  \gdefchar^^c6{\AE}
+  \gdefchar^^c7{\cedilla C}
+  \gdefchar^^c8{\`E}
+  \gdefchar^^c9{\'E}
+  \gdefchar^^ca{\^E}
+  \gdefchar^^cb{\"E}
+  \gdefchar^^cc{\`I}
+  \gdefchar^^cd{\'I}
+  \gdefchar^^ce{\^I}
+  \gdefchar^^cf{\"I}
+  %
+  \gdefchar^^d0{\DH}
+  \gdefchar^^d1{\~N}
+  \gdefchar^^d2{\`O}
+  \gdefchar^^d3{\'O}
+  \gdefchar^^d4{\^O}
+  \gdefchar^^d5{\~O}
+  \gdefchar^^d6{\"O}
+  \gdefchar^^d7{$\times$}
+  \gdefchar^^d8{\O}
+  \gdefchar^^d9{\`U}
+  \gdefchar^^da{\'U}
+  \gdefchar^^db{\^U}
+  \gdefchar^^dc{\"U}
+  \gdefchar^^dd{\'Y}
+  \gdefchar^^de{\TH}
+  \gdefchar^^df{\ss}
+  %
+  \gdefchar^^e0{\`a}
+  \gdefchar^^e1{\'a}
+  \gdefchar^^e2{\^a}
+  \gdefchar^^e3{\~a}
+  \gdefchar^^e4{\"a}
+  \gdefchar^^e5{\ringaccent a}
+  \gdefchar^^e6{\ae}
+  \gdefchar^^e7{\cedilla c}
+  \gdefchar^^e8{\`e}
+  \gdefchar^^e9{\'e}
+  \gdefchar^^ea{\^e}
+  \gdefchar^^eb{\"e}
+  \gdefchar^^ec{\`{\dotless i}}
+  \gdefchar^^ed{\'{\dotless i}}
+  \gdefchar^^ee{\^{\dotless i}}
+  \gdefchar^^ef{\"{\dotless i}}
+  %
+  \gdefchar^^f0{\dh}
+  \gdefchar^^f1{\~n}
+  \gdefchar^^f2{\`o}
+  \gdefchar^^f3{\'o}
+  \gdefchar^^f4{\^o}
+  \gdefchar^^f5{\~o}
+  \gdefchar^^f6{\"o}
+  \gdefchar^^f7{$\div$}
+  \gdefchar^^f8{\o}
+  \gdefchar^^f9{\`u}
+  \gdefchar^^fa{\'u}
+  \gdefchar^^fb{\^u}
+  \gdefchar^^fc{\"u}
+  \gdefchar^^fd{\'y}
+  \gdefchar^^fe{\th}
+  \gdefchar^^ff{\"y}
+}
+
+% Latin9 (ISO-8859-15) encoding character definitions.
+\def\latninechardefs{%
+  % Encoding is almost identical to Latin1.
+  \latonechardefs
+  %
+  \gdefchar^^a4{\euro{}}
+  \gdefchar^^a6{\v S}
+  \gdefchar^^a8{\v s}
+  \gdefchar^^b4{\v Z}
+  \gdefchar^^b8{\v z}
+  \gdefchar^^bc{\OE}
+  \gdefchar^^bd{\oe}
+  \gdefchar^^be{\"Y}
+}
+
+% Latin2 (ISO-8859-2) character definitions.
+\def\lattwochardefs{%
+  \gdefchar^^a0{\tie}
+  \gdefchar^^a1{\ogonek{A}}
+  \gdefchar^^a2{\u{}}
+  \gdefchar^^a3{\L}
+  \gdefchar^^a4{\missingcharmsg{CURRENCY SIGN}}
+  \gdefchar^^a5{\v L}
+  \gdefchar^^a6{\'S}
+  \gdefchar^^a7{\S}
+  \gdefchar^^a8{\"{}}
+  \gdefchar^^a9{\v S}
+  \gdefchar^^aa{\cedilla S}
+  \gdefchar^^ab{\v T}
+  \gdefchar^^ac{\'Z}
+  \gdefchar^^ad{\-}
+  \gdefchar^^ae{\v Z}
+  \gdefchar^^af{\dotaccent Z}
+  %
+  \gdefchar^^b0{\textdegree{}}
+  \gdefchar^^b1{\ogonek{a}}
+  \gdefchar^^b2{\ogonek{ }}
+  \gdefchar^^b3{\l}
+  \gdefchar^^b4{\'{}}
+  \gdefchar^^b5{\v l}
+  \gdefchar^^b6{\'s}
+  \gdefchar^^b7{\v{}}
+  \gdefchar^^b8{\cedilla\ }
+  \gdefchar^^b9{\v s}
+  \gdefchar^^ba{\cedilla s}
+  \gdefchar^^bb{\v t}
+  \gdefchar^^bc{\'z}
+  \gdefchar^^bd{\H{}}
+  \gdefchar^^be{\v z}
+  \gdefchar^^bf{\dotaccent z}
+  %
+  \gdefchar^^c0{\'R}
+  \gdefchar^^c1{\'A}
+  \gdefchar^^c2{\^A}
+  \gdefchar^^c3{\u A}
+  \gdefchar^^c4{\"A}
+  \gdefchar^^c5{\'L}
+  \gdefchar^^c6{\'C}
+  \gdefchar^^c7{\cedilla C}
+  \gdefchar^^c8{\v C}
+  \gdefchar^^c9{\'E}
+  \gdefchar^^ca{\ogonek{E}}
+  \gdefchar^^cb{\"E}
+  \gdefchar^^cc{\v E}
+  \gdefchar^^cd{\'I}
+  \gdefchar^^ce{\^I}
+  \gdefchar^^cf{\v D}
+  %
+  \gdefchar^^d0{\DH}
+  \gdefchar^^d1{\'N}
+  \gdefchar^^d2{\v N}
+  \gdefchar^^d3{\'O}
+  \gdefchar^^d4{\^O}
+  \gdefchar^^d5{\H O}
+  \gdefchar^^d6{\"O}
+  \gdefchar^^d7{$\times$}
+  \gdefchar^^d8{\v R}
+  \gdefchar^^d9{\ringaccent U}
+  \gdefchar^^da{\'U}
+  \gdefchar^^db{\H U}
+  \gdefchar^^dc{\"U}
+  \gdefchar^^dd{\'Y}
+  \gdefchar^^de{\cedilla T}
+  \gdefchar^^df{\ss}
+  %
+  \gdefchar^^e0{\'r}
+  \gdefchar^^e1{\'a}
+  \gdefchar^^e2{\^a}
+  \gdefchar^^e3{\u a}
+  \gdefchar^^e4{\"a}
+  \gdefchar^^e5{\'l}
+  \gdefchar^^e6{\'c}
+  \gdefchar^^e7{\cedilla c}
+  \gdefchar^^e8{\v c}
+  \gdefchar^^e9{\'e}
+  \gdefchar^^ea{\ogonek{e}}
+  \gdefchar^^eb{\"e}
+  \gdefchar^^ec{\v e}
+  \gdefchar^^ed{\'{\dotless{i}}}
+  \gdefchar^^ee{\^{\dotless{i}}}
+  \gdefchar^^ef{\v d}
+  %
+  \gdefchar^^f0{\dh}
+  \gdefchar^^f1{\'n}
+  \gdefchar^^f2{\v n}
+  \gdefchar^^f3{\'o}
+  \gdefchar^^f4{\^o}
+  \gdefchar^^f5{\H o}
+  \gdefchar^^f6{\"o}
+  \gdefchar^^f7{$\div$}
+  \gdefchar^^f8{\v r}
+  \gdefchar^^f9{\ringaccent u}
+  \gdefchar^^fa{\'u}
+  \gdefchar^^fb{\H u}
+  \gdefchar^^fc{\"u}
+  \gdefchar^^fd{\'y}
+  \gdefchar^^fe{\cedilla t}
+  \gdefchar^^ff{\dotaccent{}}
+}
+
+% UTF-8 character definitions.
+%
+% This code to support UTF-8 is based on LaTeX's utf8.def, with some
+% changes for Texinfo conventions.  It is included here under the GPL by
+% permission from Frank Mittelbach and the LaTeX team.
+%
+\newcount\countUTFx
+\newcount\countUTFy
+\newcount\countUTFz
+
+\gdef\UTFviiiTwoOctets#1#2{\expandafter
+   \UTFviiiDefined\csname u8:#1\string #2\endcsname}
+%
+\gdef\UTFviiiThreeOctets#1#2#3{\expandafter
+   \UTFviiiDefined\csname u8:#1\string #2\string #3\endcsname}
+%
+\gdef\UTFviiiFourOctets#1#2#3#4{\expandafter
+   \UTFviiiDefined\csname u8:#1\string #2\string #3\string #4\endcsname}
+
+\gdef\UTFviiiDefined#1{%
+  \ifx #1\relax
+    \message{\linenumber Unicode char \string #1 not defined for Texinfo}%
+  \else
+    \expandafter #1%
+  \fi
+}
+
+% Give non-ASCII bytes the active definitions for processing UTF-8 sequences
+\begingroup
+  \catcode`\~13
+  \catcode`\$12
+  \catcode`\"12
+
+  % Loop from \countUTFx to \countUTFy, performing \UTFviiiTmp
+  % substituting ~ and $ with a character token of that value.
+  \def\UTFviiiLoop{%
+    \global\catcode\countUTFx\active
+    \uccode`\~\countUTFx
+    \uccode`\$\countUTFx
+    \uppercase\expandafter{\UTFviiiTmp}%
+    \advance\countUTFx by 1
+    \ifnum\countUTFx < \countUTFy
+      \expandafter\UTFviiiLoop
+    \fi}
+
+  % For bytes other than the first in a UTF-8 sequence.  Not expected to
+  % be expanded except when writing to auxiliary files.
+  \countUTFx = "80
+  \countUTFy = "C2
+  \def\UTFviiiTmp{%
+    \gdef~{%
+        \ifpassthroughchars $\fi}}%
+  \UTFviiiLoop
+
+  \countUTFx = "C2
+  \countUTFy = "E0
+  \def\UTFviiiTmp{%
+    \gdef~{%
+        \ifpassthroughchars $%
+        \else\expandafter\UTFviiiTwoOctets\expandafter$\fi}}%
+  \UTFviiiLoop
+
+  \countUTFx = "E0
+  \countUTFy = "F0
+  \def\UTFviiiTmp{%
+    \gdef~{%
+        \ifpassthroughchars $%
+        \else\expandafter\UTFviiiThreeOctets\expandafter$\fi}}%
+  \UTFviiiLoop
+
+  \countUTFx = "F0
+  \countUTFy = "F4
+  \def\UTFviiiTmp{%
+    \gdef~{%
+        \ifpassthroughchars $%
+        \else\expandafter\UTFviiiFourOctets\expandafter$\fi
+        }}%
+  \UTFviiiLoop
+\endgroup
+
+\def\globallet{\global\let} % save some \expandafter's below
+
+% @U{xxxx} to produce U+xxxx, if we support it.
+\def\U#1{%
+  \expandafter\ifx\csname uni:#1\endcsname \relax
+    \iftxinativeunicodecapable
+      % All Unicode characters can be used if native Unicode handling is
+      % active.  However, if the font does not have the glyph,
+      % letters are missing.
+      \begingroup
+        \uccode`\.="#1\relax
+        \uppercase{.}
+      \endgroup
+    \else
+      \errhelp = \EMsimple     
+      \errmessage{Unicode character U+#1 not supported, sorry}%
+    \fi
+  \else
+    \csname uni:#1\endcsname
+  \fi
+}
+
+% These macros are used here to construct the name of a control
+% sequence to be defined.
+\def\UTFviiiTwoOctetsName#1#2{%
+  \csname u8:#1\string #2\endcsname}%
+\def\UTFviiiThreeOctetsName#1#2#3{%
+  \csname u8:#1\string #2\string #3\endcsname}%
+\def\UTFviiiFourOctetsName#1#2#3#4{%
+  \csname u8:#1\string #2\string #3\string #4\endcsname}%
+
+% For UTF-8 byte sequences (TeX, e-TeX and pdfTeX),
+% provide a definition macro to replace a Unicode character;
+% this gets used by the @U command
+%
+\begingroup
+  \catcode`\"=12
+  \catcode`\<=12
+  \catcode`\.=12
+  \catcode`\,=12
+  \catcode`\;=12
+  \catcode`\!=12
+  \catcode`\~=13
+  \gdef\DeclareUnicodeCharacterUTFviii#1#2{%
+    \countUTFz = "#1\relax
+    \begingroup
+      \parseXMLCharref
+    
+      % Give \u8:... its definition.  The sequence of seven \expandafter's
+      % expands after the \gdef three times, e.g.
+      %
+      % 1.  \UTFviiTwoOctetsName B1 B2
+      % 2.  \csname u8:B1 \string B2 \endcsname
+      % 3.  \u8: B1 B2  (a single control sequence token)
+      %
+      \expandafter\expandafter
+      \expandafter\expandafter
+      \expandafter\expandafter
+      \expandafter\gdef       \UTFviiiTmp{#2}%
+      % 
+      \expandafter\ifx\csname uni:#1\endcsname \relax \else
+       \message{Internal error, already defined: #1}%
+      \fi
+      %
+      % define an additional control sequence for this code point.
+      \expandafter\globallet\csname uni:#1\endcsname \UTFviiiTmp
+    \endgroup}
+  %
+  % Given the value in \countUTFz as a Unicode code point, set \UTFviiiTmp
+  % to the corresponding UTF-8 sequence.
+  \gdef\parseXMLCharref{%
+    \ifnum\countUTFz < "A0\relax
+      \errhelp = \EMsimple
+      \errmessage{Cannot define Unicode char value < 00A0}%
+    \else\ifnum\countUTFz < "800\relax
+      \parseUTFviiiA,%
+      \parseUTFviiiB C\UTFviiiTwoOctetsName.,%
+    \else\ifnum\countUTFz < "10000\relax
+      \parseUTFviiiA;%
+      \parseUTFviiiA,%
+      \parseUTFviiiB E\UTFviiiThreeOctetsName.{,;}%
+    \else
+      \parseUTFviiiA;%
+      \parseUTFviiiA,%
+      \parseUTFviiiA!%
+      \parseUTFviiiB F\UTFviiiFourOctetsName.{!,;}%
+    \fi\fi\fi
+  }
+
+  % Extract a byte from the end of the UTF-8 representation of \countUTFx.
+  % It must be a non-initial byte in the sequence.
+  % Change \uccode of #1 for it to be used in \parseUTFviiiB as one
+  % of the bytes.
+  \gdef\parseUTFviiiA#1{%
+    \countUTFx = \countUTFz
+    \divide\countUTFz by 64
+    \countUTFy = \countUTFz  % Save to be the future value of \countUTFz.
+    \multiply\countUTFz by 64
+    
+    % \countUTFz is now \countUTFx with the last 5 bits cleared.  Subtract
+    % in order to get the last five bits.
+    \advance\countUTFx by -\countUTFz
+
+    % Convert this to the byte in the UTF-8 sequence.
+    \advance\countUTFx by 128
+    \uccode `#1\countUTFx
+    \countUTFz = \countUTFy}
+
+  % Used to put a UTF-8 byte sequence into \UTFviiiTmp
+  % #1 is the increment for \countUTFz to yield a the first byte of the UTF-8
+  %    sequence.
+  % #2 is one of the \UTFviii*OctetsName macros.
+  % #3 is always a full stop (.)
+  % #4 is a template for the other bytes in the sequence.  The values for these
+  %    bytes is substituted in here with \uppercase using the \uccode's.
+  \gdef\parseUTFviiiB#1#2#3#4{%
+    \advance\countUTFz by "#10\relax
+    \uccode `#3\countUTFz
+    \uppercase{\gdef\UTFviiiTmp{#2#3#4}}}
+\endgroup
+
+% For native Unicode handling (XeTeX and LuaTeX),
+% provide a definition macro that sets a catcode to `other' non-globally
+%
+\def\DeclareUnicodeCharacterNativeOther#1#2{%
+  \catcode"#1=\other
+}
+
+% https://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_M
+% U+0000..U+007F = https://en.wikipedia.org/wiki/Basic_Latin_(Unicode_block)
+% U+0080..U+00FF = 
https://en.wikipedia.org/wiki/Latin-1_Supplement_(Unicode_block)
+% U+0100..U+017F = https://en.wikipedia.org/wiki/Latin_Extended-A
+% U+0180..U+024F = https://en.wikipedia.org/wiki/Latin_Extended-B
+% 
+% Many of our renditions are less than wonderful, and all the missing
+% characters are available somewhere.  Loading the necessary fonts
+% awaits user request.  We can't truly support Unicode without
+% reimplementing everything that's been done in LaTeX for many years,
+% plus probably using luatex or xetex, and who knows what else.
+% We won't be doing that here in this simple file.  But we can try to at
+% least make most of the characters not bomb out.
+%
+\def\unicodechardefs{%
+  \DeclareUnicodeCharacter{00A0}{\tie}%
+  \DeclareUnicodeCharacter{00A1}{\exclamdown}%
+  \DeclareUnicodeCharacter{00A2}{{\tcfont \char162}}% 0242=cent
+  \DeclareUnicodeCharacter{00A3}{\pounds{}}%
+  \DeclareUnicodeCharacter{00A4}{{\tcfont \char164}}% 0244=currency
+  \DeclareUnicodeCharacter{00A5}{{\tcfont \char165}}% 0245=yen
+  \DeclareUnicodeCharacter{00A6}{{\tcfont \char166}}% 0246=brokenbar
+  \DeclareUnicodeCharacter{00A7}{\S}%
+  \DeclareUnicodeCharacter{00A8}{\"{ }}%
+  \DeclareUnicodeCharacter{00A9}{\copyright{}}%
+  \DeclareUnicodeCharacter{00AA}{\ordf}%
+  \DeclareUnicodeCharacter{00AB}{\guillemetleft{}}%
+  \DeclareUnicodeCharacter{00AC}{\ensuremath\lnot}%
+  \DeclareUnicodeCharacter{00AD}{\-}%
+  \DeclareUnicodeCharacter{00AE}{\registeredsymbol{}}%
+  \DeclareUnicodeCharacter{00AF}{\={ }}%
+  %
+  \DeclareUnicodeCharacter{00B0}{\ringaccent{ }}%
+  \DeclareUnicodeCharacter{00B1}{\ensuremath\pm}%
+  \DeclareUnicodeCharacter{00B2}{$^2$}%
+  \DeclareUnicodeCharacter{00B3}{$^3$}%
+  \DeclareUnicodeCharacter{00B4}{\'{ }}%
+  \DeclareUnicodeCharacter{00B5}{$\mu$}%
+  \DeclareUnicodeCharacter{00B6}{\P}%
+  \DeclareUnicodeCharacter{00B7}{\ensuremath\cdot}%
+  \DeclareUnicodeCharacter{00B8}{\cedilla{ }}%
+  \DeclareUnicodeCharacter{00B9}{$^1$}%
+  \DeclareUnicodeCharacter{00BA}{\ordm}%
+  \DeclareUnicodeCharacter{00BB}{\guillemetright{}}%
+  \DeclareUnicodeCharacter{00BC}{$1\over4$}%
+  \DeclareUnicodeCharacter{00BD}{$1\over2$}%
+  \DeclareUnicodeCharacter{00BE}{$3\over4$}%
+  \DeclareUnicodeCharacter{00BF}{\questiondown}%
+  %
+  \DeclareUnicodeCharacter{00C0}{\`A}%
+  \DeclareUnicodeCharacter{00C1}{\'A}%
+  \DeclareUnicodeCharacter{00C2}{\^A}%
+  \DeclareUnicodeCharacter{00C3}{\~A}%
+  \DeclareUnicodeCharacter{00C4}{\"A}%
+  \DeclareUnicodeCharacter{00C5}{\AA}%
+  \DeclareUnicodeCharacter{00C6}{\AE}%
+  \DeclareUnicodeCharacter{00C7}{\cedilla{C}}%
+  \DeclareUnicodeCharacter{00C8}{\`E}%
+  \DeclareUnicodeCharacter{00C9}{\'E}%
+  \DeclareUnicodeCharacter{00CA}{\^E}%
+  \DeclareUnicodeCharacter{00CB}{\"E}%
+  \DeclareUnicodeCharacter{00CC}{\`I}%
+  \DeclareUnicodeCharacter{00CD}{\'I}%
+  \DeclareUnicodeCharacter{00CE}{\^I}%
+  \DeclareUnicodeCharacter{00CF}{\"I}%
+  %
+  \DeclareUnicodeCharacter{00D0}{\DH}%
+  \DeclareUnicodeCharacter{00D1}{\~N}%
+  \DeclareUnicodeCharacter{00D2}{\`O}%
+  \DeclareUnicodeCharacter{00D3}{\'O}%
+  \DeclareUnicodeCharacter{00D4}{\^O}%
+  \DeclareUnicodeCharacter{00D5}{\~O}%
+  \DeclareUnicodeCharacter{00D6}{\"O}%
+  \DeclareUnicodeCharacter{00D7}{\ensuremath\times}%
+  \DeclareUnicodeCharacter{00D8}{\O}%
+  \DeclareUnicodeCharacter{00D9}{\`U}%
+  \DeclareUnicodeCharacter{00DA}{\'U}%
+  \DeclareUnicodeCharacter{00DB}{\^U}%
+  \DeclareUnicodeCharacter{00DC}{\"U}%
+  \DeclareUnicodeCharacter{00DD}{\'Y}%
+  \DeclareUnicodeCharacter{00DE}{\TH}%
+  \DeclareUnicodeCharacter{00DF}{\ss}%
+  %
+  \DeclareUnicodeCharacter{00E0}{\`a}%
+  \DeclareUnicodeCharacter{00E1}{\'a}%
+  \DeclareUnicodeCharacter{00E2}{\^a}%
+  \DeclareUnicodeCharacter{00E3}{\~a}%
+  \DeclareUnicodeCharacter{00E4}{\"a}%
+  \DeclareUnicodeCharacter{00E5}{\aa}%
+  \DeclareUnicodeCharacter{00E6}{\ae}%
+  \DeclareUnicodeCharacter{00E7}{\cedilla{c}}%
+  \DeclareUnicodeCharacter{00E8}{\`e}%
+  \DeclareUnicodeCharacter{00E9}{\'e}%
+  \DeclareUnicodeCharacter{00EA}{\^e}%
+  \DeclareUnicodeCharacter{00EB}{\"e}%
+  \DeclareUnicodeCharacter{00EC}{\`{\dotless{i}}}%
+  \DeclareUnicodeCharacter{00ED}{\'{\dotless{i}}}%
+  \DeclareUnicodeCharacter{00EE}{\^{\dotless{i}}}%
+  \DeclareUnicodeCharacter{00EF}{\"{\dotless{i}}}%
+  %
+  \DeclareUnicodeCharacter{00F0}{\dh}%
+  \DeclareUnicodeCharacter{00F1}{\~n}%
+  \DeclareUnicodeCharacter{00F2}{\`o}%
+  \DeclareUnicodeCharacter{00F3}{\'o}%
+  \DeclareUnicodeCharacter{00F4}{\^o}%
+  \DeclareUnicodeCharacter{00F5}{\~o}%
+  \DeclareUnicodeCharacter{00F6}{\"o}%
+  \DeclareUnicodeCharacter{00F7}{\ensuremath\div}%
+  \DeclareUnicodeCharacter{00F8}{\o}%
+  \DeclareUnicodeCharacter{00F9}{\`u}%
+  \DeclareUnicodeCharacter{00FA}{\'u}%
+  \DeclareUnicodeCharacter{00FB}{\^u}%
+  \DeclareUnicodeCharacter{00FC}{\"u}%
+  \DeclareUnicodeCharacter{00FD}{\'y}%
+  \DeclareUnicodeCharacter{00FE}{\th}%
+  \DeclareUnicodeCharacter{00FF}{\"y}%
+  %
+  \DeclareUnicodeCharacter{0100}{\=A}%
+  \DeclareUnicodeCharacter{0101}{\=a}%
+  \DeclareUnicodeCharacter{0102}{\u{A}}%
+  \DeclareUnicodeCharacter{0103}{\u{a}}%
+  \DeclareUnicodeCharacter{0104}{\ogonek{A}}%
+  \DeclareUnicodeCharacter{0105}{\ogonek{a}}%
+  \DeclareUnicodeCharacter{0106}{\'C}%
+  \DeclareUnicodeCharacter{0107}{\'c}%
+  \DeclareUnicodeCharacter{0108}{\^C}%
+  \DeclareUnicodeCharacter{0109}{\^c}%
+  \DeclareUnicodeCharacter{010A}{\dotaccent{C}}%
+  \DeclareUnicodeCharacter{010B}{\dotaccent{c}}%
+  \DeclareUnicodeCharacter{010C}{\v{C}}%
+  \DeclareUnicodeCharacter{010D}{\v{c}}%
+  \DeclareUnicodeCharacter{010E}{\v{D}}%
+  \DeclareUnicodeCharacter{010F}{d'}%
+  %
+  \DeclareUnicodeCharacter{0110}{\DH}%
+  \DeclareUnicodeCharacter{0111}{\dh}%
+  \DeclareUnicodeCharacter{0112}{\=E}%
+  \DeclareUnicodeCharacter{0113}{\=e}%
+  \DeclareUnicodeCharacter{0114}{\u{E}}%
+  \DeclareUnicodeCharacter{0115}{\u{e}}%
+  \DeclareUnicodeCharacter{0116}{\dotaccent{E}}%
+  \DeclareUnicodeCharacter{0117}{\dotaccent{e}}%
+  \DeclareUnicodeCharacter{0118}{\ogonek{E}}%
+  \DeclareUnicodeCharacter{0119}{\ogonek{e}}%
+  \DeclareUnicodeCharacter{011A}{\v{E}}%
+  \DeclareUnicodeCharacter{011B}{\v{e}}%
+  \DeclareUnicodeCharacter{011C}{\^G}%
+  \DeclareUnicodeCharacter{011D}{\^g}%
+  \DeclareUnicodeCharacter{011E}{\u{G}}%
+  \DeclareUnicodeCharacter{011F}{\u{g}}%
+  %
+  \DeclareUnicodeCharacter{0120}{\dotaccent{G}}%
+  \DeclareUnicodeCharacter{0121}{\dotaccent{g}}%
+  \DeclareUnicodeCharacter{0122}{\cedilla{G}}%
+  \DeclareUnicodeCharacter{0123}{\cedilla{g}}%
+  \DeclareUnicodeCharacter{0124}{\^H}%
+  \DeclareUnicodeCharacter{0125}{\^h}%
+  \DeclareUnicodeCharacter{0126}{\missingcharmsg{H WITH STROKE}}%
+  \DeclareUnicodeCharacter{0127}{\missingcharmsg{h WITH STROKE}}%
+  \DeclareUnicodeCharacter{0128}{\~I}%
+  \DeclareUnicodeCharacter{0129}{\~{\dotless{i}}}%
+  \DeclareUnicodeCharacter{012A}{\=I}%
+  \DeclareUnicodeCharacter{012B}{\={\dotless{i}}}%
+  \DeclareUnicodeCharacter{012C}{\u{I}}%
+  \DeclareUnicodeCharacter{012D}{\u{\dotless{i}}}%
+  \DeclareUnicodeCharacter{012E}{\ogonek{I}}%
+  \DeclareUnicodeCharacter{012F}{\ogonek{i}}%
+  %
+  \DeclareUnicodeCharacter{0130}{\dotaccent{I}}%
+  \DeclareUnicodeCharacter{0131}{\dotless{i}}%
+  \DeclareUnicodeCharacter{0132}{IJ}%
+  \DeclareUnicodeCharacter{0133}{ij}%
+  \DeclareUnicodeCharacter{0134}{\^J}%
+  \DeclareUnicodeCharacter{0135}{\^{\dotless{j}}}%
+  \DeclareUnicodeCharacter{0136}{\cedilla{K}}%
+  \DeclareUnicodeCharacter{0137}{\cedilla{k}}%
+  \DeclareUnicodeCharacter{0138}{\ensuremath\kappa}%
+  \DeclareUnicodeCharacter{0139}{\'L}%
+  \DeclareUnicodeCharacter{013A}{\'l}%
+  \DeclareUnicodeCharacter{013B}{\cedilla{L}}%
+  \DeclareUnicodeCharacter{013C}{\cedilla{l}}%
+  \DeclareUnicodeCharacter{013D}{L'}% should kern
+  \DeclareUnicodeCharacter{013E}{l'}% should kern
+  \DeclareUnicodeCharacter{013F}{L\U{00B7}}%
+  %
+  \DeclareUnicodeCharacter{0140}{l\U{00B7}}%
+  \DeclareUnicodeCharacter{0141}{\L}%
+  \DeclareUnicodeCharacter{0142}{\l}%
+  \DeclareUnicodeCharacter{0143}{\'N}%
+  \DeclareUnicodeCharacter{0144}{\'n}%
+  \DeclareUnicodeCharacter{0145}{\cedilla{N}}%
+  \DeclareUnicodeCharacter{0146}{\cedilla{n}}%
+  \DeclareUnicodeCharacter{0147}{\v{N}}%
+  \DeclareUnicodeCharacter{0148}{\v{n}}%
+  \DeclareUnicodeCharacter{0149}{'n}%
+  \DeclareUnicodeCharacter{014A}{\missingcharmsg{ENG}}%
+  \DeclareUnicodeCharacter{014B}{\missingcharmsg{eng}}%
+  \DeclareUnicodeCharacter{014C}{\=O}%
+  \DeclareUnicodeCharacter{014D}{\=o}%
+  \DeclareUnicodeCharacter{014E}{\u{O}}%
+  \DeclareUnicodeCharacter{014F}{\u{o}}%
+  %
+  \DeclareUnicodeCharacter{0150}{\H{O}}%
+  \DeclareUnicodeCharacter{0151}{\H{o}}%
+  \DeclareUnicodeCharacter{0152}{\OE}%
+  \DeclareUnicodeCharacter{0153}{\oe}%
+  \DeclareUnicodeCharacter{0154}{\'R}%
+  \DeclareUnicodeCharacter{0155}{\'r}%
+  \DeclareUnicodeCharacter{0156}{\cedilla{R}}%
+  \DeclareUnicodeCharacter{0157}{\cedilla{r}}%
+  \DeclareUnicodeCharacter{0158}{\v{R}}%
+  \DeclareUnicodeCharacter{0159}{\v{r}}%
+  \DeclareUnicodeCharacter{015A}{\'S}%
+  \DeclareUnicodeCharacter{015B}{\'s}%
+  \DeclareUnicodeCharacter{015C}{\^S}%
+  \DeclareUnicodeCharacter{015D}{\^s}%
+  \DeclareUnicodeCharacter{015E}{\cedilla{S}}%
+  \DeclareUnicodeCharacter{015F}{\cedilla{s}}%
+  %
+  \DeclareUnicodeCharacter{0160}{\v{S}}%
+  \DeclareUnicodeCharacter{0161}{\v{s}}%
+  \DeclareUnicodeCharacter{0162}{\cedilla{T}}%
+  \DeclareUnicodeCharacter{0163}{\cedilla{t}}%
+  \DeclareUnicodeCharacter{0164}{\v{T}}%
+  \DeclareUnicodeCharacter{0165}{\v{t}}%
+  \DeclareUnicodeCharacter{0166}{\missingcharmsg{H WITH STROKE}}%
+  \DeclareUnicodeCharacter{0167}{\missingcharmsg{h WITH STROKE}}%
+  \DeclareUnicodeCharacter{0168}{\~U}%
+  \DeclareUnicodeCharacter{0169}{\~u}%
+  \DeclareUnicodeCharacter{016A}{\=U}%
+  \DeclareUnicodeCharacter{016B}{\=u}%
+  \DeclareUnicodeCharacter{016C}{\u{U}}%
+  \DeclareUnicodeCharacter{016D}{\u{u}}%
+  \DeclareUnicodeCharacter{016E}{\ringaccent{U}}%
+  \DeclareUnicodeCharacter{016F}{\ringaccent{u}}%
+  %
+  \DeclareUnicodeCharacter{0170}{\H{U}}%
+  \DeclareUnicodeCharacter{0171}{\H{u}}%
+  \DeclareUnicodeCharacter{0172}{\ogonek{U}}%
+  \DeclareUnicodeCharacter{0173}{\ogonek{u}}%
+  \DeclareUnicodeCharacter{0174}{\^W}%
+  \DeclareUnicodeCharacter{0175}{\^w}%
+  \DeclareUnicodeCharacter{0176}{\^Y}%
+  \DeclareUnicodeCharacter{0177}{\^y}%
+  \DeclareUnicodeCharacter{0178}{\"Y}%
+  \DeclareUnicodeCharacter{0179}{\'Z}%
+  \DeclareUnicodeCharacter{017A}{\'z}%
+  \DeclareUnicodeCharacter{017B}{\dotaccent{Z}}%
+  \DeclareUnicodeCharacter{017C}{\dotaccent{z}}%
+  \DeclareUnicodeCharacter{017D}{\v{Z}}%
+  \DeclareUnicodeCharacter{017E}{\v{z}}%
+  \DeclareUnicodeCharacter{017F}{\missingcharmsg{LONG S}}%
+  %
+  \DeclareUnicodeCharacter{01C4}{D\v{Z}}%
+  \DeclareUnicodeCharacter{01C5}{D\v{z}}%
+  \DeclareUnicodeCharacter{01C6}{d\v{z}}%
+  \DeclareUnicodeCharacter{01C7}{LJ}%
+  \DeclareUnicodeCharacter{01C8}{Lj}%
+  \DeclareUnicodeCharacter{01C9}{lj}%
+  \DeclareUnicodeCharacter{01CA}{NJ}%
+  \DeclareUnicodeCharacter{01CB}{Nj}%
+  \DeclareUnicodeCharacter{01CC}{nj}%
+  \DeclareUnicodeCharacter{01CD}{\v{A}}%
+  \DeclareUnicodeCharacter{01CE}{\v{a}}%
+  \DeclareUnicodeCharacter{01CF}{\v{I}}%
+  %
+  \DeclareUnicodeCharacter{01D0}{\v{\dotless{i}}}%
+  \DeclareUnicodeCharacter{01D1}{\v{O}}%
+  \DeclareUnicodeCharacter{01D2}{\v{o}}%
+  \DeclareUnicodeCharacter{01D3}{\v{U}}%
+  \DeclareUnicodeCharacter{01D4}{\v{u}}%
+  %
+  \DeclareUnicodeCharacter{01E2}{\={\AE}}%
+  \DeclareUnicodeCharacter{01E3}{\={\ae}}%
+  \DeclareUnicodeCharacter{01E6}{\v{G}}%
+  \DeclareUnicodeCharacter{01E7}{\v{g}}%
+  \DeclareUnicodeCharacter{01E8}{\v{K}}%
+  \DeclareUnicodeCharacter{01E9}{\v{k}}%
+  %
+  \DeclareUnicodeCharacter{01F0}{\v{\dotless{j}}}%
+  \DeclareUnicodeCharacter{01F1}{DZ}%
+  \DeclareUnicodeCharacter{01F2}{Dz}%
+  \DeclareUnicodeCharacter{01F3}{dz}%
+  \DeclareUnicodeCharacter{01F4}{\'G}%
+  \DeclareUnicodeCharacter{01F5}{\'g}%
+  \DeclareUnicodeCharacter{01F8}{\`N}%
+  \DeclareUnicodeCharacter{01F9}{\`n}%
+  \DeclareUnicodeCharacter{01FC}{\'{\AE}}%
+  \DeclareUnicodeCharacter{01FD}{\'{\ae}}%
+  \DeclareUnicodeCharacter{01FE}{\'{\O}}%
+  \DeclareUnicodeCharacter{01FF}{\'{\o}}%
+  %
+  \DeclareUnicodeCharacter{021E}{\v{H}}%
+  \DeclareUnicodeCharacter{021F}{\v{h}}%
+  %
+  \DeclareUnicodeCharacter{0226}{\dotaccent{A}}%
+  \DeclareUnicodeCharacter{0227}{\dotaccent{a}}%
+  \DeclareUnicodeCharacter{0228}{\cedilla{E}}%
+  \DeclareUnicodeCharacter{0229}{\cedilla{e}}%
+  \DeclareUnicodeCharacter{022E}{\dotaccent{O}}%
+  \DeclareUnicodeCharacter{022F}{\dotaccent{o}}%
+  %
+  \DeclareUnicodeCharacter{0232}{\=Y}%
+  \DeclareUnicodeCharacter{0233}{\=y}%
+  \DeclareUnicodeCharacter{0237}{\dotless{j}}%
+  %
+  \DeclareUnicodeCharacter{02DB}{\ogonek{ }}%
+  %
+  % Greek letters upper case
+  \DeclareUnicodeCharacter{0391}{{\it A}}%
+  \DeclareUnicodeCharacter{0392}{{\it B}}%
+  \DeclareUnicodeCharacter{0393}{\ensuremath{\mit\Gamma}}%
+  \DeclareUnicodeCharacter{0394}{\ensuremath{\mit\Delta}}%
+  \DeclareUnicodeCharacter{0395}{{\it E}}%
+  \DeclareUnicodeCharacter{0396}{{\it Z}}%
+  \DeclareUnicodeCharacter{0397}{{\it H}}%
+  \DeclareUnicodeCharacter{0398}{\ensuremath{\mit\Theta}}%
+  \DeclareUnicodeCharacter{0399}{{\it I}}%
+  \DeclareUnicodeCharacter{039A}{{\it K}}%
+  \DeclareUnicodeCharacter{039B}{\ensuremath{\mit\Lambda}}%
+  \DeclareUnicodeCharacter{039C}{{\it M}}%
+  \DeclareUnicodeCharacter{039D}{{\it N}}%
+  \DeclareUnicodeCharacter{039E}{\ensuremath{\mit\Xi}}%
+  \DeclareUnicodeCharacter{039F}{{\it O}}%
+  \DeclareUnicodeCharacter{03A0}{\ensuremath{\mit\Pi}}%
+  \DeclareUnicodeCharacter{03A1}{{\it P}}%
+  %\DeclareUnicodeCharacter{03A2}{} % none - corresponds to final sigma
+  \DeclareUnicodeCharacter{03A3}{\ensuremath{\mit\Sigma}}%
+  \DeclareUnicodeCharacter{03A4}{{\it T}}%
+  \DeclareUnicodeCharacter{03A5}{\ensuremath{\mit\Upsilon}}%
+  \DeclareUnicodeCharacter{03A6}{\ensuremath{\mit\Phi}}%
+  \DeclareUnicodeCharacter{03A7}{{\it X}}%
+  \DeclareUnicodeCharacter{03A8}{\ensuremath{\mit\Psi}}%
+  \DeclareUnicodeCharacter{03A9}{\ensuremath{\mit\Omega}}%
+  %
+  % Vowels with accents
+  \DeclareUnicodeCharacter{0390}{\ensuremath{\ddot{\acute\iota}}}%
+  \DeclareUnicodeCharacter{03AC}{\ensuremath{\acute\alpha}}%
+  \DeclareUnicodeCharacter{03AD}{\ensuremath{\acute\epsilon}}%
+  \DeclareUnicodeCharacter{03AE}{\ensuremath{\acute\eta}}%
+  \DeclareUnicodeCharacter{03AF}{\ensuremath{\acute\iota}}%
+  \DeclareUnicodeCharacter{03B0}{\ensuremath{\acute{\ddot\upsilon}}}%
+  %
+  % Standalone accent
+  \DeclareUnicodeCharacter{0384}{\ensuremath{\acute{\ }}}%
+  %
+  % Greek letters lower case
+  \DeclareUnicodeCharacter{03B1}{\ensuremath\alpha}%
+  \DeclareUnicodeCharacter{03B2}{\ensuremath\beta}%
+  \DeclareUnicodeCharacter{03B3}{\ensuremath\gamma}%
+  \DeclareUnicodeCharacter{03B4}{\ensuremath\delta}%
+  \DeclareUnicodeCharacter{03B5}{\ensuremath\epsilon}%
+  \DeclareUnicodeCharacter{03B6}{\ensuremath\zeta}%
+  \DeclareUnicodeCharacter{03B7}{\ensuremath\eta}%
+  \DeclareUnicodeCharacter{03B8}{\ensuremath\theta}%
+  \DeclareUnicodeCharacter{03B9}{\ensuremath\iota}%
+  \DeclareUnicodeCharacter{03BA}{\ensuremath\kappa}%
+  \DeclareUnicodeCharacter{03BB}{\ensuremath\lambda}%
+  \DeclareUnicodeCharacter{03BC}{\ensuremath\mu}%
+  \DeclareUnicodeCharacter{03BD}{\ensuremath\nu}%
+  \DeclareUnicodeCharacter{03BE}{\ensuremath\xi}%
+  \DeclareUnicodeCharacter{03BF}{{\it o}}% omicron
+  \DeclareUnicodeCharacter{03C0}{\ensuremath\pi}%
+  \DeclareUnicodeCharacter{03C1}{\ensuremath\rho}%
+  \DeclareUnicodeCharacter{03C2}{\ensuremath\varsigma}%
+  \DeclareUnicodeCharacter{03C3}{\ensuremath\sigma}%
+  \DeclareUnicodeCharacter{03C4}{\ensuremath\tau}%
+  \DeclareUnicodeCharacter{03C5}{\ensuremath\upsilon}%
+  \DeclareUnicodeCharacter{03C6}{\ensuremath\phi}%
+  \DeclareUnicodeCharacter{03C7}{\ensuremath\chi}%
+  \DeclareUnicodeCharacter{03C8}{\ensuremath\psi}%
+  \DeclareUnicodeCharacter{03C9}{\ensuremath\omega}%
+  %
+  % More Greek vowels with accents
+  \DeclareUnicodeCharacter{03CA}{\ensuremath{\ddot\iota}}%
+  \DeclareUnicodeCharacter{03CB}{\ensuremath{\ddot\upsilon}}%
+  \DeclareUnicodeCharacter{03CC}{\ensuremath{\acute o}}%
+  \DeclareUnicodeCharacter{03CD}{\ensuremath{\acute\upsilon}}%
+  \DeclareUnicodeCharacter{03CE}{\ensuremath{\acute\omega}}%
+  %
+  % Variant Greek letters
+  \DeclareUnicodeCharacter{03D1}{\ensuremath\vartheta}%
+  \DeclareUnicodeCharacter{03D6}{\ensuremath\varpi}%
+  \DeclareUnicodeCharacter{03F1}{\ensuremath\varrho}%
+  %
+  \DeclareUnicodeCharacter{1E02}{\dotaccent{B}}%
+  \DeclareUnicodeCharacter{1E03}{\dotaccent{b}}%
+  \DeclareUnicodeCharacter{1E04}{\udotaccent{B}}%
+  \DeclareUnicodeCharacter{1E05}{\udotaccent{b}}%
+  \DeclareUnicodeCharacter{1E06}{\ubaraccent{B}}%
+  \DeclareUnicodeCharacter{1E07}{\ubaraccent{b}}%
+  \DeclareUnicodeCharacter{1E0A}{\dotaccent{D}}%
+  \DeclareUnicodeCharacter{1E0B}{\dotaccent{d}}%
+  \DeclareUnicodeCharacter{1E0C}{\udotaccent{D}}%
+  \DeclareUnicodeCharacter{1E0D}{\udotaccent{d}}%
+  \DeclareUnicodeCharacter{1E0E}{\ubaraccent{D}}%
+  \DeclareUnicodeCharacter{1E0F}{\ubaraccent{d}}%
+  %
+  \DeclareUnicodeCharacter{1E1E}{\dotaccent{F}}%
+  \DeclareUnicodeCharacter{1E1F}{\dotaccent{f}}%
+  %
+  \DeclareUnicodeCharacter{1E20}{\=G}%
+  \DeclareUnicodeCharacter{1E21}{\=g}%
+  \DeclareUnicodeCharacter{1E22}{\dotaccent{H}}%
+  \DeclareUnicodeCharacter{1E23}{\dotaccent{h}}%
+  \DeclareUnicodeCharacter{1E24}{\udotaccent{H}}%
+  \DeclareUnicodeCharacter{1E25}{\udotaccent{h}}%
+  \DeclareUnicodeCharacter{1E26}{\"H}%
+  \DeclareUnicodeCharacter{1E27}{\"h}%
+  %
+  \DeclareUnicodeCharacter{1E30}{\'K}%
+  \DeclareUnicodeCharacter{1E31}{\'k}%
+  \DeclareUnicodeCharacter{1E32}{\udotaccent{K}}%
+  \DeclareUnicodeCharacter{1E33}{\udotaccent{k}}%
+  \DeclareUnicodeCharacter{1E34}{\ubaraccent{K}}%
+  \DeclareUnicodeCharacter{1E35}{\ubaraccent{k}}%
+  \DeclareUnicodeCharacter{1E36}{\udotaccent{L}}%
+  \DeclareUnicodeCharacter{1E37}{\udotaccent{l}}%
+  \DeclareUnicodeCharacter{1E3A}{\ubaraccent{L}}%
+  \DeclareUnicodeCharacter{1E3B}{\ubaraccent{l}}%
+  \DeclareUnicodeCharacter{1E3E}{\'M}%
+  \DeclareUnicodeCharacter{1E3F}{\'m}%
+  %
+  \DeclareUnicodeCharacter{1E40}{\dotaccent{M}}%
+  \DeclareUnicodeCharacter{1E41}{\dotaccent{m}}%
+  \DeclareUnicodeCharacter{1E42}{\udotaccent{M}}%
+  \DeclareUnicodeCharacter{1E43}{\udotaccent{m}}%
+  \DeclareUnicodeCharacter{1E44}{\dotaccent{N}}%
+  \DeclareUnicodeCharacter{1E45}{\dotaccent{n}}%
+  \DeclareUnicodeCharacter{1E46}{\udotaccent{N}}%
+  \DeclareUnicodeCharacter{1E47}{\udotaccent{n}}%
+  \DeclareUnicodeCharacter{1E48}{\ubaraccent{N}}%
+  \DeclareUnicodeCharacter{1E49}{\ubaraccent{n}}%
+  %
+  \DeclareUnicodeCharacter{1E54}{\'P}%
+  \DeclareUnicodeCharacter{1E55}{\'p}%
+  \DeclareUnicodeCharacter{1E56}{\dotaccent{P}}%
+  \DeclareUnicodeCharacter{1E57}{\dotaccent{p}}%
+  \DeclareUnicodeCharacter{1E58}{\dotaccent{R}}%
+  \DeclareUnicodeCharacter{1E59}{\dotaccent{r}}%
+  \DeclareUnicodeCharacter{1E5A}{\udotaccent{R}}%
+  \DeclareUnicodeCharacter{1E5B}{\udotaccent{r}}%
+  \DeclareUnicodeCharacter{1E5E}{\ubaraccent{R}}%
+  \DeclareUnicodeCharacter{1E5F}{\ubaraccent{r}}%
+  %
+  \DeclareUnicodeCharacter{1E60}{\dotaccent{S}}%
+  \DeclareUnicodeCharacter{1E61}{\dotaccent{s}}%
+  \DeclareUnicodeCharacter{1E62}{\udotaccent{S}}%
+  \DeclareUnicodeCharacter{1E63}{\udotaccent{s}}%
+  \DeclareUnicodeCharacter{1E6A}{\dotaccent{T}}%
+  \DeclareUnicodeCharacter{1E6B}{\dotaccent{t}}%
+  \DeclareUnicodeCharacter{1E6C}{\udotaccent{T}}%
+  \DeclareUnicodeCharacter{1E6D}{\udotaccent{t}}%
+  \DeclareUnicodeCharacter{1E6E}{\ubaraccent{T}}%
+  \DeclareUnicodeCharacter{1E6F}{\ubaraccent{t}}%
+  %
+  \DeclareUnicodeCharacter{1E7C}{\~V}%
+  \DeclareUnicodeCharacter{1E7D}{\~v}%
+  \DeclareUnicodeCharacter{1E7E}{\udotaccent{V}}%
+  \DeclareUnicodeCharacter{1E7F}{\udotaccent{v}}%
+  %
+  \DeclareUnicodeCharacter{1E80}{\`W}%
+  \DeclareUnicodeCharacter{1E81}{\`w}%
+  \DeclareUnicodeCharacter{1E82}{\'W}%
+  \DeclareUnicodeCharacter{1E83}{\'w}%
+  \DeclareUnicodeCharacter{1E84}{\"W}%
+  \DeclareUnicodeCharacter{1E85}{\"w}%
+  \DeclareUnicodeCharacter{1E86}{\dotaccent{W}}%
+  \DeclareUnicodeCharacter{1E87}{\dotaccent{w}}%
+  \DeclareUnicodeCharacter{1E88}{\udotaccent{W}}%
+  \DeclareUnicodeCharacter{1E89}{\udotaccent{w}}%
+  \DeclareUnicodeCharacter{1E8A}{\dotaccent{X}}%
+  \DeclareUnicodeCharacter{1E8B}{\dotaccent{x}}%
+  \DeclareUnicodeCharacter{1E8C}{\"X}%
+  \DeclareUnicodeCharacter{1E8D}{\"x}%
+  \DeclareUnicodeCharacter{1E8E}{\dotaccent{Y}}%
+  \DeclareUnicodeCharacter{1E8F}{\dotaccent{y}}%
+  %
+  \DeclareUnicodeCharacter{1E90}{\^Z}%
+  \DeclareUnicodeCharacter{1E91}{\^z}%
+  \DeclareUnicodeCharacter{1E92}{\udotaccent{Z}}%
+  \DeclareUnicodeCharacter{1E93}{\udotaccent{z}}%
+  \DeclareUnicodeCharacter{1E94}{\ubaraccent{Z}}%
+  \DeclareUnicodeCharacter{1E95}{\ubaraccent{z}}%
+  \DeclareUnicodeCharacter{1E96}{\ubaraccent{h}}%
+  \DeclareUnicodeCharacter{1E97}{\"t}%
+  \DeclareUnicodeCharacter{1E98}{\ringaccent{w}}%
+  \DeclareUnicodeCharacter{1E99}{\ringaccent{y}}%
+  %
+  \DeclareUnicodeCharacter{1EA0}{\udotaccent{A}}%
+  \DeclareUnicodeCharacter{1EA1}{\udotaccent{a}}%
+  %
+  \DeclareUnicodeCharacter{1EB8}{\udotaccent{E}}%
+  \DeclareUnicodeCharacter{1EB9}{\udotaccent{e}}%
+  \DeclareUnicodeCharacter{1EBC}{\~E}%
+  \DeclareUnicodeCharacter{1EBD}{\~e}%
+  %
+  \DeclareUnicodeCharacter{1ECA}{\udotaccent{I}}%
+  \DeclareUnicodeCharacter{1ECB}{\udotaccent{i}}%
+  \DeclareUnicodeCharacter{1ECC}{\udotaccent{O}}%
+  \DeclareUnicodeCharacter{1ECD}{\udotaccent{o}}%
+  %
+  \DeclareUnicodeCharacter{1EE4}{\udotaccent{U}}%
+  \DeclareUnicodeCharacter{1EE5}{\udotaccent{u}}%
+  %
+  \DeclareUnicodeCharacter{1EF2}{\`Y}%
+  \DeclareUnicodeCharacter{1EF3}{\`y}%
+  \DeclareUnicodeCharacter{1EF4}{\udotaccent{Y}}%
+  %
+  \DeclareUnicodeCharacter{1EF8}{\~Y}%
+  \DeclareUnicodeCharacter{1EF9}{\~y}%
+  %
+  % Punctuation
+  \DeclareUnicodeCharacter{2013}{--}%
+  \DeclareUnicodeCharacter{2014}{---}%
+  \DeclareUnicodeCharacter{2018}{\quoteleft{}}%
+  \DeclareUnicodeCharacter{2019}{\quoteright{}}%
+  \DeclareUnicodeCharacter{201A}{\quotesinglbase{}}%
+  \DeclareUnicodeCharacter{201C}{\quotedblleft{}}%
+  \DeclareUnicodeCharacter{201D}{\quotedblright{}}%
+  \DeclareUnicodeCharacter{201E}{\quotedblbase{}}%
+  \DeclareUnicodeCharacter{2020}{\ensuremath\dagger}%
+  \DeclareUnicodeCharacter{2021}{\ensuremath\ddagger}%
+  \DeclareUnicodeCharacter{2022}{\bullet{}}%
+  \DeclareUnicodeCharacter{202F}{\thinspace}%
+  \DeclareUnicodeCharacter{2026}{\dots{}}%
+  \DeclareUnicodeCharacter{2039}{\guilsinglleft{}}%
+  \DeclareUnicodeCharacter{203A}{\guilsinglright{}}%
+  %
+  \DeclareUnicodeCharacter{20AC}{\euro{}}%
+  %
+  \DeclareUnicodeCharacter{2192}{\expansion{}}%
+  \DeclareUnicodeCharacter{21D2}{\result{}}%
+  %
+  % Mathematical symbols
+  \DeclareUnicodeCharacter{2200}{\ensuremath\forall}%
+  \DeclareUnicodeCharacter{2203}{\ensuremath\exists}%
+  \DeclareUnicodeCharacter{2208}{\ensuremath\in}%
+  \DeclareUnicodeCharacter{2212}{\minus{}}%
+  \DeclareUnicodeCharacter{2217}{\ast}%
+  \DeclareUnicodeCharacter{221E}{\ensuremath\infty}%
+  \DeclareUnicodeCharacter{2225}{\ensuremath\parallel}%
+  \DeclareUnicodeCharacter{2227}{\ensuremath\wedge}%
+  \DeclareUnicodeCharacter{2229}{\ensuremath\cap}%
+  \DeclareUnicodeCharacter{2261}{\equiv{}}%
+  \DeclareUnicodeCharacter{2264}{\ensuremath\leq}%
+  \DeclareUnicodeCharacter{2265}{\ensuremath\geq}%
+  \DeclareUnicodeCharacter{2282}{\ensuremath\subset}%
+  \DeclareUnicodeCharacter{2287}{\ensuremath\supseteq}%
+  %
+  \DeclareUnicodeCharacter{2016}{\ensuremath\Vert}%
+  \DeclareUnicodeCharacter{2032}{\ensuremath\prime}%
+  \DeclareUnicodeCharacter{210F}{\ensuremath\hbar}%
+  \DeclareUnicodeCharacter{2111}{\ensuremath\Im}%
+  \DeclareUnicodeCharacter{2113}{\ensuremath\ell}%
+  \DeclareUnicodeCharacter{2118}{\ensuremath\wp}%
+  \DeclareUnicodeCharacter{211C}{\ensuremath\Re}%
+  \DeclareUnicodeCharacter{2135}{\ensuremath\aleph}%
+  \DeclareUnicodeCharacter{2190}{\ensuremath\leftarrow}%
+  \DeclareUnicodeCharacter{2191}{\ensuremath\uparrow}%
+  \DeclareUnicodeCharacter{2193}{\ensuremath\downarrow}%
+  \DeclareUnicodeCharacter{2194}{\ensuremath\leftrightarrow}%
+  \DeclareUnicodeCharacter{2195}{\ensuremath\updownarrow}%
+  \DeclareUnicodeCharacter{2196}{\ensuremath\nwarrow}%
+  \DeclareUnicodeCharacter{2197}{\ensuremath\nearrow}%
+  \DeclareUnicodeCharacter{2198}{\ensuremath\searrow}%
+  \DeclareUnicodeCharacter{2199}{\ensuremath\swarrow}%
+  \DeclareUnicodeCharacter{21A6}{\ensuremath\mapsto}%
+  \DeclareUnicodeCharacter{21A9}{\ensuremath\hookleftarrow}%
+  \DeclareUnicodeCharacter{21AA}{\ensuremath\hookrightarrow}%
+  \DeclareUnicodeCharacter{21BC}{\ensuremath\leftharpoonup}%
+  \DeclareUnicodeCharacter{21BD}{\ensuremath\leftharpoondown}%
+  \DeclareUnicodeCharacter{21C0}{\ensuremath\rightharpoonup}%
+  \DeclareUnicodeCharacter{21C1}{\ensuremath\rightharpoondown}%
+  \DeclareUnicodeCharacter{21CC}{\ensuremath\rightleftharpoons}%
+  \DeclareUnicodeCharacter{21D0}{\ensuremath\Leftarrow}%
+  \DeclareUnicodeCharacter{21D1}{\ensuremath\Uparrow}%
+  \DeclareUnicodeCharacter{21D3}{\ensuremath\Downarrow}%
+  \DeclareUnicodeCharacter{21D4}{\ensuremath\Leftrightarrow}%
+  \DeclareUnicodeCharacter{21D5}{\ensuremath\Updownarrow}%
+  \DeclareUnicodeCharacter{2202}{\ensuremath\partial}%
+  \DeclareUnicodeCharacter{2205}{\ensuremath\emptyset}%
+  \DeclareUnicodeCharacter{2207}{\ensuremath\nabla}%
+  \DeclareUnicodeCharacter{2209}{\ensuremath\notin}%
+  \DeclareUnicodeCharacter{220B}{\ensuremath\owns}%
+  \DeclareUnicodeCharacter{220F}{\ensuremath\prod}%
+  \DeclareUnicodeCharacter{2210}{\ensuremath\coprod}%
+  \DeclareUnicodeCharacter{2211}{\ensuremath\sum}%
+  \DeclareUnicodeCharacter{2213}{\ensuremath\mp}%
+  \DeclareUnicodeCharacter{2218}{\ensuremath\circ}%
+  \DeclareUnicodeCharacter{221A}{\ensuremath\surd}%
+  \DeclareUnicodeCharacter{221D}{\ensuremath\propto}%
+  \DeclareUnicodeCharacter{2220}{\ensuremath\angle}%
+  \DeclareUnicodeCharacter{2223}{\ensuremath\mid}%
+  \DeclareUnicodeCharacter{2228}{\ensuremath\vee}%
+  \DeclareUnicodeCharacter{222A}{\ensuremath\cup}%
+  \DeclareUnicodeCharacter{222B}{\ensuremath\smallint}%
+  \DeclareUnicodeCharacter{222E}{\ensuremath\oint}%
+  \DeclareUnicodeCharacter{223C}{\ensuremath\sim}%
+  \DeclareUnicodeCharacter{2240}{\ensuremath\wr}%
+  \DeclareUnicodeCharacter{2243}{\ensuremath\simeq}%
+  \DeclareUnicodeCharacter{2245}{\ensuremath\cong}%
+  \DeclareUnicodeCharacter{2248}{\ensuremath\approx}%
+  \DeclareUnicodeCharacter{224D}{\ensuremath\asymp}%
+  \DeclareUnicodeCharacter{2250}{\ensuremath\doteq}%
+  \DeclareUnicodeCharacter{2260}{\ensuremath\neq}%
+  \DeclareUnicodeCharacter{226A}{\ensuremath\ll}%
+  \DeclareUnicodeCharacter{226B}{\ensuremath\gg}%
+  \DeclareUnicodeCharacter{227A}{\ensuremath\prec}%
+  \DeclareUnicodeCharacter{227B}{\ensuremath\succ}%
+  \DeclareUnicodeCharacter{2283}{\ensuremath\supset}%
+  \DeclareUnicodeCharacter{2286}{\ensuremath\subseteq}%
+  \DeclareUnicodeCharacter{228E}{\ensuremath\uplus}%
+  \DeclareUnicodeCharacter{2291}{\ensuremath\sqsubseteq}%
+  \DeclareUnicodeCharacter{2292}{\ensuremath\sqsupseteq}%
+  \DeclareUnicodeCharacter{2293}{\ensuremath\sqcap}%
+  \DeclareUnicodeCharacter{2294}{\ensuremath\sqcup}%
+  \DeclareUnicodeCharacter{2295}{\ensuremath\oplus}%
+  \DeclareUnicodeCharacter{2296}{\ensuremath\ominus}%
+  \DeclareUnicodeCharacter{2297}{\ensuremath\otimes}%
+  \DeclareUnicodeCharacter{2298}{\ensuremath\oslash}%
+  \DeclareUnicodeCharacter{2299}{\ensuremath\odot}%
+  \DeclareUnicodeCharacter{22A2}{\ensuremath\vdash}%
+  \DeclareUnicodeCharacter{22A3}{\ensuremath\dashv}%
+  \DeclareUnicodeCharacter{22A4}{\ensuremath\ptextop}%
+  \DeclareUnicodeCharacter{22A5}{\ensuremath\bot}%
+  \DeclareUnicodeCharacter{22A8}{\ensuremath\models}%
+  \DeclareUnicodeCharacter{22C0}{\ensuremath\bigwedge}%
+  \DeclareUnicodeCharacter{22C1}{\ensuremath\bigvee}%
+  \DeclareUnicodeCharacter{22C2}{\ensuremath\bigcap}%
+  \DeclareUnicodeCharacter{22C3}{\ensuremath\bigcup}%
+  \DeclareUnicodeCharacter{22C4}{\ensuremath\diamond}%
+  \DeclareUnicodeCharacter{22C5}{\ensuremath\cdot}%
+  \DeclareUnicodeCharacter{22C6}{\ensuremath\star}%
+  \DeclareUnicodeCharacter{22C8}{\ensuremath\bowtie}%
+  \DeclareUnicodeCharacter{2308}{\ensuremath\lceil}%
+  \DeclareUnicodeCharacter{2309}{\ensuremath\rceil}%
+  \DeclareUnicodeCharacter{230A}{\ensuremath\lfloor}%
+  \DeclareUnicodeCharacter{230B}{\ensuremath\rfloor}%
+  \DeclareUnicodeCharacter{2322}{\ensuremath\frown}%
+  \DeclareUnicodeCharacter{2323}{\ensuremath\smile}%
+  %
+  \DeclareUnicodeCharacter{25B3}{\ensuremath\triangle}%
+  \DeclareUnicodeCharacter{25B7}{\ensuremath\triangleright}%
+  \DeclareUnicodeCharacter{25BD}{\ensuremath\bigtriangledown}%
+  \DeclareUnicodeCharacter{25C1}{\ensuremath\triangleleft}%
+  \DeclareUnicodeCharacter{25C7}{\ensuremath\diamond}%
+  \DeclareUnicodeCharacter{2660}{\ensuremath\spadesuit}%
+  \DeclareUnicodeCharacter{2661}{\ensuremath\heartsuit}%
+  \DeclareUnicodeCharacter{2662}{\ensuremath\diamondsuit}%
+  \DeclareUnicodeCharacter{2663}{\ensuremath\clubsuit}%
+  \DeclareUnicodeCharacter{266D}{\ensuremath\flat}%
+  \DeclareUnicodeCharacter{266E}{\ensuremath\natural}%
+  \DeclareUnicodeCharacter{266F}{\ensuremath\sharp}%
+  \DeclareUnicodeCharacter{26AA}{\ensuremath\bigcirc}%
+  \DeclareUnicodeCharacter{27B9}{\ensuremath\rangle}%
+  \DeclareUnicodeCharacter{27C2}{\ensuremath\perp}%
+  \DeclareUnicodeCharacter{27E8}{\ensuremath\langle}%
+  \DeclareUnicodeCharacter{27F5}{\ensuremath\longleftarrow}%
+  \DeclareUnicodeCharacter{27F6}{\ensuremath\longrightarrow}%
+  \DeclareUnicodeCharacter{27F7}{\ensuremath\longleftrightarrow}%
+  \DeclareUnicodeCharacter{27FC}{\ensuremath\longmapsto}%
+  \DeclareUnicodeCharacter{29F5}{\ensuremath\setminus}%
+  \DeclareUnicodeCharacter{2A00}{\ensuremath\bigodot}%
+  \DeclareUnicodeCharacter{2A01}{\ensuremath\bigoplus}%
+  \DeclareUnicodeCharacter{2A02}{\ensuremath\bigotimes}%
+  \DeclareUnicodeCharacter{2A04}{\ensuremath\biguplus}%
+  \DeclareUnicodeCharacter{2A06}{\ensuremath\bigsqcup}%
+  \DeclareUnicodeCharacter{2A3F}{\ensuremath\amalg}%
+  \DeclareUnicodeCharacter{2AAF}{\ensuremath\preceq}%
+  \DeclareUnicodeCharacter{2AB0}{\ensuremath\succeq}%
+  %
+  \global\mathchardef\checkmark="1370% actually the square root sign
+  \DeclareUnicodeCharacter{2713}{\ensuremath\checkmark}%
+}% end of \unicodechardefs
+
+% UTF-8 byte sequence (pdfTeX) definitions (replacing and @U command)
+% It makes the setting that replace UTF-8 byte sequence.
+\def\utfeightchardefs{%
+  \let\DeclareUnicodeCharacter\DeclareUnicodeCharacterUTFviii
+  \unicodechardefs
+}
+
+% Whether the active definitions of non-ASCII characters expand to
+% non-active tokens with the same character code.  This is used to
+% write characters literally, instead of using active definitions for
+% printing the correct glyphs.
+\newif\ifpassthroughchars
+\passthroughcharsfalse
+
+% For native Unicode handling (XeTeX and LuaTeX),
+% provide a definition macro to replace/pass-through a Unicode character
+%
+\def\DeclareUnicodeCharacterNative#1#2{%
+  \catcode"#1=\active
+  \def\dodeclareunicodecharacternative##1##2##3{%
+    \begingroup
+      \uccode`\~="##2\relax
+      \uppercase{\gdef~}{%
+        \ifpassthroughchars
+          ##1%
+        \else
+          ##3%
+        \fi
+      }
+    \endgroup
+  }
+  \begingroup
+    \uccode`\.="#1\relax
+    \uppercase{\def\UTFNativeTmp{.}}%
+    \expandafter\dodeclareunicodecharacternative\UTFNativeTmp{#1}{#2}%
+  \endgroup
+}
+
+% Native Unicode handling (XeTeX and LuaTeX) character replacing definition.
+% It activates the setting that replaces Unicode characters.
+\def\nativeunicodechardefs{%
+  \let\DeclareUnicodeCharacter\DeclareUnicodeCharacterNative
+  \unicodechardefs
+}
+
+% For native Unicode handling (XeTeX and LuaTeX),
+% make the character token expand
+% to the sequences given in \unicodechardefs for printing.
+\def\DeclareUnicodeCharacterNativeAtU#1#2{%
+  \def\UTFAtUTmp{#2}
+  \expandafter\globallet\csname uni:#1\endcsname \UTFAtUTmp
+}
+
+% @U command definitions for native Unicode handling (XeTeX and LuaTeX).
+\def\nativeunicodechardefsatu{%
+  \let\DeclareUnicodeCharacter\DeclareUnicodeCharacterNativeAtU
+  \unicodechardefs
+}
+
+% US-ASCII character definitions.
+\def\asciichardefs{% nothing need be done
+   \relax
+}
+
+% define all Unicode characters we know about, for the sake of @U.
+\iftxinativeunicodecapable
+  \nativeunicodechardefsatu
+\else
+  \utfeightchardefs
+\fi
+
+
+% Make non-ASCII characters printable again for compatibility with
+% existing Texinfo documents that may use them, even without declaring a
+% document encoding.
+%
+\setnonasciicharscatcode \other
+
+
+\message{formatting,}
+
+\newdimen\defaultparindent \defaultparindent = 15pt
+
+\chapheadingskip = 15pt plus 4pt minus 2pt
+\secheadingskip = 12pt plus 3pt minus 2pt
+\subsecheadingskip = 9pt plus 2pt minus 2pt
+
+% Prevent underfull vbox error messages.
+\vbadness = 10000
+
+% Don't be very finicky about underfull hboxes, either.
+\hbadness = 6666
+
+% Following George Bush, get rid of widows and orphans.
+\widowpenalty=10000
+\clubpenalty=10000
+
+% Use TeX 3.0's \emergencystretch to help line breaking, but if we're
+% using an old version of TeX, don't do anything.  We want the amount of
+% stretch added to depend on the line length, hence the dependence on
+% \hsize.  We call this whenever the paper size is set.
+%
+\def\setemergencystretch{%
+  \ifx\emergencystretch\thisisundefined
+    % Allow us to assign to \emergencystretch anyway.
+    \def\emergencystretch{\dimen0}%
+  \else
+    \emergencystretch = .15\hsize
+  \fi
+}
+
+% Parameters in order: 1) textheight; 2) textwidth;
+% 3) voffset; 4) hoffset; 5) binding offset; 6) topskip;
+% 7) physical page height; 8) physical page width.
+%
+% We also call \setleading{\textleading}, so the caller should define
+% \textleading.  The caller should also set \parskip.
+%
+\def\internalpagesizes#1#2#3#4#5#6#7#8{%
+  \voffset = #3\relax
+  \topskip = #6\relax
+  \splittopskip = \topskip
+  %
+  \vsize = #1\relax
+  \advance\vsize by \topskip
+  \outervsize = \vsize
+  \advance\outervsize by 2\topandbottommargin
+  \txipageheight = \vsize
+  %
+  \hsize = #2\relax
+  \outerhsize = \hsize
+  \advance\outerhsize by 0.5in
+  \txipagewidth = \hsize
+  %
+  \normaloffset = #4\relax
+  \bindingoffset = #5\relax
+  %
+  \ifpdf
+    \pdfpageheight #7\relax
+    \pdfpagewidth #8\relax
+    % if we don't reset these, they will remain at "1 true in" of
+    % whatever layout pdftex was dumped with.
+    \pdfhorigin = 1 true in
+    \pdfvorigin = 1 true in
+  \else
+    \ifx\XeTeXrevision\thisisundefined
+      \special{papersize=#8,#7}%
+    \else
+      \pdfpageheight #7\relax
+      \pdfpagewidth #8\relax
+      % XeTeX does not have \pdfhorigin and \pdfvorigin.
+    \fi
+  \fi
+  %
+  \setleading{\textleading}
+  %
+  \parindent = \defaultparindent
+  \setemergencystretch
+}
+
+% @letterpaper (the default).
+\def\letterpaper{{\globaldefs = 1
+  \parskip = 3pt plus 2pt minus 1pt
+  \textleading = 13.2pt
+  %
+  % If page is nothing but text, make it come out even.
+  \internalpagesizes{607.2pt}{6in}% that's 46 lines
+                    {\voffset}{.25in}%
+                    {\bindingoffset}{36pt}%
+                    {11in}{8.5in}%
+}}
+
+% Use @smallbook to reset parameters for 7x9.25 trim size.
+\def\smallbook{{\globaldefs = 1
+  \parskip = 2pt plus 1pt
+  \textleading = 12pt
+  %
+  \internalpagesizes{7.5in}{5in}%
+                    {-.2in}{0in}%
+                    {\bindingoffset}{16pt}%
+                    {9.25in}{7in}%
+  %
+  \lispnarrowing = 0.3in
+  \tolerance = 700
+  \contentsrightmargin = 0pt
+  \defbodyindent = .5cm
+}}
+
+% Use @smallerbook to reset parameters for 6x9 trim size.
+% (Just testing, parameters still in flux.)
+\def\smallerbook{{\globaldefs = 1
+  \parskip = 1.5pt plus 1pt
+  \textleading = 12pt
+  %
+  \internalpagesizes{7.4in}{4.8in}%
+                    {-.2in}{-.4in}%
+                    {0pt}{14pt}%
+                    {9in}{6in}%
+  %
+  \lispnarrowing = 0.25in
+  \tolerance = 700
+  \contentsrightmargin = 0pt
+  \defbodyindent = .4cm
+}}
+
+% Use @afourpaper to print on European A4 paper.
+\def\afourpaper{{\globaldefs = 1
+  \parskip = 3pt plus 2pt minus 1pt
+  \textleading = 13.2pt
+  %
+  % Double-side printing via postscript on Laserjet 4050
+  % prints double-sided nicely when \bindingoffset=10mm and \hoffset=-6mm.
+  % To change the settings for a different printer or situation, adjust
+  % \normaloffset until the front-side and back-side texts align.  Then
+  % do the same for \bindingoffset.  You can set these for testing in
+  % your texinfo source file like this:
+  % @tex
+  % \global\normaloffset = -6mm
+  % \global\bindingoffset = 10mm
+  % @end tex
+  \internalpagesizes{673.2pt}{160mm}% that's 51 lines
+                    {\voffset}{\hoffset}%
+                    {\bindingoffset}{44pt}%
+                    {297mm}{210mm}%
+  %
+  \tolerance = 700
+  \contentsrightmargin = 0pt
+  \defbodyindent = 5mm
+}}
+
+% Use @afivepaper to print on European A5 paper.
+% From address@hidden, 2 July 2000.
+% He also recommends making @example and @lisp be small.
+\def\afivepaper{{\globaldefs = 1
+  \parskip = 2pt plus 1pt minus 0.1pt
+  \textleading = 12.5pt
+  %
+  \internalpagesizes{160mm}{120mm}%
+                    {\voffset}{\hoffset}%
+                    {\bindingoffset}{8pt}%
+                    {210mm}{148mm}%
+  %
+  \lispnarrowing = 0.2in
+  \tolerance = 800
+  \contentsrightmargin = 0pt
+  \defbodyindent = 2mm
+  \tableindent = 12mm
+}}
+
+% A specific text layout, 24x15cm overall, intended for A4 paper.
+\def\afourlatex{{\globaldefs = 1
+  \afourpaper
+  \internalpagesizes{237mm}{150mm}%
+                    {\voffset}{4.6mm}%
+                    {\bindingoffset}{7mm}%
+                    {297mm}{210mm}%
+  %
+  % Must explicitly reset to 0 because we call \afourpaper.
+  \globaldefs = 0
+}}
+
+% Use @afourwide to print on A4 paper in landscape format.
+\def\afourwide{{\globaldefs = 1
+  \afourpaper
+  \internalpagesizes{241mm}{165mm}%
+                    {\voffset}{-2.95mm}%
+                    {\bindingoffset}{7mm}%
+                    {297mm}{210mm}%
+  \globaldefs = 0
+}}
+
+% @pagesizes TEXTHEIGHT[,TEXTWIDTH]
+% Perhaps we should allow setting the margins, \topskip, \parskip,
+% and/or leading, also. Or perhaps we should compute them somehow.
+%
+\parseargdef\pagesizes{\pagesizesyyy #1,,\finish}
+\def\pagesizesyyy#1,#2,#3\finish{{%
+  \setbox0 = \hbox{\ignorespaces #2}\ifdim\wd0 > 0pt \hsize=#2\relax \fi
+  \globaldefs = 1
+  %
+  \parskip = 3pt plus 2pt minus 1pt
+  \setleading{\textleading}%
+  %
+  \dimen0 = #1\relax
+  \advance\dimen0 by \voffset
+  \advance\dimen0 by 1in % reference point for DVI is 1 inch from top of page
+  %
+  \dimen2 = \hsize
+  \advance\dimen2 by \normaloffset
+  \advance\dimen2 by 1in % reference point is 1 inch from left edge of page
+  %
+  \internalpagesizes{#1}{\hsize}%
+                    {\voffset}{\normaloffset}%
+                    {\bindingoffset}{44pt}%
+                    {\dimen0}{\dimen2}%
+}}
+
+% Set default to letter.
+%
+\letterpaper
+
+% Default value of \hfuzz, for suppressing warnings about overfull hboxes.
+\hfuzz = 1pt
+
+
+\message{and turning on texinfo input format.}
+
+\def^^L{\par} % remove \outer, so ^L can appear in an @comment
+
+% DEL is a comment character, in case @c does not suffice.
+\catcode`\^^? = 14
+
+% Define macros to output various characters with catcode for normal text.
+\catcode`\"=\other \def\normaldoublequote{"}
+\catcode`\$=\other \def\normaldollar{$}%$ font-lock fix
+\catcode`\+=\other \def\normalplus{+}
+\catcode`\<=\other \def\normalless{<}
+\catcode`\>=\other \def\normalgreater{>}
+\catcode`\^=\other \def\normalcaret{^}
+\catcode`\_=\other \def\normalunderscore{_}
+\catcode`\|=\other \def\normalverticalbar{|}
+\catcode`\~=\other \def\normaltilde{~}
+
+% This macro is used to make a character print one way in \tt
+% (where it can probably be output as-is), and another way in other fonts,
+% where something hairier probably needs to be done.
+%
+% #1 is what to print if we are indeed using \tt; #2 is what to print
+% otherwise.  Since all the Computer Modern typewriter fonts have zero
+% interword stretch (and shrink), and it is reasonable to expect all
+% typewriter fonts to have this, we can check that font parameter.
+%
+\def\ifusingtt#1#2{\ifdim \fontdimen3\font=0pt #1\else #2\fi}
+
+% Same as above, but check for italic font.  Actually this also catches
+% non-italic slanted fonts since it is impossible to distinguish them from
+% italic fonts.  But since this is only used by $ and it uses \sl anyway
+% this is not a problem.
+\def\ifusingit#1#2{\ifdim \fontdimen1\font>0pt #1\else #2\fi}
+
+% Set catcodes for Texinfo file
+
+% Active characters for printing the wanted glyph.
+% Most of these we simply print from the \tt font, but for some, we can
+% use math or other variants that look better in normal text.
+%
+\catcode`\"=\active
+\def\activedoublequote{{\tt\char34}}
+\let"=\activedoublequote
+\catcode`\~=\active \def\activetilde{{\tt\char126}} \let~ = \activetilde
+\chardef\hatchar=`\^
+\catcode`\^=\active \def\activehat{{\tt \hatchar}} \let^ = \activehat
+
+\catcode`\_=\active
+\def_{\ifusingtt\normalunderscore\_}
+\def\_{\leavevmode \kern.07em \vbox{\hrule width.3em height.1ex}\kern .07em }
+\let\realunder=_
+
+\catcode`\|=\active \def|{{\tt\char124}}
+
+\chardef \less=`\<
+\catcode`\<=\active \def\activeless{{\tt \less}}\let< = \activeless
+\chardef \gtr=`\>
+\catcode`\>=\active \def\activegtr{{\tt \gtr}}\let> = \activegtr
+\catcode`\+=\active \def+{{\tt \char 43}}
+\catcode`\$=\active \def${\ifusingit{{\sl\$}}\normaldollar}%$ font-lock fix
+\catcode`\-=\active \let-=\normaldash
+
+
+% used for headline/footline in the output routine, in case the page
+% breaks in the middle of an @tex block.
+\def\texinfochars{%
+  \let< = \activeless
+  \let> = \activegtr
+  \let~ = \activetilde 
+  \let^ = \activehat
+  \markupsetuplqdefault \markupsetuprqdefault 
+  \let\b = \strong
+  \let\i = \smartitalic
+  % in principle, all other definitions in \tex have to be undone too.
+}
+
+% Used sometimes to turn off (effectively) the active characters even after
+% parsing them.
+\def\turnoffactive{%
+  \normalturnoffactive
+  \otherbackslash
+}
+
+\catcode`\@=0
+
+% \backslashcurfont outputs one backslash character in current font,
+% as in \char`\\.
+\global\chardef\backslashcurfont=`\\
+\global\let\rawbackslashxx=\backslashcurfont  % let existing .??s files work
+
+% \realbackslash is an actual character `\' with catcode other, and
+% \doublebackslash is two of them (for the pdf outlines).
+{\catcode`\\=\other @gdef@realbackslash{\} @gdef@doublebackslash{\\}}
+
+% In Texinfo, backslash is an active character; it prints the backslash
+% in fixed width font.
+\catcode`\\=\active  % @ for escape char from now on.
+
+% Print a typewriter backslash.  For math mode, we can't simply use
+% \backslashcurfont: the story here is that in math mode, the \char
+% of \backslashcurfont ends up printing the roman \ from the math symbol
+% font (because \char in math mode uses the \mathcode, and plain.tex
+% sets \mathcode`\\="026E).  Hence we use an explicit \mathchar,
+% which is the decimal equivalent of "715c (class 7, e.g., use \fam;
+% ignored family value; char position "5C).  We can't use " for the
+% usual hex value because it has already been made active.
+
+@def@ttbackslash{{@tt @ifmmode @mathchar29020 @else @backslashcurfont @fi}}
+@let@backslashchar = @ttbackslash % @backslashchar{} is for user documents.
+
+% \rawbackslash defines an active \ to do \backslashcurfont.
+% \otherbackslash defines an active \ to be a literal `\' character with
+% catcode other.  We switch back and forth between these.
+@gdef@rawbackslash{@let\=@backslashcurfont}
+@gdef@otherbackslash{@let\=@realbackslash}
+
+% Same as @turnoffactive except outputs \ as {\tt\char`\\} instead of
+% the literal character `\'.
+%
+{@catcode`- = @active
+ @gdef@normalturnoffactive{%
+   @passthroughcharstrue
+   @let-=@normaldash
+   @let"=@normaldoublequote
+   @let$=@normaldollar %$ font-lock fix
+   @let+=@normalplus
+   @let<=@normalless
+   @let>=@normalgreater
+   @let^=@normalcaret
+   @let_=@normalunderscore
+   @let|=@normalverticalbar
+   @let~=@normaltilde
+   @let\=@ttbackslash
+   @markupsetuplqdefault
+   @markupsetuprqdefault
+   @unsepspaces
+ }
+}
+
+% If a .fmt file is being used, characters that might appear in a file
+% name cannot be active until we have parsed the command line.
+% So turn them off again, and have @fixbackslash turn them back on.
+@catcode`+=@other @catcode`@_=@other
+
+% \enablebackslashhack - allow file to begin `\input texinfo'
+%
+% If a .fmt file is being used, we don't want the `\input texinfo' to show up.
+% That is what \eatinput is for; after that, the `\' should revert to printing
+% a backslash.
+% If the file did not have a `\input texinfo', then it is turned off after
+% the first line; otherwise the first `\' in the file would cause an error.
+% This is used on the very last line of this file, texinfo.tex.
+% We also use @c to call @fixbackslash, in case ends of lines are hidden.
+{
+@catcode`@^=7
+@catcode`@^^M=13@gdef@enablebackslashhack{%
+  @global@let\ = @eatinput%
+  @catcode`@^^M=13%
+  @def@c{@fixbackslash@c}%
+  % Definition for the newline at the end of this file.
+  @def ^^M{@let^^M@secondlinenl}%
+  % Definition for a newline in the main Texinfo file.
+  @gdef @secondlinenl{@fixbackslash}%
+  % In case the first line has a whole-line command on it
+  @let@originalparsearg@parsearg
+  @def@parsearg{@fixbackslash@originalparsearg}
+}}
+
+{@catcode`@^=7 @catcode`@^^M=13%
+@gdef@eatinput input texinfo#1^^M{@fixbackslash}}
+
+% Emergency active definition of newline, in case an active newline token
+% appears by mistake.
+{@catcode`@^=7 @catcode13=13%
+@gdef@enableemergencynewline{%
+  @gdef^^M{%
+    @par%
+    %<warning: active newline>@par%
+}}}
+
+
+@gdef@fixbackslash{%
+  @ifx\@eatinput @let\ = @ttbackslash @fi
+  @catcode13=5 % regular end of line
+  @enableemergencynewline
+  @let@c=@texinfoc
+  @let@parsearg@originalparsearg
+  % Also turn back on active characters that might appear in the input
+  % file name, in case not using a pre-dumped format.
+  @catcode`+=@active
+  @catcode`@_=@active
+  %
+  % If texinfo.cnf is present on the system, read it.
+  % Useful for site-wide @afourpaper, etc.  This macro, @fixbackslash, gets
+  % called at the beginning of every Texinfo file.  Not opening texinfo.cnf
+  % directly in this file, texinfo.tex, makes it possible to make a format
+  % file for Texinfo.
+  %
+  @openin 1 texinfo.cnf
+  @ifeof 1 @else @input texinfo.cnf @fi
+  @closein 1
+}
+
+
+% Say @foo, not \foo, in error messages.
+@escapechar = `@@
+
+% These (along with & and #) are made active for url-breaking, so need
+% active definitions as the normal characters.
+@def@normaldot{.}
+@def@normalquest{?}
+@def@normalslash{/}
+
+% These look ok in all fonts, so just make them not special.
+% @hashchar{} gets its own user-level command, because of #line.
+@catcode`@& = @other @def@normalamp{&}
+@catcode`@# = @other @def@normalhash{#}
+@catcode`@% = @other @def@normalpercent{%}
+
+@let @hashchar = @normalhash
+
+@c Finally, make ` and ' active, so that txicodequoteundirected and
+@c txicodequotebacktick work right in, e.g., @w{@code{`foo'}}.  If we
+@c don't make ` and ' active, @code will not get them as active chars.
+@c Do this last of all since we use ` in the previous @catcode assignments.
+@catcode`@'=@active
+@catcode`@`=@active
+@markupsetuplqdefault
+@markupsetuprqdefault
+
+@c Local variables:
+@c eval: (add-hook 'write-file-hooks 'time-stamp)
+@c page-delimiter: "^\\\\message\\|emacs-page"
+@c time-stamp-start: "def\\\\texinfoversion{"
+@c time-stamp-format: "%:y-%02m-%02d.%02H"
+@c time-stamp-end: "}"
+@c End:
+
+@c vim:sw=2:
+
+@enablebackslashhack
diff --git a/doc/version-manual.texi b/doc/version-manual.texi
new file mode 100644
index 0000000..21031dd
--- /dev/null
+++ b/doc/version-manual.texi
@@ -0,0 +1,4 @@
+@set UPDATED 18 April 2019
+@set UPDATED-MONTH April 2019
+@set EDITION 0.5.0
+@set VERSION 0.5.0
diff --git a/m4/ax_lib_postgresql.m4 b/m4/ax_lib_postgresql.m4
new file mode 100644
index 0000000..11b6991
--- /dev/null
+++ b/m4/ax_lib_postgresql.m4
@@ -0,0 +1,155 @@
+# ===========================================================================
+#     http://www.gnu.org/software/autoconf-archive/ax_lib_postgresql.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+#   AX_LIB_POSTGRESQL([MINIMUM-VERSION])
+#
+# DESCRIPTION
+#
+#   This macro provides tests of availability of PostgreSQL 'libpq' library
+#   of particular version or newer.
+#
+#   AX_LIB_POSTGRESQL macro takes only one argument which is optional. If
+#   there is no required version passed, then macro does not run version
+#   test.
+#
+#   The --with-postgresql option takes one of three possible values:
+#
+#   no - do not check for PostgreSQL client library
+#
+#   yes - do check for PostgreSQL library in standard locations (pg_config
+#   should be in the PATH)
+#
+#   path - complete path to pg_config utility, use this option if pg_config
+#   can't be found in the PATH
+#
+#   This macro calls:
+#
+#     AC_SUBST(POSTGRESQL_CPPFLAGS)
+#     AC_SUBST(POSTGRESQL_LDFLAGS)
+#     AC_SUBST(POSTGRESQL_VERSION)
+#
+#   And sets:
+#
+#     HAVE_POSTGRESQL
+#
+# LICENSE
+#
+#   Copyright (c) 2008 Mateusz Loskot <address@hidden>
+#
+#   Copying and distribution of this file, with or without modification, are
+#   permitted in any medium without royalty provided the copyright notice
+#   and this notice are preserved. This file is offered as-is, without any
+#   warranty.
+
+#serial 9
+
+AC_DEFUN([AX_LIB_POSTGRESQL],
+[
+    AC_ARG_WITH([postgresql],
+        AS_HELP_STRING([--with-postgresql=@<:@ARG@:>@],
+            [use PostgreSQL library @<:@default=yes@:>@, optionally specify 
path to pg_config]
+        ),
+        [
+        if test "$withval" = "no"; then
+            want_postgresql="no"
+        elif test "$withval" = "yes"; then
+            want_postgresql="yes"
+        else
+            want_postgresql="yes"
+            PG_CONFIG="$withval"
+        fi
+        ],
+        [want_postgresql="yes"]
+    )
+
+    POSTGRESQL_CPPFLAGS=""
+    POSTGRESQL_LDFLAGS=""
+    POSTGRESQL_VERSION=""
+
+    dnl
+    dnl Check PostgreSQL libraries (libpq)
+    dnl
+
+    if test "$want_postgresql" = "yes"; then
+
+        if test -z "$PG_CONFIG" -o test; then
+            AC_PATH_PROG([PG_CONFIG], [pg_config], [])
+        fi
+
+        if test ! -x "$PG_CONFIG"; then
+            dnl AC_MSG_ERROR([$PG_CONFIG does not exist or it is not an 
exectuable file])
+            PG_CONFIG="no"
+            found_postgresql="no"
+        fi
+
+        if test "$PG_CONFIG" != "no"; then
+            AC_MSG_CHECKING([for PostgreSQL libraries])
+
+            POSTGRESQL_CPPFLAGS="-I`$PG_CONFIG --includedir`"
+            POSTGRESQL_LDFLAGS="-L`$PG_CONFIG --libdir`"
+
+            POSTGRESQL_VERSION=`$PG_CONFIG --version | sed -e 's#PostgreSQL 
##' | awk '{print $1}'`
+
+            AC_DEFINE([HAVE_POSTGRESQL], [1],
+                [Define to 1 if PostgreSQL libraries are available])
+
+            found_postgresql="yes"
+            AC_MSG_RESULT([yes])
+        else
+            found_postgresql="no"
+            AC_MSG_RESULT([no])
+        fi
+    fi
+
+    dnl
+    dnl Check if required version of PostgreSQL is available
+    dnl
+
+
+    postgresql_version_req=ifelse([$1], [], [], [$1])
+
+    if test "$found_postgresql" = "yes" -a -n "$postgresql_version_req"; then
+
+        AC_MSG_CHECKING([if PostgreSQL version $POSTGRESQL_VERSION is >= 
$postgresql_version_req])
+
+        dnl Decompose required version string of PostgreSQL
+        dnl and calculate its number representation
+        postgresql_version_req_major=`expr $postgresql_version_req : 
'\([[0-9]]*\)'`
+        postgresql_version_req_minor=`expr $postgresql_version_req : 
'[[0-9]]*\.\([[0-9]]*\)'`
+        postgresql_version_req_micro=`expr $postgresql_version_req : 
'[[0-9]]*\.[[0-9]]*\.\([[0-9]]*\)'`
+        if test "x$postgresql_version_req_micro" = "x"; then
+            postgresql_version_req_micro="0"
+        fi
+
+        postgresql_version_req_number=`expr $postgresql_version_req_major \* 
1000000 \
+                                   \+ $postgresql_version_req_minor \* 1000 \
+                                   \+ $postgresql_version_req_micro`
+
+        dnl Decompose version string of installed PostgreSQL
+        dnl and calculate its number representation
+        postgresql_version_major=`expr $POSTGRESQL_VERSION : '\([[0-9]]*\)'`
+        postgresql_version_minor=`expr $POSTGRESQL_VERSION : 
'[[0-9]]*\.\([[0-9]]*\)'`
+        postgresql_version_micro=`expr $POSTGRESQL_VERSION : 
'[[0-9]]*\.[[0-9]]*\.\([[0-9]]*\)'`
+        if test "x$postgresql_version_micro" = "x"; then
+            postgresql_version_micro="0"
+        fi
+
+        postgresql_version_number=`expr $postgresql_version_major \* 1000000 \
+                                   \+ $postgresql_version_minor \* 1000 \
+                                   \+ $postgresql_version_micro`
+
+        postgresql_version_check=`expr $postgresql_version_number \>\= 
$postgresql_version_req_number`
+        if test "$postgresql_version_check" = "1"; then
+            AC_MSG_RESULT([yes])
+        else
+            AC_MSG_RESULT([no])
+        fi
+    fi
+
+    AC_SUBST([POSTGRESQL_VERSION])
+    AC_SUBST([POSTGRESQL_CPPFLAGS])
+    AC_SUBST([POSTGRESQL_LDFLAGS])
+])
diff --git a/m4/ax_prog_doxygen.m4 b/m4/ax_prog_doxygen.m4
new file mode 100644
index 0000000..a371f7f
--- /dev/null
+++ b/m4/ax_prog_doxygen.m4
@@ -0,0 +1,586 @@
+# ===========================================================================
+#     https://www.gnu.org/software/autoconf-archive/ax_prog_doxygen.html
+# ===========================================================================
+#
+# SYNOPSIS
+#
+#   DX_INIT_DOXYGEN(PROJECT-NAME, [DOXYFILE-PATH], [OUTPUT-DIR], ...)
+#   DX_DOXYGEN_FEATURE(ON|OFF)
+#   DX_DOT_FEATURE(ON|OFF)
+#   DX_HTML_FEATURE(ON|OFF)
+#   DX_CHM_FEATURE(ON|OFF)
+#   DX_CHI_FEATURE(ON|OFF)
+#   DX_MAN_FEATURE(ON|OFF)
+#   DX_RTF_FEATURE(ON|OFF)
+#   DX_XML_FEATURE(ON|OFF)
+#   DX_PDF_FEATURE(ON|OFF)
+#   DX_PS_FEATURE(ON|OFF)
+#
+# DESCRIPTION
+#
+#   The DX_*_FEATURE macros control the default setting for the given
+#   Doxygen feature. Supported features are 'DOXYGEN' itself, 'DOT' for
+#   generating graphics, 'HTML' for plain HTML, 'CHM' for compressed HTML
+#   help (for MS users), 'CHI' for generating a separate .chi file by the
+#   .chm file, and 'MAN', 'RTF', 'XML', 'PDF' and 'PS' for the appropriate
+#   output formats. The environment variable DOXYGEN_PAPER_SIZE may be
+#   specified to override the default 'a4wide' paper size.
+#
+#   By default, HTML, PDF and PS documentation is generated as this seems to
+#   be the most popular and portable combination. MAN pages created by
+#   Doxygen are usually problematic, though by picking an appropriate subset
+#   and doing some massaging they might be better than nothing. CHM and RTF
+#   are specific for MS (note that you can't generate both HTML and CHM at
+#   the same time). The XML is rather useless unless you apply specialized
+#   post-processing to it.
+#
+#   The macros mainly control the default state of the feature. The use can
+#   override the default by specifying --enable or --disable. The macros
+#   ensure that contradictory flags are not given (e.g.,
+#   --enable-doxygen-html and --enable-doxygen-chm,
+#   --enable-doxygen-anything with --disable-doxygen, etc.) Finally, each
+#   feature will be automatically disabled (with a warning) if the required
+#   programs are missing.
+#
+#   Once all the feature defaults have been specified, call DX_INIT_DOXYGEN
+#   with the following parameters: a one-word name for the project for use
+#   as a filename base etc., an optional configuration file name (the
+#   default is '$(srcdir)/Doxyfile', the same as Doxygen's default), and an
+#   optional output directory name (the default is 'doxygen-doc'). To run
+#   doxygen multiple times for different configuration files and output
+#   directories provide more parameters: the second, forth, sixth, etc
+#   parameter are configuration file names and the third, fifth, seventh,
+#   etc parameter are output directories. No checking is done to catch
+#   duplicates.
+#
+#   Automake Support
+#
+#   The DX_RULES substitution can be used to add all needed rules to the
+#   Makefile. Note that this is a substitution without being a variable:
+#   only the @DX_RULES@ syntax will work.
+#
+#   The provided targets are:
+#
+#     doxygen-doc: Generate all doxygen documentation.
+#
+#     doxygen-run: Run doxygen, which will generate some of the
+#                  documentation (HTML, CHM, CHI, MAN, RTF, XML)
+#                  but will not do the post processing required
+#                  for the rest of it (PS, PDF).
+#
+#     doxygen-ps:  Generate doxygen PostScript documentation.
+#
+#     doxygen-pdf: Generate doxygen PDF documentation.
+#
+#   Note that by default these are not integrated into the automake targets.
+#   If doxygen is used to generate man pages, you can achieve this
+#   integration by setting man3_MANS to the list of man pages generated and
+#   then adding the dependency:
+#
+#     $(man3_MANS): doxygen-doc
+#
+#   This will cause make to run doxygen and generate all the documentation.
+#
+#   The following variable is intended for use in Makefile.am:
+#
+#     DX_CLEANFILES = everything to clean.
+#
+#   Then add this variable to MOSTLYCLEANFILES.
+#
+# LICENSE
+#
+#   Copyright (c) 2009 Oren Ben-Kiki <address@hidden>
+#   Copyright (c) 2015 Olaf Mandel <address@hidden>
+#
+#   Copying and distribution of this file, with or without modification, are
+#   permitted in any medium without royalty provided the copyright notice
+#   and this notice are preserved. This file is offered as-is, without any
+#   warranty.
+
+#serial 23
+
+## ----------##
+## Defaults. ##
+## ----------##
+
+DX_ENV=""
+AC_DEFUN([DX_FEATURE_doc],  ON)
+AC_DEFUN([DX_FEATURE_dot],  OFF)
+AC_DEFUN([DX_FEATURE_man],  OFF)
+AC_DEFUN([DX_FEATURE_html], ON)
+AC_DEFUN([DX_FEATURE_chm],  OFF)
+AC_DEFUN([DX_FEATURE_chi],  OFF)
+AC_DEFUN([DX_FEATURE_rtf],  OFF)
+AC_DEFUN([DX_FEATURE_xml],  OFF)
+AC_DEFUN([DX_FEATURE_pdf],  ON)
+AC_DEFUN([DX_FEATURE_ps],   ON)
+
+## --------------- ##
+## Private macros. ##
+## --------------- ##
+
+# DX_ENV_APPEND(VARIABLE, VALUE)
+# ------------------------------
+# Append VARIABLE="VALUE" to DX_ENV for invoking doxygen and add it
+# as a substitution (but not a Makefile variable). The substitution
+# is skipped if the variable name is VERSION.
+AC_DEFUN([DX_ENV_APPEND],
+[AC_SUBST([DX_ENV], ["$DX_ENV $1='$2'"])dnl
+m4_if([$1], [VERSION], [], [AC_SUBST([$1], [$2])dnl
+AM_SUBST_NOTMAKE([$1])])dnl
+])
+
+# DX_DIRNAME_EXPR
+# ---------------
+# Expand into a shell expression prints the directory part of a path.
+AC_DEFUN([DX_DIRNAME_EXPR],
+         [[expr ".$1" : '\(\.\)[^/]*$' \| "x$1" : 'x\(.*\)/[^/]*$']])
+
+# DX_IF_FEATURE(FEATURE, IF-ON, IF-OFF)
+# -------------------------------------
+# Expands according to the M4 (static) status of the feature.
+AC_DEFUN([DX_IF_FEATURE], [ifelse(DX_FEATURE_$1, ON, [$2], [$3])])
+
+# DX_REQUIRE_PROG(VARIABLE, PROGRAM)
+# ----------------------------------
+# Require the specified program to be found for the DX_CURRENT_FEATURE to work.
+AC_DEFUN([DX_REQUIRE_PROG], [
+AC_PATH_TOOL([$1], [$2])
+if test "$DX_FLAG_[]DX_CURRENT_FEATURE$$1" = 1; then
+    AC_MSG_WARN([$2 not found - will not DX_CURRENT_DESCRIPTION])
+    AC_SUBST(DX_FLAG_[]DX_CURRENT_FEATURE, 0)
+fi
+])
+
+# DX_TEST_FEATURE(FEATURE)
+# ------------------------
+# Expand to a shell expression testing whether the feature is active.
+AC_DEFUN([DX_TEST_FEATURE], [test "$DX_FLAG_$1" = 1])
+
+# DX_CHECK_DEPEND(REQUIRED_FEATURE, REQUIRED_STATE)
+# -------------------------------------------------
+# Verify that a required features has the right state before trying to turn on
+# the DX_CURRENT_FEATURE.
+AC_DEFUN([DX_CHECK_DEPEND], [
+test "$DX_FLAG_$1" = "$2" \
+|| AC_MSG_ERROR([doxygen-DX_CURRENT_FEATURE ifelse([$2], 1,
+                            requires, contradicts) doxygen-DX_CURRENT_FEATURE])
+])
+
+# DX_CLEAR_DEPEND(FEATURE, REQUIRED_FEATURE, REQUIRED_STATE)
+# ----------------------------------------------------------
+# Turn off the DX_CURRENT_FEATURE if the required feature is off.
+AC_DEFUN([DX_CLEAR_DEPEND], [
+test "$DX_FLAG_$1" = "$2" || AC_SUBST(DX_FLAG_[]DX_CURRENT_FEATURE, 0)
+])
+
+# DX_FEATURE_ARG(FEATURE, DESCRIPTION,
+#                CHECK_DEPEND, CLEAR_DEPEND,
+#                REQUIRE, DO-IF-ON, DO-IF-OFF)
+# --------------------------------------------
+# Parse the command-line option controlling a feature. CHECK_DEPEND is called
+# if the user explicitly turns the feature on (and invokes DX_CHECK_DEPEND),
+# otherwise CLEAR_DEPEND is called to turn off the default state if a required
+# feature is disabled (using DX_CLEAR_DEPEND). REQUIRE performs additional
+# requirement tests (DX_REQUIRE_PROG). Finally, an automake flag is set and
+# DO-IF-ON or DO-IF-OFF are called according to the final state of the feature.
+AC_DEFUN([DX_ARG_ABLE], [
+    AC_DEFUN([DX_CURRENT_FEATURE], [$1])
+    AC_DEFUN([DX_CURRENT_DESCRIPTION], [$2])
+    AC_ARG_ENABLE(doxygen-$1,
+                  [AS_HELP_STRING(DX_IF_FEATURE([$1], [--disable-doxygen-$1],
+                                                      [--enable-doxygen-$1]),
+                                  DX_IF_FEATURE([$1], [don't $2], [$2]))],
+                  [
+case "$enableval" in
+#(
+y|Y|yes|Yes|YES)
+    AC_SUBST([DX_FLAG_$1], 1)
+    $3
+;; #(
+n|N|no|No|NO)
+    AC_SUBST([DX_FLAG_$1], 0)
+;; #(
+*)
+    AC_MSG_ERROR([invalid value '$enableval' given to doxygen-$1])
+;;
+esac
+], [
+AC_SUBST([DX_FLAG_$1], [DX_IF_FEATURE([$1], 1, 0)])
+$4
+])
+if DX_TEST_FEATURE([$1]); then
+    $5
+    :
+fi
+if DX_TEST_FEATURE([$1]); then
+    $6
+    :
+else
+    $7
+    :
+fi
+])
+
+## -------------- ##
+## Public macros. ##
+## -------------- ##
+
+# DX_XXX_FEATURE(DEFAULT_STATE)
+# -----------------------------
+AC_DEFUN([DX_DOXYGEN_FEATURE], [AC_DEFUN([DX_FEATURE_doc],  [$1])])
+AC_DEFUN([DX_DOT_FEATURE],     [AC_DEFUN([DX_FEATURE_dot], [$1])])
+AC_DEFUN([DX_MAN_FEATURE],     [AC_DEFUN([DX_FEATURE_man],  [$1])])
+AC_DEFUN([DX_HTML_FEATURE],    [AC_DEFUN([DX_FEATURE_html], [$1])])
+AC_DEFUN([DX_CHM_FEATURE],     [AC_DEFUN([DX_FEATURE_chm],  [$1])])
+AC_DEFUN([DX_CHI_FEATURE],     [AC_DEFUN([DX_FEATURE_chi],  [$1])])
+AC_DEFUN([DX_RTF_FEATURE],     [AC_DEFUN([DX_FEATURE_rtf],  [$1])])
+AC_DEFUN([DX_XML_FEATURE],     [AC_DEFUN([DX_FEATURE_xml],  [$1])])
+AC_DEFUN([DX_XML_FEATURE],     [AC_DEFUN([DX_FEATURE_xml],  [$1])])
+AC_DEFUN([DX_PDF_FEATURE],     [AC_DEFUN([DX_FEATURE_pdf],  [$1])])
+AC_DEFUN([DX_PS_FEATURE],      [AC_DEFUN([DX_FEATURE_ps],   [$1])])
+
+# DX_INIT_DOXYGEN(PROJECT, [CONFIG-FILE], [OUTPUT-DOC-DIR], ...)
+# --------------------------------------------------------------
+# PROJECT also serves as the base name for the documentation files.
+# The default CONFIG-FILE is "$(srcdir)/Doxyfile" and OUTPUT-DOC-DIR is
+# "doxygen-doc".
+# More arguments are interpreted as interleaved CONFIG-FILE and
+# OUTPUT-DOC-DIR values.
+AC_DEFUN([DX_INIT_DOXYGEN], [
+
+# Files:
+AC_SUBST([DX_PROJECT], [$1])
+AC_SUBST([DX_CONFIG], ['ifelse([$2], [], [$(srcdir)/Doxyfile], [$2])'])
+AC_SUBST([DX_DOCDIR], ['ifelse([$3], [], [doxygen-doc], [$3])'])
+m4_if(m4_eval(3 < m4_count($@)), 1, [m4_for([DX_i], 4, m4_count($@), 2,
+      [AC_SUBST([DX_CONFIG]m4_eval(DX_i[/2]),
+                'm4_default_nblank_quoted(m4_argn(DX_i, $@),
+                                          [$(srcdir)/Doxyfile])')])])dnl
+m4_if(m4_eval(3 < m4_count($@)), 1, [m4_for([DX_i], 5, m4_count($@,), 2,
+      [AC_SUBST([DX_DOCDIR]m4_eval([(]DX_i[-1)/2]),
+                'm4_default_nblank_quoted(m4_argn(DX_i, $@),
+                                          [doxygen-doc])')])])dnl
+m4_define([DX_loop], m4_dquote(m4_if(m4_eval(3 < m4_count($@)), 1,
+          [m4_for([DX_i], 4, m4_count($@), 2, [, m4_eval(DX_i[/2])])],
+          [])))dnl
+
+# Environment variables used inside doxygen.cfg:
+DX_ENV_APPEND(SRCDIR, $srcdir)
+DX_ENV_APPEND(PROJECT, $DX_PROJECT)
+DX_ENV_APPEND(VERSION, $PACKAGE_VERSION)
+
+# Doxygen itself:
+DX_ARG_ABLE(doc, [generate any doxygen documentation],
+            [],
+            [],
+            [DX_REQUIRE_PROG([DX_DOXYGEN], doxygen)
+             DX_REQUIRE_PROG([DX_PERL], perl)],
+            [DX_ENV_APPEND(PERL_PATH, $DX_PERL)])
+
+# Dot for graphics:
+DX_ARG_ABLE(dot, [generate graphics for doxygen documentation],
+            [DX_CHECK_DEPEND(doc, 1)],
+            [DX_CLEAR_DEPEND(doc, 1)],
+            [DX_REQUIRE_PROG([DX_DOT], dot)],
+            [DX_ENV_APPEND(HAVE_DOT, YES)
+             DX_ENV_APPEND(DOT_PATH, [`DX_DIRNAME_EXPR($DX_DOT)`])],
+            [DX_ENV_APPEND(HAVE_DOT, NO)])
+
+# Man pages generation:
+DX_ARG_ABLE(man, [generate doxygen manual pages],
+            [DX_CHECK_DEPEND(doc, 1)],
+            [DX_CLEAR_DEPEND(doc, 1)],
+            [],
+            [DX_ENV_APPEND(GENERATE_MAN, YES)],
+            [DX_ENV_APPEND(GENERATE_MAN, NO)])
+
+# RTF file generation:
+DX_ARG_ABLE(rtf, [generate doxygen RTF documentation],
+            [DX_CHECK_DEPEND(doc, 1)],
+            [DX_CLEAR_DEPEND(doc, 1)],
+            [],
+            [DX_ENV_APPEND(GENERATE_RTF, YES)],
+            [DX_ENV_APPEND(GENERATE_RTF, NO)])
+
+# XML file generation:
+DX_ARG_ABLE(xml, [generate doxygen XML documentation],
+            [DX_CHECK_DEPEND(doc, 1)],
+            [DX_CLEAR_DEPEND(doc, 1)],
+            [],
+            [DX_ENV_APPEND(GENERATE_XML, YES)],
+            [DX_ENV_APPEND(GENERATE_XML, NO)])
+
+# (Compressed) HTML help generation:
+DX_ARG_ABLE(chm, [generate doxygen compressed HTML help documentation],
+            [DX_CHECK_DEPEND(doc, 1)],
+            [DX_CLEAR_DEPEND(doc, 1)],
+            [DX_REQUIRE_PROG([DX_HHC], hhc)],
+            [DX_ENV_APPEND(HHC_PATH, $DX_HHC)
+             DX_ENV_APPEND(GENERATE_HTML, YES)
+             DX_ENV_APPEND(GENERATE_HTMLHELP, YES)],
+            [DX_ENV_APPEND(GENERATE_HTMLHELP, NO)])
+
+# Separate CHI file generation.
+DX_ARG_ABLE(chi, [generate doxygen separate compressed HTML help index file],
+            [DX_CHECK_DEPEND(chm, 1)],
+            [DX_CLEAR_DEPEND(chm, 1)],
+            [],
+            [DX_ENV_APPEND(GENERATE_CHI, YES)],
+            [DX_ENV_APPEND(GENERATE_CHI, NO)])
+
+# Plain HTML pages generation:
+DX_ARG_ABLE(html, [generate doxygen plain HTML documentation],
+            [DX_CHECK_DEPEND(doc, 1) DX_CHECK_DEPEND(chm, 0)],
+            [DX_CLEAR_DEPEND(doc, 1) DX_CLEAR_DEPEND(chm, 0)],
+            [],
+            [DX_ENV_APPEND(GENERATE_HTML, YES)],
+            [DX_TEST_FEATURE(chm) || DX_ENV_APPEND(GENERATE_HTML, NO)])
+
+# PostScript file generation:
+DX_ARG_ABLE(ps, [generate doxygen PostScript documentation],
+            [DX_CHECK_DEPEND(doc, 1)],
+            [DX_CLEAR_DEPEND(doc, 1)],
+            [DX_REQUIRE_PROG([DX_LATEX], latex)
+             DX_REQUIRE_PROG([DX_MAKEINDEX], makeindex)
+             DX_REQUIRE_PROG([DX_DVIPS], dvips)
+             DX_REQUIRE_PROG([DX_EGREP], egrep)])
+
+# PDF file generation:
+DX_ARG_ABLE(pdf, [generate doxygen PDF documentation],
+            [DX_CHECK_DEPEND(doc, 1)],
+            [DX_CLEAR_DEPEND(doc, 1)],
+            [DX_REQUIRE_PROG([DX_PDFLATEX], pdflatex)
+             DX_REQUIRE_PROG([DX_MAKEINDEX], makeindex)
+             DX_REQUIRE_PROG([DX_EGREP], egrep)])
+
+# LaTeX generation for PS and/or PDF:
+if DX_TEST_FEATURE(ps) || DX_TEST_FEATURE(pdf); then
+    DX_ENV_APPEND(GENERATE_LATEX, YES)
+else
+    DX_ENV_APPEND(GENERATE_LATEX, NO)
+fi
+
+# Paper size for PS and/or PDF:
+AC_ARG_VAR(DOXYGEN_PAPER_SIZE,
+           [a4wide (default), a4, letter, legal or executive])
+case "$DOXYGEN_PAPER_SIZE" in
+#(
+"")
+    AC_SUBST(DOXYGEN_PAPER_SIZE, "")
+;; #(
+a4wide|a4|letter|legal|executive)
+    DX_ENV_APPEND(PAPER_SIZE, $DOXYGEN_PAPER_SIZE)
+;; #(
+*)
+    AC_MSG_ERROR([unknown DOXYGEN_PAPER_SIZE='$DOXYGEN_PAPER_SIZE'])
+;;
+esac
+
+# Rules:
+AS_IF([[test $DX_FLAG_html -eq 1]],
+[[DX_SNIPPET_html="## ------------------------------- ##
+## Rules specific for HTML output. ##
+## ------------------------------- ##
+
+DX_CLEAN_HTML = \$(DX_DOCDIR)/html]dnl
+m4_foreach([DX_i], [m4_shift(DX_loop)], [[\\
+                \$(DX_DOCDIR]DX_i[)/html]])[
+
+"]],
+[[DX_SNIPPET_html=""]])
+AS_IF([[test $DX_FLAG_chi -eq 1]],
+[[DX_SNIPPET_chi="
+DX_CLEAN_CHI = \$(DX_DOCDIR)/\$(PACKAGE).chi]dnl
+m4_foreach([DX_i], [m4_shift(DX_loop)], [[\\
+               \$(DX_DOCDIR]DX_i[)/\$(PACKAGE).chi]])["]],
+[[DX_SNIPPET_chi=""]])
+AS_IF([[test $DX_FLAG_chm -eq 1]],
+[[DX_SNIPPET_chm="## ------------------------------ ##
+## Rules specific for CHM output. ##
+## ------------------------------ ##
+
+DX_CLEAN_CHM = \$(DX_DOCDIR)/chm]dnl
+m4_foreach([DX_i], [m4_shift(DX_loop)], [[\\
+               \$(DX_DOCDIR]DX_i[)/chm]])[\
+${DX_SNIPPET_chi}
+
+"]],
+[[DX_SNIPPET_chm=""]])
+AS_IF([[test $DX_FLAG_man -eq 1]],
+[[DX_SNIPPET_man="## ------------------------------ ##
+## Rules specific for MAN output. ##
+## ------------------------------ ##
+
+DX_CLEAN_MAN = \$(DX_DOCDIR)/man]dnl
+m4_foreach([DX_i], [m4_shift(DX_loop)], [[\\
+               \$(DX_DOCDIR]DX_i[)/man]])[
+
+"]],
+[[DX_SNIPPET_man=""]])
+AS_IF([[test $DX_FLAG_rtf -eq 1]],
+[[DX_SNIPPET_rtf="## ------------------------------ ##
+## Rules specific for RTF output. ##
+## ------------------------------ ##
+
+DX_CLEAN_RTF = \$(DX_DOCDIR)/rtf]dnl
+m4_foreach([DX_i], [m4_shift(DX_loop)], [[\\
+               \$(DX_DOCDIR]DX_i[)/rtf]])[
+
+"]],
+[[DX_SNIPPET_rtf=""]])
+AS_IF([[test $DX_FLAG_xml -eq 1]],
+[[DX_SNIPPET_xml="## ------------------------------ ##
+## Rules specific for XML output. ##
+## ------------------------------ ##
+
+DX_CLEAN_XML = \$(DX_DOCDIR)/xml]dnl
+m4_foreach([DX_i], [m4_shift(DX_loop)], [[\\
+               \$(DX_DOCDIR]DX_i[)/xml]])[
+
+"]],
+[[DX_SNIPPET_xml=""]])
+AS_IF([[test $DX_FLAG_ps -eq 1]],
+[[DX_SNIPPET_ps="## ----------------------------- ##
+## Rules specific for PS output. ##
+## ----------------------------- ##
+
+DX_CLEAN_PS = \$(DX_DOCDIR)/\$(PACKAGE).ps]dnl
+m4_foreach([DX_i], [m4_shift(DX_loop)], [[\\
+              \$(DX_DOCDIR]DX_i[)/\$(PACKAGE).ps]])[
+
+DX_PS_GOAL = doxygen-ps
+
+doxygen-ps: \$(DX_CLEAN_PS)
+
+]m4_foreach([DX_i], [DX_loop],
+[[\$(DX_DOCDIR]DX_i[)/\$(PACKAGE).ps: \$(DX_DOCDIR]DX_i[)/\$(PACKAGE).tag
+       \$(DX_V_LATEX)cd \$(DX_DOCDIR]DX_i[)/latex; \\
+       rm -f *.aux *.toc *.idx *.ind *.ilg *.log *.out; \\
+       \$(DX_LATEX) refman.tex; \\
+       \$(DX_MAKEINDEX) refman.idx; \\
+       \$(DX_LATEX) refman.tex; \\
+       countdown=5; \\
+       while \$(DX_EGREP) 'Rerun (LaTeX|to get cross-references right)' \\
+                         refman.log > /dev/null 2>&1 \\
+          && test \$\$countdown -gt 0; do \\
+           \$(DX_LATEX) refman.tex; \\
+            countdown=\`expr \$\$countdown - 1\`; \\
+       done; \\
+       \$(DX_DVIPS) -o ../\$(PACKAGE).ps refman.dvi
+
+]])["]],
+[[DX_SNIPPET_ps=""]])
+AS_IF([[test $DX_FLAG_pdf -eq 1]],
+[[DX_SNIPPET_pdf="## ------------------------------ ##
+## Rules specific for PDF output. ##
+## ------------------------------ ##
+
+DX_CLEAN_PDF = \$(DX_DOCDIR)/\$(PACKAGE).pdf]dnl
+m4_foreach([DX_i], [m4_shift(DX_loop)], [[\\
+               \$(DX_DOCDIR]DX_i[)/\$(PACKAGE).pdf]])[
+
+DX_PDF_GOAL = doxygen-pdf
+
+doxygen-pdf: \$(DX_CLEAN_PDF)
+
+]m4_foreach([DX_i], [DX_loop],
+[[\$(DX_DOCDIR]DX_i[)/\$(PACKAGE).pdf: \$(DX_DOCDIR]DX_i[)/\$(PACKAGE).tag
+       \$(DX_V_LATEX)cd \$(DX_DOCDIR]DX_i[)/latex; \\
+       rm -f *.aux *.toc *.idx *.ind *.ilg *.log *.out; \\
+       \$(DX_PDFLATEX) refman.tex; \\
+       \$(DX_MAKEINDEX) refman.idx; \\
+       \$(DX_PDFLATEX) refman.tex; \\
+       countdown=5; \\
+       while \$(DX_EGREP) 'Rerun (LaTeX|to get cross-references right)' \\
+                         refman.log > /dev/null 2>&1 \\
+          && test \$\$countdown -gt 0; do \\
+           \$(DX_PDFLATEX) refman.tex; \\
+           countdown=\`expr \$\$countdown - 1\`; \\
+       done; \\
+       mv refman.pdf ../\$(PACKAGE).pdf
+
+]])["]],
+[[DX_SNIPPET_pdf=""]])
+AS_IF([[test $DX_FLAG_ps -eq 1 -o $DX_FLAG_pdf -eq 1]],
+[[DX_SNIPPET_latex="## ------------------------------------------------- ##
+## Rules specific for LaTeX (shared for PS and PDF). ##
+## ------------------------------------------------- ##
+
+DX_V_LATEX = \$(_DX_v_LATEX_\$(V))
+_DX_v_LATEX_ = \$(_DX_v_LATEX_\$(AM_DEFAULT_VERBOSITY))
+_DX_v_LATEX_0 = @echo \"  LATEX \" \$][@;
+
+DX_CLEAN_LATEX = \$(DX_DOCDIR)/latex]dnl
+m4_foreach([DX_i], [m4_shift(DX_loop)], [[\\
+                 \$(DX_DOCDIR]DX_i[)/latex]])[
+
+"]],
+[[DX_SNIPPET_latex=""]])
+
+AS_IF([[test $DX_FLAG_doc -eq 1]],
+[[DX_SNIPPET_doc="## --------------------------------- ##
+## Format-independent Doxygen rules. ##
+## --------------------------------- ##
+
+${DX_SNIPPET_html}\
+${DX_SNIPPET_chm}\
+${DX_SNIPPET_man}\
+${DX_SNIPPET_rtf}\
+${DX_SNIPPET_xml}\
+${DX_SNIPPET_ps}\
+${DX_SNIPPET_pdf}\
+${DX_SNIPPET_latex}\
+DX_V_DXGEN = \$(_DX_v_DXGEN_\$(V))
+_DX_v_DXGEN_ = \$(_DX_v_DXGEN_\$(AM_DEFAULT_VERBOSITY))
+_DX_v_DXGEN_0 = @echo \"  DXGEN \" \$<;
+
+.PHONY: doxygen-run doxygen-doc \$(DX_PS_GOAL) \$(DX_PDF_GOAL)
+
+.INTERMEDIATE: doxygen-run \$(DX_PS_GOAL) \$(DX_PDF_GOAL)
+
+doxygen-run:]m4_foreach([DX_i], [DX_loop],
+                         [[ \$(DX_DOCDIR]DX_i[)/\$(PACKAGE).tag]])[
+
+doxygen-doc: doxygen-run \$(DX_PS_GOAL) \$(DX_PDF_GOAL)
+
+]m4_foreach([DX_i], [DX_loop],
+[[\$(DX_DOCDIR]DX_i[)/\$(PACKAGE).tag: \$(DX_CONFIG]DX_i[) 
\$(pkginclude_HEADERS)
+       \$(A""M_V_at)rm -rf \$(DX_DOCDIR]DX_i[)
+       \$(DX_V_DXGEN)\$(DX_ENV) DOCDIR=\$(DX_DOCDIR]DX_i[) \$(DX_DOXYGEN) 
\$(DX_CONFIG]DX_i[)
+       \$(A""M_V_at)echo Timestamp >\$][@
+
+]])dnl
+[DX_CLEANFILES = \\]
+m4_foreach([DX_i], [DX_loop],
+[[     \$(DX_DOCDIR]DX_i[)/doxygen_sqlite3.db \\
+       \$(DX_DOCDIR]DX_i[)/\$(PACKAGE).tag \\
+]])dnl
+[      -r \\
+       \$(DX_CLEAN_HTML) \\
+       \$(DX_CLEAN_CHM) \\
+       \$(DX_CLEAN_CHI) \\
+       \$(DX_CLEAN_MAN) \\
+       \$(DX_CLEAN_RTF) \\
+       \$(DX_CLEAN_XML) \\
+       \$(DX_CLEAN_PS) \\
+       \$(DX_CLEAN_PDF) \\
+       \$(DX_CLEAN_LATEX)"]],
+[[DX_SNIPPET_doc=""]])
+AC_SUBST([DX_RULES],
+["${DX_SNIPPET_doc}"])dnl
+AM_SUBST_NOTMAKE([DX_RULES])
+
+#For debugging:
+#echo DX_FLAG_doc=$DX_FLAG_doc
+#echo DX_FLAG_dot=$DX_FLAG_dot
+#echo DX_FLAG_man=$DX_FLAG_man
+#echo DX_FLAG_html=$DX_FLAG_html
+#echo DX_FLAG_chm=$DX_FLAG_chm
+#echo DX_FLAG_chi=$DX_FLAG_chi
+#echo DX_FLAG_rtf=$DX_FLAG_rtf
+#echo DX_FLAG_xml=$DX_FLAG_xml
+#echo DX_FLAG_pdf=$DX_FLAG_pdf
+#echo DX_FLAG_ps=$DX_FLAG_ps
+#echo DX_ENV=$DX_ENV
+])
diff --git a/m4/libcurl.m4 b/m4/libcurl.m4
new file mode 100644
index 0000000..a84077a
--- /dev/null
+++ b/m4/libcurl.m4
@@ -0,0 +1,251 @@
+# LIBCURL_CHECK_CONFIG ([DEFAULT-ACTION], [MINIMUM-VERSION],
+#                       [ACTION-IF-YES], [ACTION-IF-NO])
+# ----------------------------------------------------------
+#      David Shaw <address@hidden>   May-09-2006
+#
+# Checks for libcurl.  DEFAULT-ACTION is the string yes or no to
+# specify whether to default to --with-libcurl or --without-libcurl.
+# If not supplied, DEFAULT-ACTION is yes.  MINIMUM-VERSION is the
+# minimum version of libcurl to accept.  Pass the version as a regular
+# version number like 7.10.1. If not supplied, any version is
+# accepted.  ACTION-IF-YES is a list of shell commands to run if
+# libcurl was successfully found and passed the various tests.
+# ACTION-IF-NO is a list of shell commands that are run otherwise.
+# Note that using --without-libcurl does run ACTION-IF-NO.
+#
+# This macro #defines HAVE_LIBCURL if a working libcurl setup is
+# found, and sets @LIBCURL@ and @LIBCURL_CPPFLAGS@ to the necessary
+# values.  Other useful defines are LIBCURL_FEATURE_xxx where xxx are
+# the various features supported by libcurl, and LIBCURL_PROTOCOL_yyy
+# where yyy are the various protocols supported by libcurl.  Both xxx
+# and yyy are capitalized.  See the list of AH_TEMPLATEs at the top of
+# the macro for the complete list of possible defines.  Shell
+# variables $libcurl_feature_xxx and $libcurl_protocol_yyy are also
+# defined to 'yes' for those features and protocols that were found.
+# Note that xxx and yyy keep the same capitalization as in the
+# curl-config list (e.g. it's "HTTP" and not "http").
+#
+# Users may override the detected values by doing something like:
+# LIBCURL="-lcurl" LIBCURL_CPPFLAGS="-I/usr/myinclude" ./configure
+#
+# For the sake of sanity, this macro assumes that any libcurl that is
+# found is after version 7.7.2, the first version that included the
+# curl-config script.  Note that it is very important for people
+# packaging binary versions of libcurl to include this script!
+# Without curl-config, we can only guess what protocols are available,
+# or use curl_version_info to figure it out at runtime.
+
+AC_DEFUN([LIBCURL_CHECK_CONFIG],
+[
+  AH_TEMPLATE([LIBCURL_FEATURE_SSL],[Defined if libcurl supports SSL])
+  AH_TEMPLATE([LIBCURL_FEATURE_KRB4],[Defined if libcurl supports KRB4])
+  AH_TEMPLATE([LIBCURL_FEATURE_IPV6],[Defined if libcurl supports IPv6])
+  AH_TEMPLATE([LIBCURL_FEATURE_LIBZ],[Defined if libcurl supports libz])
+  AH_TEMPLATE([LIBCURL_FEATURE_ASYNCHDNS],[Defined if libcurl supports 
AsynchDNS])
+  AH_TEMPLATE([LIBCURL_FEATURE_IDN],[Defined if libcurl supports IDN])
+  AH_TEMPLATE([LIBCURL_FEATURE_SSPI],[Defined if libcurl supports SSPI])
+  AH_TEMPLATE([LIBCURL_FEATURE_NTLM],[Defined if libcurl supports NTLM])
+
+  AH_TEMPLATE([LIBCURL_PROTOCOL_HTTP],[Defined if libcurl supports HTTP])
+  AH_TEMPLATE([LIBCURL_PROTOCOL_HTTPS],[Defined if libcurl supports HTTPS])
+  AH_TEMPLATE([LIBCURL_PROTOCOL_FTP],[Defined if libcurl supports FTP])
+  AH_TEMPLATE([LIBCURL_PROTOCOL_FTPS],[Defined if libcurl supports FTPS])
+  AH_TEMPLATE([LIBCURL_PROTOCOL_FILE],[Defined if libcurl supports FILE])
+  AH_TEMPLATE([LIBCURL_PROTOCOL_TELNET],[Defined if libcurl supports TELNET])
+  AH_TEMPLATE([LIBCURL_PROTOCOL_LDAP],[Defined if libcurl supports LDAP])
+  AH_TEMPLATE([LIBCURL_PROTOCOL_DICT],[Defined if libcurl supports DICT])
+  AH_TEMPLATE([LIBCURL_PROTOCOL_TFTP],[Defined if libcurl supports TFTP])
+  AH_TEMPLATE([LIBCURL_PROTOCOL_RTSP],[Defined if libcurl supports RTSP])
+  AH_TEMPLATE([LIBCURL_PROTOCOL_POP3],[Defined if libcurl supports POP3])
+  AH_TEMPLATE([LIBCURL_PROTOCOL_IMAP],[Defined if libcurl supports IMAP])
+  AH_TEMPLATE([LIBCURL_PROTOCOL_SMTP],[Defined if libcurl supports SMTP])
+
+  AC_ARG_WITH(libcurl,
+     AC_HELP_STRING([--with-libcurl=PREFIX],[look for the curl library in 
PREFIX/lib and headers in PREFIX/include]),
+     [_libcurl_with=$withval],[_libcurl_with=ifelse([$1],,[yes],[$1])])
+
+  if test "$_libcurl_with" != "no" ; then
+
+     AC_PROG_AWK
+
+     _libcurl_version_parse="eval $AWK '{split(\$NF,A,\".\"); 
X=256*256*A[[1]]+256*A[[2]]+A[[3]]; print X;}'"
+
+     _libcurl_try_link=yes
+
+     if test -d "$_libcurl_with" ; then
+        LIBCURL_CPPFLAGS="-I$withval/include"
+        _libcurl_ldflags="-L$withval/lib"
+        AC_PATH_PROG([_libcurl_config],[curl-config],[],
+                     ["$withval/bin"])
+     else
+        AC_PATH_PROG([_libcurl_config],[curl-config],[],[$PATH])
+     fi
+
+     if test x$_libcurl_config != "x" ; then
+        AC_CACHE_CHECK([for the version of libcurl],
+           [libcurl_cv_lib_curl_version],
+           [libcurl_cv_lib_curl_version=`$_libcurl_config --version | $AWK 
'{print $[]2}'`])
+
+        _libcurl_version=`echo $libcurl_cv_lib_curl_version | 
$_libcurl_version_parse`
+        _libcurl_wanted=`echo ifelse([$2],,[0],[$2]) | $_libcurl_version_parse`
+
+        if test $_libcurl_wanted -gt 0 ; then
+           AC_CACHE_CHECK([for libcurl >= version $2],
+              [libcurl_cv_lib_version_ok],
+              [
+              if test $_libcurl_version -ge $_libcurl_wanted ; then
+                 libcurl_cv_lib_version_ok=yes
+              else
+                 libcurl_cv_lib_version_ok=no
+              fi
+              ])
+        fi
+
+        if test $_libcurl_wanted -eq 0 || test x$libcurl_cv_lib_version_ok = 
xyes ; then
+           if test x"$LIBCURL_CPPFLAGS" = "x" ; then
+              LIBCURL_CPPFLAGS=`$_libcurl_config --cflags`
+           fi
+           if test x"$LIBCURL" = "x" ; then
+              LIBCURL=`$_libcurl_config --libs`
+
+              # This is so silly, but Apple actually has a bug in their
+              # curl-config script.  Fixed in Tiger, but there are still
+              # lots of Panther installs around.
+              case "${host}" in
+                 powerpc-apple-darwin7*)
+                    LIBCURL=`echo $LIBCURL | sed -e 's|-arch i386||g'`
+                 ;;
+              esac
+           fi
+
+           # All curl-config scripts support --feature
+           _libcurl_features=`$_libcurl_config --feature`
+
+           # Is it modern enough to have --protocols? (7.12.4)
+           if test $_libcurl_version -ge 461828 ; then
+              _libcurl_protocols=`$_libcurl_config --protocols`
+           fi
+        else
+           _libcurl_try_link=no
+        fi
+
+        unset _libcurl_wanted
+     fi
+
+     if test $_libcurl_try_link = yes ; then
+
+        # we didn't find curl-config, so let's see if the user-supplied
+        # link line (or failing that, "-lcurl") is enough.
+        LIBCURL=${LIBCURL-"$_libcurl_ldflags -lcurl"}
+
+        AC_CACHE_CHECK([whether libcurl is usable],
+           [libcurl_cv_lib_curl_usable],
+           [
+           _libcurl_save_cppflags=$CPPFLAGS
+           CPPFLAGS="$LIBCURL_CPPFLAGS $CPPFLAGS"
+           _libcurl_save_libs=$LIBS
+           LIBS="$LIBCURL $LIBS"
+
+           AC_LINK_IFELSE([AC_LANG_PROGRAM([[#include <curl/curl.h>]],[[
+/* Try and use a few common options to force a failure if we are
+   missing symbols or can't link. */
+int x;
+curl_easy_setopt(NULL,CURLOPT_URL,NULL);
+x=CURL_ERROR_SIZE;
+x=CURLOPT_WRITEFUNCTION;
+x=CURLOPT_WRITEDATA;
+x=CURLOPT_ERRORBUFFER;
+x=CURLOPT_STDERR;
+x=CURLOPT_VERBOSE;
+if (x) ;
+]])],libcurl_cv_lib_curl_usable=yes,libcurl_cv_lib_curl_usable=no)
+
+           CPPFLAGS=$_libcurl_save_cppflags
+           LIBS=$_libcurl_save_libs
+           unset _libcurl_save_cppflags
+           unset _libcurl_save_libs
+           ])
+
+        if test $libcurl_cv_lib_curl_usable = yes ; then
+
+           # Does curl_free() exist in this version of libcurl?
+           # If not, fake it with free()
+
+           _libcurl_save_cppflags=$CPPFLAGS
+           CPPFLAGS="$CPPFLAGS $LIBCURL_CPPFLAGS"
+           _libcurl_save_libs=$LIBS
+           LIBS="$LIBS $LIBCURL"
+
+           AC_CHECK_FUNC(curl_free,,
+              AC_DEFINE(curl_free,free,
+                [Define curl_free() as free() if our version of curl lacks 
curl_free.]))
+
+           CPPFLAGS=$_libcurl_save_cppflags
+           LIBS=$_libcurl_save_libs
+           unset _libcurl_save_cppflags
+           unset _libcurl_save_libs
+
+           AC_DEFINE(HAVE_LIBCURL,1,
+             [Define to 1 if you have a functional curl library.])
+           AC_SUBST(LIBCURL_CPPFLAGS)
+           AC_SUBST(LIBCURL)
+
+           for _libcurl_feature in $_libcurl_features ; do
+              
AC_DEFINE_UNQUOTED(AS_TR_CPP(libcurl_feature_$_libcurl_feature),[1])
+              eval AS_TR_SH(libcurl_feature_$_libcurl_feature)=yes
+           done
+
+           if test "x$_libcurl_protocols" = "x" ; then
+
+              # We don't have --protocols, so just assume that all
+              # protocols are available
+              _libcurl_protocols="HTTP FTP FILE TELNET LDAP DICT TFTP"
+
+              if test x$libcurl_feature_SSL = xyes ; then
+                 _libcurl_protocols="$_libcurl_protocols HTTPS"
+
+                 # FTPS wasn't standards-compliant until version
+                 # 7.11.0 (0x070b00 == 461568)
+                 if test $_libcurl_version -ge 461568; then
+                    _libcurl_protocols="$_libcurl_protocols FTPS"
+                 fi
+              fi
+
+              # RTSP, IMAP, POP3 and SMTP were added in
+              # 7.20.0 (0x071400 == 463872)
+              if test $_libcurl_version -ge 463872; then
+                 _libcurl_protocols="$_libcurl_protocols RTSP IMAP POP3 SMTP"
+              fi
+           fi
+
+           for _libcurl_protocol in $_libcurl_protocols ; do
+              
AC_DEFINE_UNQUOTED(AS_TR_CPP(libcurl_protocol_$_libcurl_protocol),[1])
+              eval AS_TR_SH(libcurl_protocol_$_libcurl_protocol)=yes
+           done
+        else
+           unset LIBCURL
+           unset LIBCURL_CPPFLAGS
+        fi
+     fi
+
+     unset _libcurl_try_link
+     unset _libcurl_version_parse
+     unset _libcurl_config
+     unset _libcurl_feature
+     unset _libcurl_features
+     unset _libcurl_protocol
+     unset _libcurl_protocols
+     unset _libcurl_version
+     unset _libcurl_ldflags
+  fi
+
+  if test x$_libcurl_with = xno || test x$libcurl_cv_lib_curl_usable != xyes ; 
then
+     # This is the IF-NO path
+     ifelse([$4],,:,[$4])
+  else
+     # This is the IF-YES path
+     ifelse([$3],,:,[$3])
+  fi
+
+  unset _libcurl_with
+])dnl
diff --git a/m4/libgnurl.m4 b/m4/libgnurl.m4
new file mode 100644
index 0000000..69aa166
--- /dev/null
+++ b/m4/libgnurl.m4
@@ -0,0 +1,250 @@
+# LIBGNURL_CHECK_CONFIG ([DEFAULT-ACTION], [MINIMUM-VERSION],
+#                       [ACTION-IF-YES], [ACTION-IF-NO])
+# ----------------------------------------------------------
+#      David Shaw <address@hidden>   May-09-2006
+#
+# Checks for libgnurl.  DEFAULT-ACTION is the string yes or no to
+# specify whether to default to --with-libgnurl or --without-libgnurl.
+# If not supplied, DEFAULT-ACTION is yes.  MINIMUM-VERSION is the
+# minimum version of libgnurl to accept.  Pass the version as a regular
+# version number like 7.10.1. If not supplied, any version is
+# accepted.  ACTION-IF-YES is a list of shell commands to run if
+# libgnurl was successfully found and passed the various tests.
+# ACTION-IF-NO is a list of shell commands that are run otherwise.
+# Note that using --without-libgnurl does run ACTION-IF-NO.
+#
+# This macro #defines HAVE_LIBGNURL if a working libgnurl setup is
+# found, and sets @LIBGNURL@ and @LIBGNURL_CPPFLAGS@ to the necessary
+# values.  Other useful defines are LIBGNURL_FEATURE_xxx where xxx are
+# the various features supported by libgnurl, and LIBGNURL_PROTOCOL_yyy
+# where yyy are the various protocols supported by libgnurl.  Both xxx
+# and yyy are capitalized.  See the list of AH_TEMPLATEs at the top of
+# the macro for the complete list of possible defines.  Shell
+# variables $libgnurl_feature_xxx and $libgnurl_protocol_yyy are also
+# defined to 'yes' for those features and protocols that were found.
+# Note that xxx and yyy keep the same capitalization as in the
+# gnurl-config list (e.g. it's "HTTP" and not "http").
+#
+# Users may override the detected values by doing something like:
+# LIBGNURL="-lgnurl" LIBGNURL_CPPFLAGS="-I/usr/myinclude" ./configure
+#
+# For the sake of sanity, this macro assumes that any libgnurl that is
+# found is after version 7.7.2, the first version that included the
+# gnurl-config script.  Note that it is very important for people
+# packaging binary versions of libgnurl to include this script!
+# Without gnurl-config, we can only guess what protocols are available,
+# or use gnurl_version_info to figure it out at runtime.
+
+AC_DEFUN([LIBGNURL_CHECK_CONFIG],
+[
+  AH_TEMPLATE([LIBGNURL_FEATURE_SSL],[Defined if libgnurl supports SSL])
+  AH_TEMPLATE([LIBGNURL_FEATURE_KRB4],[Defined if libgnurl supports KRB4])
+  AH_TEMPLATE([LIBGNURL_FEATURE_IPV6],[Defined if libgnurl supports IPv6])
+  AH_TEMPLATE([LIBGNURL_FEATURE_LIBZ],[Defined if libgnurl supports libz])
+  AH_TEMPLATE([LIBGNURL_FEATURE_ASYNCHDNS],[Defined if libgnurl supports 
AsynchDNS])
+  AH_TEMPLATE([LIBGNURL_FEATURE_IDN],[Defined if libgnurl supports IDN])
+  AH_TEMPLATE([LIBGNURL_FEATURE_SSPI],[Defined if libgnurl supports SSPI])
+  AH_TEMPLATE([LIBGNURL_FEATURE_NTLM],[Defined if libgnurl supports NTLM])
+
+  AH_TEMPLATE([LIBGNURL_PROTOCOL_HTTP],[Defined if libgnurl supports HTTP])
+  AH_TEMPLATE([LIBGNURL_PROTOCOL_HTTPS],[Defined if libgnurl supports HTTPS])
+  AH_TEMPLATE([LIBGNURL_PROTOCOL_FTP],[Defined if libgnurl supports FTP])
+  AH_TEMPLATE([LIBGNURL_PROTOCOL_FTPS],[Defined if libgnurl supports FTPS])
+  AH_TEMPLATE([LIBGNURL_PROTOCOL_FILE],[Defined if libgnurl supports FILE])
+  AH_TEMPLATE([LIBGNURL_PROTOCOL_TELNET],[Defined if libgnurl supports TELNET])
+  AH_TEMPLATE([LIBGNURL_PROTOCOL_LDAP],[Defined if libgnurl supports LDAP])
+  AH_TEMPLATE([LIBGNURL_PROTOCOL_DICT],[Defined if libgnurl supports DICT])
+  AH_TEMPLATE([LIBGNURL_PROTOCOL_TFTP],[Defined if libgnurl supports TFTP])
+  AH_TEMPLATE([LIBGNURL_PROTOCOL_RTSP],[Defined if libgnurl supports RTSP])
+  AH_TEMPLATE([LIBGNURL_PROTOCOL_POP3],[Defined if libgnurl supports POP3])
+  AH_TEMPLATE([LIBGNURL_PROTOCOL_IMAP],[Defined if libgnurl supports IMAP])
+  AH_TEMPLATE([LIBGNURL_PROTOCOL_SMTP],[Defined if libgnurl supports SMTP])
+
+  AC_ARG_WITH(libgnurl,
+     AC_HELP_STRING([--with-libgnurl=PREFIX],[look for the gnurl library in 
PREFIX/lib and headers in PREFIX/include]),
+     [_libgnurl_with=$withval],[_libgnurl_with=ifelse([$1],,[yes],[$1])])
+
+  if test "$_libgnurl_with" != "no" ; then
+
+     AC_PROG_AWK
+
+     _libgnurl_version_parse="eval $AWK '{split(\$NF,A,\".\"); 
X=256*256*A[[1]]+256*A[[2]]+A[[3]]; print X;}'"
+
+     _libgnurl_try_link=yes
+
+     if test -d "$_libgnurl_with" ; then
+        LIBGNURL_CPPFLAGS="-I$withval/include"
+        _libgnurl_ldflags="-L$withval/lib"
+        AC_PATH_PROG([_libgnurl_config],[gnurl-config],[],
+                     ["$withval/bin"])
+     else
+        AC_PATH_PROG([_libgnurl_config],[gnurl-config],[],[$PATH])
+     fi
+
+     if test x$_libgnurl_config != "x" ; then
+        AC_CACHE_CHECK([for the version of libgnurl],
+           [libgnurl_cv_lib_gnurl_version],
+           [libgnurl_cv_lib_gnurl_version=`$_libgnurl_config --version | $AWK 
'{print $[]2}'`])
+
+        _libgnurl_version=`echo $libgnurl_cv_lib_gnurl_version | 
$_libgnurl_version_parse`
+        _libgnurl_wanted=`echo ifelse([$2],,[0],[$2]) | 
$_libgnurl_version_parse`
+
+        if test $_libgnurl_wanted -gt 0 ; then
+           AC_CACHE_CHECK([for libgnurl >= version $2],
+              [libgnurl_cv_lib_version_ok],
+              [
+              if test $_libgnurl_version -ge $_libgnurl_wanted ; then
+                 libgnurl_cv_lib_version_ok=yes
+              else
+                 libgnurl_cv_lib_version_ok=no
+              fi
+              ])
+        fi
+
+        if test $_libgnurl_wanted -eq 0 || test x$libgnurl_cv_lib_version_ok = 
xyes ; then
+           if test x"$LIBGNURL_CPPFLAGS" = "x" ; then
+              LIBGNURL_CPPFLAGS=`$_libgnurl_config --cflags`
+           fi
+           if test x"$LIBGNURL" = "x" ; then
+              LIBGNURL=`$_libgnurl_config --libs`
+
+              # This is so silly, but Apple actually has a bug in their
+              # gnurl-config script.  Fixed in Tiger, but there are still
+              # lots of Panther installs around.
+              case "${host}" in
+                 powerpc-apple-darwin7*)
+                    LIBGNURL=`echo $LIBGNURL | sed -e 's|-arch i386||g'`
+                 ;;
+              esac
+           fi
+
+           # All gnurl-config scripts support --feature
+           _libgnurl_features=`$_libgnurl_config --feature`
+
+           # Is it modern enough to have --protocols? (7.12.4)
+           if test $_libgnurl_version -ge 461828 ; then
+              _libgnurl_protocols=`$_libgnurl_config --protocols`
+           fi
+        else
+           _libgnurl_try_link=no
+        fi
+
+        unset _libgnurl_wanted
+     fi
+
+     if test $_libgnurl_try_link = yes ; then
+
+        # we didn't find gnurl-config, so let's see if the user-supplied
+        # link line (or failing that, "-lgnurl") is enough.
+        LIBGNURL=${LIBGNURL-"$_libgnurl_ldflags -lgnurl"}
+
+        AC_CACHE_CHECK([whether libgnurl is usable],
+           [libgnurl_cv_lib_gnurl_usable],
+           [
+           _libgnurl_save_cppflags=$CPPFLAGS
+           CPPFLAGS="$LIBGNURL_CPPFLAGS $CPPFLAGS"
+           _libgnurl_save_libs=$LIBS
+           LIBS="$LIBGNURL $LIBS"
+
+           AC_LINK_IFELSE([AC_LANG_PROGRAM([#include <curl/curl.h>],[
+/* Try and use a few common options to force a failure if we are
+   missing symbols or can't link. */
+int x;
+curl_easy_setopt(NULL,CURLOPT_URL,NULL);
+x=CURL_ERROR_SIZE;
+x=CURLOPT_WRITEFUNCTION;
+x=CURLOPT_FILE;
+x=CURLOPT_ERRORBUFFER;
+x=CURLOPT_STDERR;
+x=CURLOPT_VERBOSE;
+])],libgnurl_cv_lib_gnurl_usable=yes,libgnurl_cv_lib_gnurl_usable=no)
+
+           CPPFLAGS=$_libgnurl_save_cppflags
+           LIBS=$_libgnurl_save_libs
+           unset _libgnurl_save_cppflags
+           unset _libgnurl_save_libs
+           ])
+
+        if test $libgnurl_cv_lib_gnurl_usable = yes ; then
+
+           # Does gnurl_free() exist in this version of libgnurl?
+           # If not, fake it with free()
+
+           _libgnurl_save_cppflags=$CPPFLAGS
+           CPPFLAGS="$CPPFLAGS $LIBGNURL_CPPFLAGS"
+           _libgnurl_save_libs=$LIBS
+           LIBS="$LIBS $LIBGNURL"
+
+           AC_CHECK_FUNC(curl_free,,
+              AC_DEFINE(curl_free,free,
+                [Define curl_free() as free() if our version of gnurl lacks 
curl_free.]))
+
+           CPPFLAGS=$_libgnurl_save_cppflags
+           LIBS=$_libgnurl_save_libs
+           unset _libgnurl_save_cppflags
+           unset _libgnurl_save_libs
+
+           AC_DEFINE(HAVE_LIBGNURL,1,
+             [Define to 1 if you have a functional gnurl library.])
+           AC_SUBST(LIBGNURL_CPPFLAGS)
+           AC_SUBST(LIBGNURL)
+
+           for _libgnurl_feature in $_libgnurl_features ; do
+              
AC_DEFINE_UNQUOTED(AS_TR_CPP(libgnurl_feature_$_libgnurl_feature),[1])
+              eval AS_TR_SH(libgnurl_feature_$_libgnurl_feature)=yes
+           done
+
+           if test "x$_libgnurl_protocols" = "x" ; then
+
+              # We don't have --protocols, so just assume that all
+              # protocols are available
+              _libgnurl_protocols="HTTP FTP FILE TELNET LDAP DICT TFTP"
+
+              if test x$libgnurl_feature_SSL = xyes ; then
+                 _libgnurl_protocols="$_libgnurl_protocols HTTPS"
+
+                 # FTPS wasn't standards-compliant until version
+                 # 7.11.0 (0x070b00 == 461568)
+                 if test $_libgnurl_version -ge 461568; then
+                    _libgnurl_protocols="$_libgnurl_protocols FTPS"
+                 fi
+              fi
+
+              # RTSP, IMAP, POP3 and SMTP were added in
+              # 7.20.0 (0x071400 == 463872)
+              if test $_libgnurl_version -ge 463872; then
+                 _libgnurl_protocols="$_libgnurl_protocols RTSP IMAP POP3 SMTP"
+              fi
+           fi
+
+           for _libgnurl_protocol in $_libgnurl_protocols ; do
+              
AC_DEFINE_UNQUOTED(AS_TR_CPP(libgnurl_protocol_$_libgnurl_protocol),[1])
+              eval AS_TR_SH(libgnurl_protocol_$_libgnurl_protocol)=yes
+           done
+        else
+           unset LIBGNURL
+           unset LIBGNURL_CPPFLAGS
+        fi
+     fi
+
+     unset _libgnurl_try_link
+     unset _libgnurl_version_parse
+     unset _libgnurl_config
+     unset _libgnurl_feature
+     unset _libgnurl_features
+     unset _libgnurl_protocol
+     unset _libgnurl_protocols
+     unset _libgnurl_version
+     unset _libgnurl_ldflags
+  fi
+
+  if test x$_libgnurl_with = xno || test x$libgnurl_cv_lib_gnurl_usable != 
xyes ; then
+     # This is the IF-NO path
+     ifelse([$4],,:,[$4])
+  else
+     # This is the IF-YES path
+     ifelse([$3],,:,[$3])
+  fi
+
+  unset _libgnurl_with
+])dnl
diff --git a/m4/libtool.m4 b/m4/libtool.m4
new file mode 100644
index 0000000..c81e669
--- /dev/null
+++ b/m4/libtool.m4
@@ -0,0 +1,8388 @@
+# libtool.m4 - Configure libtool for the host system. -*-Autoconf-*-
+#
+#   Copyright (C) 1996-2001, 2003-2015 Free Software Foundation, Inc.
+#   Written by Gordon Matzigkeit, 1996
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+m4_define([_LT_COPYING], [dnl
+# Copyright (C) 2014 Free Software Foundation, Inc.
+# This is free software; see the source for copying conditions.  There is NO
+# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+
+# GNU Libtool is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of of the License, or
+# (at your option) any later version.
+#
+# As a special exception to the GNU General Public License, if you
+# distribute this file as part of a program or library that is built
+# using GNU Libtool, you may include this file under the  same
+# distribution terms that you use for the rest of that program.
+#
+# GNU Libtool is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+])
+
+# serial 58 LT_INIT
+
+
+# LT_PREREQ(VERSION)
+# ------------------
+# Complain and exit if this libtool version is less that VERSION.
+m4_defun([LT_PREREQ],
+[m4_if(m4_version_compare(m4_defn([LT_PACKAGE_VERSION]), [$1]), -1,
+       [m4_default([$3],
+                  [m4_fatal([Libtool version $1 or higher is required],
+                            63)])],
+       [$2])])
+
+
+# _LT_CHECK_BUILDDIR
+# ------------------
+# Complain if the absolute build directory name contains unusual characters
+m4_defun([_LT_CHECK_BUILDDIR],
+[case `pwd` in
+  *\ * | *\    *)
+    AC_MSG_WARN([Libtool does not cope well with whitespace in `pwd`]) ;;
+esac
+])
+
+
+# LT_INIT([OPTIONS])
+# ------------------
+AC_DEFUN([LT_INIT],
+[AC_PREREQ([2.62])dnl We use AC_PATH_PROGS_FEATURE_CHECK
+AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl
+AC_BEFORE([$0], [LT_LANG])dnl
+AC_BEFORE([$0], [LT_OUTPUT])dnl
+AC_BEFORE([$0], [LTDL_INIT])dnl
+m4_require([_LT_CHECK_BUILDDIR])dnl
+
+dnl Autoconf doesn't catch unexpanded LT_ macros by default:
+m4_pattern_forbid([^_?LT_[A-Z_]+$])dnl
+m4_pattern_allow([^(_LT_EOF|LT_DLGLOBAL|LT_DLLAZY_OR_NOW|LT_MULTI_MODULE)$])dnl
+dnl aclocal doesn't pull ltoptions.m4, ltsugar.m4, or ltversion.m4
+dnl unless we require an AC_DEFUNed macro:
+AC_REQUIRE([LTOPTIONS_VERSION])dnl
+AC_REQUIRE([LTSUGAR_VERSION])dnl
+AC_REQUIRE([LTVERSION_VERSION])dnl
+AC_REQUIRE([LTOBSOLETE_VERSION])dnl
+m4_require([_LT_PROG_LTMAIN])dnl
+
+_LT_SHELL_INIT([SHELL=${CONFIG_SHELL-/bin/sh}])
+
+dnl Parse OPTIONS
+_LT_SET_OPTIONS([$0], [$1])
+
+# This can be used to rebuild libtool when needed
+LIBTOOL_DEPS=$ltmain
+
+# Always use our own libtool.
+LIBTOOL='$(SHELL) $(top_builddir)/libtool'
+AC_SUBST(LIBTOOL)dnl
+
+_LT_SETUP
+
+# Only expand once:
+m4_define([LT_INIT])
+])# LT_INIT
+
+# Old names:
+AU_ALIAS([AC_PROG_LIBTOOL], [LT_INIT])
+AU_ALIAS([AM_PROG_LIBTOOL], [LT_INIT])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_PROG_LIBTOOL], [])
+dnl AC_DEFUN([AM_PROG_LIBTOOL], [])
+
+
+# _LT_PREPARE_CC_BASENAME
+# -----------------------
+m4_defun([_LT_PREPARE_CC_BASENAME], [
+# Calculate cc_basename.  Skip known compiler wrappers and cross-prefix.
+func_cc_basename ()
+{
+    for cc_temp in @S|@*""; do
+      case $cc_temp in
+        compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;;
+        distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;;
+        \-*) ;;
+        *) break;;
+      esac
+    done
+    func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; 
s%^$host_alias-%%"`
+}
+])# _LT_PREPARE_CC_BASENAME
+
+
+# _LT_CC_BASENAME(CC)
+# -------------------
+# It would be clearer to call AC_REQUIREs from _LT_PREPARE_CC_BASENAME,
+# but that macro is also expanded into generated libtool script, which
+# arranges for $SED and $ECHO to be set by different means.
+m4_defun([_LT_CC_BASENAME],
+[m4_require([_LT_PREPARE_CC_BASENAME])dnl
+AC_REQUIRE([_LT_DECL_SED])dnl
+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl
+func_cc_basename $1
+cc_basename=$func_cc_basename_result
+])
+
+
+# _LT_FILEUTILS_DEFAULTS
+# ----------------------
+# It is okay to use these file commands and assume they have been set
+# sensibly after 'm4_require([_LT_FILEUTILS_DEFAULTS])'.
+m4_defun([_LT_FILEUTILS_DEFAULTS],
+[: ${CP="cp -f"}
+: ${MV="mv -f"}
+: ${RM="rm -f"}
+])# _LT_FILEUTILS_DEFAULTS
+
+
+# _LT_SETUP
+# ---------
+m4_defun([_LT_SETUP],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+AC_REQUIRE([_LT_PREPARE_SED_QUOTE_VARS])dnl
+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl
+
+_LT_DECL([], [PATH_SEPARATOR], [1], [The PATH separator for the build 
system])dnl
+dnl
+_LT_DECL([], [host_alias], [0], [The host system])dnl
+_LT_DECL([], [host], [0])dnl
+_LT_DECL([], [host_os], [0])dnl
+dnl
+_LT_DECL([], [build_alias], [0], [The build system])dnl
+_LT_DECL([], [build], [0])dnl
+_LT_DECL([], [build_os], [0])dnl
+dnl
+AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([LT_PATH_LD])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+dnl
+AC_REQUIRE([AC_PROG_LN_S])dnl
+test -z "$LN_S" && LN_S="ln -s"
+_LT_DECL([], [LN_S], [1], [Whether we need soft or hard links])dnl
+dnl
+AC_REQUIRE([LT_CMD_MAX_LEN])dnl
+_LT_DECL([objext], [ac_objext], [0], [Object file suffix (normally "o")])dnl
+_LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl
+dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_CHECK_SHELL_FEATURES])dnl
+m4_require([_LT_PATH_CONVERSION_FUNCTIONS])dnl
+m4_require([_LT_CMD_RELOAD])dnl
+m4_require([_LT_CHECK_MAGIC_METHOD])dnl
+m4_require([_LT_CHECK_SHAREDLIB_FROM_LINKLIB])dnl
+m4_require([_LT_CMD_OLD_ARCHIVE])dnl
+m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+m4_require([_LT_WITH_SYSROOT])dnl
+m4_require([_LT_CMD_TRUNCATE])dnl
+
+_LT_CONFIG_LIBTOOL_INIT([
+# See if we are running on zsh, and set the options that allow our
+# commands through without removal of \ escapes INIT.
+if test -n "\${ZSH_VERSION+set}"; then
+   setopt NO_GLOB_SUBST
+fi
+])
+if test -n "${ZSH_VERSION+set}"; then
+   setopt NO_GLOB_SUBST
+fi
+
+_LT_CHECK_OBJDIR
+
+m4_require([_LT_TAG_COMPILER])dnl
+
+case $host_os in
+aix3*)
+  # AIX sometimes has problems with the GCC collect2 program.  For some
+  # reason, if we set the COLLECT_NAMES environment variable, the problems
+  # vanish in a puff of smoke.
+  if test set != "${COLLECT_NAMES+set}"; then
+    COLLECT_NAMES=
+    export COLLECT_NAMES
+  fi
+  ;;
+esac
+
+# Global variables:
+ofile=libtool
+can_build_shared=yes
+
+# All known linkers require a '.a' archive for static linking (except MSVC,
+# which needs '.lib').
+libext=a
+
+with_gnu_ld=$lt_cv_prog_gnu_ld
+
+old_CC=$CC
+old_CFLAGS=$CFLAGS
+
+# Set sane defaults for various variables
+test -z "$CC" && CC=cc
+test -z "$LTCC" && LTCC=$CC
+test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS
+test -z "$LD" && LD=ld
+test -z "$ac_objext" && ac_objext=o
+
+_LT_CC_BASENAME([$compiler])
+
+# Only perform the check for file, if the check method requires it
+test -z "$MAGIC_CMD" && MAGIC_CMD=file
+case $deplibs_check_method in
+file_magic*)
+  if test "$file_magic_cmd" = '$MAGIC_CMD'; then
+    _LT_PATH_MAGIC
+  fi
+  ;;
+esac
+
+# Use C for the default configuration in the libtool script
+LT_SUPPORTED_TAG([CC])
+_LT_LANG_C_CONFIG
+_LT_LANG_DEFAULT_CONFIG
+_LT_CONFIG_COMMANDS
+])# _LT_SETUP
+
+
+# _LT_PREPARE_SED_QUOTE_VARS
+# --------------------------
+# Define a few sed substitution that help us do robust quoting.
+m4_defun([_LT_PREPARE_SED_QUOTE_VARS],
+[# Backslashify metacharacters that are still active within
+# double-quoted strings.
+sed_quote_subst='s/\([["`$\\]]\)/\\\1/g'
+
+# Same as above, but do not quote variable references.
+double_quote_subst='s/\([["`\\]]\)/\\\1/g'
+
+# Sed substitution to delay expansion of an escaped shell variable in a
+# double_quote_subst'ed string.
+delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g'
+
+# Sed substitution to delay expansion of an escaped single quote.
+delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g'
+
+# Sed substitution to avoid accidental globbing in evaled expressions
+no_glob_subst='s/\*/\\\*/g'
+])
+
+# _LT_PROG_LTMAIN
+# ---------------
+# Note that this code is called both from 'configure', and 'config.status'
+# now that we use AC_CONFIG_COMMANDS to generate libtool.  Notably,
+# 'config.status' has no value for ac_aux_dir unless we are using Automake,
+# so we pass a copy along to make sure it has a sensible value anyway.
+m4_defun([_LT_PROG_LTMAIN],
+[m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([ltmain.sh])])dnl
+_LT_CONFIG_LIBTOOL_INIT([ac_aux_dir='$ac_aux_dir'])
+ltmain=$ac_aux_dir/ltmain.sh
+])# _LT_PROG_LTMAIN
+
+
+## ------------------------------------- ##
+## Accumulate code for creating libtool. ##
+## ------------------------------------- ##
+
+# So that we can recreate a full libtool script including additional
+# tags, we accumulate the chunks of code to send to AC_CONFIG_COMMANDS
+# in macros and then make a single call at the end using the 'libtool'
+# label.
+
+
+# _LT_CONFIG_LIBTOOL_INIT([INIT-COMMANDS])
+# ----------------------------------------
+# Register INIT-COMMANDS to be passed to AC_CONFIG_COMMANDS later.
+m4_define([_LT_CONFIG_LIBTOOL_INIT],
+[m4_ifval([$1],
+          [m4_append([_LT_OUTPUT_LIBTOOL_INIT],
+                     [$1
+])])])
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_INIT])
+
+
+# _LT_CONFIG_LIBTOOL([COMMANDS])
+# ------------------------------
+# Register COMMANDS to be passed to AC_CONFIG_COMMANDS later.
+m4_define([_LT_CONFIG_LIBTOOL],
+[m4_ifval([$1],
+          [m4_append([_LT_OUTPUT_LIBTOOL_COMMANDS],
+                     [$1
+])])])
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS])
+
+
+# _LT_CONFIG_SAVE_COMMANDS([COMMANDS], [INIT_COMMANDS])
+# -----------------------------------------------------
+m4_defun([_LT_CONFIG_SAVE_COMMANDS],
+[_LT_CONFIG_LIBTOOL([$1])
+_LT_CONFIG_LIBTOOL_INIT([$2])
+])
+
+
+# _LT_FORMAT_COMMENT([COMMENT])
+# -----------------------------
+# Add leading comment marks to the start of each line, and a trailing
+# full-stop to the whole comment if one is not present already.
+m4_define([_LT_FORMAT_COMMENT],
+[m4_ifval([$1], [
+m4_bpatsubst([m4_bpatsubst([$1], [^ *], [# ])],
+              [['`$\]], [\\\&])]m4_bmatch([$1], [[!?.]$], [], [.])
+)])
+
+
+
+## ------------------------ ##
+## FIXME: Eliminate VARNAME ##
+## ------------------------ ##
+
+
+# _LT_DECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION], [IS-TAGGED?])
+# -------------------------------------------------------------------
+# CONFIGNAME is the name given to the value in the libtool script.
+# VARNAME is the (base) name used in the configure script.
+# VALUE may be 0, 1 or 2 for a computed quote escaped value based on
+# VARNAME.  Any other value will be used directly.
+m4_define([_LT_DECL],
+[lt_if_append_uniq([lt_decl_varnames], [$2], [, ],
+    [lt_dict_add_subkey([lt_decl_dict], [$2], [libtool_name],
+       [m4_ifval([$1], [$1], [$2])])
+    lt_dict_add_subkey([lt_decl_dict], [$2], [value], [$3])
+    m4_ifval([$4],
+       [lt_dict_add_subkey([lt_decl_dict], [$2], [description], [$4])])
+    lt_dict_add_subkey([lt_decl_dict], [$2],
+       [tagged?], [m4_ifval([$5], [yes], [no])])])
+])
+
+
+# _LT_TAGDECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION])
+# --------------------------------------------------------
+m4_define([_LT_TAGDECL], [_LT_DECL([$1], [$2], [$3], [$4], [yes])])
+
+
+# lt_decl_tag_varnames([SEPARATOR], [VARNAME1...])
+# ------------------------------------------------
+m4_define([lt_decl_tag_varnames],
+[_lt_decl_filter([tagged?], [yes], $@)])
+
+
+# _lt_decl_filter(SUBKEY, VALUE, [SEPARATOR], [VARNAME1..])
+# ---------------------------------------------------------
+m4_define([_lt_decl_filter],
+[m4_case([$#],
+  [0], [m4_fatal([$0: too few arguments: $#])],
+  [1], [m4_fatal([$0: too few arguments: $#: $1])],
+  [2], [lt_dict_filter([lt_decl_dict], [$1], [$2], [], lt_decl_varnames)],
+  [3], [lt_dict_filter([lt_decl_dict], [$1], [$2], [$3], lt_decl_varnames)],
+  [lt_dict_filter([lt_decl_dict], $@)])[]dnl
+])
+
+
+# lt_decl_quote_varnames([SEPARATOR], [VARNAME1...])
+# --------------------------------------------------
+m4_define([lt_decl_quote_varnames],
+[_lt_decl_filter([value], [1], $@)])
+
+
+# lt_decl_dquote_varnames([SEPARATOR], [VARNAME1...])
+# ---------------------------------------------------
+m4_define([lt_decl_dquote_varnames],
+[_lt_decl_filter([value], [2], $@)])
+
+
+# lt_decl_varnames_tagged([SEPARATOR], [VARNAME1...])
+# ---------------------------------------------------
+m4_define([lt_decl_varnames_tagged],
+[m4_assert([$# <= 2])dnl
+_$0(m4_quote(m4_default([$1], [[, ]])),
+    m4_ifval([$2], [[$2]], [m4_dquote(lt_decl_tag_varnames)]),
+    m4_split(m4_normalize(m4_quote(_LT_TAGS)), [ ]))])
+m4_define([_lt_decl_varnames_tagged],
+[m4_ifval([$3], [lt_combine([$1], [$2], [_], $3)])])
+
+
+# lt_decl_all_varnames([SEPARATOR], [VARNAME1...])
+# ------------------------------------------------
+m4_define([lt_decl_all_varnames],
+[_$0(m4_quote(m4_default([$1], [[, ]])),
+     m4_if([$2], [],
+          m4_quote(lt_decl_varnames),
+       m4_quote(m4_shift($@))))[]dnl
+])
+m4_define([_lt_decl_all_varnames],
+[lt_join($@, lt_decl_varnames_tagged([$1],
+                       lt_decl_tag_varnames([[, ]], m4_shift($@))))dnl
+])
+
+
+# _LT_CONFIG_STATUS_DECLARE([VARNAME])
+# ------------------------------------
+# Quote a variable value, and forward it to 'config.status' so that its
+# declaration there will have the same value as in 'configure'.  VARNAME
+# must have a single quote delimited value for this to work.
+m4_define([_LT_CONFIG_STATUS_DECLARE],
+[$1='`$ECHO "$][$1" | $SED "$delay_single_quote_subst"`'])
+
+
+# _LT_CONFIG_STATUS_DECLARATIONS
+# ------------------------------
+# We delimit libtool config variables with single quotes, so when
+# we write them to config.status, we have to be sure to quote all
+# embedded single quotes properly.  In configure, this macro expands
+# each variable declared with _LT_DECL (and _LT_TAGDECL) into:
+#
+#    <var>='`$ECHO "$<var>" | $SED "$delay_single_quote_subst"`'
+m4_defun([_LT_CONFIG_STATUS_DECLARATIONS],
+[m4_foreach([_lt_var], m4_quote(lt_decl_all_varnames),
+    [m4_n([_LT_CONFIG_STATUS_DECLARE(_lt_var)])])])
+
+
+# _LT_LIBTOOL_TAGS
+# ----------------
+# Output comment and list of tags supported by the script
+m4_defun([_LT_LIBTOOL_TAGS],
+[_LT_FORMAT_COMMENT([The names of the tagged configurations supported by this 
script])dnl
+available_tags='_LT_TAGS'dnl
+])
+
+
+# _LT_LIBTOOL_DECLARE(VARNAME, [TAG])
+# -----------------------------------
+# Extract the dictionary values for VARNAME (optionally with TAG) and
+# expand to a commented shell variable setting:
+#
+#    # Some comment about what VAR is for.
+#    visible_name=$lt_internal_name
+m4_define([_LT_LIBTOOL_DECLARE],
+[_LT_FORMAT_COMMENT(m4_quote(lt_dict_fetch([lt_decl_dict], [$1],
+                                          [description])))[]dnl
+m4_pushdef([_libtool_name],
+    m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [libtool_name])))[]dnl
+m4_case(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [value])),
+    [0], [_libtool_name=[$]$1],
+    [1], [_libtool_name=$lt_[]$1],
+    [2], [_libtool_name=$lt_[]$1],
+    [_libtool_name=lt_dict_fetch([lt_decl_dict], [$1], [value])])[]dnl
+m4_ifval([$2], [_$2])[]m4_popdef([_libtool_name])[]dnl
+])
+
+
+# _LT_LIBTOOL_CONFIG_VARS
+# -----------------------
+# Produce commented declarations of non-tagged libtool config variables
+# suitable for insertion in the LIBTOOL CONFIG section of the 'libtool'
+# script.  Tagged libtool config variables (even for the LIBTOOL CONFIG
+# section) are produced by _LT_LIBTOOL_TAG_VARS.
+m4_defun([_LT_LIBTOOL_CONFIG_VARS],
+[m4_foreach([_lt_var],
+    m4_quote(_lt_decl_filter([tagged?], [no], [], lt_decl_varnames)),
+    [m4_n([_LT_LIBTOOL_DECLARE(_lt_var)])])])
+
+
+# _LT_LIBTOOL_TAG_VARS(TAG)
+# -------------------------
+m4_define([_LT_LIBTOOL_TAG_VARS],
+[m4_foreach([_lt_var], m4_quote(lt_decl_tag_varnames),
+    [m4_n([_LT_LIBTOOL_DECLARE(_lt_var, [$1])])])])
+
+
+# _LT_TAGVAR(VARNAME, [TAGNAME])
+# ------------------------------
+m4_define([_LT_TAGVAR], [m4_ifval([$2], [$1_$2], [$1])])
+
+
+# _LT_CONFIG_COMMANDS
+# -------------------
+# Send accumulated output to $CONFIG_STATUS.  Thanks to the lists of
+# variables for single and double quote escaping we saved from calls
+# to _LT_DECL, we can put quote escaped variables declarations
+# into 'config.status', and then the shell code to quote escape them in
+# for loops in 'config.status'.  Finally, any additional code accumulated
+# from calls to _LT_CONFIG_LIBTOOL_INIT is expanded.
+m4_defun([_LT_CONFIG_COMMANDS],
+[AC_PROVIDE_IFELSE([LT_OUTPUT],
+       dnl If the libtool generation code has been placed in $CONFIG_LT,
+       dnl instead of duplicating it all over again into config.status,
+       dnl then we will have config.status run $CONFIG_LT later, so it
+       dnl needs to know what name is stored there:
+        [AC_CONFIG_COMMANDS([libtool],
+            [$SHELL $CONFIG_LT || AS_EXIT(1)], [CONFIG_LT='$CONFIG_LT'])],
+    dnl If the libtool generation code is destined for config.status,
+    dnl expand the accumulated commands and init code now:
+    [AC_CONFIG_COMMANDS([libtool],
+        [_LT_OUTPUT_LIBTOOL_COMMANDS], [_LT_OUTPUT_LIBTOOL_COMMANDS_INIT])])
+])#_LT_CONFIG_COMMANDS
+
+
+# Initialize.
+m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS_INIT],
+[
+
+# The HP-UX ksh and POSIX shell print the target directory to stdout
+# if CDPATH is set.
+(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
+
+sed_quote_subst='$sed_quote_subst'
+double_quote_subst='$double_quote_subst'
+delay_variable_subst='$delay_variable_subst'
+_LT_CONFIG_STATUS_DECLARATIONS
+LTCC='$LTCC'
+LTCFLAGS='$LTCFLAGS'
+compiler='$compiler_DEFAULT'
+
+# A function that is used when there is no print builtin or printf.
+func_fallback_echo ()
+{
+  eval 'cat <<_LTECHO_EOF
+\$[]1
+_LTECHO_EOF'
+}
+
+# Quote evaled strings.
+for var in lt_decl_all_varnames([[ \
+]], lt_decl_quote_varnames); do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[[\\\\\\\`\\"\\\$]]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED 
\\"\\\$sed_quote_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+# Double-quote double-evaled strings.
+for var in lt_decl_all_varnames([[ \
+]], lt_decl_dquote_varnames); do
+    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
+    *[[\\\\\\\`\\"\\\$]]*)
+      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e 
\\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e 
\\"\\\$delay_variable_subst\\"\\\`\\\\\\"" ## exclude from 
sc_prohibit_nested_quotes
+      ;;
+    *)
+      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
+      ;;
+    esac
+done
+
+_LT_OUTPUT_LIBTOOL_INIT
+])
+
+# _LT_GENERATED_FILE_INIT(FILE, [COMMENT])
+# ------------------------------------
+# Generate a child script FILE with all initialization necessary to
+# reuse the environment learned by the parent script, and make the
+# file executable.  If COMMENT is supplied, it is inserted after the
+# '#!' sequence but before initialization text begins.  After this
+# macro, additional text can be appended to FILE to form the body of
+# the child script.  The macro ends with non-zero status if the
+# file could not be fully written (such as if the disk is full).
+m4_ifdef([AS_INIT_GENERATED],
+[m4_defun([_LT_GENERATED_FILE_INIT],[AS_INIT_GENERATED($@)])],
+[m4_defun([_LT_GENERATED_FILE_INIT],
+[m4_require([AS_PREPARE])]dnl
+[m4_pushdef([AS_MESSAGE_LOG_FD])]dnl
+[lt_write_fail=0
+cat >$1 <<_ASEOF || lt_write_fail=1
+#! $SHELL
+# Generated by $as_me.
+$2
+SHELL=\${CONFIG_SHELL-$SHELL}
+export SHELL
+_ASEOF
+cat >>$1 <<\_ASEOF || lt_write_fail=1
+AS_SHELL_SANITIZE
+_AS_PREPARE
+exec AS_MESSAGE_FD>&1
+_ASEOF
+test 0 = "$lt_write_fail" && chmod +x $1[]dnl
+m4_popdef([AS_MESSAGE_LOG_FD])])])# _LT_GENERATED_FILE_INIT
+
+# LT_OUTPUT
+# ---------
+# This macro allows early generation of the libtool script (before
+# AC_OUTPUT is called), incase it is used in configure for compilation
+# tests.
+AC_DEFUN([LT_OUTPUT],
+[: ${CONFIG_LT=./config.lt}
+AC_MSG_NOTICE([creating $CONFIG_LT])
+_LT_GENERATED_FILE_INIT(["$CONFIG_LT"],
+[# Run this file to recreate a libtool stub with the current configuration.])
+
+cat >>"$CONFIG_LT" <<\_LTEOF
+lt_cl_silent=false
+exec AS_MESSAGE_LOG_FD>>config.log
+{
+  echo
+  AS_BOX([Running $as_me.])
+} >&AS_MESSAGE_LOG_FD
+
+lt_cl_help="\
+'$as_me' creates a local libtool stub from the current configuration,
+for use in further configure time tests before the real libtool is
+generated.
+
+Usage: $[0] [[OPTIONS]]
+
+  -h, --help      print this help, then exit
+  -V, --version   print version number, then exit
+  -q, --quiet     do not print progress messages
+  -d, --debug     don't remove temporary files
+
+Report bugs to <address@hidden>."
+
+lt_cl_version="\
+m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl
+m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION])
+configured by $[0], generated by m4_PACKAGE_STRING.
+
+Copyright (C) 2011 Free Software Foundation, Inc.
+This config.lt script is free software; the Free Software Foundation
+gives unlimited permision to copy, distribute and modify it."
+
+while test 0 != $[#]
+do
+  case $[1] in
+    --version | --v* | -V )
+      echo "$lt_cl_version"; exit 0 ;;
+    --help | --h* | -h )
+      echo "$lt_cl_help"; exit 0 ;;
+    --debug | --d* | -d )
+      debug=: ;;
+    --quiet | --q* | --silent | --s* | -q )
+      lt_cl_silent=: ;;
+
+    -*) AC_MSG_ERROR([unrecognized option: $[1]
+Try '$[0] --help' for more information.]) ;;
+
+    *) AC_MSG_ERROR([unrecognized argument: $[1]
+Try '$[0] --help' for more information.]) ;;
+  esac
+  shift
+done
+
+if $lt_cl_silent; then
+  exec AS_MESSAGE_FD>/dev/null
+fi
+_LTEOF
+
+cat >>"$CONFIG_LT" <<_LTEOF
+_LT_OUTPUT_LIBTOOL_COMMANDS_INIT
+_LTEOF
+
+cat >>"$CONFIG_LT" <<\_LTEOF
+AC_MSG_NOTICE([creating $ofile])
+_LT_OUTPUT_LIBTOOL_COMMANDS
+AS_EXIT(0)
+_LTEOF
+chmod +x "$CONFIG_LT"
+
+# configure is writing to config.log, but config.lt does its own redirection,
+# appending to config.log, which fails on DOS, as config.log is still kept
+# open by configure.  Here we exec the FD to /dev/null, effectively closing
+# config.log, so it can be properly (re)opened and appended to by config.lt.
+lt_cl_success=:
+test yes = "$silent" &&
+  lt_config_lt_args="$lt_config_lt_args --quiet"
+exec AS_MESSAGE_LOG_FD>/dev/null
+$SHELL "$CONFIG_LT" $lt_config_lt_args || lt_cl_success=false
+exec AS_MESSAGE_LOG_FD>>config.log
+$lt_cl_success || AS_EXIT(1)
+])# LT_OUTPUT
+
+
+# _LT_CONFIG(TAG)
+# ---------------
+# If TAG is the built-in tag, create an initial libtool script with a
+# default configuration from the untagged config vars.  Otherwise add code
+# to config.status for appending the configuration named by TAG from the
+# matching tagged config vars.
+m4_defun([_LT_CONFIG],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+_LT_CONFIG_SAVE_COMMANDS([
+  m4_define([_LT_TAG], m4_if([$1], [], [C], [$1]))dnl
+  m4_if(_LT_TAG, [C], [
+    # See if we are running on zsh, and set the options that allow our
+    # commands through without removal of \ escapes.
+    if test -n "${ZSH_VERSION+set}"; then
+      setopt NO_GLOB_SUBST
+    fi
+
+    cfgfile=${ofile}T
+    trap "$RM \"$cfgfile\"; exit 1" 1 2 15
+    $RM "$cfgfile"
+
+    cat <<_LT_EOF >> "$cfgfile"
+#! $SHELL
+# Generated automatically by $as_me ($PACKAGE) $VERSION
+# NOTE: Changes made to this file will be lost: look at ltmain.sh.
+
+# Provide generalized library-building support services.
+# Written by Gordon Matzigkeit, 1996
+
+_LT_COPYING
+_LT_LIBTOOL_TAGS
+
+# Configured defaults for sys_lib_dlsearch_path munging.
+: \${LT_SYS_LIBRARY_PATH="$configure_time_lt_sys_library_path"}
+
+# ### BEGIN LIBTOOL CONFIG
+_LT_LIBTOOL_CONFIG_VARS
+_LT_LIBTOOL_TAG_VARS
+# ### END LIBTOOL CONFIG
+
+_LT_EOF
+
+    cat <<'_LT_EOF' >> "$cfgfile"
+
+# ### BEGIN FUNCTIONS SHARED WITH CONFIGURE
+
+_LT_PREPARE_MUNGE_PATH_LIST
+_LT_PREPARE_CC_BASENAME
+
+# ### END FUNCTIONS SHARED WITH CONFIGURE
+
+_LT_EOF
+
+  case $host_os in
+  aix3*)
+    cat <<\_LT_EOF >> "$cfgfile"
+# AIX sometimes has problems with the GCC collect2 program.  For some
+# reason, if we set the COLLECT_NAMES environment variable, the problems
+# vanish in a puff of smoke.
+if test set != "${COLLECT_NAMES+set}"; then
+  COLLECT_NAMES=
+  export COLLECT_NAMES
+fi
+_LT_EOF
+    ;;
+  esac
+
+  _LT_PROG_LTMAIN
+
+  # We use sed instead of cat because bash on DJGPP gets confused if
+  # if finds mixed CR/LF and LF-only lines.  Since sed operates in
+  # text mode, it properly converts lines to CR/LF.  This bash problem
+  # is reportedly fixed, but why not run on old versions too?
+  sed '$q' "$ltmain" >> "$cfgfile" \
+     || (rm -f "$cfgfile"; exit 1)
+
+   mv -f "$cfgfile" "$ofile" ||
+    (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
+  chmod +x "$ofile"
+],
+[cat <<_LT_EOF >> "$ofile"
+
+dnl Unfortunately we have to use $1 here, since _LT_TAG is not expanded
+dnl in a comment (ie after a #).
+# ### BEGIN LIBTOOL TAG CONFIG: $1
+_LT_LIBTOOL_TAG_VARS(_LT_TAG)
+# ### END LIBTOOL TAG CONFIG: $1
+_LT_EOF
+])dnl /m4_if
+],
+[m4_if([$1], [], [
+    PACKAGE='$PACKAGE'
+    VERSION='$VERSION'
+    RM='$RM'
+    ofile='$ofile'], [])
+])dnl /_LT_CONFIG_SAVE_COMMANDS
+])# _LT_CONFIG
+
+
+# LT_SUPPORTED_TAG(TAG)
+# ---------------------
+# Trace this macro to discover what tags are supported by the libtool
+# --tag option, using:
+#    autoconf --trace 'LT_SUPPORTED_TAG:$1'
+AC_DEFUN([LT_SUPPORTED_TAG], [])
+
+
+# C support is built-in for now
+m4_define([_LT_LANG_C_enabled], [])
+m4_define([_LT_TAGS], [])
+
+
+# LT_LANG(LANG)
+# -------------
+# Enable libtool support for the given language if not already enabled.
+AC_DEFUN([LT_LANG],
+[AC_BEFORE([$0], [LT_OUTPUT])dnl
+m4_case([$1],
+  [C],                 [_LT_LANG(C)],
+  [C++],               [_LT_LANG(CXX)],
+  [Go],                        [_LT_LANG(GO)],
+  [Java],              [_LT_LANG(GCJ)],
+  [Fortran 77],                [_LT_LANG(F77)],
+  [Fortran],           [_LT_LANG(FC)],
+  [Windows Resource],  [_LT_LANG(RC)],
+  [m4_ifdef([_LT_LANG_]$1[_CONFIG],
+    [_LT_LANG($1)],
+    [m4_fatal([$0: unsupported language: "$1"])])])dnl
+])# LT_LANG
+
+
+# _LT_LANG(LANGNAME)
+# ------------------
+m4_defun([_LT_LANG],
+[m4_ifdef([_LT_LANG_]$1[_enabled], [],
+  [LT_SUPPORTED_TAG([$1])dnl
+  m4_append([_LT_TAGS], [$1 ])dnl
+  m4_define([_LT_LANG_]$1[_enabled], [])dnl
+  _LT_LANG_$1_CONFIG($1)])dnl
+])# _LT_LANG
+
+
+m4_ifndef([AC_PROG_GO], [
+############################################################
+# NOTE: This macro has been submitted for inclusion into   #
+#  GNU Autoconf as AC_PROG_GO.  When it is available in    #
+#  a released version of Autoconf we should remove this    #
+#  macro and use it instead.                               #
+############################################################
+m4_defun([AC_PROG_GO],
+[AC_LANG_PUSH(Go)dnl
+AC_ARG_VAR([GOC],     [Go compiler command])dnl
+AC_ARG_VAR([GOFLAGS], [Go compiler flags])dnl
+_AC_ARG_VAR_LDFLAGS()dnl
+AC_CHECK_TOOL(GOC, gccgo)
+if test -z "$GOC"; then
+  if test -n "$ac_tool_prefix"; then
+    AC_CHECK_PROG(GOC, [${ac_tool_prefix}gccgo], [${ac_tool_prefix}gccgo])
+  fi
+fi
+if test -z "$GOC"; then
+  AC_CHECK_PROG(GOC, gccgo, gccgo, false)
+fi
+])#m4_defun
+])#m4_ifndef
+
+
+# _LT_LANG_DEFAULT_CONFIG
+# -----------------------
+m4_defun([_LT_LANG_DEFAULT_CONFIG],
+[AC_PROVIDE_IFELSE([AC_PROG_CXX],
+  [LT_LANG(CXX)],
+  [m4_define([AC_PROG_CXX], defn([AC_PROG_CXX])[LT_LANG(CXX)])])
+
+AC_PROVIDE_IFELSE([AC_PROG_F77],
+  [LT_LANG(F77)],
+  [m4_define([AC_PROG_F77], defn([AC_PROG_F77])[LT_LANG(F77)])])
+
+AC_PROVIDE_IFELSE([AC_PROG_FC],
+  [LT_LANG(FC)],
+  [m4_define([AC_PROG_FC], defn([AC_PROG_FC])[LT_LANG(FC)])])
+
+dnl The call to [A][M_PROG_GCJ] is quoted like that to stop aclocal
+dnl pulling things in needlessly.
+AC_PROVIDE_IFELSE([AC_PROG_GCJ],
+  [LT_LANG(GCJ)],
+  [AC_PROVIDE_IFELSE([A][M_PROG_GCJ],
+    [LT_LANG(GCJ)],
+    [AC_PROVIDE_IFELSE([LT_PROG_GCJ],
+      [LT_LANG(GCJ)],
+      [m4_ifdef([AC_PROG_GCJ],
+       [m4_define([AC_PROG_GCJ], defn([AC_PROG_GCJ])[LT_LANG(GCJ)])])
+       m4_ifdef([A][M_PROG_GCJ],
+       [m4_define([A][M_PROG_GCJ], defn([A][M_PROG_GCJ])[LT_LANG(GCJ)])])
+       m4_ifdef([LT_PROG_GCJ],
+       [m4_define([LT_PROG_GCJ], defn([LT_PROG_GCJ])[LT_LANG(GCJ)])])])])])
+
+AC_PROVIDE_IFELSE([AC_PROG_GO],
+  [LT_LANG(GO)],
+  [m4_define([AC_PROG_GO], defn([AC_PROG_GO])[LT_LANG(GO)])])
+
+AC_PROVIDE_IFELSE([LT_PROG_RC],
+  [LT_LANG(RC)],
+  [m4_define([LT_PROG_RC], defn([LT_PROG_RC])[LT_LANG(RC)])])
+])# _LT_LANG_DEFAULT_CONFIG
+
+# Obsolete macros:
+AU_DEFUN([AC_LIBTOOL_CXX], [LT_LANG(C++)])
+AU_DEFUN([AC_LIBTOOL_F77], [LT_LANG(Fortran 77)])
+AU_DEFUN([AC_LIBTOOL_FC], [LT_LANG(Fortran)])
+AU_DEFUN([AC_LIBTOOL_GCJ], [LT_LANG(Java)])
+AU_DEFUN([AC_LIBTOOL_RC], [LT_LANG(Windows Resource)])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_CXX], [])
+dnl AC_DEFUN([AC_LIBTOOL_F77], [])
+dnl AC_DEFUN([AC_LIBTOOL_FC], [])
+dnl AC_DEFUN([AC_LIBTOOL_GCJ], [])
+dnl AC_DEFUN([AC_LIBTOOL_RC], [])
+
+
+# _LT_TAG_COMPILER
+# ----------------
+m4_defun([_LT_TAG_COMPILER],
+[AC_REQUIRE([AC_PROG_CC])dnl
+
+_LT_DECL([LTCC], [CC], [1], [A C compiler])dnl
+_LT_DECL([LTCFLAGS], [CFLAGS], [1], [LTCC compiler flags])dnl
+_LT_TAGDECL([CC], [compiler], [1], [A language specific compiler])dnl
+_LT_TAGDECL([with_gcc], [GCC], [0], [Is the compiler the GNU compiler?])dnl
+
+# If no C compiler was specified, use CC.
+LTCC=${LTCC-"$CC"}
+
+# If no C compiler flags were specified, use CFLAGS.
+LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
+
+# Allow CC to be a program name with arguments.
+compiler=$CC
+])# _LT_TAG_COMPILER
+
+
+# _LT_COMPILER_BOILERPLATE
+# ------------------------
+# Check for compiler boilerplate output or warnings with
+# the simple compiler test code.
+m4_defun([_LT_COMPILER_BOILERPLATE],
+[m4_require([_LT_DECL_SED])dnl
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_compile_test_code" >conftest.$ac_ext
+eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_compiler_boilerplate=`cat conftest.err`
+$RM conftest*
+])# _LT_COMPILER_BOILERPLATE
+
+
+# _LT_LINKER_BOILERPLATE
+# ----------------------
+# Check for linker boilerplate output or warnings with
+# the simple link test code.
+m4_defun([_LT_LINKER_BOILERPLATE],
+[m4_require([_LT_DECL_SED])dnl
+ac_outfile=conftest.$ac_objext
+echo "$lt_simple_link_test_code" >conftest.$ac_ext
+eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
+_lt_linker_boilerplate=`cat conftest.err`
+$RM -r conftest*
+])# _LT_LINKER_BOILERPLATE
+
+# _LT_REQUIRED_DARWIN_CHECKS
+# -------------------------
+m4_defun_once([_LT_REQUIRED_DARWIN_CHECKS],[
+  case $host_os in
+    rhapsody* | darwin*)
+    AC_CHECK_TOOL([DSYMUTIL], [dsymutil], [:])
+    AC_CHECK_TOOL([NMEDIT], [nmedit], [:])
+    AC_CHECK_TOOL([LIPO], [lipo], [:])
+    AC_CHECK_TOOL([OTOOL], [otool], [:])
+    AC_CHECK_TOOL([OTOOL64], [otool64], [:])
+    _LT_DECL([], [DSYMUTIL], [1],
+      [Tool to manipulate archived DWARF debug symbol files on Mac OS X])
+    _LT_DECL([], [NMEDIT], [1],
+      [Tool to change global to local symbols on Mac OS X])
+    _LT_DECL([], [LIPO], [1],
+      [Tool to manipulate fat objects and archives on Mac OS X])
+    _LT_DECL([], [OTOOL], [1],
+      [ldd/readelf like tool for Mach-O binaries on Mac OS X])
+    _LT_DECL([], [OTOOL64], [1],
+      [ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4])
+
+    AC_CACHE_CHECK([for -single_module linker 
flag],[lt_cv_apple_cc_single_mod],
+      [lt_cv_apple_cc_single_mod=no
+      if test -z "$LT_MULTI_MODULE"; then
+       # By default we will add the -single_module flag. You can override
+       # by either setting the environment variable LT_MULTI_MODULE
+       # non-empty at configure time, or by adding -multi_module to the
+       # link flags.
+       rm -rf libconftest.dylib*
+       echo "int foo(void){return 1;}" > conftest.c
+       echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+-dynamiclib -Wl,-single_module conftest.c" >&AS_MESSAGE_LOG_FD
+       $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
+         -dynamiclib -Wl,-single_module conftest.c 2>conftest.err
+        _lt_result=$?
+       # If there is a non-empty error log, and "single_module"
+       # appears in it, assume the flag caused a linker warning
+        if test -s conftest.err && $GREP single_module conftest.err; then
+         cat conftest.err >&AS_MESSAGE_LOG_FD
+       # Otherwise, if the output was created with a 0 exit code from
+       # the compiler, it worked.
+       elif test -f libconftest.dylib && test 0 = "$_lt_result"; then
+         lt_cv_apple_cc_single_mod=yes
+       else
+         cat conftest.err >&AS_MESSAGE_LOG_FD
+       fi
+       rm -rf libconftest.dylib*
+       rm -f conftest.*
+      fi])
+
+    AC_CACHE_CHECK([for -exported_symbols_list linker flag],
+      [lt_cv_ld_exported_symbols_list],
+      [lt_cv_ld_exported_symbols_list=no
+      save_LDFLAGS=$LDFLAGS
+      echo "_main" > conftest.sym
+      LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym"
+      AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
+       [lt_cv_ld_exported_symbols_list=yes],
+       [lt_cv_ld_exported_symbols_list=no])
+       LDFLAGS=$save_LDFLAGS
+    ])
+
+    AC_CACHE_CHECK([for -force_load linker flag],[lt_cv_ld_force_load],
+      [lt_cv_ld_force_load=no
+      cat > conftest.c << _LT_EOF
+int forced_loaded() { return 2;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&AS_MESSAGE_LOG_FD
+      $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&AS_MESSAGE_LOG_FD
+      echo "$AR cru libconftest.a conftest.o" >&AS_MESSAGE_LOG_FD
+      $AR cru libconftest.a conftest.o 2>&AS_MESSAGE_LOG_FD
+      echo "$RANLIB libconftest.a" >&AS_MESSAGE_LOG_FD
+      $RANLIB libconftest.a 2>&AS_MESSAGE_LOG_FD
+      cat > conftest.c << _LT_EOF
+int main() { return 0;}
+_LT_EOF
+      echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c 
-Wl,-force_load,./libconftest.a" >&AS_MESSAGE_LOG_FD
+      $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c 
-Wl,-force_load,./libconftest.a 2>conftest.err
+      _lt_result=$?
+      if test -s conftest.err && $GREP force_load conftest.err; then
+       cat conftest.err >&AS_MESSAGE_LOG_FD
+      elif test -f conftest && test 0 = "$_lt_result" && $GREP forced_load 
conftest >/dev/null 2>&1; then
+       lt_cv_ld_force_load=yes
+      else
+       cat conftest.err >&AS_MESSAGE_LOG_FD
+      fi
+        rm -f conftest.err libconftest.a conftest conftest.c
+        rm -rf conftest.dSYM
+    ])
+    case $host_os in
+    rhapsody* | darwin1.[[012]])
+      _lt_dar_allow_undefined='$wl-undefined ${wl}suppress' ;;
+    darwin1.*)
+      _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' 
;;
+    darwin*) # darwin 5.x on
+      # if running on 10.5 or later, the deployment target defaults
+      # to the OS version, if on x86, and 10.4, the deployment
+      # target defaults to 10.4. Don't you love it?
+      case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in
+       10.0,*86*-darwin8*|10.0,*-darwin[[91]]*)
+         _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;;
+       10.[[012]][[,.]]*)
+         _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined 
${wl}suppress' ;;
+       10.*)
+         _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;;
+      esac
+    ;;
+  esac
+    if test yes = "$lt_cv_apple_cc_single_mod"; then
+      _lt_dar_single_mod='$single_module'
+    fi
+    if test yes = "$lt_cv_ld_exported_symbols_list"; then
+      _lt_dar_export_syms=' 
$wl-exported_symbols_list,$output_objdir/$libname-symbols.expsym'
+    else
+      _lt_dar_export_syms='~$NMEDIT -s $output_objdir/$libname-symbols.expsym 
$lib'
+    fi
+    if test : != "$DSYMUTIL" && test no = "$lt_cv_ld_force_load"; then
+      _lt_dsymutil='~$DSYMUTIL $lib || :'
+    else
+      _lt_dsymutil=
+    fi
+    ;;
+  esac
+])
+
+
+# _LT_DARWIN_LINKER_FEATURES([TAG])
+# ---------------------------------
+# Checks for linker and compiler features on darwin
+m4_defun([_LT_DARWIN_LINKER_FEATURES],
+[
+  m4_require([_LT_REQUIRED_DARWIN_CHECKS])
+  _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+  _LT_TAGVAR(hardcode_direct, $1)=no
+  _LT_TAGVAR(hardcode_automatic, $1)=yes
+  _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+  if test yes = "$lt_cv_ld_force_load"; then
+    _LT_TAGVAR(whole_archive_flag_spec, $1)='`for conv in $convenience\"\"; do 
test  -n \"$conv\" && new_convenience=\"$new_convenience 
$wl-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`'
+    m4_case([$1], [F77], [_LT_TAGVAR(compiler_needs_object, $1)=yes],
+                  [FC],  [_LT_TAGVAR(compiler_needs_object, $1)=yes])
+  else
+    _LT_TAGVAR(whole_archive_flag_spec, $1)=''
+  fi
+  _LT_TAGVAR(link_all_deplibs, $1)=yes
+  _LT_TAGVAR(allow_undefined_flag, $1)=$_lt_dar_allow_undefined
+  case $cc_basename in
+     ifort*|nagfor*) _lt_dar_can_shared=yes ;;
+     *) _lt_dar_can_shared=$GCC ;;
+  esac
+  if test yes = "$_lt_dar_can_shared"; then
+    output_verbose_link_cmd=func_echo_all
+    _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o 
\$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname 
\$verstring $_lt_dar_single_mod$_lt_dsymutil"
+    _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle 
\$libobjs \$deplibs \$compiler_flags$_lt_dsymutil"
+    _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's|^|_|' < \$export_symbols > 
\$output_objdir/\$libname-symbols.expsym~\$CC -dynamiclib 
\$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags 
-install_name \$rpath/\$soname \$verstring 
$_lt_dar_single_mod$_lt_dar_export_syms$_lt_dsymutil"
+    _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's|^|_|' < \$export_symbols > 
\$output_objdir/\$libname-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib 
-bundle \$libobjs \$deplibs \$compiler_flags$_lt_dar_export_syms$_lt_dsymutil"
+    m4_if([$1], [CXX],
+[   if test yes != "$lt_cv_apple_cc_single_mod"; then
+      _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o 
\$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib 
\$lib-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname 
\$verstring$_lt_dsymutil"
+      _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's|^|_|' < \$export_symbols > 
\$output_objdir/\$libname-symbols.expsym~\$CC -r -keep_private_externs 
-nostdlib -o \$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag 
-o \$lib \$lib-master.o \$deplibs \$compiler_flags -install_name 
\$rpath/\$soname \$verstring$_lt_dar_export_syms$_lt_dsymutil"
+    fi
+],[])
+  else
+  _LT_TAGVAR(ld_shlibs, $1)=no
+  fi
+])
+
+# _LT_SYS_MODULE_PATH_AIX([TAGNAME])
+# ----------------------------------
+# Links a minimal program and checks the executable
+# for the system default hardcoded library path. In most cases,
+# this is /usr/lib:/lib, but when the MPI compilers are used
+# the location of the communication and MPI libs are included too.
+# If we don't find anything, use the default library path according
+# to the aix ld manual.
+# Store the results from the different compilers for each TAGNAME.
+# Allow to override them for all tags through lt_cv_aix_libpath.
+m4_defun([_LT_SYS_MODULE_PATH_AIX],
+[m4_require([_LT_DECL_SED])dnl
+if test set = "${lt_cv_aix_libpath+set}"; then
+  aix_libpath=$lt_cv_aix_libpath
+else
+  AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])],
+  [AC_LINK_IFELSE([AC_LANG_PROGRAM],[
+  lt_aix_libpath_sed='[
+      /Import File Strings/,/^$/ {
+         /^0/ {
+             s/^0  *\([^ ]*\) *$/\1/
+             p
+         }
+      }]'
+  _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -H conftest$ac_exeext 
2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  # Check for a 64-bit object if we didn't find anything.
+  if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
+    _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 
2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
+  fi],[])
+  if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then
+    _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=/usr/lib:/lib
+  fi
+  ])
+  aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])
+fi
+])# _LT_SYS_MODULE_PATH_AIX
+
+
+# _LT_SHELL_INIT(ARG)
+# -------------------
+m4_define([_LT_SHELL_INIT],
+[m4_divert_text([M4SH-INIT], [$1
+])])# _LT_SHELL_INIT
+
+
+
+# _LT_PROG_ECHO_BACKSLASH
+# -----------------------
+# Find how we can fake an echo command that does not interpret backslash.
+# In particular, with Autoconf 2.60 or later we add some code to the start
+# of the generated configure script that will find a shell with a builtin
+# printf (that we can use as an echo command).
+m4_defun([_LT_PROG_ECHO_BACKSLASH],
+[ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+
+AC_MSG_CHECKING([how to print strings])
+# Test print first, because it will be a builtin if present.
+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
+   test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='print -r --'
+elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
+  ECHO='printf %s\n'
+else
+  # Use this function as a fallback that always works.
+  func_fallback_echo ()
+  {
+    eval 'cat <<_LTECHO_EOF
+$[]1
+_LTECHO_EOF'
+  }
+  ECHO='func_fallback_echo'
+fi
+
+# func_echo_all arg...
+# Invoke $ECHO with all args, space-separated.
+func_echo_all ()
+{
+    $ECHO "$*"
+}
+
+case $ECHO in
+  printf*) AC_MSG_RESULT([printf]) ;;
+  print*) AC_MSG_RESULT([print -r]) ;;
+  *) AC_MSG_RESULT([cat]) ;;
+esac
+
+m4_ifdef([_AS_DETECT_SUGGESTED],
+[_AS_DETECT_SUGGESTED([
+  test -n "${ZSH_VERSION+set}${BASH_VERSION+set}" || (
+    
ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
+    ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
+    ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
+    PATH=/empty FPATH=/empty; export PATH FPATH
+    test "X`printf %s $ECHO`" = "X$ECHO" \
+      || test "X`print -r -- $ECHO`" = "X$ECHO" )])])
+
+_LT_DECL([], [SHELL], [1], [Shell to use when invoking shell scripts])
+_LT_DECL([], [ECHO], [1], [An echo program that protects backslashes])
+])# _LT_PROG_ECHO_BACKSLASH
+
+
+# _LT_WITH_SYSROOT
+# ----------------
+AC_DEFUN([_LT_WITH_SYSROOT],
+[AC_MSG_CHECKING([for sysroot])
+AC_ARG_WITH([sysroot],
+[AS_HELP_STRING([--with-sysroot@<:@=DIR@:>@],
+  [Search for dependent libraries within DIR (or the compiler's sysroot
+   if not specified).])],
+[], [with_sysroot=no])
+
+dnl lt_sysroot will always be passed unquoted.  We quote it here
+dnl in case the user passed a directory name.
+lt_sysroot=
+case $with_sysroot in #(
+ yes)
+   if test yes = "$GCC"; then
+     lt_sysroot=`$CC --print-sysroot 2>/dev/null`
+   fi
+   ;; #(
+ /*)
+   lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"`
+   ;; #(
+ no|'')
+   ;; #(
+ *)
+   AC_MSG_RESULT([$with_sysroot])
+   AC_MSG_ERROR([The sysroot must be an absolute path.])
+   ;;
+esac
+
+ AC_MSG_RESULT([${lt_sysroot:-no}])
+_LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl
+[dependent libraries, and where our libraries should be installed.])])
+
+# _LT_ENABLE_LOCK
+# ---------------
+m4_defun([_LT_ENABLE_LOCK],
+[AC_ARG_ENABLE([libtool-lock],
+  [AS_HELP_STRING([--disable-libtool-lock],
+    [avoid locking (might break parallel builds)])])
+test no = "$enable_libtool_lock" || enable_libtool_lock=yes
+
+# Some flags need to be propagated to the compiler or linker for good
+# libtool support.
+case $host in
+ia64-*-hpux*)
+  # Find out what ABI is being produced by ac_compile, and set mode
+  # options accordingly.
+  echo 'int i;' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    case `/usr/bin/file conftest.$ac_objext` in
+      *ELF-32*)
+       HPUX_IA64_MODE=32
+       ;;
+      *ELF-64*)
+       HPUX_IA64_MODE=64
+       ;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+*-*-irix6*)
+  # Find out what ABI is being produced by ac_compile, and set linker
+  # options accordingly.
+  echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    if test yes = "$lt_cv_prog_gnu_ld"; then
+      case `/usr/bin/file conftest.$ac_objext` in
+       *32-bit*)
+         LD="${LD-ld} -melf32bsmip"
+         ;;
+       *N32*)
+         LD="${LD-ld} -melf32bmipn32"
+         ;;
+       *64-bit*)
+         LD="${LD-ld} -melf64bmip"
+       ;;
+      esac
+    else
+      case `/usr/bin/file conftest.$ac_objext` in
+       *32-bit*)
+         LD="${LD-ld} -32"
+         ;;
+       *N32*)
+         LD="${LD-ld} -n32"
+         ;;
+       *64-bit*)
+         LD="${LD-ld} -64"
+         ;;
+      esac
+    fi
+  fi
+  rm -rf conftest*
+  ;;
+
+mips64*-*linux*)
+  # Find out what ABI is being produced by ac_compile, and set linker
+  # options accordingly.
+  echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    emul=elf
+    case `/usr/bin/file conftest.$ac_objext` in
+      *32-bit*)
+       emul="${emul}32"
+       ;;
+      *64-bit*)
+       emul="${emul}64"
+       ;;
+    esac
+    case `/usr/bin/file conftest.$ac_objext` in
+      *MSB*)
+       emul="${emul}btsmip"
+       ;;
+      *LSB*)
+       emul="${emul}ltsmip"
+       ;;
+    esac
+    case `/usr/bin/file conftest.$ac_objext` in
+      *N32*)
+       emul="${emul}n32"
+       ;;
+    esac
+    LD="${LD-ld} -m $emul"
+  fi
+  rm -rf conftest*
+  ;;
+
+x86_64-*kfreebsd*-gnu|x86_64-*linux*|powerpc*-*linux*| \
+s390*-*linux*|s390*-*tpf*|sparc*-*linux*)
+  # Find out what ABI is being produced by ac_compile, and set linker
+  # options accordingly.  Note that the listed cases only cover the
+  # situations where additional linker options are needed (such as when
+  # doing 32-bit compilation for a host where ld defaults to 64-bit, or
+  # vice versa); the common cases where no linker options are needed do
+  # not appear in the list.
+  echo 'int i;' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    case `/usr/bin/file conftest.o` in
+      *32-bit*)
+       case $host in
+         x86_64-*kfreebsd*-gnu)
+           LD="${LD-ld} -m elf_i386_fbsd"
+           ;;
+         x86_64-*linux*)
+           case `/usr/bin/file conftest.o` in
+             *x86-64*)
+               LD="${LD-ld} -m elf32_x86_64"
+               ;;
+             *)
+               LD="${LD-ld} -m elf_i386"
+               ;;
+           esac
+           ;;
+         powerpc64le-*linux*)
+           LD="${LD-ld} -m elf32lppclinux"
+           ;;
+         powerpc64-*linux*)
+           LD="${LD-ld} -m elf32ppclinux"
+           ;;
+         s390x-*linux*)
+           LD="${LD-ld} -m elf_s390"
+           ;;
+         sparc64-*linux*)
+           LD="${LD-ld} -m elf32_sparc"
+           ;;
+       esac
+       ;;
+      *64-bit*)
+       case $host in
+         x86_64-*kfreebsd*-gnu)
+           LD="${LD-ld} -m elf_x86_64_fbsd"
+           ;;
+         x86_64-*linux*)
+           LD="${LD-ld} -m elf_x86_64"
+           ;;
+         powerpcle-*linux*)
+           LD="${LD-ld} -m elf64lppc"
+           ;;
+         powerpc-*linux*)
+           LD="${LD-ld} -m elf64ppc"
+           ;;
+         s390*-*linux*|s390*-*tpf*)
+           LD="${LD-ld} -m elf64_s390"
+           ;;
+         sparc*-*linux*)
+           LD="${LD-ld} -m elf64_sparc"
+           ;;
+       esac
+       ;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+
+*-*-sco3.2v5*)
+  # On SCO OpenServer 5, we need -belf to get full-featured binaries.
+  SAVE_CFLAGS=$CFLAGS
+  CFLAGS="$CFLAGS -belf"
+  AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf,
+    [AC_LANG_PUSH(C)
+     
AC_LINK_IFELSE([AC_LANG_PROGRAM([[]],[[]])],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no])
+     AC_LANG_POP])
+  if test yes != "$lt_cv_cc_needs_belf"; then
+    # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
+    CFLAGS=$SAVE_CFLAGS
+  fi
+  ;;
+*-*solaris*)
+  # Find out what ABI is being produced by ac_compile, and set linker
+  # options accordingly.
+  echo 'int i;' > conftest.$ac_ext
+  if AC_TRY_EVAL(ac_compile); then
+    case `/usr/bin/file conftest.o` in
+    *64-bit*)
+      case $lt_cv_prog_gnu_ld in
+      yes*)
+        case $host in
+        i?86-*-solaris*|x86_64-*-solaris*)
+          LD="${LD-ld} -m elf_x86_64"
+          ;;
+        sparc*-*-solaris*)
+          LD="${LD-ld} -m elf64_sparc"
+          ;;
+        esac
+        # GNU ld 2.21 introduced _sol2 emulations.  Use them if available.
+        if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then
+          LD=${LD-ld}_sol2
+        fi
+        ;;
+      *)
+       if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
+         LD="${LD-ld} -64"
+       fi
+       ;;
+      esac
+      ;;
+    esac
+  fi
+  rm -rf conftest*
+  ;;
+esac
+
+need_locks=$enable_libtool_lock
+])# _LT_ENABLE_LOCK
+
+
+# _LT_PROG_AR
+# -----------
+m4_defun([_LT_PROG_AR],
+[AC_CHECK_TOOLS(AR, [ar], false)
+: ${AR=ar}
+: ${AR_FLAGS=cru}
+_LT_DECL([], [AR], [1], [The archiver])
+_LT_DECL([], [AR_FLAGS], [1], [Flags to create an archive])
+
+AC_CACHE_CHECK([for archiver @FILE support], [lt_cv_ar_at_file],
+  [lt_cv_ar_at_file=no
+   AC_COMPILE_IFELSE([AC_LANG_PROGRAM],
+     [echo conftest.$ac_objext > conftest.lst
+      lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD'
+      AC_TRY_EVAL([lt_ar_try])
+      if test 0 -eq "$ac_status"; then
+       # Ensure the archiver fails upon bogus file names.
+       rm -f conftest.$ac_objext libconftest.a
+       AC_TRY_EVAL([lt_ar_try])
+       if test 0 -ne "$ac_status"; then
+          lt_cv_ar_at_file=@
+        fi
+      fi
+      rm -f conftest.* libconftest.a
+     ])
+  ])
+
+if test no = "$lt_cv_ar_at_file"; then
+  archiver_list_spec=
+else
+  archiver_list_spec=$lt_cv_ar_at_file
+fi
+_LT_DECL([], [archiver_list_spec], [1],
+  [How to feed a file listing to the archiver])
+])# _LT_PROG_AR
+
+
+# _LT_CMD_OLD_ARCHIVE
+# -------------------
+m4_defun([_LT_CMD_OLD_ARCHIVE],
+[_LT_PROG_AR
+
+AC_CHECK_TOOL(STRIP, strip, :)
+test -z "$STRIP" && STRIP=:
+_LT_DECL([], [STRIP], [1], [A symbol stripping program])
+
+AC_CHECK_TOOL(RANLIB, ranlib, :)
+test -z "$RANLIB" && RANLIB=:
+_LT_DECL([], [RANLIB], [1],
+    [Commands used to install an old-style archive])
+
+# Determine commands to create old-style static archives.
+old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs'
+old_postinstall_cmds='chmod 644 $oldlib'
+old_postuninstall_cmds=
+
+if test -n "$RANLIB"; then
+  case $host_os in
+  bitrig* | openbsd*)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib"
+    ;;
+  *)
+    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib"
+    ;;
+  esac
+  old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib"
+fi
+
+case $host_os in
+  darwin*)
+    lock_old_archive_extraction=yes ;;
+  *)
+    lock_old_archive_extraction=no ;;
+esac
+_LT_DECL([], [old_postinstall_cmds], [2])
+_LT_DECL([], [old_postuninstall_cmds], [2])
+_LT_TAGDECL([], [old_archive_cmds], [2],
+    [Commands used to build an old-style archive])
+_LT_DECL([], [lock_old_archive_extraction], [0],
+    [Whether to use a lock for old archive extraction])
+])# _LT_CMD_OLD_ARCHIVE
+
+
+# _LT_COMPILER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
+#              [OUTPUT-FILE], [ACTION-SUCCESS], [ACTION-FAILURE])
+# ----------------------------------------------------------------
+# Check whether the given compiler option works
+AC_DEFUN([_LT_COMPILER_OPTION],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_SED])dnl
+AC_CACHE_CHECK([$1], [$2],
+  [$2=no
+   m4_if([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4])
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+   lt_compiler_flag="$3"  ## exclude from sc_useless_quotes_in_assignment
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   # The option is referenced via a variable to avoid confusing sed.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
+   (eval "$lt_compile" 2>conftest.err)
+   ac_status=$?
+   cat conftest.err >&AS_MESSAGE_LOG_FD
+   echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+   if (exit $ac_status) && test -s "$ac_outfile"; then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings other than the usual output.
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
+     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; 
then
+       $2=yes
+     fi
+   fi
+   $RM conftest*
+])
+
+if test yes = "[$]$2"; then
+    m4_if([$5], , :, [$5])
+else
+    m4_if([$6], , :, [$6])
+fi
+])# _LT_COMPILER_OPTION
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_COMPILER_OPTION], [_LT_COMPILER_OPTION])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_COMPILER_OPTION], [])
+
+
+# _LT_LINKER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS,
+#                  [ACTION-SUCCESS], [ACTION-FAILURE])
+# ----------------------------------------------------
+# Check whether the given linker option works
+AC_DEFUN([_LT_LINKER_OPTION],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_SED])dnl
+AC_CACHE_CHECK([$1], [$2],
+  [$2=no
+   save_LDFLAGS=$LDFLAGS
+   LDFLAGS="$LDFLAGS $3"
+   echo "$lt_simple_link_test_code" > conftest.$ac_ext
+   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
+     # The linker can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     if test -s conftest.err; then
+       # Append any errors to the config.log.
+       cat conftest.err 1>&AS_MESSAGE_LOG_FD
+       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
+       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
+       if diff conftest.exp conftest.er2 >/dev/null; then
+         $2=yes
+       fi
+     else
+       $2=yes
+     fi
+   fi
+   $RM -r conftest*
+   LDFLAGS=$save_LDFLAGS
+])
+
+if test yes = "[$]$2"; then
+    m4_if([$4], , :, [$4])
+else
+    m4_if([$5], , :, [$5])
+fi
+])# _LT_LINKER_OPTION
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_LINKER_OPTION], [_LT_LINKER_OPTION])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_LINKER_OPTION], [])
+
+
+# LT_CMD_MAX_LEN
+#---------------
+AC_DEFUN([LT_CMD_MAX_LEN],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+# find the maximum length of command line arguments
+AC_MSG_CHECKING([the maximum length of command line arguments])
+AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl
+  i=0
+  teststring=ABCD
+
+  case $build_os in
+  msdosdjgpp*)
+    # On DJGPP, this test can blow up pretty badly due to problems in libc
+    # (any single argument exceeding 2000 bytes causes a buffer overrun
+    # during glob expansion).  Even if it were fixed, the result of this
+    # check would be larger than it should be.
+    lt_cv_sys_max_cmd_len=12288;    # 12K is about right
+    ;;
+
+  gnu*)
+    # Under GNU Hurd, this test is not required because there is
+    # no limit to the length of command line arguments.
+    # Libtool will interpret -1 as no limit whatsoever
+    lt_cv_sys_max_cmd_len=-1;
+    ;;
+
+  cygwin* | mingw* | cegcc*)
+    # On Win9x/ME, this test blows up -- it succeeds, but takes
+    # about 5 minutes as the teststring grows exponentially.
+    # Worse, since 9x/ME are not pre-emptively multitasking,
+    # you end up with a "frozen" computer, even though with patience
+    # the test eventually succeeds (with a max line length of 256k).
+    # Instead, let's just punt: use the minimum linelength reported by
+    # all of the supported platforms: 8192 (on NT/2K/XP).
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  mint*)
+    # On MiNT this can take a long time and run out of memory.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  amigaos*)
+    # On AmigaOS with pdksh, this test takes hours, literally.
+    # So we just punt and use a minimum line length of 8192.
+    lt_cv_sys_max_cmd_len=8192;
+    ;;
+
+  bitrig* | darwin* | dragonfly* | freebsd* | netbsd* | openbsd*)
+    # This has been around since 386BSD, at least.  Likely further.
+    if test -x /sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax`
+    elif test -x /usr/sbin/sysctl; then
+      lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax`
+    else
+      lt_cv_sys_max_cmd_len=65536      # usable default for all BSDs
+    fi
+    # And add a safety zone
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    ;;
+
+  interix*)
+    # We know the value 262144 and hardcode it with a safety zone (like BSD)
+    lt_cv_sys_max_cmd_len=196608
+    ;;
+
+  os2*)
+    # The test takes a long time on OS/2.
+    lt_cv_sys_max_cmd_len=8192
+    ;;
+
+  osf*)
+    # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure
+    # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not
+    # nice to cause kernel panics so lets avoid the loop below.
+    # First set a reasonable default.
+    lt_cv_sys_max_cmd_len=16384
+    #
+    if test -x /sbin/sysconfig; then
+      case `/sbin/sysconfig -q proc exec_disable_arg_limit` in
+        *1*) lt_cv_sys_max_cmd_len=-1 ;;
+      esac
+    fi
+    ;;
+  sco3.2v5*)
+    lt_cv_sys_max_cmd_len=102400
+    ;;
+  sysv5* | sco5v6* | sysv4.2uw2*)
+    kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null`
+    if test -n "$kargmax"; then
+      lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[[        ]]//'`
+    else
+      lt_cv_sys_max_cmd_len=32768
+    fi
+    ;;
+  *)
+    lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null`
+    if test -n "$lt_cv_sys_max_cmd_len" && \
+       test undefined != "$lt_cv_sys_max_cmd_len"; then
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
+    else
+      # Make teststring a little bigger before we do anything with it.
+      # a 1K string should be a reasonable start.
+      for i in 1 2 3 4 5 6 7 8; do
+        teststring=$teststring$teststring
+      done
+      SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}}
+      # If test is not a shell built-in, we'll probably end up computing a
+      # maximum length that is only half of the actual maximum length, but
+      # we can't tell.
+      while { test X`env echo "$teststring$teststring" 2>/dev/null` \
+                = "X$teststring$teststring"; } >/dev/null 2>&1 &&
+             test 17 != "$i" # 1/2 MB should be enough
+      do
+        i=`expr $i + 1`
+        teststring=$teststring$teststring
+      done
+      # Only check the string length outside the loop.
+      lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1`
+      teststring=
+      # Add a significant safety factor because C++ compilers can tack on
+      # massive amounts of additional arguments before passing them to the
+      # linker.  It appears as though 1/2 is a usable value.
+      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2`
+    fi
+    ;;
+  esac
+])
+if test -n "$lt_cv_sys_max_cmd_len"; then
+  AC_MSG_RESULT($lt_cv_sys_max_cmd_len)
+else
+  AC_MSG_RESULT(none)
+fi
+max_cmd_len=$lt_cv_sys_max_cmd_len
+_LT_DECL([], [max_cmd_len], [0],
+    [What is the maximum length of a command?])
+])# LT_CMD_MAX_LEN
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_SYS_MAX_CMD_LEN], [LT_CMD_MAX_LEN])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_SYS_MAX_CMD_LEN], [])
+
+
+# _LT_HEADER_DLFCN
+# ----------------
+m4_defun([_LT_HEADER_DLFCN],
+[AC_CHECK_HEADERS([dlfcn.h], [], [], [AC_INCLUDES_DEFAULT])dnl
+])# _LT_HEADER_DLFCN
+
+
+# _LT_TRY_DLOPEN_SELF (ACTION-IF-TRUE, ACTION-IF-TRUE-W-USCORE,
+#                      ACTION-IF-FALSE, ACTION-IF-CROSS-COMPILING)
+# ----------------------------------------------------------------
+m4_defun([_LT_TRY_DLOPEN_SELF],
+[m4_require([_LT_HEADER_DLFCN])dnl
+if test yes = "$cross_compiling"; then :
+  [$4]
+else
+  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
+  lt_status=$lt_dlunknown
+  cat > conftest.$ac_ext <<_LT_EOF
+[#line $LINENO "configure"
+#include "confdefs.h"
+
+#if HAVE_DLFCN_H
+#include <dlfcn.h>
+#endif
+
+#include <stdio.h>
+
+#ifdef RTLD_GLOBAL
+#  define LT_DLGLOBAL          RTLD_GLOBAL
+#else
+#  ifdef DL_GLOBAL
+#    define LT_DLGLOBAL                DL_GLOBAL
+#  else
+#    define LT_DLGLOBAL                0
+#  endif
+#endif
+
+/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
+   find out it does not work in some platform. */
+#ifndef LT_DLLAZY_OR_NOW
+#  ifdef RTLD_LAZY
+#    define LT_DLLAZY_OR_NOW           RTLD_LAZY
+#  else
+#    ifdef DL_LAZY
+#      define LT_DLLAZY_OR_NOW         DL_LAZY
+#    else
+#      ifdef RTLD_NOW
+#        define LT_DLLAZY_OR_NOW       RTLD_NOW
+#      else
+#        ifdef DL_NOW
+#          define LT_DLLAZY_OR_NOW     DL_NOW
+#        else
+#          define LT_DLLAZY_OR_NOW     0
+#        endif
+#      endif
+#    endif
+#  endif
+#endif
+
+/* When -fvisibility=hidden is used, assume the code has been annotated
+   correspondingly for the symbols needed.  */
+#if defined __GNUC__ && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || 
(__GNUC__ > 3))
+int fnord () __attribute__((visibility("default")));
+#endif
+
+int fnord () { return 42; }
+int main ()
+{
+  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
+  int status = $lt_dlunknown;
+
+  if (self)
+    {
+      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
+      else
+        {
+         if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
+          else puts (dlerror ());
+       }
+      /* dlclose (self); */
+    }
+  else
+    puts (dlerror ());
+
+  return status;
+}]
+_LT_EOF
+  if AC_TRY_EVAL(ac_link) && test -s "conftest$ac_exeext" 2>/dev/null; then
+    (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null
+    lt_status=$?
+    case x$lt_status in
+      x$lt_dlno_uscore) $1 ;;
+      x$lt_dlneed_uscore) $2 ;;
+      x$lt_dlunknown|x*) $3 ;;
+    esac
+  else :
+    # compilation failed
+    $3
+  fi
+fi
+rm -fr conftest*
+])# _LT_TRY_DLOPEN_SELF
+
+
+# LT_SYS_DLOPEN_SELF
+# ------------------
+AC_DEFUN([LT_SYS_DLOPEN_SELF],
+[m4_require([_LT_HEADER_DLFCN])dnl
+if test yes != "$enable_dlopen"; then
+  enable_dlopen=unknown
+  enable_dlopen_self=unknown
+  enable_dlopen_self_static=unknown
+else
+  lt_cv_dlopen=no
+  lt_cv_dlopen_libs=
+
+  case $host_os in
+  beos*)
+    lt_cv_dlopen=load_add_on
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+    ;;
+
+  mingw* | pw32* | cegcc*)
+    lt_cv_dlopen=LoadLibrary
+    lt_cv_dlopen_libs=
+    ;;
+
+  cygwin*)
+    lt_cv_dlopen=dlopen
+    lt_cv_dlopen_libs=
+    ;;
+
+  darwin*)
+    # if libdl is installed we need to link against it
+    AC_CHECK_LIB([dl], [dlopen],
+               [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl],[
+    lt_cv_dlopen=dyld
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=yes
+    ])
+    ;;
+
+  tpf*)
+    # Don't try to run any link tests for TPF.  We know it's impossible
+    # because TPF is a cross-compiler, and we know how we open DSOs.
+    lt_cv_dlopen=dlopen
+    lt_cv_dlopen_libs=
+    lt_cv_dlopen_self=no
+    ;;
+
+  *)
+    AC_CHECK_FUNC([shl_load],
+         [lt_cv_dlopen=shl_load],
+      [AC_CHECK_LIB([dld], [shl_load],
+           [lt_cv_dlopen=shl_load lt_cv_dlopen_libs=-ldld],
+       [AC_CHECK_FUNC([dlopen],
+             [lt_cv_dlopen=dlopen],
+         [AC_CHECK_LIB([dl], [dlopen],
+               [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl],
+           [AC_CHECK_LIB([svld], [dlopen],
+                 [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-lsvld],
+             [AC_CHECK_LIB([dld], [dld_link],
+                   [lt_cv_dlopen=dld_link lt_cv_dlopen_libs=-ldld])
+             ])
+           ])
+         ])
+       ])
+      ])
+    ;;
+  esac
+
+  if test no = "$lt_cv_dlopen"; then
+    enable_dlopen=no
+  else
+    enable_dlopen=yes
+  fi
+
+  case $lt_cv_dlopen in
+  dlopen)
+    save_CPPFLAGS=$CPPFLAGS
+    test yes = "$ac_cv_header_dlfcn_h" && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H"
+
+    save_LDFLAGS=$LDFLAGS
+    wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS 
$export_dynamic_flag_spec\"
+
+    save_LIBS=$LIBS
+    LIBS="$lt_cv_dlopen_libs $LIBS"
+
+    AC_CACHE_CHECK([whether a program can dlopen itself],
+         lt_cv_dlopen_self, [dnl
+         _LT_TRY_DLOPEN_SELF(
+           lt_cv_dlopen_self=yes, lt_cv_dlopen_self=yes,
+           lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross)
+    ])
+
+    if test yes = "$lt_cv_dlopen_self"; then
+      wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS 
$lt_prog_compiler_static\"
+      AC_CACHE_CHECK([whether a statically linked program can dlopen itself],
+         lt_cv_dlopen_self_static, [dnl
+         _LT_TRY_DLOPEN_SELF(
+           lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=yes,
+           lt_cv_dlopen_self_static=no,  lt_cv_dlopen_self_static=cross)
+      ])
+    fi
+
+    CPPFLAGS=$save_CPPFLAGS
+    LDFLAGS=$save_LDFLAGS
+    LIBS=$save_LIBS
+    ;;
+  esac
+
+  case $lt_cv_dlopen_self in
+  yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;;
+  *) enable_dlopen_self=unknown ;;
+  esac
+
+  case $lt_cv_dlopen_self_static in
+  yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;;
+  *) enable_dlopen_self_static=unknown ;;
+  esac
+fi
+_LT_DECL([dlopen_support], [enable_dlopen], [0],
+        [Whether dlopen is supported])
+_LT_DECL([dlopen_self], [enable_dlopen_self], [0],
+        [Whether dlopen of programs is supported])
+_LT_DECL([dlopen_self_static], [enable_dlopen_self_static], [0],
+        [Whether dlopen of statically linked programs is supported])
+])# LT_SYS_DLOPEN_SELF
+
+# Old name:
+AU_ALIAS([AC_LIBTOOL_DLOPEN_SELF], [LT_SYS_DLOPEN_SELF])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_DLOPEN_SELF], [])
+
+
+# _LT_COMPILER_C_O([TAGNAME])
+# ---------------------------
+# Check to see if options -c and -o are simultaneously supported by compiler.
+# This macro does not hard code the compiler like AC_PROG_CC_C_O.
+m4_defun([_LT_COMPILER_C_O],
+[m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+AC_CACHE_CHECK([if $compiler supports -c -o file.$ac_objext],
+  [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)],
+  [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=no
+   $RM -r conftest 2>/dev/null
+   mkdir conftest
+   cd conftest
+   mkdir out
+   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+   lt_compiler_flag="-o out/conftest2.$ac_objext"
+   # Insert the option either (1) after the last *FLAGS variable, or
+   # (2) before a word containing "conftest.", or (3) at the end.
+   # Note that $ac_compile itself does not contain backslashes and begins
+   # with a dollar sign (not a hyphen), so the echo should work correctly.
+   lt_compile=`echo "$ac_compile" | $SED \
+   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
+   -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \
+   -e 's:$: $lt_compiler_flag:'`
+   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD)
+   (eval "$lt_compile" 2>out/conftest.err)
+   ac_status=$?
+   cat out/conftest.err >&AS_MESSAGE_LOG_FD
+   echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD
+   if (exit $ac_status) && test -s out/conftest2.$ac_objext
+   then
+     # The compiler can only warn and ignore the option if not recognized
+     # So say no if there are warnings
+     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
+     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
+     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 
>/dev/null; then
+       _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
+     fi
+   fi
+   chmod u+w . 2>&AS_MESSAGE_LOG_FD
+   $RM conftest*
+   # SGI C++ compiler will create directory out/ii_files/ for
+   # template instantiation
+   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
+   $RM out/* && rmdir out
+   cd ..
+   $RM -r conftest
+   $RM conftest*
+])
+_LT_TAGDECL([compiler_c_o], [lt_cv_prog_compiler_c_o], [1],
+       [Does compiler simultaneously support -c and -o options?])
+])# _LT_COMPILER_C_O
+
+
+# _LT_COMPILER_FILE_LOCKS([TAGNAME])
+# ----------------------------------
+# Check to see if we can do hard links to lock some files if needed
+m4_defun([_LT_COMPILER_FILE_LOCKS],
+[m4_require([_LT_ENABLE_LOCK])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+_LT_COMPILER_C_O([$1])
+
+hard_links=nottested
+if test no = "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" && test no != 
"$need_locks"; then
+  # do not overwrite the value of need_locks provided by the user
+  AC_MSG_CHECKING([if we can lock with hard links])
+  hard_links=yes
+  $RM conftest*
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  touch conftest.a
+  ln conftest.a conftest.b 2>&5 || hard_links=no
+  ln conftest.a conftest.b 2>/dev/null && hard_links=no
+  AC_MSG_RESULT([$hard_links])
+  if test no = "$hard_links"; then
+    AC_MSG_WARN(['$CC' does not support '-c -o', so 'make -j' may be unsafe])
+    need_locks=warn
+  fi
+else
+  need_locks=no
+fi
+_LT_DECL([], [need_locks], [1], [Must we lock files when doing compilation?])
+])# _LT_COMPILER_FILE_LOCKS
+
+
+# _LT_CHECK_OBJDIR
+# ----------------
+m4_defun([_LT_CHECK_OBJDIR],
+[AC_CACHE_CHECK([for objdir], [lt_cv_objdir],
+[rm -f .libs 2>/dev/null
+mkdir .libs 2>/dev/null
+if test -d .libs; then
+  lt_cv_objdir=.libs
+else
+  # MS-DOS does not allow filenames that begin with a dot.
+  lt_cv_objdir=_libs
+fi
+rmdir .libs 2>/dev/null])
+objdir=$lt_cv_objdir
+_LT_DECL([], [objdir], [0],
+         [The name of the directory that contains temporary libtool files])dnl
+m4_pattern_allow([LT_OBJDIR])dnl
+AC_DEFINE_UNQUOTED([LT_OBJDIR], "$lt_cv_objdir/",
+  [Define to the sub-directory where libtool stores uninstalled libraries.])
+])# _LT_CHECK_OBJDIR
+
+
+# _LT_LINKER_HARDCODE_LIBPATH([TAGNAME])
+# --------------------------------------
+# Check hardcoding attributes.
+m4_defun([_LT_LINKER_HARDCODE_LIBPATH],
+[AC_MSG_CHECKING([how to hardcode library paths into programs])
+_LT_TAGVAR(hardcode_action, $1)=
+if test -n "$_LT_TAGVAR(hardcode_libdir_flag_spec, $1)" ||
+   test -n "$_LT_TAGVAR(runpath_var, $1)" ||
+   test yes = "$_LT_TAGVAR(hardcode_automatic, $1)"; then
+
+  # We can hardcode non-existent directories.
+  if test no != "$_LT_TAGVAR(hardcode_direct, $1)" &&
+     # If the only mechanism to avoid hardcoding is shlibpath_var, we
+     # have to relink, otherwise we might link with an installed library
+     # when we should be linking with a yet-to-be-installed one
+     ## test no != "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" &&
+     test no != "$_LT_TAGVAR(hardcode_minus_L, $1)"; then
+    # Linking always hardcodes the temporary library directory.
+    _LT_TAGVAR(hardcode_action, $1)=relink
+  else
+    # We can link without hardcoding, and we can hardcode nonexisting dirs.
+    _LT_TAGVAR(hardcode_action, $1)=immediate
+  fi
+else
+  # We cannot hardcode anything, or else we can only hardcode existing
+  # directories.
+  _LT_TAGVAR(hardcode_action, $1)=unsupported
+fi
+AC_MSG_RESULT([$_LT_TAGVAR(hardcode_action, $1)])
+
+if test relink = "$_LT_TAGVAR(hardcode_action, $1)" ||
+   test yes = "$_LT_TAGVAR(inherit_rpath, $1)"; then
+  # Fast installation is not supported
+  enable_fast_install=no
+elif test yes = "$shlibpath_overrides_runpath" ||
+     test no = "$enable_shared"; then
+  # Fast installation is not necessary
+  enable_fast_install=needless
+fi
+_LT_TAGDECL([], [hardcode_action], [0],
+    [How to hardcode a shared library path into an executable])
+])# _LT_LINKER_HARDCODE_LIBPATH
+
+
+# _LT_CMD_STRIPLIB
+# ----------------
+m4_defun([_LT_CMD_STRIPLIB],
+[m4_require([_LT_DECL_EGREP])
+striplib=
+old_striplib=
+AC_MSG_CHECKING([whether stripping libraries is possible])
+if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then
+  test -z "$old_striplib" && old_striplib="$STRIP --strip-debug"
+  test -z "$striplib" && striplib="$STRIP --strip-unneeded"
+  AC_MSG_RESULT([yes])
+else
+# FIXME - insert some real tests, host_os isn't really good enough
+  case $host_os in
+  darwin*)
+    if test -n "$STRIP"; then
+      striplib="$STRIP -x"
+      old_striplib="$STRIP -S"
+      AC_MSG_RESULT([yes])
+    else
+      AC_MSG_RESULT([no])
+    fi
+    ;;
+  *)
+    AC_MSG_RESULT([no])
+    ;;
+  esac
+fi
+_LT_DECL([], [old_striplib], [1], [Commands to strip libraries])
+_LT_DECL([], [striplib], [1])
+])# _LT_CMD_STRIPLIB
+
+
+# _LT_PREPARE_MUNGE_PATH_LIST
+# ---------------------------
+# Make sure func_munge_path_list() is defined correctly.
+m4_defun([_LT_PREPARE_MUNGE_PATH_LIST],
+[[# func_munge_path_list VARIABLE PATH
+# -----------------------------------
+# VARIABLE is name of variable containing _space_ separated list of
+# directories to be munged by the contents of PATH, which is string
+# having a format:
+# "DIR[:DIR]:"
+#       string "DIR[ DIR]" will be prepended to VARIABLE
+# ":DIR[:DIR]"
+#       string "DIR[ DIR]" will be appended to VARIABLE
+# "DIRP[:DIRP]::[DIRA:]DIRA"
+#       string "DIRP[ DIRP]" will be prepended to VARIABLE and string
+#       "DIRA[ DIRA]" will be appended to VARIABLE
+# "DIR[:DIR]"
+#       VARIABLE will be replaced by "DIR[ DIR]"
+func_munge_path_list ()
+{
+    case x@S|@2 in
+    x)
+        ;;
+    *:)
+        eval @S|@1=\"`$ECHO @S|@2 | $SED 's/:/ /g'` \@S|@@S|@1\"
+        ;;
+    x:*)
+        eval @S|@1=\"\@S|@@S|@1 `$ECHO @S|@2 | $SED 's/:/ /g'`\"
+        ;;
+    *::*)
+        eval @S|@1=\"\@S|@@S|@1\ `$ECHO @S|@2 | $SED -e 's/.*:://' -e 's/:/ 
/g'`\"
+        eval @S|@1=\"`$ECHO @S|@2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ 
\@S|@@S|@1\"
+        ;;
+    *)
+        eval @S|@1=\"`$ECHO @S|@2 | $SED 's/:/ /g'`\"
+        ;;
+    esac
+}
+]])# _LT_PREPARE_PATH_LIST
+
+
+# _LT_SYS_DYNAMIC_LINKER([TAG])
+# -----------------------------
+# PORTME Fill in your ld.so characteristics
+m4_defun([_LT_SYS_DYNAMIC_LINKER],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_OBJDUMP])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_CHECK_SHELL_FEATURES])dnl
+m4_require([_LT_PREPARE_MUNGE_PATH_LIST])dnl
+AC_MSG_CHECKING([dynamic linker characteristics])
+m4_if([$1],
+       [], [
+if test yes = "$GCC"; then
+  case $host_os in
+    darwin*) lt_awk_arg='/^libraries:/,/LR/' ;;
+    *) lt_awk_arg='/^libraries:/' ;;
+  esac
+  case $host_os in
+    mingw* | cegcc*) lt_sed_strip_eq='s|=\([[A-Za-z]]:\)|\1|g' ;;
+    *) lt_sed_strip_eq='s|=/|/|g' ;;
+  esac
+  lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e 
"s/^libraries://" -e $lt_sed_strip_eq`
+  case $lt_search_path_spec in
+  *\;*)
+    # if the path contains ";" then we assume it to be the separator
+    # otherwise default to the standard path separator (i.e. ":") - it is
+    # assumed that no part of a normal pathname contains ";" but that should
+    # okay in the real world where ";" in dirpaths is itself problematic.
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'`
+    ;;
+  *)
+    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 
"s/$PATH_SEPARATOR/ /g"`
+    ;;
+  esac
+  # Ok, now we have the path, separated by spaces, we can step through it
+  # and add multilib dir if necessary...
+  lt_tmp_lt_search_path_spec=
+  lt_multi_os_dir=/`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 
2>/dev/null`
+  # ...but if some path component already ends with the multilib dir we assume
+  # that all is fine and trust -print-search-dirs as is (GCC 4.2? or newer).
+  case "$lt_multi_os_dir; $lt_search_path_spec " in
+  "/; "* | "/.; "* | "/./; "* | *"$lt_multi_os_dir "* | *"$lt_multi_os_dir/ "*)
+    lt_multi_os_dir=
+    ;;
+  esac
+  for lt_sys_path in $lt_search_path_spec; do
+    if test -d "$lt_sys_path$lt_multi_os_dir"; then
+      lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec 
$lt_sys_path$lt_multi_os_dir"
+    elif test -n "$lt_multi_os_dir"; then
+      test -d "$lt_sys_path" && \
+       lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path"
+    fi
+  done
+  lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk '
+BEGIN {RS = " "; FS = "/|\n";} {
+  lt_foo = "";
+  lt_count = 0;
+  for (lt_i = NF; lt_i > 0; lt_i--) {
+    if ($lt_i != "" && $lt_i != ".") {
+      if ($lt_i == "..") {
+        lt_count++;
+      } else {
+        if (lt_count == 0) {
+          lt_foo = "/" $lt_i lt_foo;
+        } else {
+          lt_count--;
+        }
+      }
+    }
+  }
+  if (lt_foo != "") { lt_freq[[lt_foo]]++; }
+  if (lt_freq[[lt_foo]] == 1) { print lt_foo; }
+}'`
+  # AWK program above erroneously prepends '/' to C:/dos/paths
+  # for these hosts.
+  case $host_os in
+    mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\
+      $SED 's|/\([[A-Za-z]]:\)|\1|g'` ;;
+  esac
+  sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP`
+else
+  sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
+fi])
+library_names_spec=
+libname_spec='lib$name'
+soname_spec=
+shrext_cmds=.so
+postinstall_cmds=
+postuninstall_cmds=
+finish_cmds=
+finish_eval=
+shlibpath_var=
+shlibpath_overrides_runpath=unknown
+version_type=none
+dynamic_linker="$host_os ld.so"
+sys_lib_dlsearch_path_spec="/lib /usr/lib"
+need_lib_prefix=unknown
+hardcode_into_libs=no
+
+# when you set need_version to no, make sure it does not cause -set_version
+# flags to be left without arguments
+need_version=unknown
+
+AC_ARG_VAR([LT_SYS_LIBRARY_PATH],
+[User-defined run-time library search path.])
+
+case $host_os in
+aix3*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='$libname$release$shared_ext$versuffix $libname.a'
+  shlibpath_var=LIBPATH
+
+  # AIX 3 has no versioning support, so we append a major version to the name.
+  soname_spec='$libname$release$shared_ext$major'
+  ;;
+
+aix[[4-9]]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  hardcode_into_libs=yes
+  if test ia64 = "$host_cpu"; then
+    # AIX 5 supports IA64
+    library_names_spec='$libname$release$shared_ext$major 
$libname$release$shared_ext$versuffix $libname$shared_ext'
+    shlibpath_var=LD_LIBRARY_PATH
+  else
+    # With GCC up to 2.95.x, collect2 would create an import file
+    # for dependence libraries.  The import file would start with
+    # the line '#! .'.  This would cause the generated library to
+    # depend on '.', always an invalid library.  This was fixed in
+    # development snapshots of GCC prior to 3.0.
+    case $host_os in
+      aix4 | aix4.[[01]] | aix4.[[01]].*)
+      if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
+          echo ' yes '
+          echo '#endif'; } | $CC -E - | $GREP yes > /dev/null; then
+       :
+      else
+       can_build_shared=no
+      fi
+      ;;
+    esac
+    # Using Import Files as archive members, it is possible to support
+    # filename-based versioning of shared library archives on AIX. While
+    # this would work for both with and without runtime linking, it will
+    # prevent static linking of such archives. So we do filename-based
+    # shared library versioning with .so extension only, which is used
+    # when both runtime linking and shared linking is enabled.
+    # Unfortunately, runtime linking may impact performance, so we do
+    # not want this to be the default eventually. Also, we use the
+    # versioned .so libs for executables only if there is the -brtl
+    # linker flag in LDFLAGS as well, or --with-aix-soname=svr4 only.
+    # To allow for filename-based versioning support, we need to create
+    # libNAME.so.V as an archive file, containing:
+    # *) an Import File, referring to the versioned filename of the
+    #    archive as well as the shared archive member, telling the
+    #    bitwidth (32 or 64) of that shared object, and providing the
+    #    list of exported symbols of that shared object, eventually
+    #    decorated with the 'weak' keyword
+    # *) the shared object with the F_LOADONLY flag set, to really avoid
+    #    it being seen by the linker.
+    # At run time we better use the real file rather than another symlink,
+    # but for link time we create the symlink libNAME.so -> libNAME.so.V
+
+    case $with_aix_soname,$aix_use_runtimelinking in
+    # AIX (on Power*) has no versioning support, so currently we cannot 
hardcode correct
+    # soname into executable. Probably we can add versioning support to
+    # collect2, so additional links can be useful in future.
+    aix,yes) # traditional libtool
+      dynamic_linker='AIX unversionable lib.so'
+      # If using run time linking (on AIX 4.2 or later) use lib<name>.so
+      # instead of lib<name>.a to let people know that these are not
+      # typical AIX shared libraries.
+      library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+      ;;
+    aix,no) # traditional AIX only
+      dynamic_linker='AIX lib.a[(]lib.so.V[)]'
+      # We preserve .a as extension for shared libraries through AIX4.2
+      # and later when we are not doing run time linking.
+      library_names_spec='$libname$release.a $libname.a'
+      soname_spec='$libname$release$shared_ext$major'
+      ;;
+    svr4,*) # full svr4 only
+      dynamic_linker="AIX lib.so.V[(]$shared_archive_member_spec.o[)]"
+      library_names_spec='$libname$release$shared_ext$major 
$libname$shared_ext'
+      # We do not specify a path in Import Files, so LIBPATH fires.
+      shlibpath_overrides_runpath=yes
+      ;;
+    *,yes) # both, prefer svr4
+      dynamic_linker="AIX lib.so.V[(]$shared_archive_member_spec.o[)], 
lib.a[(]lib.so.V[)]"
+      library_names_spec='$libname$release$shared_ext$major 
$libname$shared_ext'
+      # unpreferred sharedlib libNAME.a needs extra handling
+      postinstall_cmds='test -n "$linkname" || 
linkname="$realname"~func_stripname "" ".so" "$linkname"~$install_shared_prog 
"$dir/$func_stripname_result.$libext" 
"$destdir/$func_stripname_result.$libext"~test -z "$tstripme" || test -z 
"$striplib" || $striplib "$destdir/$func_stripname_result.$libext"'
+      postuninstall_cmds='for n in $library_names $old_library; do :; 
done~func_stripname "" ".so" "$n"~test "$func_stripname_result" = "$n" || 
func_append rmfiles " $odir/$func_stripname_result.$libext"'
+      # We do not specify a path in Import Files, so LIBPATH fires.
+      shlibpath_overrides_runpath=yes
+      ;;
+    *,no) # both, prefer aix
+      dynamic_linker="AIX lib.a[(]lib.so.V[)], 
lib.so.V[(]$shared_archive_member_spec.o[)]"
+      library_names_spec='$libname$release.a $libname.a'
+      soname_spec='$libname$release$shared_ext$major'
+      # unpreferred sharedlib libNAME.so.V and symlink libNAME.so need extra 
handling
+      postinstall_cmds='test -z "$dlname" || $install_shared_prog $dir/$dlname 
$destdir/$dlname~test -z "$tstripme" || test -z "$striplib" || $striplib 
$destdir/$dlname~test -n "$linkname" || linkname=$realname~func_stripname "" 
".a" "$linkname"~(cd "$destdir" && $LN_S -f $dlname $func_stripname_result.so)'
+      postuninstall_cmds='test -z "$dlname" || func_append rmfiles " 
$odir/$dlname"~for n in $old_library $library_names; do :; done~func_stripname 
"" ".a" "$n"~func_append rmfiles " $odir/$func_stripname_result.so"'
+      ;;
+    esac
+    shlibpath_var=LIBPATH
+  fi
+  ;;
+
+amigaos*)
+  case $host_cpu in
+  powerpc)
+    # Since July 2007 AmigaOS4 officially supports .so libraries.
+    # When compiling the executable, add -use-dynld -Lsobjs: to the 
compileline.
+    library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+    ;;
+  m68k)
+    library_names_spec='$libname.ixlibrary $libname.a'
+    # Create ${libname}_ixlibrary.a entries in /sys/libs.
+    finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do 
libname=`func_echo_all "$lib" | $SED 
'\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; $RM 
/sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib 
${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || 
exit 1; done'
+    ;;
+  esac
+  ;;
+
+beos*)
+  library_names_spec='$libname$shared_ext'
+  dynamic_linker="$host_os ld.so"
+  shlibpath_var=LIBRARY_PATH
+  ;;
+
+bsdi[[45]]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_version=no
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+  soname_spec='$libname$release$shared_ext$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib 
/usr/local/lib"
+  sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
+  # the default ld.so.conf also contains /usr/contrib/lib and
+  # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
+  # libtool to hard-code these into programs
+  ;;
+
+cygwin* | mingw* | pw32* | cegcc*)
+  version_type=windows
+  shrext_cmds=.dll
+  need_version=no
+  need_lib_prefix=no
+
+  case $GCC,$cc_basename in
+  yes,*)
+    # gcc
+    library_names_spec='$libname.dll.a'
+    # DLL is installed to $(libdir)/../bin by postinstall_cmds
+    postinstall_cmds='base_file=`basename \$file`~
+      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo 
\$dlname'\''`~
+      dldir=$destdir/`dirname \$dlpath`~
+      test -d \$dldir || mkdir -p \$dldir~
+      $install_prog $dir/$dlname \$dldir/$dlname~
+      chmod a+x \$dldir/$dlname~
+      if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
+        eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
+      fi'
+    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+      dlpath=$dir/\$dldll~
+       $RM \$dlpath'
+    shlibpath_overrides_runpath=yes
+
+    case $host_os in
+    cygwin*)
+      # Cygwin DLLs use 'cyg' prefix rather than 'lib'
+      soname_spec='`echo $libname | sed -e 's/^lib/cyg/'``echo $release | $SED 
-e 's/[[.]]/-/g'`$versuffix$shared_ext'
+m4_if([$1], [],[
+      sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"])
+      ;;
+    mingw* | cegcc*)
+      # MinGW DLLs use traditional 'lib' prefix
+      soname_spec='$libname`echo $release | $SED -e 
's/[[.]]/-/g'`$versuffix$shared_ext'
+      ;;
+    pw32*)
+      # pw32 DLLs use 'pw' prefix rather than 'lib'
+      library_names_spec='`echo $libname | sed -e 's/^lib/pw/'``echo $release 
| $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext'
+      ;;
+    esac
+    dynamic_linker='Win32 ld.exe'
+    ;;
+
+  *,cl*)
+    # Native MSVC
+    libname_spec='$name'
+    soname_spec='$libname`echo $release | $SED -e 
's/[[.]]/-/g'`$versuffix$shared_ext'
+    library_names_spec='$libname.dll.lib'
+
+    case $build_os in
+    mingw*)
+      sys_lib_search_path_spec=
+      lt_save_ifs=$IFS
+      IFS=';'
+      for lt_path in $LIB
+      do
+        IFS=$lt_save_ifs
+        # Let DOS variable expansion print the short 8.3 style file name.
+        lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do 
@echo %~si"`
+        sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
+      done
+      IFS=$lt_save_ifs
+      # Convert to MSYS style.
+      sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 
's|\\\\|/|g' -e 's| \\([[a-zA-Z]]\\):| /\\1|g' -e 's|^ ||'`
+      ;;
+    cygwin*)
+      # Convert to unix form, then to dos form, then back to unix form
+      # but this time dos style (no spaces!) so that the unix form looks
+      # like /cygdrive/c/PROGRA~1:/cygdr...
+      sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
+      sys_lib_search_path_spec=`cygpath --path --dos 
"$sys_lib_search_path_spec" 2>/dev/null`
+      sys_lib_search_path_spec=`cygpath --path --unix 
"$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
+      ;;
+    *)
+      sys_lib_search_path_spec=$LIB
+      if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; 
then
+        # It is most probably a Windows format PATH.
+        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 
's/;/ /g'`
+      else
+        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 
"s/$PATH_SEPARATOR/ /g"`
+      fi
+      # FIXME: find the short name or the path components, as spaces are
+      # common. (e.g. "Program Files" -> "PROGRA~1")
+      ;;
+    esac
+
+    # DLL is installed to $(libdir)/../bin by postinstall_cmds
+    postinstall_cmds='base_file=`basename \$file`~
+      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo 
\$dlname'\''`~
+      dldir=$destdir/`dirname \$dlpath`~
+      test -d \$dldir || mkdir -p \$dldir~
+      $install_prog $dir/$dlname \$dldir/$dlname'
+    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
+      dlpath=$dir/\$dldll~
+       $RM \$dlpath'
+    shlibpath_overrides_runpath=yes
+    dynamic_linker='Win32 link.exe'
+    ;;
+
+  *)
+    # Assume MSVC wrapper
+    library_names_spec='$libname`echo $release | $SED -e 
's/[[.]]/-/g'`$versuffix$shared_ext $libname.lib'
+    dynamic_linker='Win32 ld.exe'
+    ;;
+  esac
+  # FIXME: first we should search . and the directory the executable is in
+  shlibpath_var=PATH
+  ;;
+
+darwin* | rhapsody*)
+  dynamic_linker="$host_os dyld"
+  version_type=darwin
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='$libname$release$major$shared_ext $libname$shared_ext'
+  soname_spec='$libname$release$major$shared_ext'
+  shlibpath_overrides_runpath=yes
+  shlibpath_var=DYLD_LIBRARY_PATH
+  shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
+m4_if([$1], [],[
+  sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"])
+  sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
+  ;;
+
+dgux*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+  soname_spec='$libname$release$shared_ext$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+freebsd* | dragonfly*)
+  # DragonFly does not have aout.  When/if they implement a new
+  # versioning mechanism, adjust this.
+  if test -x /usr/bin/objformat; then
+    objformat=`/usr/bin/objformat`
+  else
+    case $host_os in
+    freebsd[[23]].*) objformat=aout ;;
+    *) objformat=elf ;;
+    esac
+  fi
+  version_type=freebsd-$objformat
+  case $version_type in
+    freebsd-elf*)
+      library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+      soname_spec='$libname$release$shared_ext$major'
+      need_version=no
+      need_lib_prefix=no
+      ;;
+    freebsd-*)
+      library_names_spec='$libname$release$shared_ext$versuffix 
$libname$shared_ext$versuffix'
+      need_version=yes
+      ;;
+  esac
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_os in
+  freebsd2.*)
+    shlibpath_overrides_runpath=yes
+    ;;
+  freebsd3.[[01]]* | freebsdelf3.[[01]]*)
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  freebsd3.[[2-9]]* | freebsdelf3.[[2-9]]* | \
+  freebsd4.[[0-5]] | freebsdelf4.[[0-5]] | freebsd4.1.1 | freebsdelf4.1.1)
+    shlibpath_overrides_runpath=no
+    hardcode_into_libs=yes
+    ;;
+  *) # from 4.6 on, and DragonFly
+    shlibpath_overrides_runpath=yes
+    hardcode_into_libs=yes
+    ;;
+  esac
+  ;;
+
+haiku*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  dynamic_linker="$host_os runtime_loader"
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+  soname_spec='$libname$release$shared_ext$major'
+  shlibpath_var=LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib 
/boot/system/lib'
+  hardcode_into_libs=yes
+  ;;
+
+hpux9* | hpux10* | hpux11*)
+  # Give a soname corresponding to the major version so that dld.sl refuses to
+  # link against other versions.
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  case $host_cpu in
+  ia64*)
+    shrext_cmds='.so'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.so"
+    shlibpath_var=LD_LIBRARY_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+    soname_spec='$libname$release$shared_ext$major'
+    if test 32 = "$HPUX_IA64_MODE"; then
+      sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 
/usr/local/lib"
+      sys_lib_dlsearch_path_spec=/usr/lib/hpux32
+    else
+      sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
+      sys_lib_dlsearch_path_spec=/usr/lib/hpux64
+    fi
+    ;;
+  hppa*64*)
+    shrext_cmds='.sl'
+    hardcode_into_libs=yes
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
+    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
+    library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+    soname_spec='$libname$release$shared_ext$major'
+    sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
+    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+    ;;
+  *)
+    shrext_cmds='.sl'
+    dynamic_linker="$host_os dld.sl"
+    shlibpath_var=SHLIB_PATH
+    shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
+    library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+    soname_spec='$libname$release$shared_ext$major'
+    ;;
+  esac
+  # HP-UX runs *really* slowly unless shared libraries are mode 555, ...
+  postinstall_cmds='chmod 555 $lib'
+  # or fails outright, so override atomically:
+  install_override_mode=555
+  ;;
+
+interix[[3-9]]*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+  soname_spec='$libname$release$shared_ext$major'
+  dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $host_os in
+    nonstopux*) version_type=nonstopux ;;
+    *)
+       if test yes = "$lt_cv_prog_gnu_ld"; then
+               version_type=linux # correct to gnu/linux during the next big 
refactor
+       else
+               version_type=irix
+       fi ;;
+  esac
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='$libname$release$shared_ext$major'
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$release$shared_ext 
$libname$shared_ext'
+  case $host_os in
+  irix5* | nonstopux*)
+    libsuff= shlibsuff=
+    ;;
+  *)
+    case $LD in # libtool.m4 will add one of these switches to LD
+    *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
+      libsuff= shlibsuff= libmagic=32-bit;;
+    *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
+      libsuff=32 shlibsuff=N32 libmagic=N32;;
+    *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
+      libsuff=64 shlibsuff=64 libmagic=64-bit;;
+    *) libsuff= shlibsuff= libmagic=never-match;;
+    esac
+    ;;
+  esac
+  shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
+  shlibpath_overrides_runpath=no
+  sys_lib_search_path_spec="/usr/lib$libsuff /lib$libsuff 
/usr/local/lib$libsuff"
+  sys_lib_dlsearch_path_spec="/usr/lib$libsuff /lib$libsuff"
+  hardcode_into_libs=yes
+  ;;
+
+# No shared lib support for Linux oldld, aout, or coff.
+linux*oldld* | linux*aout* | linux*coff*)
+  dynamic_linker=no
+  ;;
+
+linux*android*)
+  version_type=none # Android doesn't support versioned libraries.
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='$libname$release$shared_ext'
+  soname_spec='$libname$release$shared_ext'
+  finish_cmds=
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+
+  # This implies no fast_install, which is unacceptable.
+  # Some rework will be needed to allow for fast_install
+  # before this can be enabled.
+  hardcode_into_libs=yes
+
+  dynamic_linker='Android linker'
+  # Don't embed -rpath directories since the linker doesn't support them.
+  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+  ;;
+
+# This must be glibc/ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+  soname_spec='$libname$release$shared_ext$major'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+
+  # Some binutils ld are patched to set DT_RUNPATH
+  AC_CACHE_VAL([lt_cv_shlibpath_overrides_runpath],
+    [lt_cv_shlibpath_overrides_runpath=no
+    save_LDFLAGS=$LDFLAGS
+    save_libdir=$libdir
+    eval "libdir=/foo; wl=\"$_LT_TAGVAR(lt_prog_compiler_wl, $1)\"; \
+        LDFLAGS=\"\$LDFLAGS $_LT_TAGVAR(hardcode_libdir_flag_spec, $1)\""
+    AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])],
+      [AS_IF([ ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep 
"RUNPATH.*$libdir" >/dev/null],
+        [lt_cv_shlibpath_overrides_runpath=yes])])
+    LDFLAGS=$save_LDFLAGS
+    libdir=$save_libdir
+    ])
+  shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath
+
+  # This implies no fast_install, which is unacceptable.
+  # Some rework will be needed to allow for fast_install
+  # before this can be enabled.
+  hardcode_into_libs=yes
+
+  # Ideally, we could use ldconfig to report *all* directores which are
+  # searched for libraries, however this is still not possible.  Aside from not
+  # being certain /sbin/ldconfig is available, command
+  # 'ldconfig -N -X -v | grep ^/' on 64bit Fedora does not report /usr/lib64,
+  # even though it is searched at run-time.  Try to do the best guess by
+  # appending ld.so.conf contents (and includes) to the search path.
+  if test -f /etc/ld.so.conf; then
+    lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 
2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < 
/etc/ld.so.conf | $SED -e 's/#.*//;/^[  ]*hwcap[        ]/d;s/[:,      ]/ 
/g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
+    sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
+  fi
+
+  # We used to test for /lib/ld.so.1 and disable shared libraries on
+  # powerpc, because MkLinux only supported shared libraries with the
+  # GNU dynamic linker.  Since this was broken with cross compilers,
+  # most powerpc-linux boxes support dynamic linking these days and
+  # people can always --disable-shared, the test was removed, and we
+  # assume the GNU/Linux dynamic linker is in use.
+  dynamic_linker='GNU/Linux ld.so'
+  ;;
+
+netbsdelf*-gnu)
+  version_type=linux
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='${libname}${release}${shared_ext}$versuffix 
${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
+  soname_spec='${libname}${release}${shared_ext}$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  dynamic_linker='NetBSD ld.elf_so'
+  ;;
+
+netbsd*)
+  version_type=sunos
+  need_lib_prefix=no
+  need_version=no
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+    library_names_spec='$libname$release$shared_ext$versuffix 
$libname$shared_ext$versuffix'
+    finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+    dynamic_linker='NetBSD (a.out) ld.so'
+  else
+    library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+    soname_spec='$libname$release$shared_ext$major'
+    dynamic_linker='NetBSD ld.elf_so'
+  fi
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  ;;
+
+newsos6)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  ;;
+
+*nto* | *qnx*)
+  version_type=qnx
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+  soname_spec='$libname$release$shared_ext$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  dynamic_linker='ldqnx.so'
+  ;;
+
+openbsd* | bitrig*)
+  version_type=sunos
+  sys_lib_dlsearch_path_spec=/usr/lib
+  need_lib_prefix=no
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then
+    need_version=no
+  else
+    need_version=yes
+  fi
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$shared_ext$versuffix'
+  finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  ;;
+
+os2*)
+  libname_spec='$name'
+  version_type=windows
+  shrext_cmds=.dll
+  need_version=no
+  need_lib_prefix=no
+  # OS/2 can only load a DLL with a base name of 8 characters or less.
+  soname_spec='`test -n "$os2dllname" && libname="$os2dllname";
+    v=$($ECHO $release$versuffix | tr -d .-);
+    n=$($ECHO $libname | cut -b -$((8 - ${#v})) | tr . _);
+    $ECHO $n$v`$shared_ext'
+  library_names_spec='${libname}_dll.$libext'
+  dynamic_linker='OS/2 ld.exe'
+  shlibpath_var=BEGINLIBPATH
+  sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
+  sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+  postinstall_cmds='base_file=`basename \$file`~
+    dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; $ECHO 
\$dlname'\''`~
+    dldir=$destdir/`dirname \$dlpath`~
+    test -d \$dldir || mkdir -p \$dldir~
+    $install_prog $dir/$dlname \$dldir/$dlname~
+    chmod a+x \$dldir/$dlname~
+    if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
+      eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
+    fi'
+  postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; $ECHO \$dlname'\''`~
+    dlpath=$dir/\$dldll~
+    $RM \$dlpath'
+  ;;
+
+osf3* | osf4* | osf5*)
+  version_type=osf
+  need_lib_prefix=no
+  need_version=no
+  soname_spec='$libname$release$shared_ext$major'
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+  shlibpath_var=LD_LIBRARY_PATH
+  sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc 
/usr/lib /usr/local/lib /var/shlib"
+  sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+  ;;
+
+rdos*)
+  dynamic_linker=no
+  ;;
+
+solaris*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+  soname_spec='$libname$release$shared_ext$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  # ldd complains unless libraries are executable
+  postinstall_cmds='chmod +x $lib'
+  ;;
+
+sunos4*)
+  version_type=sunos
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$shared_ext$versuffix'
+  finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  if test yes = "$with_gnu_ld"; then
+    need_lib_prefix=no
+  fi
+  need_version=yes
+  ;;
+
+sysv4 | sysv4.3*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+  soname_spec='$libname$release$shared_ext$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  case $host_vendor in
+    sni)
+      shlibpath_overrides_runpath=no
+      need_lib_prefix=no
+      runpath_var=LD_RUN_PATH
+      ;;
+    siemens)
+      need_lib_prefix=no
+      ;;
+    motorola)
+      need_lib_prefix=no
+      need_version=no
+      shlibpath_overrides_runpath=no
+      sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
+      ;;
+  esac
+  ;;
+
+sysv4*MP*)
+  if test -d /usr/nec; then
+    version_type=linux # correct to gnu/linux during the next big refactor
+    library_names_spec='$libname$shared_ext.$versuffix 
$libname$shared_ext.$major $libname$shared_ext'
+    soname_spec='$libname$shared_ext.$major'
+    shlibpath_var=LD_LIBRARY_PATH
+  fi
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  version_type=sco
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext $libname$shared_ext'
+  soname_spec='$libname$release$shared_ext$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=yes
+  hardcode_into_libs=yes
+  if test yes = "$with_gnu_ld"; then
+    sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib 
/usr/lib /lib'
+  else
+    sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
+    case $host_os in
+      sco3.2v5*)
+        sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
+       ;;
+    esac
+  fi
+  sys_lib_dlsearch_path_spec='/usr/lib'
+  ;;
+
+tpf*)
+  # TPF is a cross-target only.  Preferred cross-host = GNU/Linux.
+  version_type=linux # correct to gnu/linux during the next big refactor
+  need_lib_prefix=no
+  need_version=no
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+  shlibpath_var=LD_LIBRARY_PATH
+  shlibpath_overrides_runpath=no
+  hardcode_into_libs=yes
+  ;;
+
+uts4*)
+  version_type=linux # correct to gnu/linux during the next big refactor
+  library_names_spec='$libname$release$shared_ext$versuffix 
$libname$release$shared_ext$major $libname$shared_ext'
+  soname_spec='$libname$release$shared_ext$major'
+  shlibpath_var=LD_LIBRARY_PATH
+  ;;
+
+*)
+  dynamic_linker=no
+  ;;
+esac
+AC_MSG_RESULT([$dynamic_linker])
+test no = "$dynamic_linker" && can_build_shared=no
+
+variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
+if test yes = "$GCC"; then
+  variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX 
COMPILER_PATH LIBRARY_PATH"
+fi
+
+if test set = "${lt_cv_sys_lib_search_path_spec+set}"; then
+  sys_lib_search_path_spec=$lt_cv_sys_lib_search_path_spec
+fi
+
+if test set = "${lt_cv_sys_lib_dlsearch_path_spec+set}"; then
+  sys_lib_dlsearch_path_spec=$lt_cv_sys_lib_dlsearch_path_spec
+fi
+
+# remember unaugmented sys_lib_dlsearch_path content for libtool script 
decls...
+configure_time_dlsearch_path=$sys_lib_dlsearch_path_spec
+
+# ... but it needs LT_SYS_LIBRARY_PATH munging for other configure-time code
+func_munge_path_list sys_lib_dlsearch_path_spec "$LT_SYS_LIBRARY_PATH"
+
+# to be used as default LT_SYS_LIBRARY_PATH value in generated libtool
+configure_time_lt_sys_library_path=$LT_SYS_LIBRARY_PATH
+
+_LT_DECL([], [variables_saved_for_relink], [1],
+    [Variables whose values should be saved in libtool wrapper scripts and
+    restored at link time])
+_LT_DECL([], [need_lib_prefix], [0],
+    [Do we need the "lib" prefix for modules?])
+_LT_DECL([], [need_version], [0], [Do we need a version for libraries?])
+_LT_DECL([], [version_type], [0], [Library versioning type])
+_LT_DECL([], [runpath_var], [0],  [Shared library runtime path variable])
+_LT_DECL([], [shlibpath_var], [0],[Shared library path variable])
+_LT_DECL([], [shlibpath_overrides_runpath], [0],
+    [Is shlibpath searched before the hard-coded library search path?])
+_LT_DECL([], [libname_spec], [1], [Format of library name prefix])
+_LT_DECL([], [library_names_spec], [1],
+    [[List of archive names.  First name is the real one, the rest are links.
+    The last name is the one that the linker finds with -lNAME]])
+_LT_DECL([], [soname_spec], [1],
+    [[The coded name of the library, if different from the real name]])
+_LT_DECL([], [install_override_mode], [1],
+    [Permission mode override for installation of shared libraries])
+_LT_DECL([], [postinstall_cmds], [2],
+    [Command to use after installation of a shared archive])
+_LT_DECL([], [postuninstall_cmds], [2],
+    [Command to use after uninstallation of a shared archive])
+_LT_DECL([], [finish_cmds], [2],
+    [Commands used to finish a libtool library installation in a directory])
+_LT_DECL([], [finish_eval], [1],
+    [[As "finish_cmds", except a single script fragment to be evaled but
+    not shown]])
+_LT_DECL([], [hardcode_into_libs], [0],
+    [Whether we should hardcode library paths into libraries])
+_LT_DECL([], [sys_lib_search_path_spec], [2],
+    [Compile-time system search path for libraries])
+_LT_DECL([sys_lib_dlsearch_path_spec], [configure_time_dlsearch_path], [2],
+    [Detected run-time system search path for libraries])
+_LT_DECL([], [configure_time_lt_sys_library_path], [2],
+    [Explicit LT_SYS_LIBRARY_PATH set during ./configure time])
+])# _LT_SYS_DYNAMIC_LINKER
+
+
+# _LT_PATH_TOOL_PREFIX(TOOL)
+# --------------------------
+# find a file program that can recognize shared library
+AC_DEFUN([_LT_PATH_TOOL_PREFIX],
+[m4_require([_LT_DECL_EGREP])dnl
+AC_MSG_CHECKING([for $1])
+AC_CACHE_VAL(lt_cv_path_MAGIC_CMD,
+[case $MAGIC_CMD in
+[[\\/*] |  ?:[\\/]*])
+  lt_cv_path_MAGIC_CMD=$MAGIC_CMD # Let the user override the test with a path.
+  ;;
+*)
+  lt_save_MAGIC_CMD=$MAGIC_CMD
+  lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR
+dnl $ac_dummy forces splitting on constant user-supplied paths.
+dnl POSIX.2 word splitting is done only on the output of word expansions,
+dnl not every word.  This closes a longstanding sh security hole.
+  ac_dummy="m4_if([$2], , $PATH, [$2])"
+  for ac_dir in $ac_dummy; do
+    IFS=$lt_save_ifs
+    test -z "$ac_dir" && ac_dir=.
+    if test -f "$ac_dir/$1"; then
+      lt_cv_path_MAGIC_CMD=$ac_dir/"$1"
+      if test -n "$file_magic_test_file"; then
+       case $deplibs_check_method in
+       "file_magic "*)
+         file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
+         MAGIC_CMD=$lt_cv_path_MAGIC_CMD
+         if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
+           $EGREP "$file_magic_regex" > /dev/null; then
+           :
+         else
+           cat <<_LT_EOF 1>&2
+
+*** Warning: the command libtool uses to detect shared libraries,
+*** $file_magic_cmd, produces output that libtool cannot recognize.
+*** The result is that libtool may fail to recognize shared libraries
+*** as such.  This will affect the creation of libtool libraries that
+*** depend on shared libraries, but programs linked with such libtool
+*** libraries will work regardless of this problem.  Nevertheless, you
+*** may want to report the problem to your system manager and/or to
+*** address@hidden
+
+_LT_EOF
+         fi ;;
+       esac
+      fi
+      break
+    fi
+  done
+  IFS=$lt_save_ifs
+  MAGIC_CMD=$lt_save_MAGIC_CMD
+  ;;
+esac])
+MAGIC_CMD=$lt_cv_path_MAGIC_CMD
+if test -n "$MAGIC_CMD"; then
+  AC_MSG_RESULT($MAGIC_CMD)
+else
+  AC_MSG_RESULT(no)
+fi
+_LT_DECL([], [MAGIC_CMD], [0],
+        [Used to examine libraries when file_magic_cmd begins with "file"])dnl
+])# _LT_PATH_TOOL_PREFIX
+
+# Old name:
+AU_ALIAS([AC_PATH_TOOL_PREFIX], [_LT_PATH_TOOL_PREFIX])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_PATH_TOOL_PREFIX], [])
+
+
+# _LT_PATH_MAGIC
+# --------------
+# find a file program that can recognize a shared library
+m4_defun([_LT_PATH_MAGIC],
+[_LT_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH)
+if test -z "$lt_cv_path_MAGIC_CMD"; then
+  if test -n "$ac_tool_prefix"; then
+    _LT_PATH_TOOL_PREFIX(file, /usr/bin$PATH_SEPARATOR$PATH)
+  else
+    MAGIC_CMD=:
+  fi
+fi
+])# _LT_PATH_MAGIC
+
+
+# LT_PATH_LD
+# ----------
+# find the pathname to the GNU or non-GNU linker
+AC_DEFUN([LT_PATH_LD],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_PROG_ECHO_BACKSLASH])dnl
+
+AC_ARG_WITH([gnu-ld],
+    [AS_HELP_STRING([--with-gnu-ld],
+       [assume the C compiler uses GNU ld @<:@default=no@:>@])],
+    [test no = "$withval" || with_gnu_ld=yes],
+    [with_gnu_ld=no])dnl
+
+ac_prog=ld
+if test yes = "$GCC"; then
+  # Check if gcc -print-prog-name=ld gives a path.
+  AC_MSG_CHECKING([for ld used by $CC])
+  case $host in
+  *-*-mingw*)
+    # gcc leaves a trailing carriage return, which upsets mingw
+    ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
+  *)
+    ac_prog=`($CC -print-prog-name=ld) 2>&5` ;;
+  esac
+  case $ac_prog in
+    # Accept absolute paths.
+    [[\\/]]* | ?:[[\\/]]*)
+      re_direlt='/[[^/]][[^/]]*/\.\./'
+      # Canonicalize the pathname of ld
+      ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'`
+      while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do
+       ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"`
+      done
+      test -z "$LD" && LD=$ac_prog
+      ;;
+  "")
+    # If it fails, then pretend we aren't using GCC.
+    ac_prog=ld
+    ;;
+  *)
+    # If it is relative, then search for the first ld in PATH.
+    with_gnu_ld=unknown
+    ;;
+  esac
+elif test yes = "$with_gnu_ld"; then
+  AC_MSG_CHECKING([for GNU ld])
+else
+  AC_MSG_CHECKING([for non-GNU ld])
+fi
+AC_CACHE_VAL(lt_cv_path_LD,
+[if test -z "$LD"; then
+  lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR
+  for ac_dir in $PATH; do
+    IFS=$lt_save_ifs
+    test -z "$ac_dir" && ac_dir=.
+    if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
+      lt_cv_path_LD=$ac_dir/$ac_prog
+      # Check to see if the program is GNU ld.  I'd rather use --version,
+      # but apparently some variants of GNU ld only accept -v.
+      # Break only if it was the GNU/non-GNU ld that we prefer.
+      case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in
+      *GNU* | *'with BFD'*)
+       test no != "$with_gnu_ld" && break
+       ;;
+      *)
+       test yes != "$with_gnu_ld" && break
+       ;;
+      esac
+    fi
+  done
+  IFS=$lt_save_ifs
+else
+  lt_cv_path_LD=$LD # Let the user override the test with a path.
+fi])
+LD=$lt_cv_path_LD
+if test -n "$LD"; then
+  AC_MSG_RESULT($LD)
+else
+  AC_MSG_RESULT(no)
+fi
+test -z "$LD" && AC_MSG_ERROR([no acceptable ld found in \$PATH])
+_LT_PATH_LD_GNU
+AC_SUBST([LD])
+
+_LT_TAGDECL([], [LD], [1], [The linker used to build libraries])
+])# LT_PATH_LD
+
+# Old names:
+AU_ALIAS([AM_PROG_LD], [LT_PATH_LD])
+AU_ALIAS([AC_PROG_LD], [LT_PATH_LD])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_PROG_LD], [])
+dnl AC_DEFUN([AC_PROG_LD], [])
+
+
+# _LT_PATH_LD_GNU
+#- --------------
+m4_defun([_LT_PATH_LD_GNU],
+[AC_CACHE_CHECK([if the linker ($LD) is GNU ld], lt_cv_prog_gnu_ld,
+[# I'd rather use --version here, but apparently some GNU lds only accept -v.
+case `$LD -v 2>&1 </dev/null` in
+*GNU* | *'with BFD'*)
+  lt_cv_prog_gnu_ld=yes
+  ;;
+*)
+  lt_cv_prog_gnu_ld=no
+  ;;
+esac])
+with_gnu_ld=$lt_cv_prog_gnu_ld
+])# _LT_PATH_LD_GNU
+
+
+# _LT_CMD_RELOAD
+# --------------
+# find reload flag for linker
+#   -- PORTME Some linkers may need a different reload flag.
+m4_defun([_LT_CMD_RELOAD],
+[AC_CACHE_CHECK([for $LD option to reload object files],
+  lt_cv_ld_reload_flag,
+  [lt_cv_ld_reload_flag='-r'])
+reload_flag=$lt_cv_ld_reload_flag
+case $reload_flag in
+"" | " "*) ;;
+*) reload_flag=" $reload_flag" ;;
+esac
+reload_cmds='$LD$reload_flag -o $output$reload_objs'
+case $host_os in
+  cygwin* | mingw* | pw32* | cegcc*)
+    if test yes != "$GCC"; then
+      reload_cmds=false
+    fi
+    ;;
+  darwin*)
+    if test yes = "$GCC"; then
+      reload_cmds='$LTCC $LTCFLAGS -nostdlib $wl-r -o $output$reload_objs'
+    else
+      reload_cmds='$LD$reload_flag -o $output$reload_objs'
+    fi
+    ;;
+esac
+_LT_TAGDECL([], [reload_flag], [1], [How to create reloadable object files])dnl
+_LT_TAGDECL([], [reload_cmds], [2])dnl
+])# _LT_CMD_RELOAD
+
+
+# _LT_PATH_DD
+# -----------
+# find a working dd
+m4_defun([_LT_PATH_DD],
+[AC_CACHE_CHECK([for a working dd], [ac_cv_path_lt_DD],
+[printf 0123456789abcdef0123456789abcdef >conftest.i
+cat conftest.i conftest.i >conftest2.i
+: ${lt_DD:=$DD}
+AC_PATH_PROGS_FEATURE_CHECK([lt_DD], [dd],
+[if "$ac_path_lt_DD" bs=32 count=1 <conftest2.i >conftest.out 2>/dev/null; then
+  cmp -s conftest.i conftest.out \
+  && ac_cv_path_lt_DD="$ac_path_lt_DD" ac_path_lt_DD_found=:
+fi])
+rm -f conftest.i conftest2.i conftest.out])
+])# _LT_PATH_DD
+
+
+# _LT_CMD_TRUNCATE
+# ----------------
+# find command to truncate a binary pipe
+m4_defun([_LT_CMD_TRUNCATE],
+[m4_require([_LT_PATH_DD])
+AC_CACHE_CHECK([how to truncate binary pipes], [lt_cv_truncate_bin],
+[printf 0123456789abcdef0123456789abcdef >conftest.i
+cat conftest.i conftest.i >conftest2.i
+lt_cv_truncate_bin=
+if "$ac_cv_path_lt_DD" bs=32 count=1 <conftest2.i >conftest.out 2>/dev/null; 
then
+  cmp -s conftest.i conftest.out \
+  && lt_cv_truncate_bin="$ac_cv_path_lt_DD bs=4096 count=1"
+fi
+rm -f conftest.i conftest2.i conftest.out
+test -z "$lt_cv_truncate_bin" && lt_cv_truncate_bin="$SED -e 4q"])
+_LT_DECL([lt_truncate_bin], [lt_cv_truncate_bin], [1],
+  [Command to truncate a binary pipe])
+])# _LT_CMD_TRUNCATE
+
+
+# _LT_CHECK_MAGIC_METHOD
+# ----------------------
+# how to check for library dependencies
+#  -- PORTME fill in with the dynamic library characteristics
+m4_defun([_LT_CHECK_MAGIC_METHOD],
+[m4_require([_LT_DECL_EGREP])
+m4_require([_LT_DECL_OBJDUMP])
+AC_CACHE_CHECK([how to recognize dependent libraries],
+lt_cv_deplibs_check_method,
+[lt_cv_file_magic_cmd='$MAGIC_CMD'
+lt_cv_file_magic_test_file=
+lt_cv_deplibs_check_method='unknown'
+# Need to set the preceding variable on all platforms that support
+# interlibrary dependencies.
+# 'none' -- dependencies not supported.
+# 'unknown' -- same as none, but documents that we really don't know.
+# 'pass_all' -- all dependencies passed with no checks.
+# 'test_compile' -- check by making test program.
+# 'file_magic [[regex]]' -- check by looking for files in library path
+# that responds to the $file_magic_cmd with a given extended regex.
+# If you have 'file' or equivalent on your system and you're not sure
+# whether 'pass_all' will *always* work, you probably want this one.
+
+case $host_os in
+aix[[4-9]]*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+beos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+bsdi[[45]]*)
+  lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB 
(shared object|dynamic lib)'
+  lt_cv_file_magic_cmd='/usr/bin/file -L'
+  lt_cv_file_magic_test_file=/shlib/libc.so
+  ;;
+
+cygwin*)
+  # func_win32_libid is a shell function defined in ltmain.sh
+  lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+  lt_cv_file_magic_cmd='func_win32_libid'
+  ;;
+
+mingw* | pw32*)
+  # Base MSYS/MinGW do not provide the 'file' command needed by
+  # func_win32_libid shell function, so use a weaker test based on 'objdump',
+  # unless we find 'file', for example because we are cross-compiling.
+  if ( file / ) >/dev/null 2>&1; then
+    lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
+    lt_cv_file_magic_cmd='func_win32_libid'
+  else
+    # Keep this pattern in sync with the one in func_win32_libid.
+    lt_cv_deplibs_check_method='file_magic file format 
(pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
+    lt_cv_file_magic_cmd='$OBJDUMP -f'
+  fi
+  ;;
+
+cegcc*)
+  # use the weaker test based on 'objdump'. See mingw*.
+  lt_cv_deplibs_check_method='file_magic file format 
pe-arm-.*little(.*architecture: arm)?'
+  lt_cv_file_magic_cmd='$OBJDUMP -f'
+  ;;
+
+darwin* | rhapsody*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+freebsd* | dragonfly*)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    case $host_cpu in
+    i*86 )
+      # Not sure whether the presence of OpenBSD here was a mistake.
+      # Let's accept both of them until this is cleared up.
+      lt_cv_deplibs_check_method='file_magic 
(FreeBSD|OpenBSD|DragonFly)/i[[3-9]]86 (compact )?demand paged shared library'
+      lt_cv_file_magic_cmd=/usr/bin/file
+      lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*`
+      ;;
+    esac
+  else
+    lt_cv_deplibs_check_method=pass_all
+  fi
+  ;;
+
+haiku*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+hpux10.20* | hpux11*)
+  lt_cv_file_magic_cmd=/usr/bin/file
+  case $host_cpu in
+  ia64*)
+    lt_cv_deplibs_check_method='file_magic 
(s[[0-9]][[0-9]][[0-9]]|ELF-[[0-9]][[0-9]]) shared object file - IA64'
+    lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so
+    ;;
+  hppa*64*)
+    [lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ 
-][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC 
[0-9]\.[0-9]']
+    lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl
+    ;;
+  *)
+    lt_cv_deplibs_check_method='file_magic 
(s[[0-9]][[0-9]][[0-9]]|PA-RISC[[0-9]]\.[[0-9]]) shared library'
+    lt_cv_file_magic_test_file=/usr/lib/libc.sl
+    ;;
+  esac
+  ;;
+
+interix[[3-9]]*)
+  # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here
+  lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|\.a)$'
+  ;;
+
+irix5* | irix6* | nonstopux*)
+  case $LD in
+  *-32|*"-32 ") libmagic=32-bit;;
+  *-n32|*"-n32 ") libmagic=N32;;
+  *-64|*"-64 ") libmagic=64-bit;;
+  *) libmagic=never-match;;
+  esac
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+# This must be glibc/ELF.
+linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+netbsd* | netbsdelf*-gnu)
+  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
+    lt_cv_deplibs_check_method='match_pattern 
/lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|_pic\.a)$'
+  fi
+  ;;
+
+newos6*)
+  lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB 
(executable|dynamic lib)'
+  lt_cv_file_magic_cmd=/usr/bin/file
+  lt_cv_file_magic_test_file=/usr/lib/libnls.so
+  ;;
+
+*nto* | *qnx*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+openbsd* | bitrig*)
+  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then
+    lt_cv_deplibs_check_method='match_pattern 
/lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$'
+  else
+    lt_cv_deplibs_check_method='match_pattern 
/lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$'
+  fi
+  ;;
+
+osf3* | osf4* | osf5*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+rdos*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+solaris*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+
+sysv4 | sysv4.3*)
+  case $host_vendor in
+  motorola)
+    lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB 
(shared object|dynamic lib) M[[0-9]][[0-9]]* Version [[0-9]]'
+    lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*`
+    ;;
+  ncr)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  sequent)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB 
(shared object|dynamic lib )'
+    ;;
+  sni)
+    lt_cv_file_magic_cmd='/bin/file'
+    lt_cv_deplibs_check_method="file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB 
dynamic lib"
+    lt_cv_file_magic_test_file=/lib/libc.so
+    ;;
+  siemens)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  pc)
+    lt_cv_deplibs_check_method=pass_all
+    ;;
+  esac
+  ;;
+
+tpf*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+os2*)
+  lt_cv_deplibs_check_method=pass_all
+  ;;
+esac
+])
+
+file_magic_glob=
+want_nocaseglob=no
+if test "$build" = "$host"; then
+  case $host_os in
+  mingw* | pw32*)
+    if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
+      want_nocaseglob=yes
+    else
+      file_magic_glob=`echo 
aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e 
"s/\(..\)/s\/[[\1]]\/[[\1]]\/g;/g"`
+    fi
+    ;;
+  esac
+fi
+
+file_magic_cmd=$lt_cv_file_magic_cmd
+deplibs_check_method=$lt_cv_deplibs_check_method
+test -z "$deplibs_check_method" && deplibs_check_method=unknown
+
+_LT_DECL([], [deplibs_check_method], [1],
+    [Method to check whether dependent libraries are shared objects])
+_LT_DECL([], [file_magic_cmd], [1],
+    [Command to use when deplibs_check_method = "file_magic"])
+_LT_DECL([], [file_magic_glob], [1],
+    [How to find potential files when deplibs_check_method = "file_magic"])
+_LT_DECL([], [want_nocaseglob], [1],
+    [Find potential files using nocaseglob when deplibs_check_method = 
"file_magic"])
+])# _LT_CHECK_MAGIC_METHOD
+
+
+# LT_PATH_NM
+# ----------
+# find the pathname to a BSD- or MS-compatible name lister
+AC_DEFUN([LT_PATH_NM],
+[AC_REQUIRE([AC_PROG_CC])dnl
+AC_CACHE_CHECK([for BSD- or MS-compatible name lister (nm)], lt_cv_path_NM,
+[if test -n "$NM"; then
+  # Let the user override the test.
+  lt_cv_path_NM=$NM
+else
+  lt_nm_to_check=${ac_tool_prefix}nm
+  if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
+    lt_nm_to_check="$lt_nm_to_check nm"
+  fi
+  for lt_tmp_nm in $lt_nm_to_check; do
+    lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR
+    for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
+      IFS=$lt_save_ifs
+      test -z "$ac_dir" && ac_dir=.
+      tmp_nm=$ac_dir/$lt_tmp_nm
+      if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext"; then
+       # Check to see if the nm accepts a BSD-compat flag.
+       # Adding the 'sed 1q' prevents false positives on HP-UX, which says:
+       #   nm: unknown option "B" ignored
+       # Tru64's nm complains that /dev/null is an invalid object file
+       # MSYS converts /dev/null to NUL, MinGW nm treats NUL as empty
+       case $build_os in
+       mingw*) lt_bad_file=conftest.nm/nofile ;;
+       *) lt_bad_file=/dev/null ;;
+       esac
+       case `"$tmp_nm" -B $lt_bad_file 2>&1 | sed '1q'` in
+       *$lt_bad_file* | *'Invalid file or object type'*)
+         lt_cv_path_NM="$tmp_nm -B"
+         break 2
+         ;;
+       *)
+         case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
+         */dev/null*)
+           lt_cv_path_NM="$tmp_nm -p"
+           break 2
+           ;;
+         *)
+           lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
+           continue # so that we can try to find one that supports BSD flags
+           ;;
+         esac
+         ;;
+       esac
+      fi
+    done
+    IFS=$lt_save_ifs
+  done
+  : ${lt_cv_path_NM=no}
+fi])
+if test no != "$lt_cv_path_NM"; then
+  NM=$lt_cv_path_NM
+else
+  # Didn't find any BSD compatible name lister, look for dumpbin.
+  if test -n "$DUMPBIN"; then :
+    # Let the user override the test.
+  else
+    AC_CHECK_TOOLS(DUMPBIN, [dumpbin "link -dump"], :)
+    case `$DUMPBIN -symbols -headers /dev/null 2>&1 | sed '1q'` in
+    *COFF*)
+      DUMPBIN="$DUMPBIN -symbols -headers"
+      ;;
+    *)
+      DUMPBIN=:
+      ;;
+    esac
+  fi
+  AC_SUBST([DUMPBIN])
+  if test : != "$DUMPBIN"; then
+    NM=$DUMPBIN
+  fi
+fi
+test -z "$NM" && NM=nm
+AC_SUBST([NM])
+_LT_DECL([], [NM], [1], [A BSD- or MS-compatible name lister])dnl
+
+AC_CACHE_CHECK([the name lister ($NM) interface], [lt_cv_nm_interface],
+  [lt_cv_nm_interface="BSD nm"
+  echo "int some_variable = 0;" > conftest.$ac_ext
+  (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&AS_MESSAGE_LOG_FD)
+  (eval "$ac_compile" 2>conftest.err)
+  cat conftest.err >&AS_MESSAGE_LOG_FD
+  (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" 
>&AS_MESSAGE_LOG_FD)
+  (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
+  cat conftest.err >&AS_MESSAGE_LOG_FD
+  (eval echo "\"\$as_me:$LINENO: output\"" >&AS_MESSAGE_LOG_FD)
+  cat conftest.out >&AS_MESSAGE_LOG_FD
+  if $GREP 'External.*some_variable' conftest.out > /dev/null; then
+    lt_cv_nm_interface="MS dumpbin"
+  fi
+  rm -f conftest*])
+])# LT_PATH_NM
+
+# Old names:
+AU_ALIAS([AM_PROG_NM], [LT_PATH_NM])
+AU_ALIAS([AC_PROG_NM], [LT_PATH_NM])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_PROG_NM], [])
+dnl AC_DEFUN([AC_PROG_NM], [])
+
+# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
+# --------------------------------
+# how to determine the name of the shared library
+# associated with a specific link library.
+#  -- PORTME fill in with the dynamic library characteristics
+m4_defun([_LT_CHECK_SHAREDLIB_FROM_LINKLIB],
+[m4_require([_LT_DECL_EGREP])
+m4_require([_LT_DECL_OBJDUMP])
+m4_require([_LT_DECL_DLLTOOL])
+AC_CACHE_CHECK([how to associate runtime and link libraries],
+lt_cv_sharedlib_from_linklib_cmd,
+[lt_cv_sharedlib_from_linklib_cmd='unknown'
+
+case $host_os in
+cygwin* | mingw* | pw32* | cegcc*)
+  # two different shell functions defined in ltmain.sh;
+  # decide which one to use based on capabilities of $DLLTOOL
+  case `$DLLTOOL --help 2>&1` in
+  *--identify-strict*)
+    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
+    ;;
+  *)
+    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
+    ;;
+  esac
+  ;;
+*)
+  # fallback: assume linklib IS sharedlib
+  lt_cv_sharedlib_from_linklib_cmd=$ECHO
+  ;;
+esac
+])
+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
+
+_LT_DECL([], [sharedlib_from_linklib_cmd], [1],
+    [Command to associate shared and link libraries])
+])# _LT_CHECK_SHAREDLIB_FROM_LINKLIB
+
+
+# _LT_PATH_MANIFEST_TOOL
+# ----------------------
+# locate the manifest tool
+m4_defun([_LT_PATH_MANIFEST_TOOL],
+[AC_CHECK_TOOL(MANIFEST_TOOL, mt, :)
+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
+AC_CACHE_CHECK([if $MANIFEST_TOOL is a manifest tool], 
[lt_cv_path_mainfest_tool],
+  [lt_cv_path_mainfest_tool=no
+  echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&AS_MESSAGE_LOG_FD
+  $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
+  cat conftest.err >&AS_MESSAGE_LOG_FD
+  if $GREP 'Manifest Tool' conftest.out > /dev/null; then
+    lt_cv_path_mainfest_tool=yes
+  fi
+  rm -f conftest*])
+if test yes != "$lt_cv_path_mainfest_tool"; then
+  MANIFEST_TOOL=:
+fi
+_LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl
+])# _LT_PATH_MANIFEST_TOOL
+
+
+# _LT_DLL_DEF_P([FILE])
+# ---------------------
+# True iff FILE is a Windows DLL '.def' file.
+# Keep in sync with func_dll_def_p in the libtool script
+AC_DEFUN([_LT_DLL_DEF_P],
+[dnl
+  test DEF = "`$SED -n dnl
+    -e '\''s/^[[        ]]*//'\'' dnl Strip leading whitespace
+    -e '\''/^\(;.*\)*$/d'\'' dnl      Delete empty lines and comments
+    -e '\''s/^\(EXPORTS\|LIBRARY\)\([[  ]].*\)*$/DEF/p'\'' dnl
+    -e q dnl                          Only consider the first "real" line
+    $1`" dnl
+])# _LT_DLL_DEF_P
+
+
+# LT_LIB_M
+# --------
+# check for math library
+AC_DEFUN([LT_LIB_M],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+LIBM=
+case $host in
+*-*-beos* | *-*-cegcc* | *-*-cygwin* | *-*-haiku* | *-*-pw32* | *-*-darwin*)
+  # These system don't have libm, or don't need it
+  ;;
+*-ncr-sysv4.3*)
+  AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM=-lmw)
+  AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm")
+  ;;
+*)
+  AC_CHECK_LIB(m, cos, LIBM=-lm)
+  ;;
+esac
+AC_SUBST([LIBM])
+])# LT_LIB_M
+
+# Old name:
+AU_ALIAS([AC_CHECK_LIBM], [LT_LIB_M])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_CHECK_LIBM], [])
+
+
+# _LT_COMPILER_NO_RTTI([TAGNAME])
+# -------------------------------
+m4_defun([_LT_COMPILER_NO_RTTI],
+[m4_require([_LT_TAG_COMPILER])dnl
+
+_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
+
+if test yes = "$GCC"; then
+  case $cc_basename in
+  nvcc*)
+    _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -Xcompiler 
-fno-builtin' ;;
+  *)
+    _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' ;;
+  esac
+
+  _LT_COMPILER_OPTION([if $compiler supports -fno-rtti -fno-exceptions],
+    lt_cv_prog_compiler_rtti_exceptions,
+    [-fno-rtti -fno-exceptions], [],
+    [_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, 
$1)="$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) -fno-rtti 
-fno-exceptions"])
+fi
+_LT_TAGDECL([no_builtin_flag], [lt_prog_compiler_no_builtin_flag], [1],
+       [Compiler flag to turn off builtin functions])
+])# _LT_COMPILER_NO_RTTI
+
+
+# _LT_CMD_GLOBAL_SYMBOLS
+# ----------------------
+m4_defun([_LT_CMD_GLOBAL_SYMBOLS],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_PROG_CC])dnl
+AC_REQUIRE([AC_PROG_AWK])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+AC_REQUIRE([LT_PATH_LD])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+
+# Check for command to grab the raw symbol name followed by C symbol from nm.
+AC_MSG_CHECKING([command to parse $NM output from $compiler object])
+AC_CACHE_VAL([lt_cv_sys_global_symbol_pipe],
+[
+# These are sane defaults that work on at least a few old systems.
+# [They come from Ultrix.  What could be older than Ultrix?!! ;)]
+
+# Character class describing NM global symbol codes.
+symcode='[[BCDEGRST]]'
+
+# Regexp to match symbols that can be accessed directly from C.
+sympat='\([[_A-Za-z]][[_A-Za-z0-9]]*\)'
+
+# Define system-specific variables.
+case $host_os in
+aix*)
+  symcode='[[BCDT]]'
+  ;;
+cygwin* | mingw* | pw32* | cegcc*)
+  symcode='[[ABCDGISTW]]'
+  ;;
+hpux*)
+  if test ia64 = "$host_cpu"; then
+    symcode='[[ABCDEGRST]]'
+  fi
+  ;;
+irix* | nonstopux*)
+  symcode='[[BCDEGRST]]'
+  ;;
+osf*)
+  symcode='[[BCDEGQRST]]'
+  ;;
+solaris*)
+  symcode='[[BDRT]]'
+  ;;
+sco3.2v5*)
+  symcode='[[DT]]'
+  ;;
+sysv4.2uw2*)
+  symcode='[[DT]]'
+  ;;
+sysv5* | sco5v6* | unixware* | OpenUNIX*)
+  symcode='[[ABDT]]'
+  ;;
+sysv4)
+  symcode='[[DFNSTU]]'
+  ;;
+esac
+
+# If we're using GNU nm, then use its standard symbol codes.
+case `$NM -V 2>&1` in
+*GNU* | *'with BFD'*)
+  symcode='[[ABCDGIRSTW]]' ;;
+esac
+
+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+  # Gets list of data symbols to import.
+  lt_cv_sys_global_symbol_to_import="sed -n -e 's/^I .* \(.*\)$/\1/p'"
+  # Adjust the below global symbol transforms to fixup imported variables.
+  lt_cdecl_hook=" -e 's/^I .* \(.*\)$/extern __declspec(dllimport) char \1;/p'"
+  lt_c_name_hook=" -e 's/^I .* \(.*\)$/  {\"\1\", (void *) 0},/p'"
+  lt_c_name_lib_hook="\
+  -e 's/^I .* \(lib.*\)$/  {\"\1\", (void *) 0},/p'\
+  -e 's/^I .* \(.*\)$/  {\"lib\1\", (void *) 0},/p'"
+else
+  # Disable hooks by default.
+  lt_cv_sys_global_symbol_to_import=
+  lt_cdecl_hook=
+  lt_c_name_hook=
+  lt_c_name_lib_hook=
+fi
+
+# Transform an extracted symbol line into a proper C declaration.
+# Some systems (esp. on ia64) link data and code symbols differently,
+# so use this general approach.
+lt_cv_sys_global_symbol_to_cdecl="sed -n"\
+$lt_cdecl_hook\
+" -e 's/^T .* \(.*\)$/extern int \1();/p'"\
+" -e 's/^$symcode$symcode* .* \(.*\)$/extern char \1;/p'"
+
+# Transform an extracted symbol line into symbol name and symbol address
+lt_cv_sys_global_symbol_to_c_name_address="sed -n"\
+$lt_c_name_hook\
+" -e 's/^: \(.*\) .*$/  {\"\1\", (void *) 0},/p'"\
+" -e 's/^$symcode$symcode* .* \(.*\)$/  {\"\1\", (void *) \&\1},/p'"
+
+# Transform an extracted symbol line into symbol name with lib prefix and
+# symbol address.
+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n"\
+$lt_c_name_lib_hook\
+" -e 's/^: \(.*\) .*$/  {\"\1\", (void *) 0},/p'"\
+" -e 's/^$symcode$symcode* .* \(lib.*\)$/  {\"\1\", (void *) \&\1},/p'"\
+" -e 's/^$symcode$symcode* .* \(.*\)$/  {\"lib\1\", (void *) \&\1},/p'"
+
+# Handle CRLF in mingw tool chain
+opt_cr=
+case $build_os in
+mingw*)
+  opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp
+  ;;
+esac
+
+# Try without a prefix underscore, then with it.
+for ac_symprfx in "" "_"; do
+
+  # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol.
+  symxfrm="\\1 $ac_symprfx\\2 \\2"
+
+  # Write the raw and C identifiers.
+  if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+    # Fake it for dumpbin and say T for any non-static function,
+    # D for any global variable and I for any imported variable.
+    # Also find C++ and __fastcall symbols from MSVC++,
+    # which start with @ or ?.
+    lt_cv_sys_global_symbol_pipe="$AWK ['"\
+"     {last_section=section; section=\$ 3};"\
+"     /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\
+"     /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\
+"     /^ *Symbol name *: /{split(\$ 0,sn,\":\"); si=substr(sn[2],2)};"\
+"     /^ *Type *: code/{print \"T\",si,substr(si,length(prfx))};"\
+"     /^ *Type *: data/{print \"I\",si,substr(si,length(prfx))};"\
+"     \$ 0!~/External *\|/{next};"\
+"     / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\
+"     {if(hide[section]) next};"\
+"     {f=\"D\"}; \$ 0~/\(\).*\|/{f=\"T\"};"\
+"     {split(\$ 0,a,/\||\r/); split(a[2],s)};"\
+"     s[1]~/^[@?]/{print f,s[1],s[1]; next};"\
+"     s[1]~prfx {split(s[1],t,\"@\"); print f,t[1],substr(t[1],length(prfx))}"\
+"     ' prfx=^$ac_symprfx]"
+  else
+    lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[    
]]\($symcode$symcode*\)[[       ]][[    
]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
+  fi
+  lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ 
__gnu_lto/d'"
+
+  # Check to see that the pipe works correctly.
+  pipe_works=no
+
+  rm -f conftest*
+  cat > conftest.$ac_ext <<_LT_EOF
+#ifdef __cplusplus
+extern "C" {
+#endif
+char nm_test_var;
+void nm_test_func(void);
+void nm_test_func(void){}
+#ifdef __cplusplus
+}
+#endif
+int main(){nm_test_var='a';nm_test_func();return(0);}
+_LT_EOF
+
+  if AC_TRY_EVAL(ac_compile); then
+    # Now try to grab the symbols.
+    nlist=conftest.nm
+    $ECHO "$as_me:$LINENO: $NM conftest.$ac_objext | 
$lt_cv_sys_global_symbol_pipe > $nlist" >&AS_MESSAGE_LOG_FD
+    if eval "$NM" conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> 
$nlist 2>&AS_MESSAGE_LOG_FD && test -s "$nlist"; then
+      # Try sorting and uniquifying the output.
+      if sort "$nlist" | uniq > "$nlist"T; then
+       mv -f "$nlist"T "$nlist"
+      else
+       rm -f "$nlist"T
+      fi
+
+      # Make sure that we snagged all the symbols we need.
+      if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
+       if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
+         cat <<_LT_EOF > conftest.$ac_ext
+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests.  
*/
+#if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE
+/* DATA imports from DLLs on WIN32 can't be const, because runtime
+   relocations are performed -- see ld's documentation on pseudo-relocs.  */
+# define LT@&t@_DLSYM_CONST
+#elif defined __osf__
+/* This system does not cope well with relocations in const data.  */
+# define LT@&t@_DLSYM_CONST
+#else
+# define LT@&t@_DLSYM_CONST const
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+_LT_EOF
+         # Now generate the symbol file.
+         eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main 
>> conftest.$ac_ext'
+
+         cat <<_LT_EOF >> conftest.$ac_ext
+
+/* The mapping between symbol names and symbols.  */
+LT@&t@_DLSYM_CONST struct {
+  const char *name;
+  void       *address;
+}
+lt__PROGRAM__LTX_preloaded_symbols[[]] =
+{
+  { "@PROGRAM@", (void *) 0 },
+_LT_EOF
+         $SED "s/^$symcode$symcode* .* \(.*\)$/  {\"\1\", (void *) \&\1},/" < 
"$nlist" | $GREP -v main >> conftest.$ac_ext
+         cat <<\_LT_EOF >> conftest.$ac_ext
+  {0, (void *) 0}
+};
+
+/* This works around a problem in FreeBSD linker */
+#ifdef FREEBSD_WORKAROUND
+static const void *lt_preloaded_setup() {
+  return lt__PROGRAM__LTX_preloaded_symbols;
+}
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+_LT_EOF
+         # Now try linking the two files.
+         mv conftest.$ac_objext conftstm.$ac_objext
+         lt_globsym_save_LIBS=$LIBS
+         lt_globsym_save_CFLAGS=$CFLAGS
+         LIBS=conftstm.$ac_objext
+         CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)"
+         if AC_TRY_EVAL(ac_link) && test -s conftest$ac_exeext; then
+           pipe_works=yes
+         fi
+         LIBS=$lt_globsym_save_LIBS
+         CFLAGS=$lt_globsym_save_CFLAGS
+       else
+         echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD
+       fi
+      else
+       echo "cannot find nm_test_var in $nlist" >&AS_MESSAGE_LOG_FD
+      fi
+    else
+      echo "cannot run $lt_cv_sys_global_symbol_pipe" >&AS_MESSAGE_LOG_FD
+    fi
+  else
+    echo "$progname: failed program was:" >&AS_MESSAGE_LOG_FD
+    cat conftest.$ac_ext >&5
+  fi
+  rm -rf conftest* conftst*
+
+  # Do not use the global_symbol_pipe unless it works.
+  if test yes = "$pipe_works"; then
+    break
+  else
+    lt_cv_sys_global_symbol_pipe=
+  fi
+done
+])
+if test -z "$lt_cv_sys_global_symbol_pipe"; then
+  lt_cv_sys_global_symbol_to_cdecl=
+fi
+if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; 
then
+  AC_MSG_RESULT(failed)
+else
+  AC_MSG_RESULT(ok)
+fi
+
+# Response file support.
+if test "$lt_cv_nm_interface" = "MS dumpbin"; then
+  nm_file_list_spec='@'
+elif $NM --help 2>/dev/null | grep '[[@]]FILE' >/dev/null; then
+  nm_file_list_spec='@'
+fi
+
+_LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1],
+    [Take the output of nm and produce a listing of raw symbols and C names])
+_LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1],
+    [Transform the output of nm in a proper C declaration])
+_LT_DECL([global_symbol_to_import], [lt_cv_sys_global_symbol_to_import], [1],
+    [Transform the output of nm into a list of symbols to manually relocate])
+_LT_DECL([global_symbol_to_c_name_address],
+    [lt_cv_sys_global_symbol_to_c_name_address], [1],
+    [Transform the output of nm in a C name address pair])
+_LT_DECL([global_symbol_to_c_name_address_lib_prefix],
+    [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1],
+    [Transform the output of nm in a C name address pair when lib prefix is 
needed])
+_LT_DECL([nm_interface], [lt_cv_nm_interface], [1],
+    [The name lister interface])
+_LT_DECL([], [nm_file_list_spec], [1],
+    [Specify filename containing input files for $NM])
+]) # _LT_CMD_GLOBAL_SYMBOLS
+
+
+# _LT_COMPILER_PIC([TAGNAME])
+# ---------------------------
+m4_defun([_LT_COMPILER_PIC],
+[m4_require([_LT_TAG_COMPILER])dnl
+_LT_TAGVAR(lt_prog_compiler_wl, $1)=
+_LT_TAGVAR(lt_prog_compiler_pic, $1)=
+_LT_TAGVAR(lt_prog_compiler_static, $1)=
+
+m4_if([$1], [CXX], [
+  # C++ specific cases for pic, static, wl, etc.
+  if test yes = "$GXX"; then
+    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+
+    case $host_os in
+    aix*)
+      # All AIX code is PIC.
+      if test ia64 = "$host_cpu"; then
+       # AIX 5 now supports IA64 processor
+       _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      fi
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+        ;;
+      m68k)
+            # FIXME: we need at least 68020 code to build shared libraries, but
+            # adding the '-m68020' flag to GCC prevents building anything 
better,
+            # like '-m68040'.
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 
-malways-restore-a4'
+        ;;
+      esac
+      ;;
+
+    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+      # PIC is the default for these OSes.
+      ;;
+    mingw* | cygwin* | os2* | pw32* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      # Although the cygwin gcc ignores -fPIC, still need this for old-style
+      # (--disable-auto-import) libraries
+      m4_if([$1], [GCJ], [],
+       [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+      case $host_os in
+      os2*)
+       _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static'
+       ;;
+      esac
+      ;;
+    darwin* | rhapsody*)
+      # PIC is the default on this platform
+      # Common symbols not allowed in MH_DYLIB files
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+      ;;
+    *djgpp*)
+      # DJGPP does not support shared libraries at all
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+      ;;
+    haiku*)
+      # PIC is the default for Haiku.
+      # The "-static" flag exists, but is broken.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)=
+      ;;
+    interix[[3-9]]*)
+      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+      # Instead, we relocate shared libraries at runtime.
+      ;;
+    sysv4*MP*)
+      if test -d /usr/nec; then
+       _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
+      fi
+      ;;
+    hpux*)
+      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
+      # sets the default TLS model and affects inlining.
+      case $host_cpu in
+      hppa*64*)
+       ;;
+      *)
+       _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+       ;;
+      esac
+      ;;
+    *qnx* | *nto*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+      ;;
+    *)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+      ;;
+    esac
+  else
+    case $host_os in
+      aix[[4-9]]*)
+       # All AIX code is PIC.
+       if test ia64 = "$host_cpu"; then
+         # AIX 5 now supports IA64 processor
+         _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+       else
+         _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
+       fi
+       ;;
+      chorus*)
+       case $cc_basename in
+       cxch68*)
+         # Green Hills C++ Compiler
+         # _LT_TAGVAR(lt_prog_compiler_static, $1)="--no_auto_instantiation -u 
__main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a 
$MVME_DIR/lib/classix/libcx.s.a"
+         ;;
+       esac
+       ;;
+      mingw* | cygwin* | os2* | pw32* | cegcc*)
+       # This hack is so that the source file can tell whether it is being
+       # built for inclusion in a dll (and should export symbols for example).
+       m4_if([$1], [GCJ], [],
+         [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+       ;;
+      dgux*)
+       case $cc_basename in
+         ec++*)
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+           ;;
+         ghcx*)
+           # Green Hills C++ Compiler
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+           ;;
+         *)
+           ;;
+       esac
+       ;;
+      freebsd* | dragonfly*)
+       # FreeBSD uses GNU C++
+       ;;
+      hpux9* | hpux10* | hpux11*)
+       case $cc_basename in
+         CC*)
+           _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+           _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive'
+           if test ia64 != "$host_cpu"; then
+             _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+           fi
+           ;;
+         aCC*)
+           _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+           _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive'
+           case $host_cpu in
+           hppa*64*|ia64*)
+             # +Z the default
+             ;;
+           *)
+             _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+             ;;
+           esac
+           ;;
+         *)
+           ;;
+       esac
+       ;;
+      interix*)
+       # This is c89, which is MS Visual C++ (no shared libs)
+       # Anyone wants to do a port?
+       ;;
+      irix5* | irix6* | nonstopux*)
+       case $cc_basename in
+         CC*)
+           _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+           _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+           # CC pic flag -KPIC is the default.
+           ;;
+         *)
+           ;;
+       esac
+       ;;
+      linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*)
+       case $cc_basename in
+         KCC*)
+           # KAI C++ Compiler
+           _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+           ;;
+         ecpc* )
+           # old Intel C++ for x86_64, which still supported -KPIC.
+           _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+           _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+           ;;
+         icpc* )
+           # Intel C++, used to be incompatible with GCC.
+           # ICC 10 doesn't accept -KPIC any more.
+           _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+           _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+           ;;
+         pgCC* | pgcpp*)
+           # Portland Group C++ compiler
+           _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+           _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+           ;;
+         cxx*)
+           # Compaq C++
+           # Make sure the PIC flag is empty.  It appears that all Alpha
+           # Linux and Compaq Tru64 Unix objects are PIC.
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+           _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+           ;;
+         xlc* | xlC* | bgxl[[cC]]* | mpixl[[cC]]*)
+           # IBM XL 8.0, 9.0 on PPC and BlueGene
+           _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
+           _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
+           ;;
+         *)
+           case `$CC -V 2>&1 | sed 5q` in
+           *Sun\ C*)
+             # Sun C++ 5.9
+             _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+             _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+             _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+             ;;
+           esac
+           ;;
+       esac
+       ;;
+      lynxos*)
+       ;;
+      m88k*)
+       ;;
+      mvs*)
+       case $cc_basename in
+         cxx*)
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)='-W c,exportall'
+           ;;
+         *)
+           ;;
+       esac
+       ;;
+      netbsd* | netbsdelf*-gnu)
+       ;;
+      *qnx* | *nto*)
+        # QNX uses GNU C++, but need to define -shared option too, otherwise
+        # it will coredump.
+        _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+        ;;
+      osf3* | osf4* | osf5*)
+       case $cc_basename in
+         KCC*)
+           _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,'
+           ;;
+         RCC*)
+           # Rational C++ 2.4.1
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+           ;;
+         cxx*)
+           # Digital/Compaq C++
+           _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+           # Make sure the PIC flag is empty.  It appears that all Alpha
+           # Linux and Compaq Tru64 Unix objects are PIC.
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+           _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+           ;;
+         *)
+           ;;
+       esac
+       ;;
+      psos*)
+       ;;
+      solaris*)
+       case $cc_basename in
+         CC* | sunCC*)
+           # Sun C++ 4.2, 5.x and Centerline C++
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+           _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+           _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+           ;;
+         gcx*)
+           # Green Hills C++ Compiler
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+           ;;
+         *)
+           ;;
+       esac
+       ;;
+      sunos4*)
+       case $cc_basename in
+         CC*)
+           # Sun C++ 4.x
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+           _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+           ;;
+         lcc*)
+           # Lucid
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+           ;;
+         *)
+           ;;
+       esac
+       ;;
+      sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+       case $cc_basename in
+         CC*)
+           _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+           _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+           ;;
+       esac
+       ;;
+      tandem*)
+       case $cc_basename in
+         NCC*)
+           # NonStop-UX NCC 3.20
+           _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+           ;;
+         *)
+           ;;
+       esac
+       ;;
+      vxworks*)
+       ;;
+      *)
+       _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+       ;;
+    esac
+  fi
+],
+[
+  if test yes = "$GCC"; then
+    _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+    _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+
+    case $host_os in
+      aix*)
+      # All AIX code is PIC.
+      if test ia64 = "$host_cpu"; then
+       # AIX 5 now supports IA64 processor
+       _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      fi
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+        ;;
+      m68k)
+            # FIXME: we need at least 68020 code to build shared libraries, but
+            # adding the '-m68020' flag to GCC prevents building anything 
better,
+            # like '-m68040'.
+            _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 
-malways-restore-a4'
+        ;;
+      esac
+      ;;
+
+    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
+      # PIC is the default for these OSes.
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      # Although the cygwin gcc ignores -fPIC, still need this for old-style
+      # (--disable-auto-import) libraries
+      m4_if([$1], [GCJ], [],
+       [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+      case $host_os in
+      os2*)
+       _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static'
+       ;;
+      esac
+      ;;
+
+    darwin* | rhapsody*)
+      # PIC is the default on this platform
+      # Common symbols not allowed in MH_DYLIB files
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+      ;;
+
+    haiku*)
+      # PIC is the default for Haiku.
+      # The "-static" flag exists, but is broken.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)=
+      ;;
+
+    hpux*)
+      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
+      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
+      # sets the default TLS model and affects inlining.
+      case $host_cpu in
+      hppa*64*)
+       # +Z the default
+       ;;
+      *)
+       _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+       ;;
+      esac
+      ;;
+
+    interix[[3-9]]*)
+      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
+      # Instead, we relocate shared libraries at runtime.
+      ;;
+
+    msdosdjgpp*)
+      # Just because we use GCC doesn't mean we suddenly get shared libraries
+      # on systems that don't support them.
+      _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+      enable_shared=no
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+       _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic
+      fi
+      ;;
+
+    *)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+      ;;
+    esac
+
+    case $cc_basename in
+    nvcc*) # Cuda Compiler Driver 2.2
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Xlinker '
+      if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then
+        _LT_TAGVAR(lt_prog_compiler_pic, $1)="-Xcompiler 
$_LT_TAGVAR(lt_prog_compiler_pic, $1)"
+      fi
+      ;;
+    esac
+  else
+    # PORTME Check for flag to pass linker flags through the system compiler.
+    case $host_os in
+    aix*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      if test ia64 = "$host_cpu"; then
+       # AIX 5 now supports IA64 processor
+       _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      else
+       _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp'
+      fi
+      ;;
+
+    darwin* | rhapsody*)
+      # PIC is the default on this platform
+      # Common symbols not allowed in MH_DYLIB files
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common'
+      case $cc_basename in
+      nagfor*)
+        # NAG Fortran compiler
+        _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,'
+        _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+        _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+        ;;
+      esac
+      ;;
+
+    mingw* | cygwin* | pw32* | os2* | cegcc*)
+      # This hack is so that the source file can tell whether it is being
+      # built for inclusion in a dll (and should export symbols for example).
+      m4_if([$1], [GCJ], [],
+       [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT'])
+      case $host_os in
+      os2*)
+       _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static'
+       ;;
+      esac
+      ;;
+
+    hpux9* | hpux10* | hpux11*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
+      # not for PA HP-UX.
+      case $host_cpu in
+      hppa*64*|ia64*)
+       # +Z the default
+       ;;
+      *)
+       _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z'
+       ;;
+      esac
+      # Is there a better lt_prog_compiler_static that works with the bundled 
CC?
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive'
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      # PIC (with -KPIC) is the default.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+      ;;
+
+    linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*)
+      case $cc_basename in
+      # old Intel for x86_64, which still supported -KPIC.
+      ecc*)
+       _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+       _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+       _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+        ;;
+      # icc used to be incompatible with GCC.
+      # ICC 10 doesn't accept -KPIC any more.
+      icc* | ifort*)
+       _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+       _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+       _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+        ;;
+      # Lahey Fortran 8.1.
+      lf95*)
+       _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+       _LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared'
+       _LT_TAGVAR(lt_prog_compiler_static, $1)='--static'
+       ;;
+      nagfor*)
+       # NAG Fortran compiler
+       _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,'
+       _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+       _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+       ;;
+      tcc*)
+       # Fabrice Bellard et al's Tiny C Compiler
+       _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+       _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+       _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+       ;;
+      pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
+        # Portland Group compilers (*not* the Pentium gcc compiler,
+       # which looks to be a dead project)
+       _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+       _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+       _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+        ;;
+      ccc*)
+        _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+        # All Alpha code is PIC.
+        _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+        ;;
+      xl* | bgxl* | bgf* | mpixl*)
+       # IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene
+       _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+       _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic'
+       _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink'
+       ;;
+      *)
+       case `$CC -V 2>&1 | sed 5q` in
+       *Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [[1-7]].* | *Sun*Fortran*\ 
8.[[0-3]]*)
+         # Sun Fortran 8.3 passes all unrecognized flags to the linker
+         _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+         _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+         _LT_TAGVAR(lt_prog_compiler_wl, $1)=''
+         ;;
+       *Sun\ F* | *Sun*Fortran*)
+         _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+         _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+         _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+         ;;
+       *Sun\ C*)
+         # Sun C 5.9
+         _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+         _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+         _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+         ;;
+        *Intel*\ [[CF]]*Compiler*)
+         _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+         _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC'
+         _LT_TAGVAR(lt_prog_compiler_static, $1)='-static'
+         ;;
+       *Portland\ Group*)
+         _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+         _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic'
+         _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+         ;;
+       esac
+       ;;
+      esac
+      ;;
+
+    newsos6)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    *nto* | *qnx*)
+      # QNX uses GNU C++, but need to define -shared option too, otherwise
+      # it will coredump.
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared'
+      ;;
+
+    osf3* | osf4* | osf5*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      # All OSF/1 code is PIC.
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+      ;;
+
+    rdos*)
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared'
+      ;;
+
+    solaris*)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      case $cc_basename in
+      f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
+       _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';;
+      *)
+       _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';;
+      esac
+      ;;
+
+    sunos4*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld '
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    sysv4 | sysv4.2uw2* | sysv4.3*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+       _LT_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic'
+       _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      fi
+      ;;
+
+    sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    unicos*)
+      _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,'
+      _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+      ;;
+
+    uts4*)
+      _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic'
+      _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic'
+      ;;
+
+    *)
+      _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no
+      ;;
+    esac
+  fi
+])
+case $host_os in
+  # For platforms that do not support PIC, -DPIC is meaningless:
+  *djgpp*)
+    _LT_TAGVAR(lt_prog_compiler_pic, $1)=
+    ;;
+  *)
+    _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, 
$1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])"
+    ;;
+esac
+
+AC_CACHE_CHECK([for $compiler option to produce PIC],
+  [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)],
+  [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_prog_compiler_pic, 
$1)])
+_LT_TAGVAR(lt_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)
+
+#
+# Check to make sure the PIC flag actually works.
+#
+if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then
+  _LT_COMPILER_OPTION([if $compiler PIC flag $_LT_TAGVAR(lt_prog_compiler_pic, 
$1) works],
+    [_LT_TAGVAR(lt_cv_prog_compiler_pic_works, $1)],
+    [$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ 
-DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])], [],
+    [case $_LT_TAGVAR(lt_prog_compiler_pic, $1) in
+     "" | " "*) ;;
+     *) _LT_TAGVAR(lt_prog_compiler_pic, $1)=" 
$_LT_TAGVAR(lt_prog_compiler_pic, $1)" ;;
+     esac],
+    [_LT_TAGVAR(lt_prog_compiler_pic, $1)=
+     _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no])
+fi
+_LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1],
+       [Additional compiler flags for building library objects])
+
+_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1],
+       [How to pass a linker flag through the compiler])
+#
+# Check to make sure the static flag actually works.
+#
+wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) eval 
lt_tmp_static_flag=\"$_LT_TAGVAR(lt_prog_compiler_static, $1)\"
+_LT_LINKER_OPTION([if $compiler static flag $lt_tmp_static_flag works],
+  _LT_TAGVAR(lt_cv_prog_compiler_static_works, $1),
+  $lt_tmp_static_flag,
+  [],
+  [_LT_TAGVAR(lt_prog_compiler_static, $1)=])
+_LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1],
+       [Compiler flag to prevent dynamic linking])
+])# _LT_COMPILER_PIC
+
+
+# _LT_LINKER_SHLIBS([TAGNAME])
+# ----------------------------
+# See if the linker supports building shared libraries.
+m4_defun([_LT_LINKER_SHLIBS],
+[AC_REQUIRE([LT_PATH_LD])dnl
+AC_REQUIRE([LT_PATH_NM])dnl
+m4_require([_LT_PATH_MANIFEST_TOOL])dnl
+m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_DECL_SED])dnl
+m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl
+m4_require([_LT_TAG_COMPILER])dnl
+AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries])
+m4_if([$1], [CXX], [
+  _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | 
$global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+  _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+  case $host_os in
+  aix[[4-9]]*)
+    # If we're using GNU nm, then we don't want the "-C" option.
+    # -C means demangle to GNU nm, but means don't demangle to AIX nm.
+    # Without the "-l" option, or with the "-B" option, AIX nm treats
+    # weak defined symbols like other global defined symbols, whereas
+    # GNU nm marks them as "W".
+    # While the 'weak' keyword is ignored in the Export File, we need
+    # it in the Import File for the 'aix-soname' feature, so we have
+    # to replace the "-B" option with "-P" for AIX nm.
+    if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | 
awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == 
"W")) && ([substr](\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } 
else { print \$ 3 } } }'\'' | sort -u > $export_symbols'
+    else
+      _LT_TAGVAR(export_symbols_cmds, $1)='`func_echo_all $NM | $SED -e 
'\''s/B\([[^B]]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 
2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") 
|| (\$ 2 == "Z")) && ([substr](\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 
== "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | 
sort -u > $export_symbols'
+    fi
+    ;;
+  pw32*)
+    _LT_TAGVAR(export_symbols_cmds, $1)=$ltdll_cmds
+    ;;
+  cygwin* | mingw* | cegcc*)
+    case $cc_basename in
+    cl*)
+      _LT_TAGVAR(exclude_expsyms, 
$1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
+      ;;
+    *)
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | 
$global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 
DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ 
]]/s/.* //'\'' | sort | uniq > $export_symbols'
+      _LT_TAGVAR(exclude_expsyms, 
$1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
+      ;;
+    esac
+    ;;
+  linux* | k*bsd*-gnu | gnu*)
+    _LT_TAGVAR(link_all_deplibs, $1)=no
+    ;;
+  *)
+    _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | 
$global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+    ;;
+  esac
+], [
+  runpath_var=
+  _LT_TAGVAR(allow_undefined_flag, $1)=
+  _LT_TAGVAR(always_export_symbols, $1)=no
+  _LT_TAGVAR(archive_cmds, $1)=
+  _LT_TAGVAR(archive_expsym_cmds, $1)=
+  _LT_TAGVAR(compiler_needs_object, $1)=no
+  _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+  _LT_TAGVAR(export_dynamic_flag_spec, $1)=
+  _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | 
$global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
+  _LT_TAGVAR(hardcode_automatic, $1)=no
+  _LT_TAGVAR(hardcode_direct, $1)=no
+  _LT_TAGVAR(hardcode_direct_absolute, $1)=no
+  _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+  _LT_TAGVAR(hardcode_libdir_separator, $1)=
+  _LT_TAGVAR(hardcode_minus_L, $1)=no
+  _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+  _LT_TAGVAR(inherit_rpath, $1)=no
+  _LT_TAGVAR(link_all_deplibs, $1)=unknown
+  _LT_TAGVAR(module_cmds, $1)=
+  _LT_TAGVAR(module_expsym_cmds, $1)=
+  _LT_TAGVAR(old_archive_from_new_cmds, $1)=
+  _LT_TAGVAR(old_archive_from_expsyms_cmds, $1)=
+  _LT_TAGVAR(thread_safe_flag_spec, $1)=
+  _LT_TAGVAR(whole_archive_flag_spec, $1)=
+  # include_expsyms should be a list of space-separated symbols to be *always*
+  # included in the symbol list
+  _LT_TAGVAR(include_expsyms, $1)=
+  # exclude_expsyms can be an extended regexp of symbols to exclude
+  # it will be wrapped by ' (' and ')$', so one must not match beginning or
+  # end of line.  Example: 'a|bc|.*d.*' will exclude the symbols 'a' and 'bc',
+  # as well as any symbol that contains 'd'.
+  _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*']
+  # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
+  # platforms (ab)use it in PIC code, but their linkers get confused if
+  # the symbol is explicitly referenced.  Since portable code cannot
+  # rely on this symbol name, it's probably fine to never include it in
+  # preloaded symbol tables.
+  # Exclude shared library initialization/finalization symbols.
+dnl Note also adjust exclude_expsyms for C++ above.
+  extract_expsyms_cmds=
+
+  case $host_os in
+  cygwin* | mingw* | pw32* | cegcc*)
+    # FIXME: the MSVC++ port hasn't been tested in a loooong time
+    # When not using gcc, we currently assume that we are using
+    # Microsoft Visual C++.
+    if test yes != "$GCC"; then
+      with_gnu_ld=no
+    fi
+    ;;
+  interix*)
+    # we just hope/assume this is gcc and not c89 (= MSVC++)
+    with_gnu_ld=yes
+    ;;
+  openbsd* | bitrig*)
+    with_gnu_ld=no
+    ;;
+  linux* | k*bsd*-gnu | gnu*)
+    _LT_TAGVAR(link_all_deplibs, $1)=no
+    ;;
+  esac
+
+  _LT_TAGVAR(ld_shlibs, $1)=yes
+
+  # On some targets, GNU ld is compatible enough with the native linker
+  # that we're better off using the native interface for both.
+  lt_use_gnu_ld_interface=no
+  if test yes = "$with_gnu_ld"; then
+    case $host_os in
+      aix*)
+       # The AIX port of GNU ld has always aspired to compatibility
+       # with the native linker.  However, as the warning in the GNU ld
+       # block says, versions before 2.19.5* couldn't really create working
+       # shared libraries, regardless of the interface used.
+       case `$LD -v 2>&1` in
+         *\ \(GNU\ Binutils\)\ 2.19.5*) ;;
+         *\ \(GNU\ Binutils\)\ 2.[[2-9]]*) ;;
+         *\ \(GNU\ Binutils\)\ [[3-9]]*) ;;
+         *)
+           lt_use_gnu_ld_interface=yes
+           ;;
+       esac
+       ;;
+      *)
+       lt_use_gnu_ld_interface=yes
+       ;;
+    esac
+  fi
+
+  if test yes = "$lt_use_gnu_ld_interface"; then
+    # If archive_cmds runs LD, not CC, wlarc should be empty
+    wlarc='$wl'
+
+    # Set some defaults for GNU ld with shared library support. These
+    # are reset later if shared libraries are not supported. Putting them
+    # here allows them to be overridden if necessary.
+    runpath_var=LD_RUN_PATH
+    _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+    _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic'
+    # ancient GNU ld didn't support --whole-archive et. al.
+    if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
+      _LT_TAGVAR(whole_archive_flag_spec, 
$1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive'
+    else
+      _LT_TAGVAR(whole_archive_flag_spec, $1)=
+    fi
+    supports_anon_versioning=no
+    case `$LD -v | $SED -e 's/([^)]\+)\s\+//' 2>&1` in
+      *GNU\ gold*) supports_anon_versioning=yes ;;
+      *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11
+      *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
+      *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
+      *\ 2.11.*) ;; # other 2.11 versions
+      *) supports_anon_versioning=yes ;;
+    esac
+
+    # See if GNU ld supports shared libraries.
+    case $host_os in
+    aix[[3-9]]*)
+      # On AIX/PPC, the GNU linker is very broken
+      if test ia64 != "$host_cpu"; then
+       _LT_TAGVAR(ld_shlibs, $1)=no
+       cat <<_LT_EOF 1>&2
+
+*** Warning: the GNU linker, at least up to release 2.19, is reported
+*** to be unable to reliably create shared libraries on AIX.
+*** Therefore, libtool is disabling shared libraries support.  If you
+*** really care for shared libraries, you may want to install binutils
+*** 2.20 or above, or modify your PATH so that a non-GNU linker is found.
+*** You will then need to restart the configuration process.
+
+_LT_EOF
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs 
$compiler_flags $wl-soname $wl$soname -o $lib'
+            _LT_TAGVAR(archive_expsym_cmds, $1)=''
+        ;;
+      m68k)
+            _LT_TAGVAR(archive_cmds, $1)='$RM 
$output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > 
$output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> 
$output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> 
$output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> 
$output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd 
$output_objdir && a2ixlibrary -32)'
+            _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+            _LT_TAGVAR(hardcode_minus_L, $1)=yes
+        ;;
+      esac
+      ;;
+
+    beos*)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+       _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+       # Joseph Beckenbach <address@hidden> says some releases of gcc
+       # support --undefined.  This deserves some investigation.  FIXME
+       _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs 
$compiler_flags $wl-soname $wl$soname -o $lib'
+      else
+       _LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
+      # as there is no search path for DLLs.
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-all-symbols'
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      _LT_TAGVAR(always_export_symbols, $1)=no
+      _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+      _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | 
$global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 
DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ 
]]/s/.* //'\'' | sort | uniq > $export_symbols'
+      _LT_TAGVAR(exclude_expsyms, 
$1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname']
+
+      if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs 
$compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker 
--out-implib -Xlinker $lib'
+       # If the export-symbols file already is a .def file, use it as
+       # is; otherwise, prepend EXPORTS...
+       _LT_TAGVAR(archive_expsym_cmds, $1)='if 
_LT_DLL_DEF_P([$export_symbols]); then
+          cp $export_symbols $output_objdir/$soname.def;
+        else
+          echo EXPORTS > $output_objdir/$soname.def;
+          cat $export_symbols >> $output_objdir/$soname.def;
+        fi~
+        $CC -shared $output_objdir/$soname.def $libobjs $deplibs 
$compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker 
--out-implib -Xlinker $lib'
+      else
+       _LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    haiku*)
+      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs 
$compiler_flags $wl-soname $wl$soname -o $lib'
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      ;;
+
+    os2*)
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      shrext_cmds=.dll
+      _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} 
INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~
+       $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~
+       $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~
+       $ECHO EXPORTS >> $output_objdir/$libname.def~
+       emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> 
$output_objdir/$libname.def~
+       $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs 
$compiler_flags $output_objdir/$libname.def~
+       emximp -o $lib $output_objdir/$libname.def'
+      _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY 
${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~
+       $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~
+       $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~
+       $ECHO EXPORTS >> $output_objdir/$libname.def~
+       prefix_cmds="$SED"~
+       if test EXPORTS = "`$SED 1q $export_symbols`"; then
+         prefix_cmds="$prefix_cmds -e 1d";
+       fi~
+       prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~
+       cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~
+       $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs 
$compiler_flags $output_objdir/$libname.def~
+       emximp -o $lib $output_objdir/$libname.def'
+      _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o 
$output_objdir/${libname}_dll.a $output_objdir/$libname.def'
+      _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+      ;;
+
+    interix[[3-9]]*)
+      _LT_TAGVAR(hardcode_direct, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+      # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+      # Instead, shared libraries are loaded at an image base (0x10000000 by
+      # default) and relocated if they conflict, which is a slow very memory
+      # consuming and fragmenting process.  To avoid this, we pick a random,
+      # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+      # time.  Moving up from 0x10000000 also allows more sbrk(2) space.
+      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs 
$compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 
262144 + 1342177280` -o $lib'
+      _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s|^|_|" $export_symbols 
>$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs 
$compiler_flags $wl-h,$soname 
$wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr 
${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+      ;;
+
+    gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
+      tmp_diet=no
+      if test linux-dietlibc = "$host_os"; then
+       case $cc_basename in
+         diet\ *) tmp_diet=yes;;       # linux-dietlibc with static linking 
(!diet-dyn)
+       esac
+      fi
+      if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
+        && test no = "$tmp_diet"
+      then
+       tmp_addflag=' $pic_flag'
+       tmp_sharedflag='-shared'
+       case $cc_basename,$host_cpu in
+        pgcc*)                         # Portland Group C compiler
+         _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv 
in $convenience\"\"; do test  -n \"$conv\" && 
new_convenience=\"$new_convenience,$conv\"; done; func_echo_all 
\"$new_convenience\"` $wl--no-whole-archive'
+         tmp_addflag=' $pic_flag'
+         ;;
+       pgf77* | pgf90* | pgf95* | pgfortran*)
+                                       # Portland Group f77 and f90 compilers
+         _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv 
in $convenience\"\"; do test  -n \"$conv\" && 
new_convenience=\"$new_convenience,$conv\"; done; func_echo_all 
\"$new_convenience\"` $wl--no-whole-archive'
+         tmp_addflag=' $pic_flag -Mnomain' ;;
+       ecc*,ia64* | icc*,ia64*)        # Intel C compiler on ia64
+         tmp_addflag=' -i_dynamic' ;;
+       efc*,ia64* | ifort*,ia64*)      # Intel Fortran compiler on ia64
+         tmp_addflag=' -i_dynamic -nofor_main' ;;
+       ifc* | ifort*)                  # Intel Fortran compiler
+         tmp_addflag=' -nofor_main' ;;
+       lf95*)                          # Lahey Fortran 8.1
+         _LT_TAGVAR(whole_archive_flag_spec, $1)=
+         tmp_sharedflag='--shared' ;;
+        nagfor*)                        # NAGFOR 5.3
+          tmp_sharedflag='-Wl,-shared' ;;
+       xl[[cC]]* | bgxl[[cC]]* | mpixl[[cC]]*) # IBM XL C 8.0 on PPC (deal 
with xlf below)
+         tmp_sharedflag='-qmkshrobj'
+         tmp_addflag= ;;
+       nvcc*)  # Cuda Compiler Driver 2.2
+         _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv 
in $convenience\"\"; do test  -n \"$conv\" && 
new_convenience=\"$new_convenience,$conv\"; done; func_echo_all 
\"$new_convenience\"` $wl--no-whole-archive'
+         _LT_TAGVAR(compiler_needs_object, $1)=yes
+         ;;
+       esac
+       case `$CC -V 2>&1 | sed 5q` in
+       *Sun\ C*)                       # Sun C 5.9
+         _LT_TAGVAR(whole_archive_flag_spec, 
$1)='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test 
-z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all 
\"$new_convenience\"` $wl--no-whole-archive'
+         _LT_TAGVAR(compiler_needs_object, $1)=yes
+         tmp_sharedflag='-G' ;;
+       *Sun\ F*)                       # Sun Fortran 8.3
+         tmp_sharedflag='-G' ;;
+       esac
+       _LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' 
$libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
+
+        if test yes = "$supports_anon_versioning"; then
+          _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > 
$output_objdir/$libname.ver~
+            cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> 
$output_objdir/$libname.ver~
+            echo "local: *; };" >> $output_objdir/$libname.ver~
+            $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs 
$compiler_flags $wl-soname $wl$soname $wl-version-script 
$wl$output_objdir/$libname.ver -o $lib'
+        fi
+
+       case $cc_basename in
+       tcc*)
+         _LT_TAGVAR(export_dynamic_flag_spec, $1)='-rdynamic'
+         ;;
+       xlf* | bgf* | bgxlf* | mpixlf*)
+         # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
+         _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience 
--no-whole-archive'
+         _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+         _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs 
$linker_flags -soname $soname -o $lib'
+         if test yes = "$supports_anon_versioning"; then
+           _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > 
$output_objdir/$libname.ver~
+              cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> 
$output_objdir/$libname.ver~
+              echo "local: *; };" >> $output_objdir/$libname.ver~
+              $LD -shared $libobjs $deplibs $linker_flags -soname $soname 
-version-script $output_objdir/$libname.ver -o $lib'
+         fi
+         ;;
+       esac
+      else
+        _LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    netbsd* | netbsdelf*-gnu)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+       _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs 
$linker_flags -o $lib'
+       wlarc=
+      else
+       _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs 
$compiler_flags $wl-soname $wl$soname -o $lib'
+       _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs 
$deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file 
$wl$export_symbols -o $lib'
+      fi
+      ;;
+
+    solaris*)
+      if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
+       _LT_TAGVAR(ld_shlibs, $1)=no
+       cat <<_LT_EOF 1>&2
+
+*** Warning: The releases 2.8.* of the GNU linker cannot reliably
+*** create shared libraries on Solaris systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.9.1 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+      elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; 
then
+       _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs 
$compiler_flags $wl-soname $wl$soname -o $lib'
+       _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs 
$deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file 
$wl$export_symbols -o $lib'
+      else
+       _LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
+      case `$LD -v 2>&1` in
+        *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.1[[0-5]].*)
+       _LT_TAGVAR(ld_shlibs, $1)=no
+       cat <<_LT_EOF 1>&2
+
+*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 cannot
+*** reliably create shared libraries on SCO systems.  Therefore, libtool
+*** is disabling shared libraries support.  We urge you to upgrade GNU
+*** binutils to release 2.16.91.0.3 or newer.  Another option is to modify
+*** your PATH or compiler configuration so that the native linker is
+*** used, and then restart.
+
+_LT_EOF
+       ;;
+       *)
+         # For security reasons, it is highly recommended that you always
+         # use absolute paths for naming shared libraries, and exclude the
+         # DT_RUNPATH tag from executables and libraries.  But doing so
+         # requires that you compile everything twice, which is a pain.
+         if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; 
then
+           _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+           _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs 
$compiler_flags $wl-soname $wl$soname -o $lib'
+           _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs 
$compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file 
$wl$export_symbols -o $lib'
+         else
+           _LT_TAGVAR(ld_shlibs, $1)=no
+         fi
+       ;;
+      esac
+      ;;
+
+    sunos4*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bshareable -o $lib 
$libobjs $deplibs $linker_flags'
+      wlarc=
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    *)
+      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
+       _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs 
$compiler_flags $wl-soname $wl$soname -o $lib'
+       _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs 
$deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file 
$wl$export_symbols -o $lib'
+      else
+       _LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+    esac
+
+    if test no = "$_LT_TAGVAR(ld_shlibs, $1)"; then
+      runpath_var=
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)=
+      _LT_TAGVAR(whole_archive_flag_spec, $1)=
+    fi
+  else
+    # PORTME fill in a description of your system's linker (not GNU ld)
+    case $host_os in
+    aix3*)
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      _LT_TAGVAR(always_export_symbols, $1)=yes
+      _LT_TAGVAR(archive_expsym_cmds, $1)='$LD -o $output_objdir/$soname 
$libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR 
$AR_FLAGS $lib $output_objdir/$soname'
+      # Note: this linker hardcodes the directories in LIBPATH if there
+      # are no directories specified by -L.
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      if test yes = "$GCC" && test -z "$lt_prog_compiler_static"; then
+       # Neither direct hardcoding nor static linking is supported with a
+       # broken collect2.
+       _LT_TAGVAR(hardcode_direct, $1)=unsupported
+      fi
+      ;;
+
+    aix[[4-9]]*)
+      if test ia64 = "$host_cpu"; then
+       # On IA64, the linker does run time linking by default, so we don't
+       # have to do anything special.
+       aix_use_runtimelinking=no
+       exp_sym_flag='-Bexport'
+       no_entry_flag=
+      else
+       # If we're using GNU nm, then we don't want the "-C" option.
+       # -C means demangle to GNU nm, but means don't demangle to AIX nm.
+       # Without the "-l" option, or with the "-B" option, AIX nm treats
+       # weak defined symbols like other global defined symbols, whereas
+       # GNU nm marks them as "W".
+       # While the 'weak' keyword is ignored in the Export File, we need
+       # it in the Import File for the 'aix-soname' feature, so we have
+       # to replace the "-B" option with "-P" for AIX nm.
+       if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+         _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | 
awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == 
"W")) && ([substr](\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } 
else { print \$ 3 } } }'\'' | sort -u > $export_symbols'
+       else
+         _LT_TAGVAR(export_symbols_cmds, $1)='`func_echo_all $NM | $SED -e 
'\''s/B\([[^B]]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 
2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") 
|| (\$ 2 == "Z")) && ([substr](\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 
== "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | 
sort -u > $export_symbols'
+       fi
+       aix_use_runtimelinking=no
+
+       # Test if we are trying to use run time linking or normal
+       # AIX style linking. If -brtl is somewhere in LDFLAGS, we
+       # have runtime linking enabled, and use it for executables.
+       # For shared libraries, we enable/disable runtime linking
+       # depending on the kind of the shared library created -
+       # when "with_aix_soname,aix_use_runtimelinking" is:
+       # "aix,no"   lib.a(lib.so.V) shared, rtl:no,  for executables
+       # "aix,yes"  lib.so          shared, rtl:yes, for executables
+       #            lib.a           static archive
+       # "both,no"  lib.so.V(shr.o) shared, rtl:yes
+       #            lib.a(lib.so.V) shared, rtl:no,  for executables
+       # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables
+       #            lib.a(lib.so.V) shared, rtl:no
+       # "svr4,*"   lib.so.V(shr.o) shared, rtl:yes, for executables
+       #            lib.a           static archive
+       case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
+         for ld_flag in $LDFLAGS; do
+         if (test x-brtl = "x$ld_flag" || test x-Wl,-brtl = "x$ld_flag"); then
+           aix_use_runtimelinking=yes
+           break
+         fi
+         done
+         if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then
+           # With aix-soname=svr4, we create the lib.so.V shared archives only,
+           # so we don't have lib.a shared libs to link our executables.
+           # We have to force runtime linking in this case.
+           aix_use_runtimelinking=yes
+           LDFLAGS="$LDFLAGS -Wl,-brtl"
+         fi
+         ;;
+       esac
+
+       exp_sym_flag='-bexport'
+       no_entry_flag='-bnoentry'
+      fi
+
+      # When large executables or shared objects are built, AIX ld can
+      # have problems creating the table of contents.  If linking a library
+      # or program results in "error TOC overflow" add -mminimal-toc to
+      # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
+      # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+      _LT_TAGVAR(archive_cmds, $1)=''
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      _LT_TAGVAR(file_list_spec, $1)='$wl-f,'
+      case $with_aix_soname,$aix_use_runtimelinking in
+      aix,*) ;; # traditional, no import file
+      svr4,* | *,yes) # use import file
+       # The Import File defines what to hardcode.
+       _LT_TAGVAR(hardcode_direct, $1)=no
+       _LT_TAGVAR(hardcode_direct_absolute, $1)=no
+       ;;
+      esac
+
+      if test yes = "$GCC"; then
+       case $host_os in aix4.[[012]]|aix4.[[012]].*)
+       # We only want to do this on AIX 4.2 and lower, the check
+       # below for broken collect2 doesn't work under 4.3+
+         collect2name=`$CC -print-prog-name=collect2`
+         if test -f "$collect2name" &&
+          strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+         then
+         # We have reworked collect2
+         :
+         else
+         # We have old collect2
+         _LT_TAGVAR(hardcode_direct, $1)=unsupported
+         # It fails to find uninstalled libraries when the uninstalled
+         # path is not listed in the libpath.  Setting hardcode_minus_L
+         # to unsupported forces relinking
+         _LT_TAGVAR(hardcode_minus_L, $1)=yes
+         _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+         _LT_TAGVAR(hardcode_libdir_separator, $1)=
+         fi
+         ;;
+       esac
+       shared_flag='-shared'
+       if test yes = "$aix_use_runtimelinking"; then
+         shared_flag="$shared_flag "'$wl-G'
+       fi
+       # Need to ensure runtime linking is disabled for the traditional
+       # shared library, or the linker may eventually find shared libraries
+       # /with/ Import File - we do not want to mix them.
+       shared_flag_aix='-shared'
+       shared_flag_svr4='-shared $wl-G'
+      else
+       # not using gcc
+       if test ia64 = "$host_cpu"; then
+       # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+       # chokes on -Wl,-G. The following line is correct:
+         shared_flag='-G'
+       else
+         if test yes = "$aix_use_runtimelinking"; then
+           shared_flag='$wl-G'
+         else
+           shared_flag='$wl-bM:SRE'
+         fi
+         shared_flag_aix='$wl-bM:SRE'
+         shared_flag_svr4='$wl-G'
+       fi
+      fi
+
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-bexpall'
+      # It seems that -bexpall does not export symbols beginning with
+      # underscore (_), so it is better to generate a list of symbols to 
export.
+      _LT_TAGVAR(always_export_symbols, $1)=yes
+      if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then
+       # Warning - without using the other runtime loading flags (-brtl),
+       # -berok will link without error, but may produce a broken library.
+       _LT_TAGVAR(allow_undefined_flag, $1)='-berok'
+        # Determine the default libpath from the value encoded in an
+        # empty executable.
+        _LT_SYS_MODULE_PATH_AIX([$1])
+        _LT_TAGVAR(hardcode_libdir_flag_spec, 
$1)='$wl-blibpath:$libdir:'"$aix_libpath"
+        _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname 
$libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n 
"$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; 
fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag
+      else
+       if test ia64 = "$host_cpu"; then
+         _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R 
$libdir:/usr/lib:/lib'
+         _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
+         _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o 
$output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags 
$wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols"
+       else
+        # Determine the default libpath from the value encoded in an
+        # empty executable.
+        _LT_SYS_MODULE_PATH_AIX([$1])
+        _LT_TAGVAR(hardcode_libdir_flag_spec, 
$1)='$wl-blibpath:$libdir:'"$aix_libpath"
+         # Warning - without using the other run time loading flags,
+         # -berok will link without error, but may produce a broken library.
+         _LT_TAGVAR(no_undefined_flag, $1)=' $wl-bernotok'
+         _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-berok'
+         if test yes = "$with_gnu_ld"; then
+           # We only use this code for GNU lds that support --whole-archive.
+           _LT_TAGVAR(whole_archive_flag_spec, 
$1)='$wl--whole-archive$convenience $wl--no-whole-archive'
+         else
+           # Exported symbols can be pulled into shared objects from archives
+           _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
+         fi
+         _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+         _LT_TAGVAR(archive_expsym_cmds, $1)='$RM -r 
$output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d'
+         # -brtl affects multiple linker settings, -berok does not and is 
overridden later
+         compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e 
"s%-brtl\\([[, ]]\\)%-berok\\1%g"`'
+         if test svr4 != "$with_aix_soname"; then
+           # This is similar to how AIX traditionally builds its shared 
libraries.
+           _LT_TAGVAR(archive_expsym_cmds, 
$1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_aix' -o 
$output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry 
'$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR 
$AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname'
+         fi
+         if test aix != "$with_aix_soname"; then
+           _LT_TAGVAR(archive_expsym_cmds, 
$1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_svr4' -o 
$output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs 
$wl-bnoentry 
'$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e 
$output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! 
$soname($shared_archive_member_spec.o)"; if test shr_64 = 
"$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 
3 [...]
+         else
+           # used by -dlpreopen to get the symbols
+           _LT_TAGVAR(archive_expsym_cmds, 
$1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$MV  
$output_objdir/$realname.d/$soname $output_objdir'
+         fi
+         _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, 
$1)"'~$RM -r $output_objdir/$realname.d'
+       fi
+      fi
+      ;;
+
+    amigaos*)
+      case $host_cpu in
+      powerpc)
+            # see comment about AmigaOS4 .so support
+            _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs 
$compiler_flags $wl-soname $wl$soname -o $lib'
+            _LT_TAGVAR(archive_expsym_cmds, $1)=''
+        ;;
+      m68k)
+            _LT_TAGVAR(archive_cmds, $1)='$RM 
$output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > 
$output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> 
$output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> 
$output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> 
$output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd 
$output_objdir && a2ixlibrary -32)'
+            _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+            _LT_TAGVAR(hardcode_minus_L, $1)=yes
+        ;;
+      esac
+      ;;
+
+    bsdi[[45]]*)
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)=-rdynamic
+      ;;
+
+    cygwin* | mingw* | pw32* | cegcc*)
+      # When not using gcc, we currently assume that we are using
+      # Microsoft Visual C++.
+      # hardcode_libdir_flag_spec is actually meaningless, as there is
+      # no search path for DLLs.
+      case $cc_basename in
+      cl*)
+       # Native MSVC
+       _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+       _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+       _LT_TAGVAR(always_export_symbols, $1)=yes
+       _LT_TAGVAR(file_list_spec, $1)='@'
+       # Tell ltmain to make .lib files, not .a files.
+       libext=lib
+       # Tell ltmain to make .dll files, not .so files.
+       shrext_cmds=.dll
+       # FIXME: Setting linknames here is a bad hack.
+       _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs 
$compiler_flags $deplibs 
-Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames='
+       _LT_TAGVAR(archive_expsym_cmds, $1)='if 
_LT_DLL_DEF_P([$export_symbols]); then
+            cp "$export_symbols" "$output_objdir/$soname.def";
+            echo "$tool_output_objdir$soname.def" > 
"$output_objdir/$soname.exp";
+          else
+            $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > 
$output_objdir/$soname.exp;
+          fi~
+          $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs 
"@$tool_output_objdir$soname.exp" 
-Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
+          linknames='
+       # The linker will not automatically build a static lib if we build a 
DLL.
+       # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+       _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+       _LT_TAGVAR(exclude_expsyms, 
$1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
+       _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | 
$global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ 
]]*\)/\1,DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq 
> $export_symbols'
+       # Don't use ranlib
+       _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
+       _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
+          lt_tool_outputfile="@TOOL_OUTPUT@"~
+          case $lt_outputfile in
+            *.exe|*.EXE) ;;
+            *)
+              lt_outputfile=$lt_outputfile.exe
+              lt_tool_outputfile=$lt_tool_outputfile.exe
+              ;;
+          esac~
+          if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; 
then
+            $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" 
-outputresource:"$lt_tool_outputfile" || exit 1;
+            $RM "$lt_outputfile.manifest";
+          fi'
+       ;;
+      *)
+       # Assume MSVC wrapper
+       _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+       _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+       # Tell ltmain to make .lib files, not .a files.
+       libext=lib
+       # Tell ltmain to make .dll files, not .so files.
+       shrext_cmds=.dll
+       # FIXME: Setting linknames here is a bad hack.
+       _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags 
`func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
+       # The linker will automatically build a .lib file if we build a DLL.
+       _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+       # FIXME: Should let the user specify the lib program.
+       _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs'
+       _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+       ;;
+      esac
+      ;;
+
+    darwin* | rhapsody*)
+      _LT_DARWIN_LINKER_FEATURES($1)
+      ;;
+
+    dgux*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs 
$deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
+    # support.  Future versions do this automatically, but an explicit c++rt0.o
+    # does not break anything, and helps significantly (at the cost of a little
+    # extra space).
+    freebsd2.2*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs 
$linker_flags /usr/lib/c++rt0.o'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    # Unfortunately, older versions of FreeBSD 2 do not have this feature.
+    freebsd2.*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs 
$linker_flags'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
+    freebsd* | dragonfly*)
+      _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs 
$deplibs $compiler_flags'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    hpux9*)
+      if test yes = "$GCC"; then
+       _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared 
$pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $libobjs $deplibs 
$compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv 
$output_objdir/$soname $lib'
+      else
+       _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b 
$install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test 
"x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib'
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+
+      # hardcode_minus_L: Not really in the search PATH,
+      # but as the default location of the library.
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+      ;;
+
+    hpux10*)
+      if test yes,no = "$GCC,$with_gnu_ld"; then
+       _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname 
$wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+      else
+       _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o 
$lib $libobjs $deplibs $linker_flags'
+      fi
+      if test no = "$with_gnu_ld"; then
+       _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir'
+       _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+       _LT_TAGVAR(hardcode_direct, $1)=yes
+       _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+       _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+       # hardcode_minus_L: Not really in the search PATH,
+       # but as the default location of the library.
+       _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      fi
+      ;;
+
+    hpux11*)
+      if test yes,no = "$GCC,$with_gnu_ld"; then
+       case $host_cpu in
+       hppa*64*)
+         _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl+h $wl$soname -o $lib 
$libobjs $deplibs $compiler_flags'
+         ;;
+       ia64*)
+         _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname 
$wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+         ;;
+       *)
+         _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname 
$wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
+         ;;
+       esac
+      else
+       case $host_cpu in
+       hppa*64*)
+         _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname -o $lib 
$libobjs $deplibs $compiler_flags'
+         ;;
+       ia64*)
+         _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname 
$wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
+         ;;
+       *)
+       m4_if($1, [], [
+         # Older versions of the 11.00 compiler do not understand -b yet
+         # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP 
does)
+         _LT_LINKER_OPTION([if $CC understands -b],
+           _LT_TAGVAR(lt_cv_prog_compiler__b, $1), [-b],
+           [_LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b 
$wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags'],
+           [_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir 
-o $lib $libobjs $deplibs $linker_flags'])],
+         [_LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b 
$wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags'])
+         ;;
+       esac
+      fi
+      if test no = "$with_gnu_ld"; then
+       _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir'
+       _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+       case $host_cpu in
+       hppa*64*|ia64*)
+         _LT_TAGVAR(hardcode_direct, $1)=no
+         _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+         ;;
+       *)
+         _LT_TAGVAR(hardcode_direct, $1)=yes
+         _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+         _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+
+         # hardcode_minus_L: Not really in the search PATH,
+         # but as the default location of the library.
+         _LT_TAGVAR(hardcode_minus_L, $1)=yes
+         ;;
+       esac
+      fi
+      ;;
+
+    irix5* | irix6* | nonstopux*)
+      if test yes = "$GCC"; then
+       _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs 
$compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all 
"$wl-set_version $wl$verstring"` $wl-update_registry 
$wl$output_objdir/so_locations -o $lib'
+       # Try to use the -exported_symbol ld option, if it does not
+       # work, assume that -exports_file does not work either and
+       # implicitly export all symbols.
+       # This should be the same for all languages, so no per-tag cache 
variable.
+       AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol],
+         [lt_cv_irix_exported_symbol],
+         [save_LDFLAGS=$LDFLAGS
+          LDFLAGS="$LDFLAGS -shared $wl-exported_symbol ${wl}foo 
$wl-update_registry $wl/dev/null"
+          AC_LINK_IFELSE(
+            [AC_LANG_SOURCE(
+               [AC_LANG_CASE([C], [[int foo (void) { return 0; }]],
+                             [C++], [[int foo (void) { return 0; }]],
+                             [Fortran 77], [[
+      subroutine foo
+      end]],
+                             [Fortran], [[
+      subroutine foo
+      end]])])],
+             [lt_cv_irix_exported_symbol=yes],
+             [lt_cv_irix_exported_symbol=no])
+           LDFLAGS=$save_LDFLAGS])
+       if test yes = "$lt_cv_irix_exported_symbol"; then
+          _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs 
$deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && 
func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry 
$wl$output_objdir/so_locations $wl-exports_file $wl$export_symbols -o $lib'
+       fi
+       _LT_TAGVAR(link_all_deplibs, $1)=no
+      else
+       _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs 
$compiler_flags -soname $soname `test -n "$verstring" && func_echo_all 
"-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib'
+       _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs 
$compiler_flags -soname $soname `test -n "$verstring" && func_echo_all 
"-set_version $verstring"` -update_registry $output_objdir/so_locations 
-exports_file $export_symbols -o $lib'
+      fi
+      _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      _LT_TAGVAR(inherit_rpath, $1)=yes
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      ;;
+
+    linux*)
+      case $cc_basename in
+      tcc*)
+       # Fabrice Bellard et al's Tiny C Compiler
+       _LT_TAGVAR(ld_shlibs, $1)=yes
+       _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs 
$deplibs $compiler_flags'
+       ;;
+      esac
+      ;;
+
+    netbsd* | netbsdelf*-gnu)
+      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+       _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs 
$linker_flags'  # a.out
+      else
+       _LT_TAGVAR(archive_cmds, $1)='$LD -shared -o $lib $libobjs $deplibs 
$linker_flags'      # ELF
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    newsos6)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs 
$deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    *nto* | *qnx*)
+      ;;
+
+    openbsd* | bitrig*)
+      if test -f /usr/libexec/ld.so; then
+       _LT_TAGVAR(hardcode_direct, $1)=yes
+       _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+       _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+       if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then
+         _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs 
$deplibs $compiler_flags'
+         _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib 
$libobjs $deplibs $compiler_flags $wl-retain-symbols-file,$export_symbols'
+         _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+         _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+       else
+         _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs 
$deplibs $compiler_flags'
+         _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+       fi
+      else
+       _LT_TAGVAR(ld_shlibs, $1)=no
+      fi
+      ;;
+
+    os2*)
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+      shrext_cmds=.dll
+      _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} 
INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~
+       $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~
+       $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~
+       $ECHO EXPORTS >> $output_objdir/$libname.def~
+       emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> 
$output_objdir/$libname.def~
+       $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs 
$compiler_flags $output_objdir/$libname.def~
+       emximp -o $lib $output_objdir/$libname.def'
+      _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY 
${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~
+       $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~
+       $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~
+       $ECHO EXPORTS >> $output_objdir/$libname.def~
+       prefix_cmds="$SED"~
+       if test EXPORTS = "`$SED 1q $export_symbols`"; then
+         prefix_cmds="$prefix_cmds -e 1d";
+       fi~
+       prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~
+       cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~
+       $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs 
$compiler_flags $output_objdir/$libname.def~
+       emximp -o $lib $output_objdir/$libname.def'
+      _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o 
$output_objdir/${libname}_dll.a $output_objdir/$libname.def'
+      _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+      ;;
+
+    osf3*)
+      if test yes = "$GCC"; then
+       _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*'
+       _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs 
$deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && 
func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry 
$wl$output_objdir/so_locations -o $lib'
+      else
+       _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+       _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs 
$deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all 
"-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib'
+      fi
+      _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      ;;
+
+    osf4* | osf5*)     # as osf3* with the addition of -msym flag
+      if test yes = "$GCC"; then
+       _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*'
+       _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag 
$pic_flag $libobjs $deplibs $compiler_flags $wl-msym $wl-soname $wl$soname 
`test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` 
$wl-update_registry $wl$output_objdir/so_locations -o $lib'
+       _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+      else
+       _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+       _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs 
$deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && 
func_echo_all "-set_version $verstring"` -update_registry 
$output_objdir/so_locations -o $lib'
+       _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do 
printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" 
"-hidden">> $lib.exp~
+          $CC -shared$allow_undefined_flag $wl-input $wl$lib.exp 
$compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && 
$ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations 
-o $lib~$RM $lib.exp'
+
+       # Both c and cxx compiler support -rpath directly
+       _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+      fi
+      _LT_TAGVAR(archive_cmds_need_lc, $1)='no'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+      ;;
+
+    solaris*)
+      _LT_TAGVAR(no_undefined_flag, $1)=' -z defs'
+      if test yes = "$GCC"; then
+       wlarc='$wl'
+       _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl-z ${wl}text 
$wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags'
+       _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat 
$export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> 
$lib.exp~
+          $CC -shared $pic_flag $wl-z ${wl}text $wl-M $wl$lib.exp $wl-h 
$wl$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
+      else
+       case `$CC -V 2>&1` in
+       *"Compilers 5.0"*)
+         wlarc=''
+         _LT_TAGVAR(archive_cmds, $1)='$LD -G$allow_undefined_flag -h $soname 
-o $lib $libobjs $deplibs $linker_flags'
+         _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat 
$export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> 
$lib.exp~
+            $LD -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib 
$libobjs $deplibs $linker_flags~$RM $lib.exp'
+         ;;
+       *)
+         wlarc='$wl'
+         _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h $soname 
-o $lib $libobjs $deplibs $compiler_flags'
+         _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat 
$export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> 
$lib.exp~
+            $CC -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib 
$libobjs $deplibs $compiler_flags~$RM $lib.exp'
+         ;;
+       esac
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      case $host_os in
+      solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+      *)
+       # The compiler driver will combine and reorder linker options,
+       # but understands '-z linker_flag'.  GCC discards it without '$wl',
+       # but is careful enough not to reorder.
+       # Supported since Solaris 2.6 (maybe 2.5.1?)
+       if test yes = "$GCC"; then
+         _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl-z 
${wl}allextract$convenience $wl-z ${wl}defaultextract'
+       else
+         _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z 
defaultextract'
+       fi
+       ;;
+      esac
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      ;;
+
+    sunos4*)
+      if test sequent = "$host_vendor"; then
+       # Use $CC to link under sequent, because it throws in some extra .o
+       # files that make .init and .fini sections work.
+       _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h $soname -o $lib $libobjs 
$deplibs $compiler_flags'
+      else
+       _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib 
$libobjs $deplibs $linker_flags'
+      fi
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_direct, $1)=yes
+      _LT_TAGVAR(hardcode_minus_L, $1)=yes
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    sysv4)
+      case $host_vendor in
+       sni)
+         _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs 
$deplibs $linker_flags'
+         _LT_TAGVAR(hardcode_direct, $1)=yes # is this really true???
+       ;;
+       siemens)
+         ## LD is ld it makes a PLAMLIB
+         ## CC just makes a GrossModule.
+         _LT_TAGVAR(archive_cmds, $1)='$LD -G -o $lib $libobjs $deplibs 
$linker_flags'
+         _LT_TAGVAR(reload_cmds, $1)='$CC -r -o $output$reload_objs'
+         _LT_TAGVAR(hardcode_direct, $1)=no
+        ;;
+       motorola)
+         _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs 
$deplibs $linker_flags'
+         _LT_TAGVAR(hardcode_direct, $1)=no #Motorola manual says yes, but my 
tests say they lie
+       ;;
+      esac
+      runpath_var='LD_RUN_PATH'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    sysv4.3*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs 
$deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='-Bexport'
+      ;;
+
+    sysv4*MP*)
+      if test -d /usr/nec; then
+       _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs 
$deplibs $linker_flags'
+       _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+       runpath_var=LD_RUN_PATH
+       hardcode_runpath_var=yes
+       _LT_TAGVAR(ld_shlibs, $1)=yes
+      fi
+      ;;
+
+    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | 
sco3.2v5.0.[[024]]*)
+      _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text'
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      runpath_var='LD_RUN_PATH'
+
+      if test yes = "$GCC"; then
+       _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib 
$libobjs $deplibs $compiler_flags'
+       _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared 
$wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs 
$compiler_flags'
+      else
+       _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs 
$deplibs $compiler_flags'
+       _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols 
$wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    sysv5* | sco3.2v5* | sco5v6*)
+      # Note: We CANNOT use -z defs as we might desire, because we do not
+      # link with -lc, and that would cause any symbols used from libc to
+      # always be unresolved, which means just about no library would
+      # ever link correctly.  If we're not using GNU ld we use -z text
+      # though, which does catch some bad symbols but isn't as heavy-handed
+      # as -z defs.
+      _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text'
+      _LT_TAGVAR(allow_undefined_flag, $1)='$wl-z,nodefs'
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R,$libdir'
+      _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+      _LT_TAGVAR(link_all_deplibs, $1)=yes
+      _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Bexport'
+      runpath_var='LD_RUN_PATH'
+
+      if test yes = "$GCC"; then
+       _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib 
$libobjs $deplibs $compiler_flags'
+       _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared 
$wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs 
$compiler_flags'
+      else
+       _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs 
$deplibs $compiler_flags'
+       _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols 
$wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
+      fi
+      ;;
+
+    uts4*)
+      _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs 
$deplibs $linker_flags'
+      _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      ;;
+
+    *)
+      _LT_TAGVAR(ld_shlibs, $1)=no
+      ;;
+    esac
+
+    if test sni = "$host_vendor"; then
+      case $host in
+      sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
+       _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Blargedynsym'
+       ;;
+      esac
+    fi
+  fi
+])
+AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
+test no = "$_LT_TAGVAR(ld_shlibs, $1)" && can_build_shared=no
+
+_LT_TAGVAR(with_gnu_ld, $1)=$with_gnu_ld
+
+_LT_DECL([], [libext], [0], [Old archive suffix (normally "a")])dnl
+_LT_DECL([], [shrext_cmds], [1], [Shared library suffix (normally ".so")])dnl
+_LT_DECL([], [extract_expsyms_cmds], [2],
+    [The commands to extract the exported symbol list from a shared archive])
+
+#
+# Do we need to explicitly link libc?
+#
+case "x$_LT_TAGVAR(archive_cmds_need_lc, $1)" in
+x|xyes)
+  # Assume -lc should be added
+  _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+
+  if test yes,yes = "$GCC,$enable_shared"; then
+    case $_LT_TAGVAR(archive_cmds, $1) in
+    *'~'*)
+      # FIXME: we may have to deal with multi-command sequences.
+      ;;
+    '$CC '*)
+      # Test whether the compiler implicitly links with -lc since on some
+      # systems, -lgcc has to come before -lc. If gcc already passes -lc
+      # to ld, don't add -lc before -lgcc.
+      AC_CACHE_CHECK([whether -lc should be explicitly linked in],
+       [lt_cv_]_LT_TAGVAR(archive_cmds_need_lc, $1),
+       [$RM conftest*
+       echo "$lt_simple_compile_test_code" > conftest.$ac_ext
+
+       if AC_TRY_EVAL(ac_compile) 2>conftest.err; then
+         soname=conftest
+         lib=conftest
+         libobjs=conftest.$ac_objext
+         deplibs=
+         wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1)
+         pic_flag=$_LT_TAGVAR(lt_prog_compiler_pic, $1)
+         compiler_flags=-v
+         linker_flags=-v
+         verstring=
+         output_objdir=.
+         libname=conftest
+         lt_save_allow_undefined_flag=$_LT_TAGVAR(allow_undefined_flag, $1)
+         _LT_TAGVAR(allow_undefined_flag, $1)=
+         if AC_TRY_EVAL(_LT_TAGVAR(archive_cmds, $1) 2\>\&1 \| $GREP \" -lc \" 
\>/dev/null 2\>\&1)
+         then
+           lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+         else
+           lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+         fi
+         _LT_TAGVAR(allow_undefined_flag, $1)=$lt_save_allow_undefined_flag
+       else
+         cat conftest.err 1>&5
+       fi
+       $RM conftest*
+       ])
+      _LT_TAGVAR(archive_cmds_need_lc, 
$1)=$lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)
+      ;;
+    esac
+  fi
+  ;;
+esac
+
+_LT_TAGDECL([build_libtool_need_lc], [archive_cmds_need_lc], [0],
+    [Whether or not to add -lc for building shared libraries])
+_LT_TAGDECL([allow_libtool_libs_with_static_runtimes],
+    [enable_shared_with_static_runtimes], [0],
+    [Whether or not to disallow shared libs when runtime libs are static])
+_LT_TAGDECL([], [export_dynamic_flag_spec], [1],
+    [Compiler flag to allow reflexive dlopens])
+_LT_TAGDECL([], [whole_archive_flag_spec], [1],
+    [Compiler flag to generate shared objects directly from archives])
+_LT_TAGDECL([], [compiler_needs_object], [1],
+    [Whether the compiler copes with passing no objects directly])
+_LT_TAGDECL([], [old_archive_from_new_cmds], [2],
+    [Create an old-style archive from a shared archive])
+_LT_TAGDECL([], [old_archive_from_expsyms_cmds], [2],
+    [Create a temporary old-style archive to link instead of a shared archive])
+_LT_TAGDECL([], [archive_cmds], [2], [Commands used to build a shared archive])
+_LT_TAGDECL([], [archive_expsym_cmds], [2])
+_LT_TAGDECL([], [module_cmds], [2],
+    [Commands used to build a loadable module if different from building
+    a shared archive.])
+_LT_TAGDECL([], [module_expsym_cmds], [2])
+_LT_TAGDECL([], [with_gnu_ld], [1],
+    [Whether we are building with GNU ld or not])
+_LT_TAGDECL([], [allow_undefined_flag], [1],
+    [Flag that allows shared libraries with undefined symbols to be built])
+_LT_TAGDECL([], [no_undefined_flag], [1],
+    [Flag that enforces no undefined symbols])
+_LT_TAGDECL([], [hardcode_libdir_flag_spec], [1],
+    [Flag to hardcode $libdir into a binary during linking.
+    This must work even if $libdir does not exist])
+_LT_TAGDECL([], [hardcode_libdir_separator], [1],
+    [Whether we need a single "-rpath" flag with a separated argument])
+_LT_TAGDECL([], [hardcode_direct], [0],
+    [Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes
+    DIR into the resulting binary])
+_LT_TAGDECL([], [hardcode_direct_absolute], [0],
+    [Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes
+    DIR into the resulting binary and the resulting library dependency is
+    "absolute", i.e impossible to change by setting $shlibpath_var if the
+    library is relocated])
+_LT_TAGDECL([], [hardcode_minus_L], [0],
+    [Set to "yes" if using the -LDIR flag during linking hardcodes DIR
+    into the resulting binary])
+_LT_TAGDECL([], [hardcode_shlibpath_var], [0],
+    [Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
+    into the resulting binary])
+_LT_TAGDECL([], [hardcode_automatic], [0],
+    [Set to "yes" if building a shared library automatically hardcodes DIR
+    into the library and all subsequent libraries and executables linked
+    against it])
+_LT_TAGDECL([], [inherit_rpath], [0],
+    [Set to yes if linker adds runtime paths of dependent libraries
+    to runtime path list])
+_LT_TAGDECL([], [link_all_deplibs], [0],
+    [Whether libtool must link a program against all its dependency libraries])
+_LT_TAGDECL([], [always_export_symbols], [0],
+    [Set to "yes" if exported symbols are required])
+_LT_TAGDECL([], [export_symbols_cmds], [2],
+    [The commands to list exported symbols])
+_LT_TAGDECL([], [exclude_expsyms], [1],
+    [Symbols that should not be listed in the preloaded symbols])
+_LT_TAGDECL([], [include_expsyms], [1],
+    [Symbols that must always be exported])
+_LT_TAGDECL([], [prelink_cmds], [2],
+    [Commands necessary for linking programs (against libraries) with 
templates])
+_LT_TAGDECL([], [postlink_cmds], [2],
+    [Commands necessary for finishing linking programs])
+_LT_TAGDECL([], [file_list_spec], [1],
+    [Specify filename containing input files])
+dnl FIXME: Not yet implemented
+dnl _LT_TAGDECL([], [thread_safe_flag_spec], [1],
+dnl    [Compiler flag to generate thread safe objects])
+])# _LT_LINKER_SHLIBS
+
+
+# _LT_LANG_C_CONFIG([TAG])
+# ------------------------
+# Ensure that the configuration variables for a C compiler are suitably
+# defined.  These variables are subsequently used by _LT_CONFIG to write
+# the compiler configuration to 'libtool'.
+m4_defun([_LT_LANG_C_CONFIG],
+[m4_require([_LT_DECL_EGREP])dnl
+lt_save_CC=$CC
+AC_LANG_PUSH(C)
+
+# Source file extension for C test sources.
+ac_ext=c
+
+# Object file extension for compiled C test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="int some_variable = 0;"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='int main(){return(0);}'
+
+_LT_TAG_COMPILER
+# Save the default compiler, since it gets overwritten when the other
+# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP.
+compiler_DEFAULT=$CC
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+## CAVEAT EMPTOR:
+## There is no encapsulation within the following macros, do not change
+## the running order or otherwise move them around unless you know exactly
+## what you are doing...
+if test -n "$compiler"; then
+  _LT_COMPILER_NO_RTTI($1)
+  _LT_COMPILER_PIC($1)
+  _LT_COMPILER_C_O($1)
+  _LT_COMPILER_FILE_LOCKS($1)
+  _LT_LINKER_SHLIBS($1)
+  _LT_SYS_DYNAMIC_LINKER($1)
+  _LT_LINKER_HARDCODE_LIBPATH($1)
+  LT_SYS_DLOPEN_SELF
+  _LT_CMD_STRIPLIB
+
+  # Report what library types will actually be built
+  AC_MSG_CHECKING([if libtool supports shared libraries])
+  AC_MSG_RESULT([$can_build_shared])
+
+  AC_MSG_CHECKING([whether to build shared libraries])
+  test no = "$can_build_shared" && enable_shared=no
+
+  # On AIX, shared libraries and static libraries use the same namespace, and
+  # are all built from PIC.
+  case $host_os in
+  aix3*)
+    test yes = "$enable_shared" && enable_static=no
+    if test -n "$RANLIB"; then
+      archive_cmds="$archive_cmds~\$RANLIB \$lib"
+      postinstall_cmds='$RANLIB $lib'
+    fi
+    ;;
+
+  aix[[4-9]]*)
+    if test ia64 != "$host_cpu"; then
+      case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in
+      yes,aix,yes) ;;                  # shared object as lib.so file only
+      yes,svr4,*) ;;                   # shared object as lib.so archive 
member only
+      yes,*) enable_static=no ;;       # shared object in lib.a archive as well
+      esac
+    fi
+    ;;
+  esac
+  AC_MSG_RESULT([$enable_shared])
+
+  AC_MSG_CHECKING([whether to build static libraries])
+  # Make sure either enable_shared or enable_static is yes.
+  test yes = "$enable_shared" || enable_static=yes
+  AC_MSG_RESULT([$enable_static])
+
+  _LT_CONFIG($1)
+fi
+AC_LANG_POP
+CC=$lt_save_CC
+])# _LT_LANG_C_CONFIG
+
+
+# _LT_LANG_CXX_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for a C++ compiler are suitably
+# defined.  These variables are subsequently used by _LT_CONFIG to write
+# the compiler configuration to 'libtool'.
+m4_defun([_LT_LANG_CXX_CONFIG],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+m4_require([_LT_DECL_EGREP])dnl
+m4_require([_LT_PATH_MANIFEST_TOOL])dnl
+if test -n "$CXX" && ( test no != "$CXX" &&
+    ( (test g++ = "$CXX" && `g++ -v >/dev/null 2>&1` ) ||
+    (test g++ != "$CXX"))); then
+  AC_PROG_CXXCPP
+else
+  _lt_caught_CXX_error=yes
+fi
+
+AC_LANG_PUSH(C++)
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(compiler_needs_object, $1)=no
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for C++ test sources.
+ac_ext=cpp
+
+# Object file extension for compiled C++ test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the CXX compiler isn't working.  Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test yes != "$_lt_caught_CXX_error"; then
+  # Code to be used in simple compile tests
+  lt_simple_compile_test_code="int some_variable = 0;"
+
+  # Code to be used in simple link tests
+  lt_simple_link_test_code='int main(int, char *[[]]) { return(0); }'
+
+  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+  _LT_TAG_COMPILER
+
+  # save warnings/boilerplate of simple test code
+  _LT_COMPILER_BOILERPLATE
+  _LT_LINKER_BOILERPLATE
+
+  # Allow CC to be a program name with arguments.
+  lt_save_CC=$CC
+  lt_save_CFLAGS=$CFLAGS
+  lt_save_LD=$LD
+  lt_save_GCC=$GCC
+  GCC=$GXX
+  lt_save_with_gnu_ld=$with_gnu_ld
+  lt_save_path_LD=$lt_cv_path_LD
+  if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then
+    lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx
+  else
+    $as_unset lt_cv_prog_gnu_ld
+  fi
+  if test -n "${lt_cv_path_LDCXX+set}"; then
+    lt_cv_path_LD=$lt_cv_path_LDCXX
+  else
+    $as_unset lt_cv_path_LD
+  fi
+  test -z "${LDCXX+set}" || LD=$LDCXX
+  CC=${CXX-"c++"}
+  CFLAGS=$CXXFLAGS
+  compiler=$CC
+  _LT_TAGVAR(compiler, $1)=$CC
+  _LT_CC_BASENAME([$compiler])
+
+  if test -n "$compiler"; then
+    # We don't want -fno-exception when compiling C++ code, so set the
+    # no_builtin_flag separately
+    if test yes = "$GXX"; then
+      _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin'
+    else
+      _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=
+    fi
+
+    if test yes = "$GXX"; then
+      # Set up default GNU C++ configuration
+
+      LT_PATH_LD
+
+      # Check if GNU C++ uses GNU ld as the underlying linker, since the
+      # archiving commands below assume that GNU ld is being used.
+      if test yes = "$with_gnu_ld"; then
+        _LT_TAGVAR(archive_cmds, $1)='$CC $pic_flag -shared -nostdlib 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname 
$wl$soname -o $lib'
+        _LT_TAGVAR(archive_expsym_cmds, $1)='$CC $pic_flag -shared -nostdlib 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname 
$wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib'
+
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic'
+
+        # If archive_cmds runs LD, not CC, wlarc should be empty
+        # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to
+        #     investigate it a little bit more. (MM)
+        wlarc='$wl'
+
+        # ancient GNU ld didn't support --whole-archive et. al.
+        if eval "`$CC -print-prog-name=ld` --help 2>&1" |
+         $GREP 'no-whole-archive' > /dev/null; then
+          _LT_TAGVAR(whole_archive_flag_spec, 
$1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive'
+        else
+          _LT_TAGVAR(whole_archive_flag_spec, $1)=
+        fi
+      else
+        with_gnu_ld=no
+        wlarc=
+
+        # A generic and very simple default shared library creation
+        # command for GNU C++ for the case where it uses the native
+        # linker, instead of GNU ld.  If possible, this setting should
+        # overridden to take advantage of the native linker features on
+        # the platform it is being used on.
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects 
$libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
+      fi
+
+      # Commands to make compiler produce verbose output that lists
+      # what "hidden" libraries, object files and flags are used when
+      # linking a shared library.
+      output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | 
$GREP -v "^Configured with:" | $GREP " \-L"'
+
+    else
+      GXX=no
+      with_gnu_ld=no
+      wlarc=
+    fi
+
+    # PORTME: fill in a description of your system's C++ link characteristics
+    AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared 
libraries])
+    _LT_TAGVAR(ld_shlibs, $1)=yes
+    case $host_os in
+      aix3*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+      aix[[4-9]]*)
+        if test ia64 = "$host_cpu"; then
+          # On IA64, the linker does run time linking by default, so we don't
+          # have to do anything special.
+          aix_use_runtimelinking=no
+          exp_sym_flag='-Bexport'
+          no_entry_flag=
+        else
+          aix_use_runtimelinking=no
+
+          # Test if we are trying to use run time linking or normal
+          # AIX style linking. If -brtl is somewhere in LDFLAGS, we
+          # have runtime linking enabled, and use it for executables.
+          # For shared libraries, we enable/disable runtime linking
+          # depending on the kind of the shared library created -
+          # when "with_aix_soname,aix_use_runtimelinking" is:
+          # "aix,no"   lib.a(lib.so.V) shared, rtl:no,  for executables
+          # "aix,yes"  lib.so          shared, rtl:yes, for executables
+          #            lib.a           static archive
+          # "both,no"  lib.so.V(shr.o) shared, rtl:yes
+          #            lib.a(lib.so.V) shared, rtl:no,  for executables
+          # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables
+          #            lib.a(lib.so.V) shared, rtl:no
+          # "svr4,*"   lib.so.V(shr.o) shared, rtl:yes, for executables
+          #            lib.a           static archive
+          case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*)
+           for ld_flag in $LDFLAGS; do
+             case $ld_flag in
+             *-brtl*)
+               aix_use_runtimelinking=yes
+               break
+               ;;
+             esac
+           done
+           if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then
+             # With aix-soname=svr4, we create the lib.so.V shared archives 
only,
+             # so we don't have lib.a shared libs to link our executables.
+             # We have to force runtime linking in this case.
+             aix_use_runtimelinking=yes
+             LDFLAGS="$LDFLAGS -Wl,-brtl"
+           fi
+           ;;
+          esac
+
+          exp_sym_flag='-bexport'
+          no_entry_flag='-bnoentry'
+        fi
+
+        # When large executables or shared objects are built, AIX ld can
+        # have problems creating the table of contents.  If linking a library
+        # or program results in "error TOC overflow" add -mminimal-toc to
+        # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
+        # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
+
+        _LT_TAGVAR(archive_cmds, $1)=''
+        _LT_TAGVAR(hardcode_direct, $1)=yes
+        _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+        _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+        _LT_TAGVAR(link_all_deplibs, $1)=yes
+        _LT_TAGVAR(file_list_spec, $1)='$wl-f,'
+        case $with_aix_soname,$aix_use_runtimelinking in
+        aix,*) ;;      # no import file
+        svr4,* | *,yes) # use import file
+          # The Import File defines what to hardcode.
+          _LT_TAGVAR(hardcode_direct, $1)=no
+          _LT_TAGVAR(hardcode_direct_absolute, $1)=no
+          ;;
+        esac
+
+        if test yes = "$GXX"; then
+          case $host_os in aix4.[[012]]|aix4.[[012]].*)
+          # We only want to do this on AIX 4.2 and lower, the check
+          # below for broken collect2 doesn't work under 4.3+
+         collect2name=`$CC -print-prog-name=collect2`
+         if test -f "$collect2name" &&
+            strings "$collect2name" | $GREP resolve_lib_name >/dev/null
+         then
+           # We have reworked collect2
+           :
+         else
+           # We have old collect2
+           _LT_TAGVAR(hardcode_direct, $1)=unsupported
+           # It fails to find uninstalled libraries when the uninstalled
+           # path is not listed in the libpath.  Setting hardcode_minus_L
+           # to unsupported forces relinking
+           _LT_TAGVAR(hardcode_minus_L, $1)=yes
+           _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+           _LT_TAGVAR(hardcode_libdir_separator, $1)=
+         fi
+          esac
+          shared_flag='-shared'
+         if test yes = "$aix_use_runtimelinking"; then
+           shared_flag=$shared_flag' $wl-G'
+         fi
+         # Need to ensure runtime linking is disabled for the traditional
+         # shared library, or the linker may eventually find shared libraries
+         # /with/ Import File - we do not want to mix them.
+         shared_flag_aix='-shared'
+         shared_flag_svr4='-shared $wl-G'
+        else
+          # not using gcc
+          if test ia64 = "$host_cpu"; then
+         # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
+         # chokes on -Wl,-G. The following line is correct:
+         shared_flag='-G'
+          else
+           if test yes = "$aix_use_runtimelinking"; then
+             shared_flag='$wl-G'
+           else
+             shared_flag='$wl-bM:SRE'
+           fi
+           shared_flag_aix='$wl-bM:SRE'
+           shared_flag_svr4='$wl-G'
+          fi
+        fi
+
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-bexpall'
+        # It seems that -bexpall does not export symbols beginning with
+        # underscore (_), so it is better to generate a list of symbols to
+       # export.
+        _LT_TAGVAR(always_export_symbols, $1)=yes
+       if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then
+          # Warning - without using the other runtime loading flags (-brtl),
+          # -berok will link without error, but may produce a broken library.
+          # The "-G" linker flag allows undefined symbols.
+          _LT_TAGVAR(no_undefined_flag, $1)='-bernotok'
+          # Determine the default libpath from the value encoded in an empty
+          # executable.
+          _LT_SYS_MODULE_PATH_AIX([$1])
+          _LT_TAGVAR(hardcode_libdir_flag_spec, 
$1)='$wl-blibpath:$libdir:'"$aix_libpath"
+
+          _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname 
$libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n 
"$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; 
fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag
+        else
+          if test ia64 = "$host_cpu"; then
+           _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R 
$libdir:/usr/lib:/lib'
+           _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs"
+           _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o 
$output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags 
$wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols"
+          else
+           # Determine the default libpath from the value encoded in an
+           # empty executable.
+           _LT_SYS_MODULE_PATH_AIX([$1])
+           _LT_TAGVAR(hardcode_libdir_flag_spec, 
$1)='$wl-blibpath:$libdir:'"$aix_libpath"
+           # Warning - without using the other run time loading flags,
+           # -berok will link without error, but may produce a broken library.
+           _LT_TAGVAR(no_undefined_flag, $1)=' $wl-bernotok'
+           _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-berok'
+           if test yes = "$with_gnu_ld"; then
+             # We only use this code for GNU lds that support --whole-archive.
+             _LT_TAGVAR(whole_archive_flag_spec, 
$1)='$wl--whole-archive$convenience $wl--no-whole-archive'
+           else
+             # Exported symbols can be pulled into shared objects from archives
+             _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience'
+           fi
+           _LT_TAGVAR(archive_cmds_need_lc, $1)=yes
+           _LT_TAGVAR(archive_expsym_cmds, $1)='$RM -r 
$output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d'
+           # -brtl affects multiple linker settings, -berok does not and is 
overridden later
+           compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED 
-e "s%-brtl\\([[, ]]\\)%-berok\\1%g"`'
+           if test svr4 != "$with_aix_soname"; then
+             # This is similar to how AIX traditionally builds its shared
+             # libraries. Need -bnortl late, we may have -brtl in LDFLAGS.
+             _LT_TAGVAR(archive_expsym_cmds, 
$1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_aix' -o 
$output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry 
'$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR 
$AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname'
+           fi
+           if test aix != "$with_aix_soname"; then
+             _LT_TAGVAR(archive_expsym_cmds, 
$1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_svr4' -o 
$output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs 
$wl-bnoentry 
'$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e 
$output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! 
$soname($shared_archive_member_spec.o)"; if test shr_64 = 
"$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 
[...]
+           else
+             # used by -dlpreopen to get the symbols
+             _LT_TAGVAR(archive_expsym_cmds, 
$1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$MV  
$output_objdir/$realname.d/$soname $output_objdir'
+           fi
+           _LT_TAGVAR(archive_expsym_cmds, 
$1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$RM -r $output_objdir/$realname.d'
+          fi
+        fi
+        ;;
+
+      beos*)
+       if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; 
then
+         _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+         # Joseph Beckenbach <address@hidden> says some releases of gcc
+         # support --undefined.  This deserves some investigation.  FIXME
+         _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs 
$compiler_flags $wl-soname $wl$soname -o $lib'
+       else
+         _LT_TAGVAR(ld_shlibs, $1)=no
+       fi
+       ;;
+
+      chorus*)
+        case $cc_basename in
+          *)
+         # FIXME: insert proper C++ library support
+         _LT_TAGVAR(ld_shlibs, $1)=no
+         ;;
+        esac
+        ;;
+
+      cygwin* | mingw* | pw32* | cegcc*)
+       case $GXX,$cc_basename in
+       ,cl* | no,cl*)
+         # Native MSVC
+         # hardcode_libdir_flag_spec is actually meaningless, as there is
+         # no search path for DLLs.
+         _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' '
+         _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+         _LT_TAGVAR(always_export_symbols, $1)=yes
+         _LT_TAGVAR(file_list_spec, $1)='@'
+         # Tell ltmain to make .lib files, not .a files.
+         libext=lib
+         # Tell ltmain to make .dll files, not .so files.
+         shrext_cmds=.dll
+         # FIXME: Setting linknames here is a bad hack.
+         _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs 
$compiler_flags $deplibs 
-Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames='
+         _LT_TAGVAR(archive_expsym_cmds, $1)='if 
_LT_DLL_DEF_P([$export_symbols]); then
+              cp "$export_symbols" "$output_objdir/$soname.def";
+              echo "$tool_output_objdir$soname.def" > 
"$output_objdir/$soname.exp";
+            else
+              $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > 
$output_objdir/$soname.exp;
+            fi~
+            $CC -o $tool_output_objdir$soname $libobjs $compiler_flags 
$deplibs "@$tool_output_objdir$soname.exp" 
-Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
+            linknames='
+         # The linker will not automatically build a static lib if we build a 
DLL.
+         # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true'
+         _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+         # Don't use ranlib
+         _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib'
+         _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~
+            lt_tool_outputfile="@TOOL_OUTPUT@"~
+            case $lt_outputfile in
+              *.exe|*.EXE) ;;
+              *)
+                lt_outputfile=$lt_outputfile.exe
+                lt_tool_outputfile=$lt_tool_outputfile.exe
+                ;;
+            esac~
+            func_to_tool_file "$lt_outputfile"~
+            if test : != "$MANIFEST_TOOL" && test -f 
"$lt_outputfile.manifest"; then
+              $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" 
-outputresource:"$lt_tool_outputfile" || exit 1;
+              $RM "$lt_outputfile.manifest";
+            fi'
+         ;;
+       *)
+         # g++
+         # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless,
+         # as there is no search path for DLLs.
+         _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+         _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-all-symbols'
+         _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+         _LT_TAGVAR(always_export_symbols, $1)=no
+         _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+
+         if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
+           _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects 
$libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname 
$wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+           # If the export-symbols file already is a .def file, use it as
+           # is; otherwise, prepend EXPORTS...
+           _LT_TAGVAR(archive_expsym_cmds, $1)='if 
_LT_DLL_DEF_P([$export_symbols]); then
+              cp $export_symbols $output_objdir/$soname.def;
+            else
+              echo EXPORTS > $output_objdir/$soname.def;
+              cat $export_symbols >> $output_objdir/$soname.def;
+            fi~
+            $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects 
$libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname 
$wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
+         else
+           _LT_TAGVAR(ld_shlibs, $1)=no
+         fi
+         ;;
+       esac
+       ;;
+      darwin* | rhapsody*)
+        _LT_DARWIN_LINKER_FEATURES($1)
+       ;;
+
+      os2*)
+       _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir'
+       _LT_TAGVAR(hardcode_minus_L, $1)=yes
+       _LT_TAGVAR(allow_undefined_flag, $1)=unsupported
+       shrext_cmds=.dll
+       _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} 
INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~
+         $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~
+         $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~
+         $ECHO EXPORTS >> $output_objdir/$libname.def~
+         emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> 
$output_objdir/$libname.def~
+         $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs 
$compiler_flags $output_objdir/$libname.def~
+         emximp -o $lib $output_objdir/$libname.def'
+       _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY 
${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~
+         $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~
+         $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~
+         $ECHO EXPORTS >> $output_objdir/$libname.def~
+         prefix_cmds="$SED"~
+         if test EXPORTS = "`$SED 1q $export_symbols`"; then
+           prefix_cmds="$prefix_cmds -e 1d";
+         fi~
+         prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~
+         cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~
+         $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs 
$compiler_flags $output_objdir/$libname.def~
+         emximp -o $lib $output_objdir/$libname.def'
+       _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o 
$output_objdir/${libname}_dll.a $output_objdir/$libname.def'
+       _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes
+       ;;
+
+      dgux*)
+        case $cc_basename in
+          ec++*)
+           # FIXME: insert proper C++ library support
+           _LT_TAGVAR(ld_shlibs, $1)=no
+           ;;
+          ghcx*)
+           # Green Hills C++ Compiler
+           # FIXME: insert proper C++ library support
+           _LT_TAGVAR(ld_shlibs, $1)=no
+           ;;
+          *)
+           # FIXME: insert proper C++ library support
+           _LT_TAGVAR(ld_shlibs, $1)=no
+           ;;
+        esac
+        ;;
+
+      freebsd2.*)
+        # C++ shared libraries reported to be fairly broken before
+       # switch to ELF
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+
+      freebsd-elf*)
+        _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+        ;;
+
+      freebsd* | dragonfly*)
+        # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF
+        # conventions
+        _LT_TAGVAR(ld_shlibs, $1)=yes
+        ;;
+
+      haiku*)
+        _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs 
$compiler_flags $wl-soname $wl$soname -o $lib'
+        _LT_TAGVAR(link_all_deplibs, $1)=yes
+        ;;
+
+      hpux9*)
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir'
+        _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+        _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+        _LT_TAGVAR(hardcode_direct, $1)=yes
+        _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
+                                            # but as the default
+                                            # location of the library.
+
+        case $cc_basename in
+          CC*)
+            # FIXME: insert proper C++ library support
+            _LT_TAGVAR(ld_shlibs, $1)=no
+            ;;
+          aCC*)
+            _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b 
$wl+b $wl$install_libdir -o $output_objdir/$soname $predep_objects $libobjs 
$deplibs $postdep_objects $compiler_flags~test "x$output_objdir/$soname" = 
"x$lib" || mv $output_objdir/$soname $lib'
+            # Commands to make compiler produce verbose output that lists
+            # what "hidden" libraries, object files and flags are used when
+            # linking a shared library.
+            #
+            # There doesn't appear to be a way to prevent this compiler from
+            # explicitly linking system object files so we need to strip them
+            # from the output so that they don't get included in the library
+            # dependencies.
+            output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v 
conftest.$objext 2>&1) | $EGREP " \-L"`; list= ; for z in $templist; do case $z 
in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; 
done; func_echo_all "$list"'
+            ;;
+          *)
+            if test yes = "$GXX"; then
+              _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC 
-shared -nostdlib $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test 
"x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib'
+            else
+              # FIXME: insert proper C++ library support
+              _LT_TAGVAR(ld_shlibs, $1)=no
+            fi
+            ;;
+        esac
+        ;;
+
+      hpux10*|hpux11*)
+        if test no = "$with_gnu_ld"; then
+         _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir'
+         _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+          case $host_cpu in
+            hppa*64*|ia64*)
+              ;;
+            *)
+             _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+              ;;
+          esac
+        fi
+        case $host_cpu in
+          hppa*64*|ia64*)
+            _LT_TAGVAR(hardcode_direct, $1)=no
+            _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+            ;;
+          *)
+            _LT_TAGVAR(hardcode_direct, $1)=yes
+            _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+            _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH,
+                                                # but as the default
+                                                # location of the library.
+            ;;
+        esac
+
+        case $cc_basename in
+          CC*)
+           # FIXME: insert proper C++ library support
+           _LT_TAGVAR(ld_shlibs, $1)=no
+           ;;
+          aCC*)
+           case $host_cpu in
+             hppa*64*)
+               _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname -o $lib 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+               ;;
+             ia64*)
+               _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname 
$wl+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects 
$compiler_flags'
+               ;;
+             *)
+               _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b 
$wl$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects 
$compiler_flags'
+               ;;
+           esac
+           # Commands to make compiler produce verbose output that lists
+           # what "hidden" libraries, object files and flags are used when
+           # linking a shared library.
+           #
+           # There doesn't appear to be a way to prevent this compiler from
+           # explicitly linking system object files so we need to strip them
+           # from the output so that they don't get included in the library
+           # dependencies.
+           output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v 
conftest.$objext 2>&1) | $GREP " \-L"`; list= ; for z in $templist; do case $z 
in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; 
done; func_echo_all "$list"'
+           ;;
+          *)
+           if test yes = "$GXX"; then
+             if test no = "$with_gnu_ld"; then
+               case $host_cpu in
+                 hppa*64*)
+                   _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC 
$wl+h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects 
$compiler_flags'
+                   ;;
+                 ia64*)
+                   _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib 
$pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $predep_objects $libobjs 
$deplibs $postdep_objects $compiler_flags'
+                   ;;
+                 *)
+                   _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib 
$pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $predep_objects 
$libobjs $deplibs $postdep_objects $compiler_flags'
+                   ;;
+               esac
+             fi
+           else
+             # FIXME: insert proper C++ library support
+             _LT_TAGVAR(ld_shlibs, $1)=no
+           fi
+           ;;
+        esac
+        ;;
+
+      interix[[3-9]]*)
+       _LT_TAGVAR(hardcode_direct, $1)=no
+       _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+       _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+       _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+       # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
+       # Instead, shared libraries are loaded at an image base (0x10000000 by
+       # default) and relocated if they conflict, which is a slow very memory
+       # consuming and fragmenting process.  To avoid this, we pick a random,
+       # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
+       # time.  Moving up from 0x10000000 also allows more sbrk(2) space.
+       _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs 
$compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 
262144 + 1342177280` -o $lib'
+       _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s|^|_|" $export_symbols 
>$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs 
$compiler_flags $wl-h,$soname 
$wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr 
${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
+       ;;
+      irix5* | irix6*)
+        case $cc_basename in
+          CC*)
+           # SGI C++
+           _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname 
$soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` 
-update_registry $output_objdir/so_locations -o $lib'
+
+           # Archives containing C++ object files must be created using
+           # "CC -ar", where "CC" is the IRIX C++ compiler.  This is
+           # necessary to make sure instantiated templates are included
+           # in the archive.
+           _LT_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib 
$oldobjs'
+           ;;
+          *)
+           if test yes = "$GXX"; then
+             if test no = "$with_gnu_ld"; then
+               _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname 
$wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version 
$wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib'
+             else
+               _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname 
$wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version 
$wl$verstring"` -o $lib'
+             fi
+           fi
+           _LT_TAGVAR(link_all_deplibs, $1)=yes
+           ;;
+        esac
+        _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+        _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+        _LT_TAGVAR(inherit_rpath, $1)=yes
+        ;;
+
+      linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*)
+        case $cc_basename in
+          KCC*)
+           # Kuck and Associates, Inc. (KAI) C++ Compiler
+
+           # KCC will only create a shared library if the output file
+           # ends with ".so" (or ".sl" for HP-UX), so rename the library
+           # to its proper name (with version) after linking.
+           _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e 
'\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e 
"s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects 
$compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
+           _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | 
$SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED 
-e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs 
$postdep_objects $compiler_flags --soname $soname -o \$templib 
$wl-retain-symbols-file,$export_symbols; mv \$templib $lib'
+           # Commands to make compiler produce verbose output that lists
+           # what "hidden" libraries, object files and flags are used when
+           # linking a shared library.
+           #
+           # There doesn't appear to be a way to prevent this compiler from
+           # explicitly linking system object files so we need to strip them
+           # from the output so that they don't get included in the library
+           # dependencies.
+           output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext 
-o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; 
list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; 
*.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"'
+
+           _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+           _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic'
+
+           # Archives containing C++ object files must be created using
+           # "CC -Bstatic", where "CC" is the KAI C++ compiler.
+           _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs'
+           ;;
+         icpc* | ecpc* )
+           # Intel C++
+           with_gnu_ld=yes
+           # version 8.0 and above of icpc choke on multiply defined symbols
+           # if we add $predep_objects and $postdep_objects, however 7.1 and
+           # earlier do not add the objects themselves.
+           case `$CC -V 2>&1` in
+             *"Version 7."*)
+               _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects 
$libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o 
$lib'
+               _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname 
$wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib'
+               ;;
+             *)  # Version 8.0 or newer
+               tmp_idyn=
+               case $host_cpu in
+                 ia64*) tmp_idyn=' -i_dynamic';;
+               esac
+               _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs 
$deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
+               _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' 
$libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file 
$wl$export_symbols -o $lib'
+               ;;
+           esac
+           _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+           _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+           _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic'
+           _LT_TAGVAR(whole_archive_flag_spec, 
$1)='$wl--whole-archive$convenience $wl--no-whole-archive'
+           ;;
+          pgCC* | pgcpp*)
+            # Portland Group C++ compiler
+           case `$CC -V` in
+           *pgCC\ [[1-5]].* | *pgcpp\ [[1-5]].*)
+             _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~
+               rm -rf $tpldir~
+               $CC --prelink_objects --instantiation_dir $tpldir $objs 
$libobjs $compile_deplibs~
+               compile_command="$compile_command `find $tpldir -name \*.o | 
sort | $NL2SP`"'
+             _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~
+                rm -rf $tpldir~
+                $CC --prelink_objects --instantiation_dir $tpldir 
$oldobjs$old_deplibs~
+                $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name 
\*.o | sort | $NL2SP`~
+                $RANLIB $oldlib'
+             _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~
+                rm -rf $tpldir~
+                $CC --prelink_objects --instantiation_dir $tpldir 
$predep_objects $libobjs $deplibs $convenience $postdep_objects~
+                $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find 
$tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname 
$wl$soname -o $lib'
+             _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~
+                rm -rf $tpldir~
+                $CC --prelink_objects --instantiation_dir $tpldir 
$predep_objects $libobjs $deplibs $convenience $postdep_objects~
+                $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find 
$tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname 
$wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib'
+             ;;
+           *) # Version 6 and above use weak symbols
+             _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname 
$wl$soname -o $lib'
+             _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname 
$wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib'
+             ;;
+           esac
+
+           _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl--rpath $wl$libdir'
+           _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic'
+           _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for 
conv in $convenience\"\"; do test  -n \"$conv\" && 
new_convenience=\"$new_convenience,$conv\"; done; func_echo_all 
\"$new_convenience\"` $wl--no-whole-archive'
+            ;;
+         cxx*)
+           # Compaq C++
+           _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs 
$deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib'
+           _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects 
$libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname  -o 
$lib $wl-retain-symbols-file $wl$export_symbols'
+
+           runpath_var=LD_RUN_PATH
+           _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+           _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+           # Commands to make compiler produce verbose output that lists
+           # what "hidden" libraries, object files and flags are used when
+           # linking a shared library.
+           #
+           # There doesn't appear to be a way to prevent this compiler from
+           # explicitly linking system object files so we need to strip them
+           # from the output so that they don't get included in the library
+           # dependencies.
+           output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v 
conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED 
"s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list= ; for z in $templist; do case $z in 
conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; 
done; func_echo_all "X$list" | $Xsed'
+           ;;
+         xl* | mpixl* | bgxl*)
+           # IBM XL 8.0 on PPC, with GNU ld
+           _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+           _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic'
+           _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs 
$compiler_flags $wl-soname $wl$soname -o $lib'
+           if test yes = "$supports_anon_versioning"; then
+             _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > 
$output_objdir/$libname.ver~
+                cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> 
$output_objdir/$libname.ver~
+                echo "local: *; };" >> $output_objdir/$libname.ver~
+                $CC -qmkshrobj $libobjs $deplibs $compiler_flags $wl-soname 
$wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib'
+           fi
+           ;;
+         *)
+           case `$CC -V 2>&1 | sed 5q` in
+           *Sun\ C*)
+             # Sun C++ 5.9
+             _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
+             _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag 
-h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects 
$compiler_flags'
+             _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G$allow_undefined_flag 
-h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects 
$compiler_flags $wl-retain-symbols-file $wl$export_symbols'
+             _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+             _LT_TAGVAR(whole_archive_flag_spec, 
$1)='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test 
-z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all 
\"$new_convenience\"` $wl--no-whole-archive'
+             _LT_TAGVAR(compiler_needs_object, $1)=yes
+
+             # Not sure whether something based on
+             # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1
+             # would be better.
+             output_verbose_link_cmd='func_echo_all'
+
+             # Archives containing C++ object files must be created using
+             # "CC -xar", where "CC" is the Sun C++ compiler.  This is
+             # necessary to make sure instantiated templates are included
+             # in the archive.
+             _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
+             ;;
+           esac
+           ;;
+       esac
+       ;;
+
+      lynxos*)
+        # FIXME: insert proper C++ library support
+       _LT_TAGVAR(ld_shlibs, $1)=no
+       ;;
+
+      m88k*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+       ;;
+
+      mvs*)
+        case $cc_basename in
+          cxx*)
+           # FIXME: insert proper C++ library support
+           _LT_TAGVAR(ld_shlibs, $1)=no
+           ;;
+         *)
+           # FIXME: insert proper C++ library support
+           _LT_TAGVAR(ld_shlibs, $1)=no
+           ;;
+       esac
+       ;;
+
+      netbsd*)
+        if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
+         _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable  -o $lib 
$predep_objects $libobjs $deplibs $postdep_objects $linker_flags'
+         wlarc=
+         _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+         _LT_TAGVAR(hardcode_direct, $1)=yes
+         _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+       fi
+       # Workaround some broken pre-1.5 toolchains
+       output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | 
$GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"'
+       ;;
+
+      *nto* | *qnx*)
+        _LT_TAGVAR(ld_shlibs, $1)=yes
+       ;;
+
+      openbsd* | bitrig*)
+       if test -f /usr/libexec/ld.so; then
+         _LT_TAGVAR(hardcode_direct, $1)=yes
+         _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+         _LT_TAGVAR(hardcode_direct_absolute, $1)=yes
+         _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects 
$libobjs $deplibs $postdep_objects $compiler_flags -o $lib'
+         _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+         if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`"; then
+           _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags 
$wl-retain-symbols-file,$export_symbols -o $lib'
+           _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E'
+           _LT_TAGVAR(whole_archive_flag_spec, 
$1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive'
+         fi
+         output_verbose_link_cmd=func_echo_all
+       else
+         _LT_TAGVAR(ld_shlibs, $1)=no
+       fi
+       ;;
+
+      osf3* | osf4* | osf5*)
+        case $cc_basename in
+          KCC*)
+           # Kuck and Associates, Inc. (KAI) C++ Compiler
+
+           # KCC will only create a shared library if the output file
+           # ends with ".so" (or ".sl" for HP-UX), so rename the library
+           # to its proper name (with version) after linking.
+           _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e 
'\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e 
"s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects 
$compiler_flags --soname $soname -o \$templib; mv \$templib $lib'
+
+           _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir'
+           _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+           # Archives containing C++ object files must be created using
+           # the KAI C++ compiler.
+           case $host in
+             osf3*) _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib 
$oldobjs' ;;
+             *) _LT_TAGVAR(old_archive_cmds, $1)='$CC -o $oldlib $oldobjs' ;;
+           esac
+           ;;
+          RCC*)
+           # Rational C++ 2.4.1
+           # FIXME: insert proper C++ library support
+           _LT_TAGVAR(ld_shlibs, $1)=no
+           ;;
+          cxx*)
+           case $host in
+             osf3*)
+               _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved 
$wl\*'
+               _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname 
$soname `test -n "$verstring" && func_echo_all "$wl-set_version $verstring"` 
-update_registry $output_objdir/so_locations -o $lib'
+               _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+               ;;
+             *)
+               _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*'
+               _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym 
-soname $soname `test -n "$verstring" && func_echo_all "-set_version 
$verstring"` -update_registry $output_objdir/so_locations -o $lib'
+               _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat 
$export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~
+                  echo "-hidden">> $lib.exp~
+                  $CC -shared$allow_undefined_flag $predep_objects $libobjs 
$deplibs $postdep_objects $compiler_flags -msym -soname $soname $wl-input 
$wl$lib.exp  `test -n "$verstring" && $ECHO "-set_version $verstring"` 
-update_registry $output_objdir/so_locations -o $lib~
+                  $RM $lib.exp'
+               _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir'
+               ;;
+           esac
+
+           _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+           # Commands to make compiler produce verbose output that lists
+           # what "hidden" libraries, object files and flags are used when
+           # linking a shared library.
+           #
+           # There doesn't appear to be a way to prevent this compiler from
+           # explicitly linking system object files so we need to strip them
+           # from the output so that they don't get included in the library
+           # dependencies.
+           output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v 
conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all 
"$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list= ; for z in 
$templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) 
list="$list $z";;esac; done; func_echo_all "$list"'
+           ;;
+         *)
+           if test yes,no = "$GXX,$with_gnu_ld"; then
+             _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved 
$wl\*'
+             case $host in
+               osf3*)
+                 _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib 
$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects 
$compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all 
"$wl-set_version $wl$verstring"` $wl-update_registry 
$wl$output_objdir/so_locations -o $lib'
+                 ;;
+               *)
+                 _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib 
$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects 
$compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && 
func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry 
$wl$output_objdir/so_locations -o $lib'
+                 ;;
+             esac
+
+             _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir'
+             _LT_TAGVAR(hardcode_libdir_separator, $1)=:
+
+             # Commands to make compiler produce verbose output that lists
+             # what "hidden" libraries, object files and flags are used when
+             # linking a shared library.
+             output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 
2>&1 | $GREP -v "^Configured with:" | $GREP " \-L"'
+
+           else
+             # FIXME: insert proper C++ library support
+             _LT_TAGVAR(ld_shlibs, $1)=no
+           fi
+           ;;
+        esac
+        ;;
+
+      psos*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+
+      sunos4*)
+        case $cc_basename in
+          CC*)
+           # Sun C++ 4.x
+           # FIXME: insert proper C++ library support
+           _LT_TAGVAR(ld_shlibs, $1)=no
+           ;;
+          lcc*)
+           # Lucid
+           # FIXME: insert proper C++ library support
+           _LT_TAGVAR(ld_shlibs, $1)=no
+           ;;
+          *)
+           # FIXME: insert proper C++ library support
+           _LT_TAGVAR(ld_shlibs, $1)=no
+           ;;
+        esac
+        ;;
+
+      solaris*)
+        case $cc_basename in
+          CC* | sunCC*)
+           # Sun C++ 4.2, 5.x and Centerline C++
+            _LT_TAGVAR(archive_cmds_need_lc,$1)=yes
+           _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs'
+           _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h$soname 
-o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags'
+           _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > 
$lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: 
*; };" >> $lib.exp~
+              $CC -G$allow_undefined_flag $wl-M $wl$lib.exp -h$soname -o $lib 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+           _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir'
+           _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+           case $host_os in
+             solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+             *)
+               # The compiler driver will combine and reorder linker options,
+               # but understands '-z linker_flag'.
+               # Supported since Solaris 2.6 (maybe 2.5.1?)
+               _LT_TAGVAR(whole_archive_flag_spec, $1)='-z 
allextract$convenience -z defaultextract'
+               ;;
+           esac
+           _LT_TAGVAR(link_all_deplibs, $1)=yes
+
+           output_verbose_link_cmd='func_echo_all'
+
+           # Archives containing C++ object files must be created using
+           # "CC -xar", where "CC" is the Sun C++ compiler.  This is
+           # necessary to make sure instantiated templates are included
+           # in the archive.
+           _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs'
+           ;;
+          gcx*)
+           # Green Hills C++ Compiler
+           _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs 
$deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib'
+
+           # The C++ compiler must be used to create the archive.
+           _LT_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib 
$oldobjs'
+           ;;
+          *)
+           # GNU C++ compiler with Solaris linker
+           if test yes,no = "$GXX,$with_gnu_ld"; then
+             _LT_TAGVAR(no_undefined_flag, $1)=' $wl-z ${wl}defs'
+             if $CC --version | $GREP -v '^2\.7' > /dev/null; then
+               _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h 
$wl$soname -o $lib'
+               _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > 
$lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: 
*; };" >> $lib.exp~
+                  $CC -shared $pic_flag -nostdlib $wl-M $wl$lib.exp $wl-h 
$wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects 
$compiler_flags~$RM $lib.exp'
+
+               # Commands to make compiler produce verbose output that lists
+               # what "hidden" libraries, object files and flags are used when
+               # linking a shared library.
+               output_verbose_link_cmd='$CC -shared $CFLAGS -v 
conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP " \-L"'
+             else
+               # g++ 2.7 appears to require '-G' NOT '-shared' on this
+               # platform.
+               _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $predep_objects 
$libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib'
+               _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > 
$lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: 
*; };" >> $lib.exp~
+                  $CC -G -nostdlib $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib 
$predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp'
+
+               # Commands to make compiler produce verbose output that lists
+               # what "hidden" libraries, object files and flags are used when
+               # linking a shared library.
+               output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 
2>&1 | $GREP -v "^Configured with:" | $GREP " \-L"'
+             fi
+
+             _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $wl$libdir'
+             case $host_os in
+               solaris2.[[0-5]] | solaris2.[[0-5]].*) ;;
+               *)
+                 _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl-z 
${wl}allextract$convenience $wl-z ${wl}defaultextract'
+                 ;;
+             esac
+           fi
+           ;;
+        esac
+        ;;
+
+    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | 
sco3.2v5.0.[[024]]*)
+      _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text'
+      _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+      _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+      runpath_var='LD_RUN_PATH'
+
+      case $cc_basename in
+        CC*)
+         _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs 
$deplibs $compiler_flags'
+         _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G 
$wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs 
$compiler_flags'
+         ;;
+       *)
+         _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib 
$libobjs $deplibs $compiler_flags'
+         _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared 
$wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs 
$compiler_flags'
+         ;;
+      esac
+      ;;
+
+      sysv5* | sco3.2v5* | sco5v6*)
+       # Note: We CANNOT use -z defs as we might desire, because we do not
+       # link with -lc, and that would cause any symbols used from libc to
+       # always be unresolved, which means just about no library would
+       # ever link correctly.  If we're not using GNU ld we use -z text
+       # though, which does catch some bad symbols but isn't as heavy-handed
+       # as -z defs.
+       _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text'
+       _LT_TAGVAR(allow_undefined_flag, $1)='$wl-z,nodefs'
+       _LT_TAGVAR(archive_cmds_need_lc, $1)=no
+       _LT_TAGVAR(hardcode_shlibpath_var, $1)=no
+       _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R,$libdir'
+       _LT_TAGVAR(hardcode_libdir_separator, $1)=':'
+       _LT_TAGVAR(link_all_deplibs, $1)=yes
+       _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Bexport'
+       runpath_var='LD_RUN_PATH'
+
+       case $cc_basename in
+          CC*)
+           _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs 
$deplibs $compiler_flags'
+           _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G 
$wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs 
$compiler_flags'
+           _LT_TAGVAR(old_archive_cmds, $1)='$CC -Tprelink_objects $oldobjs~
+              '"$_LT_TAGVAR(old_archive_cmds, $1)"
+           _LT_TAGVAR(reload_cmds, $1)='$CC -Tprelink_objects $reload_objs~
+              '"$_LT_TAGVAR(reload_cmds, $1)"
+           ;;
+         *)
+           _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib 
$libobjs $deplibs $compiler_flags'
+           _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared 
$wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs 
$compiler_flags'
+           ;;
+       esac
+      ;;
+
+      tandem*)
+        case $cc_basename in
+          NCC*)
+           # NonStop-UX NCC 3.20
+           # FIXME: insert proper C++ library support
+           _LT_TAGVAR(ld_shlibs, $1)=no
+           ;;
+          *)
+           # FIXME: insert proper C++ library support
+           _LT_TAGVAR(ld_shlibs, $1)=no
+           ;;
+        esac
+        ;;
+
+      vxworks*)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+
+      *)
+        # FIXME: insert proper C++ library support
+        _LT_TAGVAR(ld_shlibs, $1)=no
+        ;;
+    esac
+
+    AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)])
+    test no = "$_LT_TAGVAR(ld_shlibs, $1)" && can_build_shared=no
+
+    _LT_TAGVAR(GCC, $1)=$GXX
+    _LT_TAGVAR(LD, $1)=$LD
+
+    ## CAVEAT EMPTOR:
+    ## There is no encapsulation within the following macros, do not change
+    ## the running order or otherwise move them around unless you know exactly
+    ## what you are doing...
+    _LT_SYS_HIDDEN_LIBDEPS($1)
+    _LT_COMPILER_PIC($1)
+    _LT_COMPILER_C_O($1)
+    _LT_COMPILER_FILE_LOCKS($1)
+    _LT_LINKER_SHLIBS($1)
+    _LT_SYS_DYNAMIC_LINKER($1)
+    _LT_LINKER_HARDCODE_LIBPATH($1)
+
+    _LT_CONFIG($1)
+  fi # test -n "$compiler"
+
+  CC=$lt_save_CC
+  CFLAGS=$lt_save_CFLAGS
+  LDCXX=$LD
+  LD=$lt_save_LD
+  GCC=$lt_save_GCC
+  with_gnu_ld=$lt_save_with_gnu_ld
+  lt_cv_path_LDCXX=$lt_cv_path_LD
+  lt_cv_path_LD=$lt_save_path_LD
+  lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld
+  lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld
+fi # test yes != "$_lt_caught_CXX_error"
+
+AC_LANG_POP
+])# _LT_LANG_CXX_CONFIG
+
+
+# _LT_FUNC_STRIPNAME_CNF
+# ----------------------
+# func_stripname_cnf prefix suffix name
+# strip PREFIX and SUFFIX off of NAME.
+# PREFIX and SUFFIX must not contain globbing or regex special
+# characters, hashes, percent signs, but SUFFIX may contain a leading
+# dot (in which case that matches only a dot).
+#
+# This function is identical to the (non-XSI) version of func_stripname,
+# except this one can be used by m4 code that may be executed by configure,
+# rather than the libtool script.
+m4_defun([_LT_FUNC_STRIPNAME_CNF],[dnl
+AC_REQUIRE([_LT_DECL_SED])
+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])
+func_stripname_cnf ()
+{
+  case @S|@2 in
+  .*) func_stripname_result=`$ECHO "@S|@3" | $SED "s%^@S|@1%%; 
s%\\\\@S|@2\$%%"`;;
+  *)  func_stripname_result=`$ECHO "@S|@3" | $SED "s%^@S|@1%%; s%@S|@2\$%%"`;;
+  esac
+} # func_stripname_cnf
+])# _LT_FUNC_STRIPNAME_CNF
+
+
+# _LT_SYS_HIDDEN_LIBDEPS([TAGNAME])
+# ---------------------------------
+# Figure out "hidden" library dependencies from verbose
+# compiler output when linking a shared library.
+# Parse the compiler output and extract the necessary
+# objects, libraries and library flags.
+m4_defun([_LT_SYS_HIDDEN_LIBDEPS],
+[m4_require([_LT_FILEUTILS_DEFAULTS])dnl
+AC_REQUIRE([_LT_FUNC_STRIPNAME_CNF])dnl
+# Dependencies to place before and after the object being linked:
+_LT_TAGVAR(predep_objects, $1)=
+_LT_TAGVAR(postdep_objects, $1)=
+_LT_TAGVAR(predeps, $1)=
+_LT_TAGVAR(postdeps, $1)=
+_LT_TAGVAR(compiler_lib_search_path, $1)=
+
+dnl we can't use the lt_simple_compile_test_code here,
+dnl because it contains code intended for an executable,
+dnl not a library.  It's possible we should let each
+dnl tag define a new lt_????_link_test_code variable,
+dnl but it's only used here...
+m4_if([$1], [], [cat > conftest.$ac_ext <<_LT_EOF
+int a;
+void foo (void) { a = 0; }
+_LT_EOF
+], [$1], [CXX], [cat > conftest.$ac_ext <<_LT_EOF
+class Foo
+{
+public:
+  Foo (void) { a = 0; }
+private:
+  int a;
+};
+_LT_EOF
+], [$1], [F77], [cat > conftest.$ac_ext <<_LT_EOF
+      subroutine foo
+      implicit none
+      integer*4 a
+      a=0
+      return
+      end
+_LT_EOF
+], [$1], [FC], [cat > conftest.$ac_ext <<_LT_EOF
+      subroutine foo
+      implicit none
+      integer a
+      a=0
+      return
+      end
+_LT_EOF
+], [$1], [GCJ], [cat > conftest.$ac_ext <<_LT_EOF
+public class foo {
+  private int a;
+  public void bar (void) {
+    a = 0;
+  }
+};
+_LT_EOF
+], [$1], [GO], [cat > conftest.$ac_ext <<_LT_EOF
+package foo
+func foo() {
+}
+_LT_EOF
+])
+
+_lt_libdeps_save_CFLAGS=$CFLAGS
+case "$CC $CFLAGS " in #(
+*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;;
+*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;;
+*\ -fuse-linker-plugin*\ *) CFLAGS="$CFLAGS -fno-use-linker-plugin" ;;
+esac
+
+dnl Parse the compiler output and extract the necessary
+dnl objects, libraries and library flags.
+if AC_TRY_EVAL(ac_compile); then
+  # Parse the compiler output and extract the necessary
+  # objects, libraries and library flags.
+
+  # Sentinel used to keep track of whether or not we are before
+  # the conftest object file.
+  pre_test_object_deps_done=no
+
+  for p in `eval "$output_verbose_link_cmd"`; do
+    case $prev$p in
+
+    -L* | -R* | -l*)
+       # Some compilers place space between "-{L,R}" and the path.
+       # Remove the space.
+       if test x-L = "$p" ||
+          test x-R = "$p"; then
+        prev=$p
+        continue
+       fi
+
+       # Expand the sysroot to ease extracting the directories later.
+       if test -z "$prev"; then
+         case $p in
+         -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; 
p=$func_stripname_result ;;
+         -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; 
p=$func_stripname_result ;;
+         -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; 
p=$func_stripname_result ;;
+         esac
+       fi
+       case $p in
+       =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result 
;;
+       esac
+       if test no = "$pre_test_object_deps_done"; then
+        case $prev in
+        -L | -R)
+          # Internal compiler library paths should come after those
+          # provided the user.  The postdeps already come after the
+          # user supplied libs so there is no need to process them.
+          if test -z "$_LT_TAGVAR(compiler_lib_search_path, $1)"; then
+            _LT_TAGVAR(compiler_lib_search_path, $1)=$prev$p
+          else
+            _LT_TAGVAR(compiler_lib_search_path, 
$1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} $prev$p"
+          fi
+          ;;
+        # The "-l" case would never come before the object being
+        # linked, so don't bother handling this case.
+        esac
+       else
+        if test -z "$_LT_TAGVAR(postdeps, $1)"; then
+          _LT_TAGVAR(postdeps, $1)=$prev$p
+        else
+          _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} $prev$p"
+        fi
+       fi
+       prev=
+       ;;
+
+    *.lto.$objext) ;; # Ignore GCC LTO objects
+    *.$objext)
+       # This assumes that the test object file only shows up
+       # once in the compiler output.
+       if test "$p" = "conftest.$objext"; then
+        pre_test_object_deps_done=yes
+        continue
+       fi
+
+       if test no = "$pre_test_object_deps_done"; then
+        if test -z "$_LT_TAGVAR(predep_objects, $1)"; then
+          _LT_TAGVAR(predep_objects, $1)=$p
+        else
+          _LT_TAGVAR(predep_objects, $1)="$_LT_TAGVAR(predep_objects, $1) $p"
+        fi
+       else
+        if test -z "$_LT_TAGVAR(postdep_objects, $1)"; then
+          _LT_TAGVAR(postdep_objects, $1)=$p
+        else
+          _LT_TAGVAR(postdep_objects, $1)="$_LT_TAGVAR(postdep_objects, $1) $p"
+        fi
+       fi
+       ;;
+
+    *) ;; # Ignore the rest.
+
+    esac
+  done
+
+  # Clean up.
+  rm -f a.out a.exe
+else
+  echo "libtool.m4: error: problem compiling $1 test program"
+fi
+
+$RM -f confest.$objext
+CFLAGS=$_lt_libdeps_save_CFLAGS
+
+# PORTME: override above test on systems where it is broken
+m4_if([$1], [CXX],
+[case $host_os in
+interix[[3-9]]*)
+  # Interix 3.5 installs completely hosed .la files for C++, so rather than
+  # hack all around it, let's just trust "g++" to DTRT.
+  _LT_TAGVAR(predep_objects,$1)=
+  _LT_TAGVAR(postdep_objects,$1)=
+  _LT_TAGVAR(postdeps,$1)=
+  ;;
+esac
+])
+
+case " $_LT_TAGVAR(postdeps, $1) " in
+*" -lc "*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;;
+esac
+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=
+if test -n "${_LT_TAGVAR(compiler_lib_search_path, $1)}"; then
+ _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " 
${_LT_TAGVAR(compiler_lib_search_path, $1)}" | $SED -e 's! -L! !g' -e 's!^ !!'`
+fi
+_LT_TAGDECL([], [compiler_lib_search_dirs], [1],
+    [The directories searched by this compiler when creating a shared library])
+_LT_TAGDECL([], [predep_objects], [1],
+    [Dependencies to place before and after the objects being linked to
+    create a shared library])
+_LT_TAGDECL([], [postdep_objects], [1])
+_LT_TAGDECL([], [predeps], [1])
+_LT_TAGDECL([], [postdeps], [1])
+_LT_TAGDECL([], [compiler_lib_search_path], [1],
+    [The library search path used internally by the compiler when linking
+    a shared library])
+])# _LT_SYS_HIDDEN_LIBDEPS
+
+
+# _LT_LANG_F77_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for a Fortran 77 compiler are
+# suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to 'libtool'.
+m4_defun([_LT_LANG_F77_CONFIG],
+[AC_LANG_PUSH(Fortran 77)
+if test -z "$F77" || test no = "$F77"; then
+  _lt_disable_F77=yes
+fi
+
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for f77 test sources.
+ac_ext=f
+
+# Object file extension for compiled f77 test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the F77 compiler isn't working.  Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test yes != "$_lt_disable_F77"; then
+  # Code to be used in simple compile tests
+  lt_simple_compile_test_code="\
+      subroutine t
+      return
+      end
+"
+
+  # Code to be used in simple link tests
+  lt_simple_link_test_code="\
+      program t
+      end
+"
+
+  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+  _LT_TAG_COMPILER
+
+  # save warnings/boilerplate of simple test code
+  _LT_COMPILER_BOILERPLATE
+  _LT_LINKER_BOILERPLATE
+
+  # Allow CC to be a program name with arguments.
+  lt_save_CC=$CC
+  lt_save_GCC=$GCC
+  lt_save_CFLAGS=$CFLAGS
+  CC=${F77-"f77"}
+  CFLAGS=$FFLAGS
+  compiler=$CC
+  _LT_TAGVAR(compiler, $1)=$CC
+  _LT_CC_BASENAME([$compiler])
+  GCC=$G77
+  if test -n "$compiler"; then
+    AC_MSG_CHECKING([if libtool supports shared libraries])
+    AC_MSG_RESULT([$can_build_shared])
+
+    AC_MSG_CHECKING([whether to build shared libraries])
+    test no = "$can_build_shared" && enable_shared=no
+
+    # On AIX, shared libraries and static libraries use the same namespace, and
+    # are all built from PIC.
+    case $host_os in
+      aix3*)
+        test yes = "$enable_shared" && enable_static=no
+        if test -n "$RANLIB"; then
+          archive_cmds="$archive_cmds~\$RANLIB \$lib"
+          postinstall_cmds='$RANLIB $lib'
+        fi
+        ;;
+      aix[[4-9]]*)
+       if test ia64 != "$host_cpu"; then
+         case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in
+         yes,aix,yes) ;;               # shared object as lib.so file only
+         yes,svr4,*) ;;                # shared object as lib.so archive 
member only
+         yes,*) enable_static=no ;;    # shared object in lib.a archive as well
+         esac
+       fi
+        ;;
+    esac
+    AC_MSG_RESULT([$enable_shared])
+
+    AC_MSG_CHECKING([whether to build static libraries])
+    # Make sure either enable_shared or enable_static is yes.
+    test yes = "$enable_shared" || enable_static=yes
+    AC_MSG_RESULT([$enable_static])
+
+    _LT_TAGVAR(GCC, $1)=$G77
+    _LT_TAGVAR(LD, $1)=$LD
+
+    ## CAVEAT EMPTOR:
+    ## There is no encapsulation within the following macros, do not change
+    ## the running order or otherwise move them around unless you know exactly
+    ## what you are doing...
+    _LT_COMPILER_PIC($1)
+    _LT_COMPILER_C_O($1)
+    _LT_COMPILER_FILE_LOCKS($1)
+    _LT_LINKER_SHLIBS($1)
+    _LT_SYS_DYNAMIC_LINKER($1)
+    _LT_LINKER_HARDCODE_LIBPATH($1)
+
+    _LT_CONFIG($1)
+  fi # test -n "$compiler"
+
+  GCC=$lt_save_GCC
+  CC=$lt_save_CC
+  CFLAGS=$lt_save_CFLAGS
+fi # test yes != "$_lt_disable_F77"
+
+AC_LANG_POP
+])# _LT_LANG_F77_CONFIG
+
+
+# _LT_LANG_FC_CONFIG([TAG])
+# -------------------------
+# Ensure that the configuration variables for a Fortran compiler are
+# suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to 'libtool'.
+m4_defun([_LT_LANG_FC_CONFIG],
+[AC_LANG_PUSH(Fortran)
+
+if test -z "$FC" || test no = "$FC"; then
+  _lt_disable_FC=yes
+fi
+
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+_LT_TAGVAR(allow_undefined_flag, $1)=
+_LT_TAGVAR(always_export_symbols, $1)=no
+_LT_TAGVAR(archive_expsym_cmds, $1)=
+_LT_TAGVAR(export_dynamic_flag_spec, $1)=
+_LT_TAGVAR(hardcode_direct, $1)=no
+_LT_TAGVAR(hardcode_direct_absolute, $1)=no
+_LT_TAGVAR(hardcode_libdir_flag_spec, $1)=
+_LT_TAGVAR(hardcode_libdir_separator, $1)=
+_LT_TAGVAR(hardcode_minus_L, $1)=no
+_LT_TAGVAR(hardcode_automatic, $1)=no
+_LT_TAGVAR(inherit_rpath, $1)=no
+_LT_TAGVAR(module_cmds, $1)=
+_LT_TAGVAR(module_expsym_cmds, $1)=
+_LT_TAGVAR(link_all_deplibs, $1)=unknown
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+_LT_TAGVAR(no_undefined_flag, $1)=
+_LT_TAGVAR(whole_archive_flag_spec, $1)=
+_LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no
+
+# Source file extension for fc test sources.
+ac_ext=${ac_fc_srcext-f}
+
+# Object file extension for compiled fc test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# No sense in running all these tests if we already determined that
+# the FC compiler isn't working.  Some variables (like enable_shared)
+# are currently assumed to apply to all compilers on this platform,
+# and will be corrupted by setting them based on a non-working compiler.
+if test yes != "$_lt_disable_FC"; then
+  # Code to be used in simple compile tests
+  lt_simple_compile_test_code="\
+      subroutine t
+      return
+      end
+"
+
+  # Code to be used in simple link tests
+  lt_simple_link_test_code="\
+      program t
+      end
+"
+
+  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
+  _LT_TAG_COMPILER
+
+  # save warnings/boilerplate of simple test code
+  _LT_COMPILER_BOILERPLATE
+  _LT_LINKER_BOILERPLATE
+
+  # Allow CC to be a program name with arguments.
+  lt_save_CC=$CC
+  lt_save_GCC=$GCC
+  lt_save_CFLAGS=$CFLAGS
+  CC=${FC-"f95"}
+  CFLAGS=$FCFLAGS
+  compiler=$CC
+  GCC=$ac_cv_fc_compiler_gnu
+
+  _LT_TAGVAR(compiler, $1)=$CC
+  _LT_CC_BASENAME([$compiler])
+
+  if test -n "$compiler"; then
+    AC_MSG_CHECKING([if libtool supports shared libraries])
+    AC_MSG_RESULT([$can_build_shared])
+
+    AC_MSG_CHECKING([whether to build shared libraries])
+    test no = "$can_build_shared" && enable_shared=no
+
+    # On AIX, shared libraries and static libraries use the same namespace, and
+    # are all built from PIC.
+    case $host_os in
+      aix3*)
+        test yes = "$enable_shared" && enable_static=no
+        if test -n "$RANLIB"; then
+          archive_cmds="$archive_cmds~\$RANLIB \$lib"
+          postinstall_cmds='$RANLIB $lib'
+        fi
+        ;;
+      aix[[4-9]]*)
+       if test ia64 != "$host_cpu"; then
+         case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in
+         yes,aix,yes) ;;               # shared object as lib.so file only
+         yes,svr4,*) ;;                # shared object as lib.so archive 
member only
+         yes,*) enable_static=no ;;    # shared object in lib.a archive as well
+         esac
+       fi
+        ;;
+    esac
+    AC_MSG_RESULT([$enable_shared])
+
+    AC_MSG_CHECKING([whether to build static libraries])
+    # Make sure either enable_shared or enable_static is yes.
+    test yes = "$enable_shared" || enable_static=yes
+    AC_MSG_RESULT([$enable_static])
+
+    _LT_TAGVAR(GCC, $1)=$ac_cv_fc_compiler_gnu
+    _LT_TAGVAR(LD, $1)=$LD
+
+    ## CAVEAT EMPTOR:
+    ## There is no encapsulation within the following macros, do not change
+    ## the running order or otherwise move them around unless you know exactly
+    ## what you are doing...
+    _LT_SYS_HIDDEN_LIBDEPS($1)
+    _LT_COMPILER_PIC($1)
+    _LT_COMPILER_C_O($1)
+    _LT_COMPILER_FILE_LOCKS($1)
+    _LT_LINKER_SHLIBS($1)
+    _LT_SYS_DYNAMIC_LINKER($1)
+    _LT_LINKER_HARDCODE_LIBPATH($1)
+
+    _LT_CONFIG($1)
+  fi # test -n "$compiler"
+
+  GCC=$lt_save_GCC
+  CC=$lt_save_CC
+  CFLAGS=$lt_save_CFLAGS
+fi # test yes != "$_lt_disable_FC"
+
+AC_LANG_POP
+])# _LT_LANG_FC_CONFIG
+
+
+# _LT_LANG_GCJ_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for the GNU Java Compiler compiler
+# are suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to 'libtool'.
+m4_defun([_LT_LANG_GCJ_CONFIG],
+[AC_REQUIRE([LT_PROG_GCJ])dnl
+AC_LANG_SAVE
+
+# Source file extension for Java test sources.
+ac_ext=java
+
+# Object file extension for compiled Java test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="class foo {}"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='public class conftest { public static void 
main(String[[]] argv) {}; }'
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC=$CC
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=yes
+CC=${GCJ-"gcj"}
+CFLAGS=$GCJFLAGS
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_TAGVAR(LD, $1)=$LD
+_LT_CC_BASENAME([$compiler])
+
+# GCJ did not exist at the time GCC didn't implicitly link libc in.
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+
+## CAVEAT EMPTOR:
+## There is no encapsulation within the following macros, do not change
+## the running order or otherwise move them around unless you know exactly
+## what you are doing...
+if test -n "$compiler"; then
+  _LT_COMPILER_NO_RTTI($1)
+  _LT_COMPILER_PIC($1)
+  _LT_COMPILER_C_O($1)
+  _LT_COMPILER_FILE_LOCKS($1)
+  _LT_LINKER_SHLIBS($1)
+  _LT_LINKER_HARDCODE_LIBPATH($1)
+
+  _LT_CONFIG($1)
+fi
+
+AC_LANG_RESTORE
+
+GCC=$lt_save_GCC
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_GCJ_CONFIG
+
+
+# _LT_LANG_GO_CONFIG([TAG])
+# --------------------------
+# Ensure that the configuration variables for the GNU Go compiler
+# are suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to 'libtool'.
+m4_defun([_LT_LANG_GO_CONFIG],
+[AC_REQUIRE([LT_PROG_GO])dnl
+AC_LANG_SAVE
+
+# Source file extension for Go test sources.
+ac_ext=go
+
+# Object file extension for compiled Go test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code="package main; func main() { }"
+
+# Code to be used in simple link tests
+lt_simple_link_test_code='package main; func main() { }'
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC=$CC
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=yes
+CC=${GOC-"gccgo"}
+CFLAGS=$GOFLAGS
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_TAGVAR(LD, $1)=$LD
+_LT_CC_BASENAME([$compiler])
+
+# Go did not exist at the time GCC didn't implicitly link libc in.
+_LT_TAGVAR(archive_cmds_need_lc, $1)=no
+
+_LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds
+_LT_TAGVAR(reload_flag, $1)=$reload_flag
+_LT_TAGVAR(reload_cmds, $1)=$reload_cmds
+
+## CAVEAT EMPTOR:
+## There is no encapsulation within the following macros, do not change
+## the running order or otherwise move them around unless you know exactly
+## what you are doing...
+if test -n "$compiler"; then
+  _LT_COMPILER_NO_RTTI($1)
+  _LT_COMPILER_PIC($1)
+  _LT_COMPILER_C_O($1)
+  _LT_COMPILER_FILE_LOCKS($1)
+  _LT_LINKER_SHLIBS($1)
+  _LT_LINKER_HARDCODE_LIBPATH($1)
+
+  _LT_CONFIG($1)
+fi
+
+AC_LANG_RESTORE
+
+GCC=$lt_save_GCC
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_GO_CONFIG
+
+
+# _LT_LANG_RC_CONFIG([TAG])
+# -------------------------
+# Ensure that the configuration variables for the Windows resource compiler
+# are suitably defined.  These variables are subsequently used by _LT_CONFIG
+# to write the compiler configuration to 'libtool'.
+m4_defun([_LT_LANG_RC_CONFIG],
+[AC_REQUIRE([LT_PROG_RC])dnl
+AC_LANG_SAVE
+
+# Source file extension for RC test sources.
+ac_ext=rc
+
+# Object file extension for compiled RC test sources.
+objext=o
+_LT_TAGVAR(objext, $1)=$objext
+
+# Code to be used in simple compile tests
+lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }'
+
+# Code to be used in simple link tests
+lt_simple_link_test_code=$lt_simple_compile_test_code
+
+# ltmain only uses $CC for tagged configurations so make sure $CC is set.
+_LT_TAG_COMPILER
+
+# save warnings/boilerplate of simple test code
+_LT_COMPILER_BOILERPLATE
+_LT_LINKER_BOILERPLATE
+
+# Allow CC to be a program name with arguments.
+lt_save_CC=$CC
+lt_save_CFLAGS=$CFLAGS
+lt_save_GCC=$GCC
+GCC=
+CC=${RC-"windres"}
+CFLAGS=
+compiler=$CC
+_LT_TAGVAR(compiler, $1)=$CC
+_LT_CC_BASENAME([$compiler])
+_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes
+
+if test -n "$compiler"; then
+  :
+  _LT_CONFIG($1)
+fi
+
+GCC=$lt_save_GCC
+AC_LANG_RESTORE
+CC=$lt_save_CC
+CFLAGS=$lt_save_CFLAGS
+])# _LT_LANG_RC_CONFIG
+
+
+# LT_PROG_GCJ
+# -----------
+AC_DEFUN([LT_PROG_GCJ],
+[m4_ifdef([AC_PROG_GCJ], [AC_PROG_GCJ],
+  [m4_ifdef([A][M_PROG_GCJ], [A][M_PROG_GCJ],
+    [AC_CHECK_TOOL(GCJ, gcj,)
+      test set = "${GCJFLAGS+set}" || GCJFLAGS="-g -O2"
+      AC_SUBST(GCJFLAGS)])])[]dnl
+])
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_GCJ], [LT_PROG_GCJ])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_GCJ], [])
+
+
+# LT_PROG_GO
+# ----------
+AC_DEFUN([LT_PROG_GO],
+[AC_CHECK_TOOL(GOC, gccgo,)
+])
+
+
+# LT_PROG_RC
+# ----------
+AC_DEFUN([LT_PROG_RC],
+[AC_CHECK_TOOL(RC, windres,)
+])
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_RC], [LT_PROG_RC])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_RC], [])
+
+
+# _LT_DECL_EGREP
+# --------------
+# If we don't have a new enough Autoconf to choose the best grep
+# available, choose the one first in the user's PATH.
+m4_defun([_LT_DECL_EGREP],
+[AC_REQUIRE([AC_PROG_EGREP])dnl
+AC_REQUIRE([AC_PROG_FGREP])dnl
+test -z "$GREP" && GREP=grep
+_LT_DECL([], [GREP], [1], [A grep program that handles long lines])
+_LT_DECL([], [EGREP], [1], [An ERE matcher])
+_LT_DECL([], [FGREP], [1], [A literal string matcher])
+dnl Non-bleeding-edge autoconf doesn't subst GREP, so do it here too
+AC_SUBST([GREP])
+])
+
+
+# _LT_DECL_OBJDUMP
+# --------------
+# If we don't have a new enough Autoconf to choose the best objdump
+# available, choose the one first in the user's PATH.
+m4_defun([_LT_DECL_OBJDUMP],
+[AC_CHECK_TOOL(OBJDUMP, objdump, false)
+test -z "$OBJDUMP" && OBJDUMP=objdump
+_LT_DECL([], [OBJDUMP], [1], [An object symbol dumper])
+AC_SUBST([OBJDUMP])
+])
+
+# _LT_DECL_DLLTOOL
+# ----------------
+# Ensure DLLTOOL variable is set.
+m4_defun([_LT_DECL_DLLTOOL],
+[AC_CHECK_TOOL(DLLTOOL, dlltool, false)
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+_LT_DECL([], [DLLTOOL], [1], [DLL creation program])
+AC_SUBST([DLLTOOL])
+])
+
+# _LT_DECL_SED
+# ------------
+# Check for a fully-functional sed program, that truncates
+# as few characters as possible.  Prefer GNU sed if found.
+m4_defun([_LT_DECL_SED],
+[AC_PROG_SED
+test -z "$SED" && SED=sed
+Xsed="$SED -e 1s/^X//"
+_LT_DECL([], [SED], [1], [A sed program that does not truncate output])
+_LT_DECL([], [Xsed], ["\$SED -e 1s/^X//"],
+    [Sed that helps us avoid accidentally triggering echo(1) options like -n])
+])# _LT_DECL_SED
+
+m4_ifndef([AC_PROG_SED], [
+############################################################
+# NOTE: This macro has been submitted for inclusion into   #
+#  GNU Autoconf as AC_PROG_SED.  When it is available in   #
+#  a released version of Autoconf we should remove this    #
+#  macro and use it instead.                               #
+############################################################
+
+m4_defun([AC_PROG_SED],
+[AC_MSG_CHECKING([for a sed that does not truncate output])
+AC_CACHE_VAL(lt_cv_path_SED,
+[# Loop through the user's path and test for sed and gsed.
+# Then use that list of sed's as ones to test for truncation.
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  for lt_ac_prog in sed gsed; do
+    for ac_exec_ext in '' $ac_executable_extensions; do
+      if $as_executable_p "$as_dir/$lt_ac_prog$ac_exec_ext"; then
+        lt_ac_sed_list="$lt_ac_sed_list $as_dir/$lt_ac_prog$ac_exec_ext"
+      fi
+    done
+  done
+done
+IFS=$as_save_IFS
+lt_ac_max=0
+lt_ac_count=0
+# Add /usr/xpg4/bin/sed as it is typically found on Solaris
+# along with /bin/sed that truncates output.
+for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do
+  test ! -f "$lt_ac_sed" && continue
+  cat /dev/null > conftest.in
+  lt_ac_count=0
+  echo $ECHO_N "0123456789$ECHO_C" >conftest.in
+  # Check for GNU sed and select it if it is found.
+  if "$lt_ac_sed" --version 2>&1 < /dev/null | grep 'GNU' > /dev/null; then
+    lt_cv_path_SED=$lt_ac_sed
+    break
+  fi
+  while true; do
+    cat conftest.in conftest.in >conftest.tmp
+    mv conftest.tmp conftest.in
+    cp conftest.in conftest.nl
+    echo >>conftest.nl
+    $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break
+    cmp -s conftest.out conftest.nl || break
+    # 10000 chars as input seems more than enough
+    test 10 -lt "$lt_ac_count" && break
+    lt_ac_count=`expr $lt_ac_count + 1`
+    if test "$lt_ac_count" -gt "$lt_ac_max"; then
+      lt_ac_max=$lt_ac_count
+      lt_cv_path_SED=$lt_ac_sed
+    fi
+  done
+done
+])
+SED=$lt_cv_path_SED
+AC_SUBST([SED])
+AC_MSG_RESULT([$SED])
+])#AC_PROG_SED
+])#m4_ifndef
+
+# Old name:
+AU_ALIAS([LT_AC_PROG_SED], [AC_PROG_SED])
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([LT_AC_PROG_SED], [])
+
+
+# _LT_CHECK_SHELL_FEATURES
+# ------------------------
+# Find out whether the shell is Bourne or XSI compatible,
+# or has some other useful features.
+m4_defun([_LT_CHECK_SHELL_FEATURES],
+[if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then
+  lt_unset=unset
+else
+  lt_unset=false
+fi
+_LT_DECL([], [lt_unset], [0], [whether the shell understands "unset"])dnl
+
+# test EBCDIC or ASCII
+case `echo X|tr X '\101'` in
+ A) # ASCII based system
+    # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
+  lt_SP2NL='tr \040 \012'
+  lt_NL2SP='tr \015\012 \040\040'
+  ;;
+ *) # EBCDIC based system
+  lt_SP2NL='tr \100 \n'
+  lt_NL2SP='tr \r\n \100\100'
+  ;;
+esac
+_LT_DECL([SP2NL], [lt_SP2NL], [1], [turn spaces into newlines])dnl
+_LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl
+])# _LT_CHECK_SHELL_FEATURES
+
+
+# _LT_PATH_CONVERSION_FUNCTIONS
+# -----------------------------
+# Determine what file name conversion functions should be used by
+# func_to_host_file (and, implicitly, by func_to_host_path).  These are needed
+# for certain cross-compile configurations and native mingw.
+m4_defun([_LT_PATH_CONVERSION_FUNCTIONS],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+AC_MSG_CHECKING([how to convert $build file names to $host format])
+AC_CACHE_VAL(lt_cv_to_host_file_cmd,
+[case $host in
+  *-*-mingw* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
+        ;;
+      *-*-cygwin* )
+        lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
+        ;;
+      * ) # otherwise, assume *nix
+        lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
+        ;;
+    esac
+    ;;
+  *-*-cygwin* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
+        ;;
+      *-*-cygwin* )
+        lt_cv_to_host_file_cmd=func_convert_file_noop
+        ;;
+      * ) # otherwise, assume *nix
+        lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
+        ;;
+    esac
+    ;;
+  * ) # unhandled hosts (and "normal" native builds)
+    lt_cv_to_host_file_cmd=func_convert_file_noop
+    ;;
+esac
+])
+to_host_file_cmd=$lt_cv_to_host_file_cmd
+AC_MSG_RESULT([$lt_cv_to_host_file_cmd])
+_LT_DECL([to_host_file_cmd], [lt_cv_to_host_file_cmd],
+         [0], [convert $build file names to $host format])dnl
+
+AC_MSG_CHECKING([how to convert $build file names to toolchain format])
+AC_CACHE_VAL(lt_cv_to_tool_file_cmd,
+[#assume ordinary cross tools, or native build.
+lt_cv_to_tool_file_cmd=func_convert_file_noop
+case $host in
+  *-*-mingw* )
+    case $build in
+      *-*-mingw* ) # actually msys
+        lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
+        ;;
+    esac
+    ;;
+esac
+])
+to_tool_file_cmd=$lt_cv_to_tool_file_cmd
+AC_MSG_RESULT([$lt_cv_to_tool_file_cmd])
+_LT_DECL([to_tool_file_cmd], [lt_cv_to_tool_file_cmd],
+         [0], [convert $build files to toolchain format])dnl
+])# _LT_PATH_CONVERSION_FUNCTIONS
diff --git a/m4/ltoptions.m4 b/m4/ltoptions.m4
new file mode 100644
index 0000000..94b0829
--- /dev/null
+++ b/m4/ltoptions.m4
@@ -0,0 +1,437 @@
+# Helper functions for option handling.                    -*- Autoconf -*-
+#
+#   Copyright (C) 2004-2005, 2007-2009, 2011-2015 Free Software
+#   Foundation, Inc.
+#   Written by Gary V. Vaughan, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 8 ltoptions.m4
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])])
+
+
+# _LT_MANGLE_OPTION(MACRO-NAME, OPTION-NAME)
+# ------------------------------------------
+m4_define([_LT_MANGLE_OPTION],
+[[_LT_OPTION_]m4_bpatsubst($1__$2, [[^a-zA-Z0-9_]], [_])])
+
+
+# _LT_SET_OPTION(MACRO-NAME, OPTION-NAME)
+# ---------------------------------------
+# Set option OPTION-NAME for macro MACRO-NAME, and if there is a
+# matching handler defined, dispatch to it.  Other OPTION-NAMEs are
+# saved as a flag.
+m4_define([_LT_SET_OPTION],
+[m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl
+m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]),
+        _LT_MANGLE_DEFUN([$1], [$2]),
+    [m4_warning([Unknown $1 option '$2'])])[]dnl
+])
+
+
+# _LT_IF_OPTION(MACRO-NAME, OPTION-NAME, IF-SET, [IF-NOT-SET])
+# ------------------------------------------------------------
+# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise.
+m4_define([_LT_IF_OPTION],
+[m4_ifdef(_LT_MANGLE_OPTION([$1], [$2]), [$3], [$4])])
+
+
+# _LT_UNLESS_OPTIONS(MACRO-NAME, OPTION-LIST, IF-NOT-SET)
+# -------------------------------------------------------
+# Execute IF-NOT-SET unless all options in OPTION-LIST for MACRO-NAME
+# are set.
+m4_define([_LT_UNLESS_OPTIONS],
+[m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
+           [m4_ifdef(_LT_MANGLE_OPTION([$1], _LT_Option),
+                     [m4_define([$0_found])])])[]dnl
+m4_ifdef([$0_found], [m4_undefine([$0_found])], [$3
+])[]dnl
+])
+
+
+# _LT_SET_OPTIONS(MACRO-NAME, OPTION-LIST)
+# ----------------------------------------
+# OPTION-LIST is a space-separated list of Libtool options associated
+# with MACRO-NAME.  If any OPTION has a matching handler declared with
+# LT_OPTION_DEFINE, dispatch to that macro; otherwise complain about
+# the unknown option and exit.
+m4_defun([_LT_SET_OPTIONS],
+[# Set options
+m4_foreach([_LT_Option], m4_split(m4_normalize([$2])),
+    [_LT_SET_OPTION([$1], _LT_Option)])
+
+m4_if([$1],[LT_INIT],[
+  dnl
+  dnl Simply set some default values (i.e off) if boolean options were not
+  dnl specified:
+  _LT_UNLESS_OPTIONS([LT_INIT], [dlopen], [enable_dlopen=no
+  ])
+  _LT_UNLESS_OPTIONS([LT_INIT], [win32-dll], [enable_win32_dll=no
+  ])
+  dnl
+  dnl If no reference was made to various pairs of opposing options, then
+  dnl we run the default mode handler for the pair.  For example, if neither
+  dnl 'shared' nor 'disable-shared' was passed, we enable building of shared
+  dnl archives by default:
+  _LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED])
+  _LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC])
+  _LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC])
+  _LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install],
+                  [_LT_ENABLE_FAST_INSTALL])
+  _LT_UNLESS_OPTIONS([LT_INIT], [aix-soname=aix aix-soname=both 
aix-soname=svr4],
+                  [_LT_WITH_AIX_SONAME([aix])])
+  ])
+])# _LT_SET_OPTIONS
+
+
+## --------------------------------- ##
+## Macros to handle LT_INIT options. ##
+## --------------------------------- ##
+
+# _LT_MANGLE_DEFUN(MACRO-NAME, OPTION-NAME)
+# -----------------------------------------
+m4_define([_LT_MANGLE_DEFUN],
+[[_LT_OPTION_DEFUN_]m4_bpatsubst(m4_toupper([$1__$2]), [[^A-Z0-9_]], [_])])
+
+
+# LT_OPTION_DEFINE(MACRO-NAME, OPTION-NAME, CODE)
+# -----------------------------------------------
+m4_define([LT_OPTION_DEFINE],
+[m4_define(_LT_MANGLE_DEFUN([$1], [$2]), [$3])[]dnl
+])# LT_OPTION_DEFINE
+
+
+# dlopen
+# ------
+LT_OPTION_DEFINE([LT_INIT], [dlopen], [enable_dlopen=yes
+])
+
+AU_DEFUN([AC_LIBTOOL_DLOPEN],
+[_LT_SET_OPTION([LT_INIT], [dlopen])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the 'dlopen' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_DLOPEN], [])
+
+
+# win32-dll
+# ---------
+# Declare package support for building win32 dll's.
+LT_OPTION_DEFINE([LT_INIT], [win32-dll],
+[enable_win32_dll=yes
+
+case $host in
+*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-cegcc*)
+  AC_CHECK_TOOL(AS, as, false)
+  AC_CHECK_TOOL(DLLTOOL, dlltool, false)
+  AC_CHECK_TOOL(OBJDUMP, objdump, false)
+  ;;
+esac
+
+test -z "$AS" && AS=as
+_LT_DECL([], [AS],      [1], [Assembler program])dnl
+
+test -z "$DLLTOOL" && DLLTOOL=dlltool
+_LT_DECL([], [DLLTOOL], [1], [DLL creation program])dnl
+
+test -z "$OBJDUMP" && OBJDUMP=objdump
+_LT_DECL([], [OBJDUMP], [1], [Object dumper program])dnl
+])# win32-dll
+
+AU_DEFUN([AC_LIBTOOL_WIN32_DLL],
+[AC_REQUIRE([AC_CANONICAL_HOST])dnl
+_LT_SET_OPTION([LT_INIT], [win32-dll])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the 'win32-dll' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_WIN32_DLL], [])
+
+
+# _LT_ENABLE_SHARED([DEFAULT])
+# ----------------------------
+# implement the --enable-shared flag, and supports the 'shared' and
+# 'disable-shared' LT_INIT options.
+# DEFAULT is either 'yes' or 'no'.  If omitted, it defaults to 'yes'.
+m4_define([_LT_ENABLE_SHARED],
+[m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([shared],
+    [AS_HELP_STRING([--enable-shared@<:@=PKGS@:>@],
+       [build shared libraries @<:@default=]_LT_ENABLE_SHARED_DEFAULT[@:>@])],
+    [p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_shared=yes ;;
+    no) enable_shared=no ;;
+    *)
+      enable_shared=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR,
+      for pkg in $enableval; do
+       IFS=$lt_save_ifs
+       if test "X$pkg" = "X$p"; then
+         enable_shared=yes
+       fi
+      done
+      IFS=$lt_save_ifs
+      ;;
+    esac],
+    [enable_shared=]_LT_ENABLE_SHARED_DEFAULT)
+
+    _LT_DECL([build_libtool_libs], [enable_shared], [0],
+       [Whether or not to build shared libraries])
+])# _LT_ENABLE_SHARED
+
+LT_OPTION_DEFINE([LT_INIT], [shared], [_LT_ENABLE_SHARED([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-shared], [_LT_ENABLE_SHARED([no])])
+
+# Old names:
+AC_DEFUN([AC_ENABLE_SHARED],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[shared])
+])
+
+AC_DEFUN([AC_DISABLE_SHARED],
+[_LT_SET_OPTION([LT_INIT], [disable-shared])
+])
+
+AU_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)])
+AU_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_ENABLE_SHARED], [])
+dnl AC_DEFUN([AM_DISABLE_SHARED], [])
+
+
+
+# _LT_ENABLE_STATIC([DEFAULT])
+# ----------------------------
+# implement the --enable-static flag, and support the 'static' and
+# 'disable-static' LT_INIT options.
+# DEFAULT is either 'yes' or 'no'.  If omitted, it defaults to 'yes'.
+m4_define([_LT_ENABLE_STATIC],
+[m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([static],
+    [AS_HELP_STRING([--enable-static@<:@=PKGS@:>@],
+       [build static libraries @<:@default=]_LT_ENABLE_STATIC_DEFAULT[@:>@])],
+    [p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_static=yes ;;
+    no) enable_static=no ;;
+    *)
+     enable_static=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR,
+      for pkg in $enableval; do
+       IFS=$lt_save_ifs
+       if test "X$pkg" = "X$p"; then
+         enable_static=yes
+       fi
+      done
+      IFS=$lt_save_ifs
+      ;;
+    esac],
+    [enable_static=]_LT_ENABLE_STATIC_DEFAULT)
+
+    _LT_DECL([build_old_libs], [enable_static], [0],
+       [Whether or not to build static libraries])
+])# _LT_ENABLE_STATIC
+
+LT_OPTION_DEFINE([LT_INIT], [static], [_LT_ENABLE_STATIC([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-static], [_LT_ENABLE_STATIC([no])])
+
+# Old names:
+AC_DEFUN([AC_ENABLE_STATIC],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[static])
+])
+
+AC_DEFUN([AC_DISABLE_STATIC],
+[_LT_SET_OPTION([LT_INIT], [disable-static])
+])
+
+AU_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)])
+AU_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AM_ENABLE_STATIC], [])
+dnl AC_DEFUN([AM_DISABLE_STATIC], [])
+
+
+
+# _LT_ENABLE_FAST_INSTALL([DEFAULT])
+# ----------------------------------
+# implement the --enable-fast-install flag, and support the 'fast-install'
+# and 'disable-fast-install' LT_INIT options.
+# DEFAULT is either 'yes' or 'no'.  If omitted, it defaults to 'yes'.
+m4_define([_LT_ENABLE_FAST_INSTALL],
+[m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl
+AC_ARG_ENABLE([fast-install],
+    [AS_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@],
+    [optimize for fast installation 
@<:@default=]_LT_ENABLE_FAST_INSTALL_DEFAULT[@:>@])],
+    [p=${PACKAGE-default}
+    case $enableval in
+    yes) enable_fast_install=yes ;;
+    no) enable_fast_install=no ;;
+    *)
+      enable_fast_install=no
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR,
+      for pkg in $enableval; do
+       IFS=$lt_save_ifs
+       if test "X$pkg" = "X$p"; then
+         enable_fast_install=yes
+       fi
+      done
+      IFS=$lt_save_ifs
+      ;;
+    esac],
+    [enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT)
+
+_LT_DECL([fast_install], [enable_fast_install], [0],
+        [Whether or not to optimize for fast installation])dnl
+])# _LT_ENABLE_FAST_INSTALL
+
+LT_OPTION_DEFINE([LT_INIT], [fast-install], [_LT_ENABLE_FAST_INSTALL([yes])])
+LT_OPTION_DEFINE([LT_INIT], [disable-fast-install], 
[_LT_ENABLE_FAST_INSTALL([no])])
+
+# Old names:
+AU_DEFUN([AC_ENABLE_FAST_INSTALL],
+[_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you put
+the 'fast-install' option into LT_INIT's first parameter.])
+])
+
+AU_DEFUN([AC_DISABLE_FAST_INSTALL],
+[_LT_SET_OPTION([LT_INIT], [disable-fast-install])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you put
+the 'disable-fast-install' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_ENABLE_FAST_INSTALL], [])
+dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], [])
+
+
+# _LT_WITH_AIX_SONAME([DEFAULT])
+# ----------------------------------
+# implement the --with-aix-soname flag, and support the `aix-soname=aix'
+# and `aix-soname=both' and `aix-soname=svr4' LT_INIT options. DEFAULT
+# is either `aix', `both' or `svr4'.  If omitted, it defaults to `aix'.
+m4_define([_LT_WITH_AIX_SONAME],
+[m4_define([_LT_WITH_AIX_SONAME_DEFAULT], [m4_if($1, svr4, svr4, m4_if($1, 
both, both, aix))])dnl
+shared_archive_member_spec=
+case $host,$enable_shared in
+power*-*-aix[[5-9]]*,yes)
+  AC_MSG_CHECKING([which variant of shared library versioning to provide])
+  AC_ARG_WITH([aix-soname],
+    [AS_HELP_STRING([--with-aix-soname=aix|svr4|both],
+      [shared library versioning (aka "SONAME") variant to provide on AIX, 
@<:@default=]_LT_WITH_AIX_SONAME_DEFAULT[@:>@.])],
+    [case $withval in
+    aix|svr4|both)
+      ;;
+    *)
+      AC_MSG_ERROR([Unknown argument to --with-aix-soname])
+      ;;
+    esac
+    lt_cv_with_aix_soname=$with_aix_soname],
+    [AC_CACHE_VAL([lt_cv_with_aix_soname],
+      [lt_cv_with_aix_soname=]_LT_WITH_AIX_SONAME_DEFAULT)
+    with_aix_soname=$lt_cv_with_aix_soname])
+  AC_MSG_RESULT([$with_aix_soname])
+  if test aix != "$with_aix_soname"; then
+    # For the AIX way of multilib, we name the shared archive member
+    # based on the bitwidth used, traditionally 'shr.o' or 'shr_64.o',
+    # and 'shr.imp' or 'shr_64.imp', respectively, for the Import File.
+    # Even when GNU compilers ignore OBJECT_MODE but need '-maix64' flag,
+    # the AIX toolchain works better with OBJECT_MODE set (default 32).
+    if test 64 = "${OBJECT_MODE-32}"; then
+      shared_archive_member_spec=shr_64
+    else
+      shared_archive_member_spec=shr
+    fi
+  fi
+  ;;
+*)
+  with_aix_soname=aix
+  ;;
+esac
+
+_LT_DECL([], [shared_archive_member_spec], [0],
+    [Shared archive member basename, for filename based shared library 
versioning on AIX])dnl
+])# _LT_WITH_AIX_SONAME
+
+LT_OPTION_DEFINE([LT_INIT], [aix-soname=aix], [_LT_WITH_AIX_SONAME([aix])])
+LT_OPTION_DEFINE([LT_INIT], [aix-soname=both], [_LT_WITH_AIX_SONAME([both])])
+LT_OPTION_DEFINE([LT_INIT], [aix-soname=svr4], [_LT_WITH_AIX_SONAME([svr4])])
+
+
+# _LT_WITH_PIC([MODE])
+# --------------------
+# implement the --with-pic flag, and support the 'pic-only' and 'no-pic'
+# LT_INIT options.
+# MODE is either 'yes' or 'no'.  If omitted, it defaults to 'both'.
+m4_define([_LT_WITH_PIC],
+[AC_ARG_WITH([pic],
+    [AS_HELP_STRING([--with-pic@<:@=PKGS@:>@],
+       [try to use only PIC/non-PIC objects @<:@default=use both@:>@])],
+    [lt_p=${PACKAGE-default}
+    case $withval in
+    yes|no) pic_mode=$withval ;;
+    *)
+      pic_mode=default
+      # Look at the argument we got.  We use all the common list separators.
+      lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR,
+      for lt_pkg in $withval; do
+       IFS=$lt_save_ifs
+       if test "X$lt_pkg" = "X$lt_p"; then
+         pic_mode=yes
+       fi
+      done
+      IFS=$lt_save_ifs
+      ;;
+    esac],
+    [pic_mode=m4_default([$1], [default])])
+
+_LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl
+])# _LT_WITH_PIC
+
+LT_OPTION_DEFINE([LT_INIT], [pic-only], [_LT_WITH_PIC([yes])])
+LT_OPTION_DEFINE([LT_INIT], [no-pic], [_LT_WITH_PIC([no])])
+
+# Old name:
+AU_DEFUN([AC_LIBTOOL_PICMODE],
+[_LT_SET_OPTION([LT_INIT], [pic-only])
+AC_DIAGNOSE([obsolete],
+[$0: Remove this warning and the call to _LT_SET_OPTION when you
+put the 'pic-only' option into LT_INIT's first parameter.])
+])
+
+dnl aclocal-1.4 backwards compatibility:
+dnl AC_DEFUN([AC_LIBTOOL_PICMODE], [])
+
+## ----------------- ##
+## LTDL_INIT Options ##
+## ----------------- ##
+
+m4_define([_LTDL_MODE], [])
+LT_OPTION_DEFINE([LTDL_INIT], [nonrecursive],
+                [m4_define([_LTDL_MODE], [nonrecursive])])
+LT_OPTION_DEFINE([LTDL_INIT], [recursive],
+                [m4_define([_LTDL_MODE], [recursive])])
+LT_OPTION_DEFINE([LTDL_INIT], [subproject],
+                [m4_define([_LTDL_MODE], [subproject])])
+
+m4_define([_LTDL_TYPE], [])
+LT_OPTION_DEFINE([LTDL_INIT], [installable],
+                [m4_define([_LTDL_TYPE], [installable])])
+LT_OPTION_DEFINE([LTDL_INIT], [convenience],
+                [m4_define([_LTDL_TYPE], [convenience])])
diff --git a/m4/ltsugar.m4 b/m4/ltsugar.m4
new file mode 100644
index 0000000..48bc934
--- /dev/null
+++ b/m4/ltsugar.m4
@@ -0,0 +1,124 @@
+# ltsugar.m4 -- libtool m4 base layer.                         -*-Autoconf-*-
+#
+# Copyright (C) 2004-2005, 2007-2008, 2011-2015 Free Software
+# Foundation, Inc.
+# Written by Gary V. Vaughan, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 6 ltsugar.m4
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTSUGAR_VERSION], [m4_if([0.1])])
+
+
+# lt_join(SEP, ARG1, [ARG2...])
+# -----------------------------
+# Produce ARG1SEPARG2...SEPARGn, omitting [] arguments and their
+# associated separator.
+# Needed until we can rely on m4_join from Autoconf 2.62, since all earlier
+# versions in m4sugar had bugs.
+m4_define([lt_join],
+[m4_if([$#], [1], [],
+       [$#], [2], [[$2]],
+       [m4_if([$2], [], [], [[$2]_])$0([$1], m4_shift(m4_shift($@)))])])
+m4_define([_lt_join],
+[m4_if([$#$2], [2], [],
+       [m4_if([$2], [], [], [[$1$2]])$0([$1], m4_shift(m4_shift($@)))])])
+
+
+# lt_car(LIST)
+# lt_cdr(LIST)
+# ------------
+# Manipulate m4 lists.
+# These macros are necessary as long as will still need to support
+# Autoconf-2.59, which quotes differently.
+m4_define([lt_car], [[$1]])
+m4_define([lt_cdr],
+[m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])],
+       [$#], 1, [],
+       [m4_dquote(m4_shift($@))])])
+m4_define([lt_unquote], $1)
+
+
+# lt_append(MACRO-NAME, STRING, [SEPARATOR])
+# ------------------------------------------
+# Redefine MACRO-NAME to hold its former content plus 'SEPARATOR''STRING'.
+# Note that neither SEPARATOR nor STRING are expanded; they are appended
+# to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked).
+# No SEPARATOR is output if MACRO-NAME was previously undefined (different
+# than defined and empty).
+#
+# This macro is needed until we can rely on Autoconf 2.62, since earlier
+# versions of m4sugar mistakenly expanded SEPARATOR but not STRING.
+m4_define([lt_append],
+[m4_define([$1],
+          m4_ifdef([$1], [m4_defn([$1])[$3]])[$2])])
+
+
+
+# lt_combine(SEP, PREFIX-LIST, INFIX, SUFFIX1, [SUFFIX2...])
+# ----------------------------------------------------------
+# Produce a SEP delimited list of all paired combinations of elements of
+# PREFIX-LIST with SUFFIX1 through SUFFIXn.  Each element of the list
+# has the form PREFIXmINFIXSUFFIXn.
+# Needed until we can rely on m4_combine added in Autoconf 2.62.
+m4_define([lt_combine],
+[m4_if(m4_eval([$# > 3]), [1],
+       [m4_pushdef([_Lt_sep], [m4_define([_Lt_sep], m4_defn([lt_car]))])]]dnl
+[[m4_foreach([_Lt_prefix], [$2],
+            [m4_foreach([_Lt_suffix],
+               ]m4_dquote(m4_dquote(m4_shift(m4_shift(m4_shift($@)))))[,
+       [_Lt_sep([$1])[]m4_defn([_Lt_prefix])[$3]m4_defn([_Lt_suffix])])])])])
+
+
+# lt_if_append_uniq(MACRO-NAME, VARNAME, [SEPARATOR], [UNIQ], [NOT-UNIQ])
+# -----------------------------------------------------------------------
+# Iff MACRO-NAME does not yet contain VARNAME, then append it (delimited
+# by SEPARATOR if supplied) and expand UNIQ, else NOT-UNIQ.
+m4_define([lt_if_append_uniq],
+[m4_ifdef([$1],
+         [m4_if(m4_index([$3]m4_defn([$1])[$3], [$3$2$3]), [-1],
+                [lt_append([$1], [$2], [$3])$4],
+                [$5])],
+         [lt_append([$1], [$2], [$3])$4])])
+
+
+# lt_dict_add(DICT, KEY, VALUE)
+# -----------------------------
+m4_define([lt_dict_add],
+[m4_define([$1($2)], [$3])])
+
+
+# lt_dict_add_subkey(DICT, KEY, SUBKEY, VALUE)
+# --------------------------------------------
+m4_define([lt_dict_add_subkey],
+[m4_define([$1($2:$3)], [$4])])
+
+
+# lt_dict_fetch(DICT, KEY, [SUBKEY])
+# ----------------------------------
+m4_define([lt_dict_fetch],
+[m4_ifval([$3],
+       m4_ifdef([$1($2:$3)], [m4_defn([$1($2:$3)])]),
+    m4_ifdef([$1($2)], [m4_defn([$1($2)])]))])
+
+
+# lt_if_dict_fetch(DICT, KEY, [SUBKEY], VALUE, IF-TRUE, [IF-FALSE])
+# -----------------------------------------------------------------
+m4_define([lt_if_dict_fetch],
+[m4_if(lt_dict_fetch([$1], [$2], [$3]), [$4],
+       [$5],
+    [$6])])
+
+
+# lt_dict_filter(DICT, [SUBKEY], VALUE, [SEPARATOR], KEY, [...])
+# --------------------------------------------------------------
+m4_define([lt_dict_filter],
+[m4_if([$5], [], [],
+  [lt_join(m4_quote(m4_default([$4], [[, ]])),
+           lt_unquote(m4_split(m4_normalize(m4_foreach(_Lt_key, 
lt_car([m4_shiftn(4, $@)]),
+                     [lt_if_dict_fetch([$1], _Lt_key, [$2], [$3], [_Lt_key 
])])))))])[]dnl
+])
diff --git a/m4/ltversion.m4 b/m4/ltversion.m4
new file mode 100644
index 0000000..fa04b52
--- /dev/null
+++ b/m4/ltversion.m4
@@ -0,0 +1,23 @@
+# ltversion.m4 -- version numbers                      -*- Autoconf -*-
+#
+#   Copyright (C) 2004, 2011-2015 Free Software Foundation, Inc.
+#   Written by Scott James Remnant, 2004
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# @configure_input@
+
+# serial 4179 ltversion.m4
+# This file is part of GNU Libtool
+
+m4_define([LT_PACKAGE_VERSION], [2.4.6])
+m4_define([LT_PACKAGE_REVISION], [2.4.6])
+
+AC_DEFUN([LTVERSION_VERSION],
+[macro_version='2.4.6'
+macro_revision='2.4.6'
+_LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?])
+_LT_DECL(, macro_revision, 0)
+])
diff --git a/m4/lt~obsolete.m4 b/m4/lt~obsolete.m4
new file mode 100644
index 0000000..c6b26f8
--- /dev/null
+++ b/m4/lt~obsolete.m4
@@ -0,0 +1,99 @@
+# lt~obsolete.m4 -- aclocal satisfying obsolete definitions.    -*-Autoconf-*-
+#
+#   Copyright (C) 2004-2005, 2007, 2009, 2011-2015 Free Software
+#   Foundation, Inc.
+#   Written by Scott James Remnant, 2004.
+#
+# This file is free software; the Free Software Foundation gives
+# unlimited permission to copy and/or distribute it, with or without
+# modifications, as long as this notice is preserved.
+
+# serial 5 lt~obsolete.m4
+
+# These exist entirely to fool aclocal when bootstrapping libtool.
+#
+# In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN),
+# which have later been changed to m4_define as they aren't part of the
+# exported API, or moved to Autoconf or Automake where they belong.
+#
+# The trouble is, aclocal is a bit thick.  It'll see the old AC_DEFUN
+# in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us
+# using a macro with the same name in our local m4/libtool.m4 it'll
+# pull the old libtool.m4 in (it doesn't see our shiny new m4_define
+# and doesn't know about Autoconf macros at all.)
+#
+# So we provide this file, which has a silly filename so it's always
+# included after everything else.  This provides aclocal with the
+# AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything
+# because those macros already exist, or will be overwritten later.
+# We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6.
+#
+# Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here.
+# Yes, that means every name once taken will need to remain here until
+# we give up compatibility with versions before 1.7, at which point
+# we need to keep only those names which we still refer to.
+
+# This is to help aclocal find these macros, as it can't see m4_define.
+AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])])
+
+m4_ifndef([AC_LIBTOOL_LINKER_OPTION],  [AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])])
+m4_ifndef([AC_PROG_EGREP],             [AC_DEFUN([AC_PROG_EGREP])])
+m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH],        
[AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])])
+m4_ifndef([_LT_AC_SHELL_INIT],         [AC_DEFUN([_LT_AC_SHELL_INIT])])
+m4_ifndef([_LT_AC_SYS_LIBPATH_AIX],    [AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])])
+m4_ifndef([_LT_PROG_LTMAIN],           [AC_DEFUN([_LT_PROG_LTMAIN])])
+m4_ifndef([_LT_AC_TAGVAR],             [AC_DEFUN([_LT_AC_TAGVAR])])
+m4_ifndef([AC_LTDL_ENABLE_INSTALL],    [AC_DEFUN([AC_LTDL_ENABLE_INSTALL])])
+m4_ifndef([AC_LTDL_PREOPEN],           [AC_DEFUN([AC_LTDL_PREOPEN])])
+m4_ifndef([_LT_AC_SYS_COMPILER],       [AC_DEFUN([_LT_AC_SYS_COMPILER])])
+m4_ifndef([_LT_AC_LOCK],               [AC_DEFUN([_LT_AC_LOCK])])
+m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE],        
[AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])])
+m4_ifndef([_LT_AC_TRY_DLOPEN_SELF],    [AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])])
+m4_ifndef([AC_LIBTOOL_PROG_CC_C_O],    [AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])])
+m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], 
[AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])])
+m4_ifndef([AC_LIBTOOL_OBJDIR],         [AC_DEFUN([AC_LIBTOOL_OBJDIR])])
+m4_ifndef([AC_LTDL_OBJDIR],            [AC_DEFUN([AC_LTDL_OBJDIR])])
+m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], 
[AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])])
+m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP],  [AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])])
+m4_ifndef([AC_PATH_MAGIC],             [AC_DEFUN([AC_PATH_MAGIC])])
+m4_ifndef([AC_PROG_LD_GNU],            [AC_DEFUN([AC_PROG_LD_GNU])])
+m4_ifndef([AC_PROG_LD_RELOAD_FLAG],    [AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])])
+m4_ifndef([AC_DEPLIBS_CHECK_METHOD],   [AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])])
+m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], 
[AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])])
+m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], 
[AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])])
+m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], 
[AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])])
+m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS], [AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])])
+m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP], [AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])])
+m4_ifndef([LT_AC_PROG_EGREP],          [AC_DEFUN([LT_AC_PROG_EGREP])])
+m4_ifndef([LT_AC_PROG_SED],            [AC_DEFUN([LT_AC_PROG_SED])])
+m4_ifndef([_LT_CC_BASENAME],           [AC_DEFUN([_LT_CC_BASENAME])])
+m4_ifndef([_LT_COMPILER_BOILERPLATE],  [AC_DEFUN([_LT_COMPILER_BOILERPLATE])])
+m4_ifndef([_LT_LINKER_BOILERPLATE],    [AC_DEFUN([_LT_LINKER_BOILERPLATE])])
+m4_ifndef([_AC_PROG_LIBTOOL],          [AC_DEFUN([_AC_PROG_LIBTOOL])])
+m4_ifndef([AC_LIBTOOL_SETUP],          [AC_DEFUN([AC_LIBTOOL_SETUP])])
+m4_ifndef([_LT_AC_CHECK_DLFCN],                
[AC_DEFUN([_LT_AC_CHECK_DLFCN])])
+m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER],     
[AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])])
+m4_ifndef([_LT_AC_TAGCONFIG],          [AC_DEFUN([_LT_AC_TAGCONFIG])])
+m4_ifndef([AC_DISABLE_FAST_INSTALL],   [AC_DEFUN([AC_DISABLE_FAST_INSTALL])])
+m4_ifndef([_LT_AC_LANG_CXX],           [AC_DEFUN([_LT_AC_LANG_CXX])])
+m4_ifndef([_LT_AC_LANG_F77],           [AC_DEFUN([_LT_AC_LANG_F77])])
+m4_ifndef([_LT_AC_LANG_GCJ],           [AC_DEFUN([_LT_AC_LANG_GCJ])])
+m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG],  [AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])])
+m4_ifndef([_LT_AC_LANG_C_CONFIG],      [AC_DEFUN([_LT_AC_LANG_C_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG],        
[AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])])
+m4_ifndef([_LT_AC_LANG_CXX_CONFIG],    [AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG],        
[AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])])
+m4_ifndef([_LT_AC_LANG_F77_CONFIG],    [AC_DEFUN([_LT_AC_LANG_F77_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG],        
[AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])])
+m4_ifndef([_LT_AC_LANG_GCJ_CONFIG],    [AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])])
+m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])])
+m4_ifndef([_LT_AC_LANG_RC_CONFIG],     [AC_DEFUN([_LT_AC_LANG_RC_CONFIG])])
+m4_ifndef([AC_LIBTOOL_CONFIG],         [AC_DEFUN([AC_LIBTOOL_CONFIG])])
+m4_ifndef([_LT_AC_FILE_LTDLL_C],       [AC_DEFUN([_LT_AC_FILE_LTDLL_C])])
+m4_ifndef([_LT_REQUIRED_DARWIN_CHECKS],        
[AC_DEFUN([_LT_REQUIRED_DARWIN_CHECKS])])
+m4_ifndef([_LT_AC_PROG_CXXCPP],                
[AC_DEFUN([_LT_AC_PROG_CXXCPP])])
+m4_ifndef([_LT_PREPARE_SED_QUOTE_VARS],        
[AC_DEFUN([_LT_PREPARE_SED_QUOTE_VARS])])
+m4_ifndef([_LT_PROG_ECHO_BACKSLASH],   [AC_DEFUN([_LT_PROG_ECHO_BACKSLASH])])
+m4_ifndef([_LT_PROG_F77],              [AC_DEFUN([_LT_PROG_F77])])
+m4_ifndef([_LT_PROG_FC],               [AC_DEFUN([_LT_PROG_FC])])
+m4_ifndef([_LT_PROG_CXX],              [AC_DEFUN([_LT_PROG_CXX])])
diff --git a/src/Makefile.am b/src/Makefile.am
new file mode 100644
index 0000000..56f5cf4
--- /dev/null
+++ b/src/Makefile.am
@@ -0,0 +1,3 @@
+# This Makefile is in the public domain
+AM_CPPFLAGS = -I$(top_srcdir)/src/include
+SUBDIRS = include backup-db backup lib
diff --git a/src/backup-db/Makefile.am b/src/backup-db/Makefile.am
new file mode 100644
index 0000000..6d6538d
--- /dev/null
+++ b/src/backup-db/Makefile.am
@@ -0,0 +1,26 @@
+# This Makefile.am is in the public domain
+AM_CPPFLAGS = -I$(top_srcdir)/src/include
+
+plugindir = $(libdir)/taler
+
+if HAVE_POSTGRESQL
+if HAVE_GNUNETPQ
+plugin_LTLIBRARIES = \
+  libtaler_plugin_merchantdb_postgres.la
+endif
+endif
+
+if USE_COVERAGE
+  AM_CFLAGS = --coverage -O0
+  XLIB = -lgcov
+endif
+
+libtaler_plugin_merchantdb_postgres_la_SOURCES = \
+  plugin_anastasis_postgres.c
+libtaler_plugin_merchantdb_postgres_la_LIBADD = \
+  $(LTLIBINTL)
+libtaler_plugin_merchantdb_postgres_la_LDFLAGS = \
+  $(TALER_PLUGIN_LDFLAGS) \
+  -lgnunetpq \
+  -lpq \
+  -lgnunetutil $(XLIB)
diff --git a/src/backup-db/plugin_anastasis_postgres.c 
b/src/backup-db/plugin_anastasis_postgres.c
new file mode 100644
index 0000000..6d2d585
--- /dev/null
+++ b/src/backup-db/plugin_anastasis_postgres.c
@@ -0,0 +1,2758 @@
+/*
+  This file is part of TALER
+  (C) 2014--2019 Taler Systems SA
+
+  TALER is free software; you can redistribute it and/or modify it under the
+  terms of the GNU Lesser General Public License as published by the Free 
Software
+  Foundation; either version 3, or (at your option) any later version.
+
+  TALER is distributed in the hope that it will be useful, but WITHOUT ANY
+  WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+  A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License along with
+  TALER; see the file COPYING.  If not, see <http://www.gnu.org/licenses/>
+*/
+/**
+ * @file merchant/plugin_merchantdb_postgres.c
+ * @brief database helper functions for postgres used by the merchant
+ * @author Sree Harsha Totakura <address@hidden>
+ * @author Christian Grothoff
+ * @author Marcello Stanisci
+ */
+#include "platform.h"
+#include <gnunet/gnunet_util_lib.h>
+#include <gnunet/gnunet_pq_lib.h>
+#include <taler/taler_util.h>
+#include <taler/taler_pq_lib.h>
+#include <taler/taler_json_lib.h>
+#include "taler_merchantdb_plugin.h"
+
+/**
+ * How often do we re-try if we run into a DB serialization error?
+ */
+#define MAX_RETRIES 3
+
+
+/**
+ * Type of the "cls" argument given to each of the functions in
+ * our API.
+ */
+struct PostgresClosure
+{
+
+  /**
+   * Postgres connection handle.
+   */
+  PGconn *conn;
+
+  /**
+   * Underlying configuration.
+   */
+  const struct GNUNET_CONFIGURATION_Handle *cfg;
+
+  /**
+   * Name of the currently active transaction, NULL if none is active.
+   */
+  const char *transaction_name;
+
+};
+
+
+/**
+ * Drop merchant tables
+ *
+ * @param cls closure our `struct Plugin`
+ * @return #GNUNET_OK upon success; #GNUNET_SYSERR upon failure
+ */
+static int
+postgres_drop_tables (void *cls)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_ExecuteStatement es[] = {
+    GNUNET_PQ_make_try_execute ("DROP TABLE IF EXISTS anastasis_foo CASCADE;"),
+    GNUNET_PQ_EXECUTE_STATEMENT_END
+  };
+
+  return GNUNET_PQ_exec_statements (pg->conn,
+                                    es);
+}
+
+
+/**
+ * Initialize merchant tables
+ *
+ * @param cls closure our `struct Plugin`
+ * @return #GNUNET_OK upon success; #GNUNET_SYSERR upon failure
+ */
+static int
+postgres_initialize (void *cls)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_ExecuteStatement es[] = {
+    /* Orders created by the frontend, not signed or given a nonce yet.
+       The contract terms will change (nonce will be added) when moved to the
+       contract terms table */
+    GNUNET_PQ_make_execute ("CREATE TABLE IF NOT EXISTS anastasis_foo ("
+                            "order_id VARCHAR NOT NULL"
+                            ",PRIMARY KEY (order_id)"
+                            ");"),
+    GNUNET_PQ_EXECUTE_STATEMENT_END
+  };
+  struct GNUNET_PQ_PreparedStatement ps[] = {
+    GNUNET_PQ_make_prepare ("insert_foo",
+                            "INSERT INTO anastasis_foo"
+                            "(order_id) VALUES "
+                            "($1)",
+                            1),
+    GNUNET_PQ_PREPARED_STATEMENT_END
+  };
+
+  if (GNUNET_OK !=
+      GNUNET_PQ_exec_statements (pg->conn,
+                                 es))
+  {
+    GNUNET_break (0);
+    return GNUNET_SYSERR;
+  }
+  if (GNUNET_OK !=
+      GNUNET_PQ_prepare_statements (pg->conn,
+                                    ps))
+  {
+    GNUNET_break (0);
+    return GNUNET_SYSERR;
+  }
+  return GNUNET_OK;
+}
+
+
+/**
+ * Check that the database connection is still up.
+ *
+ * @param pg connection to check
+ */
+static void
+check_connection (struct PostgresClosure *pg)
+{
+  if (CONNECTION_BAD != PQstatus (pg->conn))
+    return;
+  PQfinish (pg->conn);
+  pg->conn = GNUNET_PQ_connect_with_cfg (pg->cfg,
+                                         "merchantdb-postgres");
+  GNUNET_break (NULL != pg->conn);
+  GNUNET_break (GNUNET_OK ==
+               postgres_initialize (pg));
+}
+
+
+/**
+ * Do a pre-flight check that we are not in an uncommitted transaction.
+ * If we are, try to commit the previous transaction and output a warning.
+ * Does not return anything, as we will continue regardless of the outcome.
+ *
+ * @param cls the `struct PostgresClosure` with the plugin-specific state
+ */
+static void
+postgres_preflight (void *cls)
+{
+  struct PostgresClosure *pg = cls;
+  PGresult *result;
+  ExecStatusType status;
+
+  if (NULL == pg->transaction_name)
+    return; /* all good */
+  result = PQexec (pg->conn,
+                   "COMMIT");
+  status = PQresultStatus (result);
+  if (PGRES_COMMAND_OK == status)
+  {
+    GNUNET_log (GNUNET_ERROR_TYPE_ERROR,
+                "BUG: Preflight check committed transaction `%s'!\n",
+                pg->transaction_name);
+  }
+  else
+  {
+    GNUNET_log (GNUNET_ERROR_TYPE_ERROR,
+                "BUG: Preflight check failed to commit transaction `%s'!\n",
+                pg->transaction_name);
+  }
+  pg->transaction_name = NULL;
+  PQclear (result);
+}
+
+
+/**
+ * Start a transaction.
+ *
+ * @param cls the `struct PostgresClosure` with the plugin-specific state
+ * @param name unique name identifying the transaction (for debugging),
+ *             must point to a constant
+ * @return #GNUNET_OK on success
+ */
+static int
+postgres_start (void *cls,
+                const char *name)
+{
+  struct PostgresClosure *pg = cls;
+  PGresult *result;
+  ExecStatusType ex;
+
+  check_connection (pg);
+  postgres_preflight (pg);
+  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+              "Starting merchant DB transaction\n");
+  result = PQexec (pg->conn,
+                   "START TRANSACTION ISOLATION LEVEL SERIALIZABLE");
+  if (PGRES_COMMAND_OK !=
+      (ex = PQresultStatus (result)))
+  {
+    TALER_LOG_ERROR ("Failed to start transaction (%s): %s\n",
+                     PQresStatus (ex),
+                     PQerrorMessage (pg->conn));
+    GNUNET_break (0);
+    PQclear (result);
+    return GNUNET_SYSERR;
+  }
+  PQclear (result);
+  pg->transaction_name = name;
+  return GNUNET_OK;
+}
+
+
+/**
+ * Roll back the current transaction of a database connection.
+ *
+ * @param cls the `struct PostgresClosure` with the plugin-specific state
+ * @return #GNUNET_OK on success
+ */
+static void
+postgres_rollback (void *cls)
+{
+  struct PostgresClosure *pg = cls;
+  PGresult *result;
+
+  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+              "Rolling back merchant DB transaction\n");
+  result = PQexec (pg->conn,
+                   "ROLLBACK");
+  GNUNET_break (PGRES_COMMAND_OK ==
+                PQresultStatus (result));
+  PQclear (result);
+  pg->transaction_name = NULL;
+}
+
+
+/**
+ * Commit the current transaction of a database connection.
+ *
+ * @param cls the `struct PostgresClosure` with the plugin-specific state
+ * @return transaction status code
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_commit (void *cls)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_end
+  };
+
+  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+              "Committing merchant DB transaction\n");
+  pg->transaction_name = NULL;
+  return GNUNET_PQ_eval_prepared_non_select (pg->conn,
+                                             "end_transaction",
+                                             params);
+}
+
+
+/**
+ * Retrieve proposal data given its proposal data's hashcode
+ *
+ * @param cls closure
+ * @param contract_terms where to store the retrieved proposal data
+ * @param h_contract_terms proposal data's hashcode that will be used to
+ * perform the lookup
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_find_contract_terms_from_hash (void *cls,
+                                        json_t **contract_terms,
+                                        const struct GNUNET_HashCode 
*h_contract_terms,
+                                        const struct TALER_MerchantPublicKeyP 
*merchant_pub)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (h_contract_terms),
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_end
+  };
+  struct GNUNET_PQ_ResultSpec rs[] = {
+    TALER_PQ_result_spec_json ("contract_terms",
+                               contract_terms),
+    GNUNET_PQ_result_spec_end
+  };
+
+  check_connection (pg);
+  return GNUNET_PQ_eval_prepared_singleton_select (pg->conn,
+                                                   
"find_contract_terms_from_hash",
+                                                   params,
+                                                   rs);
+}
+
+
+/**
+ * Retrieve proposal data given its proposal data's hashcode
+ *
+ * @param cls closure
+ * @param contract_terms where to store the retrieved proposal data
+ * @param h_contract_terms proposal data's hashcode that will be used to
+ * perform the lookup
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_find_paid_contract_terms_from_hash (void *cls,
+                                             json_t **contract_terms,
+                                             const struct GNUNET_HashCode 
*h_contract_terms,
+                                             const struct 
TALER_MerchantPublicKeyP *merchant_pub)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (h_contract_terms),
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_end
+  };
+  struct GNUNET_PQ_ResultSpec rs[] = {
+    TALER_PQ_result_spec_json ("contract_terms",
+                               contract_terms),
+    GNUNET_PQ_result_spec_end
+  };
+
+  /* no preflight check here, runs in its own transaction from
+     caller (in /pay case) */
+  check_connection (pg);
+  return GNUNET_PQ_eval_prepared_singleton_select (pg->conn,
+                                                   
"find_paid_contract_terms_from_hash",
+                                                   params,
+                                                   rs);
+}
+
+
+/**
+ * Retrieve proposal data given its order id.  Ignores if the
+ * proposal has been paid or not.
+ *
+ * @param cls closure
+ * @param[out] contract_terms where to store the retrieved contract terms
+ * @param[out] last_session_id where to store the result
+ * @param order id order id used to perform the lookup
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_find_contract_terms (void *cls,
+                              json_t **contract_terms,
+                              char **last_session_id,
+                              const char *order_id,
+                              const struct TALER_MerchantPublicKeyP 
*merchant_pub)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_string (order_id),
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_end
+  };
+  struct GNUNET_PQ_ResultSpec rs[] = {
+    TALER_PQ_result_spec_json ("contract_terms",
+                               contract_terms),
+    GNUNET_PQ_result_spec_string ("last_session_id",
+                                  last_session_id),
+    GNUNET_PQ_result_spec_end
+  };
+
+  *contract_terms = NULL;
+  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+              "Finding contract term, order_id: '%s', merchant_pub: '%s'.\n",
+              order_id,
+              TALER_B2S (merchant_pub));
+  check_connection (pg);
+  return GNUNET_PQ_eval_prepared_singleton_select (pg->conn,
+                                                   "find_contract_terms",
+                                                   params,
+                                                   rs);
+}
+
+
+/**
+ * Retrieve order given its order id and the instance's merchant public key.
+ *
+ * @param cls closure
+ * @param[out] contract_terms where to store the retrieved contract terms
+ * @param order id order id used to perform the lookup
+ * @param merchant_pub merchant public key that identifies the instance
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_find_order (void *cls,
+                     json_t **contract_terms,
+                     const char *order_id,
+                     const struct TALER_MerchantPublicKeyP *merchant_pub)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_string (order_id),
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_end
+  };
+  struct GNUNET_PQ_ResultSpec rs[] = {
+    TALER_PQ_result_spec_json ("contract_terms",
+                               contract_terms),
+    GNUNET_PQ_result_spec_end
+  };
+
+  *contract_terms = NULL;
+  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+              "Finding contract term, order_id: '%s', merchant_pub: '%s'.\n",
+              order_id,
+              TALER_B2S (merchant_pub));
+  check_connection (pg);
+  return GNUNET_PQ_eval_prepared_singleton_select (pg->conn,
+                                                   "find_order",
+                                                   params,
+                                                   rs);
+}
+
+
+/**
+ * Insert proposal data and its hashcode into db
+ *
+ * @param cls closure
+ * @param order_id identificator of the proposal being stored
+ * @param merchant_pub merchant's public key
+ * @param timestamp timestamp of this proposal data
+ * @param contract_terms proposal data to store
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_insert_contract_terms (void *cls,
+                               const char *order_id,
+                               const struct TALER_MerchantPublicKeyP 
*merchant_pub,
+                               struct GNUNET_TIME_Absolute timestamp,
+                               const json_t *contract_terms)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_HashCode h_contract_terms;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_string (order_id),
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_absolute_time (&timestamp),
+    TALER_PQ_query_param_json (contract_terms),
+    GNUNET_PQ_query_param_auto_from_type (&h_contract_terms),
+    GNUNET_PQ_query_param_end
+  };
+
+  if (GNUNET_OK !=
+      TALER_JSON_hash (contract_terms,
+                      &h_contract_terms))
+  {
+    GNUNET_break (0);
+    return GNUNET_SYSERR;
+  }
+  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+              "inserting contract terms: order_id: %s, merchant_pub: %s, 
h_contract_terms: %s.\n",
+              order_id,
+              TALER_B2S (merchant_pub),
+              GNUNET_h2s (&h_contract_terms));
+  check_connection (pg);
+  return GNUNET_PQ_eval_prepared_non_select (pg->conn,
+                                             "insert_contract_terms",
+                                             params);
+}
+
+
+/**
+ * Insert order into the DB.
+ *
+ * @param cls closure
+ * @param order_id identificator of the proposal being stored
+ * @param merchant_pub merchant's public key
+ * @param timestamp timestamp of this proposal data
+ * @param contract_terms proposal data to store
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_insert_order (void *cls,
+                       const char *order_id,
+                       const struct TALER_MerchantPublicKeyP *merchant_pub,
+                       struct GNUNET_TIME_Absolute timestamp,
+                       const json_t *contract_terms)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_string (order_id),
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_absolute_time (&timestamp),
+    TALER_PQ_query_param_json (contract_terms),
+    GNUNET_PQ_query_param_end
+  };
+
+  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+              "inserting order: order_id: %s, merchant_pub: %s.\n",
+              order_id,
+              TALER_B2S (merchant_pub));
+  check_connection (pg);
+  return GNUNET_PQ_eval_prepared_non_select (pg->conn,
+                                             "insert_order",
+                                             params);
+}
+
+
+/**
+ * Mark contract terms as payed.  Needed by /history as only payed
+ * contracts must be shown.
+ *
+ * NOTE: we can't get the list of (payed) contracts from the
+ * transactions table because it lacks contract_terms plain JSON.  In
+ * facts, the protocol doesn't allow to store contract_terms in
+ * transactions table, as /pay handler doesn't receive this data (only
+ * /proposal does).
+ *
+ * @param cls closure
+ * @param h_contract_terms hash of the contract that is now paid
+ * @param merchant_pub merchant's public key
+ * @param last_session_id session id used for the payment, NULL
+ *        if payment was not session-bound
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_mark_proposal_paid (void *cls,
+                             const struct GNUNET_HashCode *h_contract_terms,
+                             const struct TALER_MerchantPublicKeyP 
*merchant_pub,
+                             const char *last_session_id)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (h_contract_terms),
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_string ((last_session_id == NULL) ? "" : 
last_session_id),
+    GNUNET_PQ_query_param_end
+  };
+
+  TALER_LOG_DEBUG ("Marking proposal paid, h_contract_terms: '%s',"
+                   " merchant_pub: '%s'\n",
+                   GNUNET_h2s (h_contract_terms),
+                   TALER_B2S (merchant_pub));
+  return GNUNET_PQ_eval_prepared_non_select (pg->conn,
+                                             "mark_proposal_paid",
+                                             params);
+}
+
+
+/**
+ * Insert payment confirmation from the exchange into the database.
+ *
+ * @param cls closure
+ * @param order_id identificator of the proposal associated with this revenue
+ * @param merchant_pub merchant's public key
+ * @param coin_pub public key of the coin
+ * @param amount_with_fee amount the exchange will deposit for this coin
+ * @param deposit_fee fee the exchange will charge for this coin
+ * @param refund_fee fee the exchange will charge for refunding this coin
+ * @param wire_fee wire fee changed by the exchange
+ * @param signkey_pub public key used by the exchange for @a exchange_proof
+ * @param exchange_proof proof from exchange that coin was accepted
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_store_deposit (void *cls,
+                        const struct GNUNET_HashCode *h_contract_terms,
+                        const struct TALER_MerchantPublicKeyP *merchant_pub,
+                        const struct TALER_CoinSpendPublicKeyP *coin_pub,
+                        const char *exchange_url,
+                        const struct TALER_Amount *amount_with_fee,
+                        const struct TALER_Amount *deposit_fee,
+                        const struct TALER_Amount *refund_fee,
+                        const struct TALER_Amount *wire_fee,
+                        const struct TALER_ExchangePublicKeyP *signkey_pub,
+                        const json_t *exchange_proof)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (h_contract_terms),
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_auto_from_type (coin_pub),
+    GNUNET_PQ_query_param_string (exchange_url),
+    TALER_PQ_query_param_amount (amount_with_fee),
+    TALER_PQ_query_param_amount (deposit_fee),
+    TALER_PQ_query_param_amount (refund_fee),
+    TALER_PQ_query_param_amount (wire_fee),
+    GNUNET_PQ_query_param_auto_from_type (signkey_pub),
+    TALER_PQ_query_param_json (exchange_proof),
+    GNUNET_PQ_query_param_end
+  };
+
+  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+              "Storing payment for h_contract_terms `%s', coin_pub: `%s', 
amount_with_fee: %s\n",
+              GNUNET_h2s (h_contract_terms),
+              TALER_B2S (coin_pub),
+              TALER_amount2s (amount_with_fee));
+  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+              "Merchant pub is `%s'\n",
+              TALER_B2S (merchant_pub));
+  check_connection (pg);
+  return GNUNET_PQ_eval_prepared_non_select (pg->conn,
+                                             "insert_deposit",
+                                             params);
+}
+
+
+/**
+ * Insert mapping of @a coin_pub and @a h_contract_terms to
+ * corresponding @a wtid.
+ *
+ * @param cls closure
+ * @param h_contract_terms hashcode of the proposal data paid by @a coin_pub
+ * @param coin_pub public key of the coin
+ * @param wtid identifier of the wire transfer in which the exchange
+ *             send us the money for the coin deposit
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_store_coin_to_transfer (void *cls,
+                                 const struct GNUNET_HashCode 
*h_contract_terms,
+                                 const struct TALER_CoinSpendPublicKeyP 
*coin_pub,
+                                 const struct TALER_WireTransferIdentifierRawP 
*wtid)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (h_contract_terms),
+    GNUNET_PQ_query_param_auto_from_type (coin_pub),
+    GNUNET_PQ_query_param_auto_from_type (wtid),
+    GNUNET_PQ_query_param_end
+  };
+
+  check_connection (pg);
+  return GNUNET_PQ_eval_prepared_non_select (pg->conn,
+                                             "insert_transfer",
+                                             params);
+}
+
+
+/**
+ * Insert wire transfer confirmation from the exchange into the database.
+ *
+ * @param cls closure
+ * @param exchange_url URL of the exchange
+ * @param wtid identifier of the wire transfer
+ * @param execution_time when was @a wtid executed
+ * @param signkey_pub public key used by the exchange for @a exchange_proof
+ * @param exchange_proof proof from exchange about what the deposit was for
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_store_transfer_to_proof (void *cls,
+                                  const char *exchange_url,
+                                  const struct 
TALER_WireTransferIdentifierRawP *wtid,
+                                  struct GNUNET_TIME_Absolute execution_time,
+                                  const struct TALER_ExchangePublicKeyP 
*signkey_pub,
+                                  const json_t *exchange_proof)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_string (exchange_url),
+    GNUNET_PQ_query_param_auto_from_type (wtid),
+    GNUNET_PQ_query_param_absolute_time (&execution_time),
+    GNUNET_PQ_query_param_auto_from_type (signkey_pub),
+    TALER_PQ_query_param_json (exchange_proof),
+    GNUNET_PQ_query_param_end
+  };
+
+  check_connection (pg);
+  return GNUNET_PQ_eval_prepared_non_select (pg->conn,
+                                             "insert_proof",
+                                             params);
+}
+
+
+/**
+ * Lookup for a proposal, respecting the signature used by the
+ * /history's db methods.
+ *
+ * @param cls db plugin handle
+ * @param order_id order id used to search for the proposal data
+ * @param merchant_pub public key of the merchant using this method
+ * @param cb the callback
+ * @param cb_cls closure to pass to the callback
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_find_contract_terms_history (void *cls,
+                                      const char *order_id,
+                                      const struct TALER_MerchantPublicKeyP 
*merchant_pub,
+                                      TALER_MERCHANTDB_ProposalDataCallback cb,
+                                      void *cb_cls)
+{
+  struct PostgresClosure *pg = cls;
+  json_t *contract_terms;
+  enum GNUNET_DB_QueryStatus qs;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_string (order_id),
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_end
+  };
+  struct GNUNET_PQ_ResultSpec rs[] = {
+    TALER_PQ_result_spec_json ("contract_terms",
+                               &contract_terms),
+    GNUNET_PQ_result_spec_end
+  };
+
+  qs = GNUNET_PQ_eval_prepared_singleton_select (pg->conn,
+                                                 "find_contract_terms_history",
+                                                 params,
+                                                 rs);
+  if (qs <= 0)
+    return qs;
+  if (NULL != cb)
+    cb (cb_cls,
+        order_id,
+        0,
+        contract_terms);
+  GNUNET_PQ_cleanup_result (rs);
+  return qs;
+}
+
+
+/**
+ * Closure for #find_contracts_cb().
+ */
+struct FindContractsContext
+{
+  /**
+   * Function to call on each result.
+   */
+  TALER_MERCHANTDB_ProposalDataCallback cb;
+
+  /**
+   * Closure for @e cb.
+   */
+  void *cb_cls;
+
+  /**
+   * Transaction status code to set.
+   */
+  enum GNUNET_DB_QueryStatus qs;
+};
+
+
+/**
+ * Function to be called with the results of a SELECT statement
+ * that has returned @a num_results results.
+ *
+ * @param cls of type `struct FindContractsContext *`
+ * @param result the postgres result
+ * @param num_result the number of results in @a result
+ */
+static void
+find_contracts_cb (void *cls,
+                   PGresult *result,
+                   unsigned int num_results)
+{
+  struct FindContractsContext *fcctx = cls;
+
+  for (unsigned int i = 0; i < num_results; i++)
+  {
+    char *order_id;
+    json_t *contract_terms;
+    uint64_t row_id;
+    struct GNUNET_PQ_ResultSpec rs[] = {
+      GNUNET_PQ_result_spec_string ("order_id",
+                                    &order_id),
+      TALER_PQ_result_spec_json ("contract_terms",
+                                 &contract_terms),
+      GNUNET_PQ_result_spec_uint64 ("row_id",
+                                    &row_id),
+      GNUNET_PQ_result_spec_end
+    };
+
+    if (GNUNET_OK !=
+        GNUNET_PQ_extract_result (result,
+                                  rs,
+                                  i))
+    {
+      GNUNET_break (0);
+      fcctx->qs = GNUNET_DB_STATUS_HARD_ERROR;
+      return;
+    }
+    fcctx->qs = i + 1;
+    fcctx->cb (fcctx->cb_cls,
+               order_id,
+               row_id,
+               contract_terms);
+    GNUNET_PQ_cleanup_result (rs);
+  }
+}
+
+
+/**
+ * Return proposals whose timestamp are older than `date`.
+ * Among those proposals, only those ones being between the
+ * start-th and (start-nrows)-th record are returned.  The rows
+ * are sorted having the youngest first.
+ *
+ * @param cls our plugin handle.
+ * @param date only results older than this date are returned.
+ * @param merchant_pub instance's public key; only rows related to this
+ * instance are returned.
+ * @param start only rows with serial id less than start are returned.
+ * In other words, you lower `start` to get older records. The tipical
+ * usage is to firstly call `find_contract_terms_by_date`, so that you get
+ * the `nrows` youngest records. The oldest of those records will tell you
+ * from which timestamp and `start` you can query the DB in order to get
+ * furtherly older records, and so on. Alternatively, you can use always
+ * the same timestamp and just go behind in history by tuning `start`.
+ * @param nrows only nrows rows are returned.
+ * @param past if set to #GNUNET_YES, retrieves rows older than `date`.
+ * @param ascending if GNUNET_YES, results will be sorted in chronological 
order.
+ * This is tipically used to show live updates on the merchant's backoffice
+ * Web interface.
+ * @param cb function to call with transaction data, can be NULL.
+ * @param cb_cls closure for @a cb
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_find_contract_terms_by_date_and_range (void *cls,
+                                                struct GNUNET_TIME_Absolute 
date,
+                                                const struct 
TALER_MerchantPublicKeyP *merchant_pub,
+                                                uint64_t start,
+                                                uint64_t nrows,
+                                                int past,
+                                                unsigned int ascending,
+                                                
TALER_MERCHANTDB_ProposalDataCallback cb,
+                                                void *cb_cls)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_absolute_time (&date),
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_uint64 (&start),
+    GNUNET_PQ_query_param_uint64 (&nrows),
+    GNUNET_PQ_query_param_end
+  };
+  const char *stmt;
+  enum GNUNET_DB_QueryStatus qs;
+  struct FindContractsContext fcctx = {
+    .cb = cb,
+    .cb_cls = cb_cls
+  };
+
+  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+              "DB serving /history with date %s\n",
+              GNUNET_STRINGS_absolute_time_to_string (date));
+  stmt =
+    (GNUNET_YES == past)
+    ? ( (GNUNET_YES == ascending)
+        ? "find_contract_terms_by_date_and_range_past_asc"
+        : "find_contract_terms_by_date_and_range_past" )
+    : ( (GNUNET_YES == ascending)
+        ? "find_contract_terms_by_date_and_range_asc"
+        : "find_contract_terms_by_date_and_range" );
+  check_connection (pg);
+  qs = GNUNET_PQ_eval_prepared_multi_select (pg->conn,
+                                             stmt,
+                                             params,
+                                             &find_contracts_cb,
+                                             &fcctx);
+  if (0 >= qs)
+    return qs;
+  return fcctx.qs;
+}
+
+
+/**
+ * Closure for #find_tip_authorizations_cb().
+ */
+struct GetAuthorizedTipAmountContext
+{
+  /**
+   * Total authorized amount.
+   */
+  struct TALER_Amount authorized_amount;
+
+  /**
+   * Transaction status code to set.
+   */
+  enum GNUNET_DB_QueryStatus qs;
+
+};
+
+
+/**
+ * Function to be called with the results of a SELECT statement
+ * that has returned @a num_results results.
+ *
+ * @param cls of type `struct GetAuthorizedTipAmountContext *`
+ * @param result the postgres result
+ * @param num_result the number of results in @a result
+ */
+static void
+find_tip_authorizations_cb (void *cls,
+                            PGresult *result,
+                            unsigned int num_results)
+{
+  struct GetAuthorizedTipAmountContext *ctx = cls;
+  unsigned int i;
+
+  for (i = 0; i < num_results; i++)
+  {
+    struct TALER_Amount amount;
+    char *just;
+    struct GNUNET_HashCode h;
+    struct GNUNET_PQ_ResultSpec rs[] = {
+      GNUNET_PQ_result_spec_string ("justification",
+                                    &just),
+      GNUNET_PQ_result_spec_auto_from_type ("tip_id",
+                                            &h),
+      TALER_PQ_result_spec_amount ("amount",
+                                    &amount),
+      GNUNET_PQ_result_spec_end
+    };
+
+    if (GNUNET_OK !=
+        GNUNET_PQ_extract_result (result,
+                                  rs,
+                                  i))
+    {
+      GNUNET_break (0);
+      ctx->qs = GNUNET_DB_STATUS_HARD_ERROR;
+      return;
+    }
+
+    if (0 == i)
+    {
+      ctx->authorized_amount = amount;
+    }
+    else
+    {
+      if (GNUNET_OK !=
+          TALER_amount_add (&ctx->authorized_amount,
+                            &ctx->authorized_amount,
+                            &amount))
+      {
+        GNUNET_break (0);
+        GNUNET_PQ_cleanup_result (rs);
+        ctx->qs = GNUNET_DB_STATUS_HARD_ERROR;
+        return;
+      }
+    }
+    GNUNET_PQ_cleanup_result (rs);
+  }
+
+  if (0 == i)
+  {
+    ctx->qs = GNUNET_DB_STATUS_SUCCESS_NO_RESULTS;
+  }
+  else
+  {
+    /* one aggregated result */
+    ctx->qs = GNUNET_DB_STATUS_SUCCESS_ONE_RESULT;
+  }
+}
+
+
+/**
+ * Get the total amount of authorized tips for a tipping reserve.
+ *
+ * @param cls closure, typically a connection to the db
+ * @param reserve_priv which reserve to check
+ * @param[out] authorzed_amount amount we've authorized so far for tips
+ * @return transaction status, usually
+ *      #GNUNET_DB_STATUS_SUCCESS_ONE_RESULT for success
+ *      #GNUNET_DB_STATUS_SUCCESS_NO_RESULTS if the reserve_priv
+ *      does not identify a known tipping reserve
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_get_authorized_tip_amount (void *cls,
+                                    const struct TALER_ReservePrivateKeyP 
*reserve_priv,
+                                    struct TALER_Amount *authorized_amount)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (reserve_priv),
+    GNUNET_PQ_query_param_end
+  };
+  enum GNUNET_DB_QueryStatus qs;
+  struct GetAuthorizedTipAmountContext ctx = { 0 };
+
+  check_connection (pg);
+  qs = GNUNET_PQ_eval_prepared_multi_select (pg->conn,
+                                             "find_tip_authorizations",
+                                             params,
+                                             &find_tip_authorizations_cb,
+                                             &ctx);
+  if (0 >= qs)
+    return qs;
+  *authorized_amount = ctx.authorized_amount;
+  return ctx.qs;
+}
+
+
+/**
+ * Return proposals whose timestamp are older than `date`.
+ * The rows are sorted having the youngest first.
+ *
+ * @param cls our plugin handle.
+ * @param date only results older than this date are returned.
+ * @param merchant_pub instance's public key; only rows related to this
+ * instance are returned.
+ * @param nrows at most nrows rows are returned.
+ * @param cb function to call with transaction data, can be NULL.
+ * @param cb_cls closure for @a cb
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_find_contract_terms_by_date (void *cls,
+                                      struct GNUNET_TIME_Absolute date,
+                                      const struct TALER_MerchantPublicKeyP 
*merchant_pub,
+                                      uint64_t nrows,
+                                      TALER_MERCHANTDB_ProposalDataCallback cb,
+                                      void *cb_cls)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_absolute_time (&date),
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_uint64 (&nrows),
+    GNUNET_PQ_query_param_end
+  };
+  enum GNUNET_DB_QueryStatus qs;
+  struct FindContractsContext fcctx = {
+    .cb = cb,
+    .cb_cls = cb_cls
+  };
+
+  check_connection (pg);
+  qs = GNUNET_PQ_eval_prepared_multi_select (pg->conn,
+                                             "find_contract_terms_by_date",
+                                             params,
+                                             &find_contracts_cb,
+                                             &fcctx);
+  if (0 >= qs)
+    return qs;
+  return fcctx.qs;
+}
+
+
+/**
+ * Closure for #find_payments_cb().
+ */
+struct FindPaymentsContext
+{
+  /**
+   * Function to call with results.
+   */
+  TALER_MERCHANTDB_CoinDepositCallback cb;
+
+  /**
+   * Closure for @e cls.
+   */
+  void *cb_cls;
+
+  /**
+   * Contract term hash used for the search.
+   */
+  const struct GNUNET_HashCode *h_contract_terms;
+
+  /**
+   * Transaction status (set).
+   */
+  enum GNUNET_DB_QueryStatus qs;
+};
+
+
+/**
+ * Function to be called with the results of a SELECT statement
+ * that has returned @a num_results results.
+ *
+ * @param cls of type `struct FindPaymentsContext *`
+ * @param result the postgres result
+ * @param num_result the number of results in @a result
+ */
+static void
+find_payments_cb (void *cls,
+                  PGresult *result,
+                  unsigned int num_results)
+{
+  struct FindPaymentsContext *fpc = cls;
+
+  for (unsigned int i=0;i<num_results;i++)
+  {
+    struct TALER_CoinSpendPublicKeyP coin_pub;
+    struct TALER_Amount amount_with_fee;
+    struct TALER_Amount deposit_fee;
+    struct TALER_Amount refund_fee;
+    struct TALER_Amount wire_fee;
+    json_t *exchange_proof;
+    char *exchange_url;
+    struct GNUNET_PQ_ResultSpec rs[] = {
+      GNUNET_PQ_result_spec_auto_from_type ("coin_pub",
+                                            &coin_pub),
+      GNUNET_PQ_result_spec_string ("exchange_url",
+                                    &exchange_url),
+      TALER_PQ_result_spec_amount ("amount_with_fee",
+                                   &amount_with_fee),
+      TALER_PQ_result_spec_amount ("deposit_fee",
+                                   &deposit_fee),
+      TALER_PQ_result_spec_amount ("refund_fee",
+                                   &refund_fee),
+      TALER_PQ_result_spec_amount ("wire_fee",
+                                   &wire_fee),
+      TALER_PQ_result_spec_json ("exchange_proof",
+                                 &exchange_proof),
+      GNUNET_PQ_result_spec_end
+    };
+
+    if (GNUNET_OK !=
+        GNUNET_PQ_extract_result (result,
+                                  rs,
+                                  i))
+    {
+      GNUNET_break (0);
+      fpc->qs = GNUNET_DB_STATUS_HARD_ERROR;
+      return;
+    }
+    fpc->qs = i + 1;
+    fpc->cb (fpc->cb_cls,
+             fpc->h_contract_terms,
+             &coin_pub,
+             exchange_url,
+             &amount_with_fee,
+             &deposit_fee,
+             &refund_fee,
+             &wire_fee,
+             exchange_proof);
+    GNUNET_PQ_cleanup_result (rs);
+  }
+}
+
+
+/**
+ * Lookup information about coin payments by proposal data hash
+ * (and @a merchant_pub)
+ *
+ * @param cls closure
+ * @param h_contract_terms key for the search
+ * @param merchant_pub merchant's public key
+ * @param cb function to call with payment data
+ * @param cb_cls closure for @a cb
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_find_payments (void *cls,
+                        const struct GNUNET_HashCode *h_contract_terms,
+                        const struct TALER_MerchantPublicKeyP *merchant_pub,
+                        TALER_MERCHANTDB_CoinDepositCallback cb,
+                        void *cb_cls)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (h_contract_terms),
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_end
+  };
+  struct FindPaymentsContext fpc = {
+    .h_contract_terms = h_contract_terms,
+    .cb = cb,
+    .cb_cls = cb_cls
+  };
+  enum GNUNET_DB_QueryStatus qs;
+
+  /* no preflight check here, run in its own transaction by the
+     caller! */
+  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+              "Finding payment for h_contract_terms '%s'\n",
+              GNUNET_h2s (h_contract_terms));
+  check_connection (pg);
+  qs = GNUNET_PQ_eval_prepared_multi_select (pg->conn,
+                                             "find_deposits",
+                                             params,
+                                             &find_payments_cb,
+                                             &fpc);
+  if (qs <= 0)
+    return qs;
+  return fpc.qs;
+}
+
+
+/**
+ * Closure for #find_payments_by_coin_cb().
+ */
+struct FindPaymentsByCoinContext
+{
+  /**
+   * Function to call with results.
+   */
+  TALER_MERCHANTDB_CoinDepositCallback cb;
+
+  /**
+   * Closure for @e cls.
+   */
+  void *cb_cls;
+
+  /**
+   * Coin we are looking for.
+   */
+  const struct TALER_CoinSpendPublicKeyP *coin_pub;
+
+  /**
+   * Hash of the contract we are looking for.
+   */
+  const struct GNUNET_HashCode *h_contract_terms;
+
+  /**
+   * Transaction status (set).
+   */
+  enum GNUNET_DB_QueryStatus qs;
+};
+
+
+/**
+ * Function to be called with the results of a SELECT statement
+ * that has returned @a num_results results.
+ *
+ * @param cls of type `struct FindPaymentsByCoinContext *`
+ * @param result the postgres result
+ * @param num_result the number of results in @a result
+ */
+static void
+find_payments_by_coin_cb (void *cls,
+                          PGresult *result,
+                          unsigned int num_results)
+{
+  struct FindPaymentsByCoinContext *fpc = cls;
+
+  for (unsigned int i=0;i<num_results;i++)
+  {
+    struct TALER_Amount amount_with_fee;
+    struct TALER_Amount deposit_fee;
+    struct TALER_Amount refund_fee;
+    struct TALER_Amount wire_fee;
+    char *exchange_url;
+    json_t *exchange_proof;
+    struct GNUNET_PQ_ResultSpec rs[] = {
+      TALER_PQ_result_spec_amount ("amount_with_fee",
+                                   &amount_with_fee),
+      TALER_PQ_result_spec_amount ("deposit_fee",
+                                   &deposit_fee),
+      TALER_PQ_result_spec_amount ("refund_fee",
+                                   &refund_fee),
+      TALER_PQ_result_spec_amount ("wire_fee",
+                                   &wire_fee),
+      GNUNET_PQ_result_spec_string ("exchange_url",
+                                   &exchange_url),
+      TALER_PQ_result_spec_json ("exchange_proof",
+                                 &exchange_proof),
+      GNUNET_PQ_result_spec_end
+    };
+
+    if (GNUNET_OK !=
+        GNUNET_PQ_extract_result (result,
+                                  rs,
+                                  i))
+    {
+      GNUNET_break (0);
+      fpc->qs = GNUNET_DB_STATUS_HARD_ERROR;
+      return;
+    }
+    fpc->qs = i + 1;
+    fpc->cb (fpc->cb_cls,
+             fpc->h_contract_terms,
+             fpc->coin_pub,
+             exchange_url,
+             &amount_with_fee,
+             &deposit_fee,
+             &refund_fee,
+             &wire_fee,
+             exchange_proof);
+    GNUNET_PQ_cleanup_result (rs);
+  }
+}
+
+
+/**
+ * Retrieve information about a deposited coin.
+ *
+ * @param cls closure
+ * @param h_contract_terms hashcode of the proposal data paid by @a coin_pub
+ * @param merchant_pub merchant's public key.
+ * @param coin_pub coin's public key used for the search
+ * @param cb function to call with payment data
+ * @param cb_cls closure for @a cb
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_find_payments_by_hash_and_coin (void *cls,
+                                         const struct GNUNET_HashCode 
*h_contract_terms,
+                                         const struct TALER_MerchantPublicKeyP 
*merchant_pub,
+                                         const struct 
TALER_CoinSpendPublicKeyP *coin_pub,
+                                         TALER_MERCHANTDB_CoinDepositCallback 
cb,
+                                         void *cb_cls)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (h_contract_terms),
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_auto_from_type (coin_pub),
+    GNUNET_PQ_query_param_end
+  };
+  struct FindPaymentsByCoinContext fpc = {
+    .cb = cb,
+    .cb_cls = cb_cls,
+    .h_contract_terms = h_contract_terms,
+    .coin_pub = coin_pub
+  };
+  enum GNUNET_DB_QueryStatus qs;
+
+  check_connection (pg);
+  qs = GNUNET_PQ_eval_prepared_multi_select (pg->conn,
+                                             "find_deposits_by_hash_and_coin",
+                                             params,
+                                             &find_payments_by_coin_cb,
+                                             &fpc);
+  if (0 >= qs)
+    return qs;
+  return fpc.qs;
+}
+
+
+/**
+ * Closure for #find_transfers_cb().
+ */
+struct FindTransfersContext
+{
+  /**
+   * Function to call on results.
+   */
+  TALER_MERCHANTDB_TransferCallback cb;
+
+  /**
+   * Closure for @e cb.
+   */
+  void *cb_cls;
+
+  /**
+   * Hash of the contract we are looking under.
+   */
+  const struct GNUNET_HashCode *h_contract_terms;
+
+  /**
+   * Transaction status (set).
+   */
+  enum GNUNET_DB_QueryStatus qs;
+};
+
+
+/**
+ * Function to be called with the results of a SELECT statement
+ * that has returned @a num_results results.
+ *
+ * @param cls of type `struct FindTransfersContext *`
+ * @param result the postgres result
+ * @param num_result the number of results in @a result
+ */
+static void
+find_transfers_cb (void *cls,
+                   PGresult *result,
+                   unsigned int num_results)
+{
+  struct FindTransfersContext *ftc = cls;
+
+  for (unsigned int i=0;i<PQntuples (result);i++)
+  {
+    struct TALER_CoinSpendPublicKeyP coin_pub;
+    struct TALER_WireTransferIdentifierRawP wtid;
+    struct GNUNET_TIME_Absolute execution_time;
+    json_t *proof;
+
+    struct GNUNET_PQ_ResultSpec rs[] = {
+      GNUNET_PQ_result_spec_auto_from_type ("coin_pub",
+                                            &coin_pub),
+      GNUNET_PQ_result_spec_auto_from_type ("wtid",
+                                            &wtid),
+      GNUNET_PQ_result_spec_absolute_time ("execution_time",
+                                           &execution_time),
+      TALER_PQ_result_spec_json ("proof",
+                                 &proof),
+      GNUNET_PQ_result_spec_end
+    };
+
+    if (GNUNET_OK !=
+        GNUNET_PQ_extract_result (result,
+                                  rs,
+                                  i))
+    {
+      GNUNET_break (0);
+      ftc->qs = GNUNET_DB_STATUS_HARD_ERROR;
+      return;
+    }
+    ftc->qs = i + 1;
+    ftc->cb (ftc->cb_cls,
+             ftc->h_contract_terms,
+             &coin_pub,
+             &wtid,
+             execution_time,
+             proof);
+    GNUNET_PQ_cleanup_result (rs);
+  }
+}
+
+
+/**
+ * Lookup information about a transfer by @a h_contract_terms.  Note
+ * that in theory there could be multiple wire transfers for a
+ * single @a h_contract_terms, as the transaction may have involved
+ * multiple coins and the coins may be spread over different wire
+ * transfers.
+ *
+ * @param cls closure
+ * @param h_contract_terms key for the search
+ * @param cb function to call with transfer data
+ * @param cb_cls closure for @a cb
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_find_transfers_by_hash (void *cls,
+                                 const struct GNUNET_HashCode 
*h_contract_terms,
+                                 TALER_MERCHANTDB_TransferCallback cb,
+                                 void *cb_cls)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (h_contract_terms),
+    GNUNET_PQ_query_param_end
+  };
+  struct FindTransfersContext ftc = {
+    .h_contract_terms = h_contract_terms,
+    .cb = cb,
+    .cb_cls = cb_cls
+  };
+  enum GNUNET_DB_QueryStatus qs;
+
+  check_connection (pg);
+  qs = GNUNET_PQ_eval_prepared_multi_select (pg->conn,
+                                             "find_transfers_by_hash",
+                                             params,
+                                             &find_transfers_cb,
+                                             &ftc);
+  if (0 >= qs)
+    return qs;
+  return ftc.qs;
+}
+
+
+/**
+ * Closure for #find_deposits_cb().
+ */
+struct FindDepositsContext
+{
+
+  /**
+   * Function to call for each result.
+   */
+  TALER_MERCHANTDB_CoinDepositCallback cb;
+
+  /**
+   * Closure for @e cb.
+   */
+  void *cb_cls;
+
+  /**
+   * Transaction status (set).
+   */
+  enum GNUNET_DB_QueryStatus qs;
+};
+
+
+/**
+ * Function to be called with the results of a SELECT statement
+ * that has returned @a num_results results.
+ *
+ * @param cls of type `struct FindDepositsContext *`
+ * @param result the postgres result
+ * @param num_result the number of results in @a result
+ */
+static void
+find_deposits_cb (void *cls,
+                  PGresult *result,
+                  unsigned int num_results)
+{
+  struct FindDepositsContext *fdc = cls;
+
+  for (unsigned int i=0;i<PQntuples (result);i++)
+  {
+    struct GNUNET_HashCode h_contract_terms;
+    struct TALER_CoinSpendPublicKeyP coin_pub;
+    struct TALER_Amount amount_with_fee;
+    struct TALER_Amount deposit_fee;
+    struct TALER_Amount refund_fee;
+    struct TALER_Amount wire_fee;
+    char *exchange_url;
+    json_t *exchange_proof;
+    struct GNUNET_PQ_ResultSpec rs[] = {
+      GNUNET_PQ_result_spec_auto_from_type ("h_contract_terms",
+                                            &h_contract_terms),
+      GNUNET_PQ_result_spec_auto_from_type ("coin_pub",
+                                            &coin_pub),
+      TALER_PQ_result_spec_amount ("amount_with_fee",
+                                   &amount_with_fee),
+      TALER_PQ_result_spec_amount ("deposit_fee",
+                                   &deposit_fee),
+      TALER_PQ_result_spec_amount ("refund_fee",
+                                   &refund_fee),
+      TALER_PQ_result_spec_amount ("wire_fee",
+                                   &wire_fee),
+      GNUNET_PQ_result_spec_string ("exchange_url",
+                                   &exchange_url),
+      TALER_PQ_result_spec_json ("exchange_proof",
+                                 &exchange_proof),
+      GNUNET_PQ_result_spec_end
+    };
+
+    if (GNUNET_OK !=
+        GNUNET_PQ_extract_result (result,
+                                  rs,
+                                  i))
+    {
+      GNUNET_break (0);
+      fdc->qs = GNUNET_DB_STATUS_HARD_ERROR;
+      return;
+    }
+    fdc->qs = i + 1;
+    fdc->cb (fdc->cb_cls,
+             &h_contract_terms,
+             &coin_pub,
+             exchange_url,
+             &amount_with_fee,
+             &deposit_fee,
+             &refund_fee,
+             &wire_fee,
+             exchange_proof);
+    GNUNET_PQ_cleanup_result (rs);
+  }
+}
+
+
+/**
+ * Lookup information about a coin deposits by @a wtid.
+ *
+ * @param cls closure
+ * @param wtid wire transfer identifier to find matching transactions for
+ * @param cb function to call with payment data
+ * @param cb_cls closure for @a cb
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_find_deposits_by_wtid (void *cls,
+                                const struct TALER_WireTransferIdentifierRawP 
*wtid,
+                                TALER_MERCHANTDB_CoinDepositCallback cb,
+                                void *cb_cls)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (wtid),
+    GNUNET_PQ_query_param_end
+  };
+  struct FindDepositsContext fdc = {
+    .cb = cb,
+    .cb_cls = cb_cls
+  };
+  enum GNUNET_DB_QueryStatus qs;
+
+  check_connection (pg);
+  qs = GNUNET_PQ_eval_prepared_multi_select (pg->conn,
+                                             "find_deposits_by_wtid",
+                                             params,
+                                             &find_deposits_cb,
+                                             &fdc);
+  if (0 >= qs)
+    return qs;
+  return fdc.qs;
+}
+
+
+/**
+ * Closure for #get_refunds_cb().
+ */
+struct GetRefundsContext
+{
+  /**
+   * Function to call for each refund.
+   */
+  TALER_MERCHANTDB_RefundCallback rc;
+
+  /**
+   * Closure for @e rc.
+   */
+  void *rc_cls;
+
+  /**
+   * Transaction result.
+   */
+  enum GNUNET_DB_QueryStatus qs;
+};
+
+
+/**
+ * Function to be called with the results of a SELECT statement
+ * that has returned @a num_results results.
+ *
+ * @param cls of type `struct GetRefundsContext *`
+ * @param result the postgres result
+ * @param num_result the number of results in @a result
+ */
+static void
+get_refunds_cb (void *cls,
+                PGresult *result,
+                unsigned int num_results)
+{
+  struct GetRefundsContext *grc = cls;
+
+  for (unsigned int i=0;i<num_results;i++)
+  {
+    struct TALER_CoinSpendPublicKeyP coin_pub;
+    uint64_t rtransaction_id;
+    struct TALER_Amount refund_amount;
+    struct TALER_Amount refund_fee;
+    char *reason;
+    struct GNUNET_PQ_ResultSpec rs[] = {
+      GNUNET_PQ_result_spec_auto_from_type ("coin_pub",
+                                            &coin_pub),
+      GNUNET_PQ_result_spec_uint64 ("rtransaction_id",
+                                    &rtransaction_id),
+      TALER_PQ_result_spec_amount ("refund_amount",
+                                   &refund_amount),
+      TALER_PQ_result_spec_amount ("refund_fee",
+                                   &refund_fee),
+      GNUNET_PQ_result_spec_string ("reason",
+                                    &reason),
+      GNUNET_PQ_result_spec_end
+    };
+
+    if (GNUNET_OK !=
+        GNUNET_PQ_extract_result (result,
+                                  rs,
+                                  i))
+    {
+      GNUNET_break (0);
+      grc->qs = GNUNET_DB_STATUS_HARD_ERROR;
+      return;
+    }
+    grc->qs = i + 1;
+    grc->rc (grc->rc_cls,
+             &coin_pub,
+             rtransaction_id,
+             reason,
+             &refund_amount,
+             &refund_fee);
+    GNUNET_PQ_cleanup_result (rs);
+  }
+}
+
+
+/**
+ * Obtain refunds associated with a contract.
+ *
+ * @param cls closure, typically a connection to the db
+ * @param merchant_pub public key of the merchant instance
+ * @param h_contract_terms hash code of the contract
+ * @param rc function to call for each coin on which there is a refund
+ * @param rc_cls closure for @a rc
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_get_refunds_from_contract_terms_hash (void *cls,
+                                               const struct 
TALER_MerchantPublicKeyP *merchant_pub,
+                                               const struct GNUNET_HashCode 
*h_contract_terms,
+                                               TALER_MERCHANTDB_RefundCallback 
rc,
+                                               void *rc_cls)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_auto_from_type (h_contract_terms),
+    GNUNET_PQ_query_param_end
+  };
+  struct GetRefundsContext grc = {
+    .rc = rc,
+    .rc_cls = rc_cls
+  };
+  enum GNUNET_DB_QueryStatus qs;
+
+  /* no preflight check here, run in transaction by caller! */
+  TALER_LOG_DEBUG ("Looking for refund %s + %s\n",
+                   GNUNET_h2s (h_contract_terms),
+                   TALER_B2S (merchant_pub));
+  check_connection (pg);
+  qs = GNUNET_PQ_eval_prepared_multi_select (pg->conn,
+                                            
"find_refunds_from_contract_terms_hash",
+                                            params,
+                                            &get_refunds_cb,
+                                            &grc);
+  if (0 >= qs)
+    return qs;
+  return grc.qs;
+}
+
+
+/**
+ * Insert a refund row into merchant_refunds.  Not meant to be exported
+ * in the db API.
+ *
+ * @param cls closure, tipically a connection to the db
+ * @param merchant_pub merchant instance public key
+ * @param h_contract_terms hashcode of the contract related to this refund
+ * @param coin_pub public key of the coin giving the (part of) refund
+ * @param reason human readable explaination behind the refund
+ * @param refund how much this coin is refunding
+ * @param refund_fee refund fee for this coin
+ */
+static enum GNUNET_DB_QueryStatus
+insert_refund (void *cls,
+               const struct TALER_MerchantPublicKeyP *merchant_pub,
+               const struct GNUNET_HashCode *h_contract_terms,
+               const struct TALER_CoinSpendPublicKeyP *coin_pub,
+               const char *reason,
+               const struct TALER_Amount *refund,
+               const struct TALER_Amount *refund_fee)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_auto_from_type (h_contract_terms),
+    GNUNET_PQ_query_param_auto_from_type (coin_pub),
+    GNUNET_PQ_query_param_string (reason),
+    TALER_PQ_query_param_amount (refund),
+    TALER_PQ_query_param_amount (refund_fee),
+    GNUNET_PQ_query_param_end
+  };
+
+  TALER_LOG_DEBUG ("Inserting refund %s + %s\n",
+                   GNUNET_h2s (h_contract_terms),
+                   TALER_B2S (merchant_pub));
+
+  check_connection (pg);
+  return GNUNET_PQ_eval_prepared_non_select (pg->conn,
+                                             "insert_refund",
+                                             params);
+}
+
+
+/**
+ * Store information about wire fees charged by an exchange,
+ * including signature (so we have proof).
+ *
+ * @param cls closure
+ * @paramm exchange_pub public key of the exchange
+ * @param h_wire_method hash of wire method
+ * @param wire_fee wire fee charged
+ * @param closing_fee closing fee charged (irrelevant for us,
+ *              but needed to check signature)
+ * @param start_date start of fee being used
+ * @param end_date end of fee being used
+ * @param exchange_sig signature of exchange over fee structure
+ * @return transaction status code
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_store_wire_fee_by_exchange (void *cls,
+                                     const struct TALER_MasterPublicKeyP 
*exchange_pub,
+                                     const struct GNUNET_HashCode 
*h_wire_method,
+                                     const struct TALER_Amount *wire_fee,
+                                     const struct TALER_Amount *closing_fee,
+                                     struct GNUNET_TIME_Absolute start_date,
+                                     struct GNUNET_TIME_Absolute end_date,
+                                     const struct TALER_MasterSignatureP 
*exchange_sig)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (exchange_pub),
+    GNUNET_PQ_query_param_auto_from_type (h_wire_method),
+    TALER_PQ_query_param_amount (wire_fee),
+    TALER_PQ_query_param_amount (closing_fee),
+    GNUNET_PQ_query_param_absolute_time (&start_date),
+    GNUNET_PQ_query_param_absolute_time (&end_date),
+    GNUNET_PQ_query_param_auto_from_type (exchange_sig),
+    GNUNET_PQ_query_param_end
+  };
+
+  /* no preflight check here, run in its own transaction by the caller */
+  check_connection (pg);
+  GNUNET_log (GNUNET_ERROR_TYPE_INFO,
+              "Storing wire fee for %s starting at %s of %s\n",
+              TALER_B2S (exchange_pub),
+              GNUNET_STRINGS_absolute_time_to_string (start_date),
+              TALER_amount2s (wire_fee));
+  return GNUNET_PQ_eval_prepared_non_select (pg->conn,
+                                             "insert_wire_fee",
+                                             params);
+}
+
+
+/**
+ * Obtain information about wire fees charged by an exchange,
+ * including signature (so we have proof).
+ *
+ * @param cls closure
+ * @param exchange_pub public key of the exchange
+ * @param h_wire_method hash of wire method
+ * @param contract_date date of the contract to use for the lookup
+ * @param[out] wire_fee wire fee charged
+ * @param[out] closing_fee closing fee charged (irrelevant for us,
+ *              but needed to check signature)
+ * @param[out] start_date start of fee being used
+ * @param[out] end_date end of fee being used
+ * @param[out] exchange_sig signature of exchange over fee structure
+ * @return transaction status code
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_lookup_wire_fee (void *cls,
+                          const struct TALER_MasterPublicKeyP *exchange_pub,
+                          const struct GNUNET_HashCode *h_wire_method,
+                          struct GNUNET_TIME_Absolute contract_date,
+                          struct TALER_Amount *wire_fee,
+                          struct TALER_Amount *closing_fee,
+                          struct GNUNET_TIME_Absolute *start_date,
+                          struct GNUNET_TIME_Absolute *end_date,
+                          struct TALER_MasterSignatureP *exchange_sig)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (exchange_pub),
+    GNUNET_PQ_query_param_auto_from_type (h_wire_method),
+    GNUNET_PQ_query_param_absolute_time (&contract_date),
+    GNUNET_PQ_query_param_end
+  };
+  struct GNUNET_PQ_ResultSpec rs[] = {
+    TALER_PQ_result_spec_amount ("wire_fee",
+                                 wire_fee),
+    TALER_PQ_result_spec_amount ("closing_fee",
+                                 closing_fee),
+    GNUNET_PQ_result_spec_absolute_time ("start_date",
+                                         start_date),
+    GNUNET_PQ_result_spec_absolute_time ("end_date",
+                                         end_date),
+    GNUNET_PQ_result_spec_auto_from_type ("exchange_sig",
+                                          exchange_sig),
+    GNUNET_PQ_result_spec_end
+  };
+
+  check_connection (pg);
+  return GNUNET_PQ_eval_prepared_singleton_select (pg->conn,
+                                                   "lookup_wire_fee",
+                                                   params,
+                                                   rs);
+}
+
+
+/**
+ * Closure for #process_refund_cb.
+ */
+struct FindRefundContext
+{
+  /**
+   * Updated to reflect total amount refunded so far.
+   */
+  struct TALER_Amount refunded_amount;
+
+  /**
+   * Set to #GNUNET_SYSERR on hard errors.
+   */
+  int err;
+};
+
+
+/**
+ * Function to be called with the results of a SELECT statement
+ * that has returned @a num_results results.
+ *
+ * @param cls closure, our `struct FindRefundContext`
+ * @param result the postgres result
+ * @param num_result the number of results in @a result
+ */
+static void
+process_refund_cb (void *cls,
+                   PGresult *result,
+                   unsigned int num_results)
+{
+  struct FindRefundContext *ictx = cls;
+
+  for (unsigned int i=0; i<num_results; i++)
+  {
+    /* Sum up existing refunds */
+    struct TALER_Amount acc;
+    struct GNUNET_PQ_ResultSpec rs[] = {
+      TALER_PQ_result_spec_amount ("refund_amount",
+                                   &acc),
+      GNUNET_PQ_result_spec_end
+    };
+
+    if (GNUNET_OK !=
+        GNUNET_PQ_extract_result (result,
+                                  rs,
+                                  i))
+    {
+      GNUNET_break (0);
+      ictx->err = GNUNET_SYSERR;
+      return;
+    }
+    if (GNUNET_SYSERR ==
+        TALER_amount_add (&ictx->refunded_amount,
+                          &ictx->refunded_amount,
+                          &acc))
+    {
+      GNUNET_break (0);
+      ictx->err = GNUNET_SYSERR;
+      return;
+    }
+    GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+                "Found refund of %s\n",
+                TALER_amount2s (&acc));
+  }
+}
+
+
+/**
+ * Closure for #process_deposits_cb.
+ */
+struct InsertRefundContext
+{
+  /**
+   * Used to provide a connection to the db
+   */
+  struct PostgresClosure *pg;
+
+  /**
+   * Amount to which increase the refund for this contract
+   */
+  const struct TALER_Amount *refund;
+
+  /**
+   * Merchant instance public key
+   */
+  const struct TALER_MerchantPublicKeyP *merchant_pub;
+
+  /**
+   * Hash code representing the contract
+   */
+  const struct GNUNET_HashCode *h_contract_terms;
+
+  /**
+   * Human-readable reason behind this refund
+   */
+  const char *reason;
+
+  /**
+   * Transaction status code.
+   */
+  enum GNUNET_DB_QueryStatus qs;
+};
+
+
+/**
+ * Function to be called with the results of a SELECT statement
+ * that has returned @a num_results results.
+ *
+ * @param cls closure, our `struct InsertRefundContext`
+ * @param result the postgres result
+ * @param num_result the number of results in @a result
+ */
+static void
+process_deposits_for_refund_cb (void *cls,
+                                PGresult *result,
+                                unsigned int num_results)
+{
+  struct InsertRefundContext *ctx = cls;
+  struct TALER_Amount current_refund;
+  struct TALER_Amount deposit_refund[GNUNET_NZL(num_results)];
+  struct TALER_CoinSpendPublicKeyP deposit_coin_pubs[GNUNET_NZL(num_results)];
+  struct TALER_Amount deposit_amount_with_fee[GNUNET_NZL(num_results)];
+  struct TALER_Amount deposit_refund_fee[GNUNET_NZL(num_results)];
+
+  GNUNET_assert (GNUNET_OK ==
+                 TALER_amount_get_zero (ctx->refund->currency,
+                                        &current_refund));
+
+  /* Pass 1:  Collect amount of existing refunds into current_refund.
+   * Also store existing refunded amount for each deposit in deposit_refund. */
+  for (unsigned int i=0; i<num_results; i++)
+  {
+    struct TALER_CoinSpendPublicKeyP coin_pub;
+    struct TALER_Amount amount_with_fee;
+    struct TALER_Amount refund_fee;
+    struct GNUNET_PQ_ResultSpec rs[] = {
+      GNUNET_PQ_result_spec_auto_from_type ("coin_pub",
+                                            &coin_pub),
+      TALER_PQ_result_spec_amount ("amount_with_fee",
+                                   &amount_with_fee),
+      TALER_PQ_result_spec_amount ("refund_fee",
+                                   &refund_fee),
+      GNUNET_PQ_result_spec_end
+    };
+    struct FindRefundContext ictx;
+    enum GNUNET_DB_QueryStatus ires;
+    struct GNUNET_PQ_QueryParam params[] = {
+      GNUNET_PQ_query_param_auto_from_type (&coin_pub),
+      GNUNET_PQ_query_param_end
+    };
+
+    if (GNUNET_OK !=
+        GNUNET_PQ_extract_result (result,
+                                  rs,
+                                  i))
+    {
+      GNUNET_break (0);
+      ctx->qs = GNUNET_DB_STATUS_HARD_ERROR;
+      return;
+    }
+    GNUNET_assert (GNUNET_OK ==
+                   TALER_amount_get_zero (ctx->refund->currency,
+                                          &ictx.refunded_amount));
+    ictx.err = GNUNET_OK; /* no error so far */
+    ires = GNUNET_PQ_eval_prepared_multi_select (ctx->pg->conn,
+                                                 "find_refunds",
+                                                 params,
+                                                 &process_refund_cb,
+                                                 &ictx);
+    if ( (GNUNET_OK != ictx.err) ||
+         (GNUNET_DB_STATUS_HARD_ERROR == ires) )
+    {
+      GNUNET_break (0);
+      ctx->qs = GNUNET_DB_STATUS_HARD_ERROR;
+      return;
+    }
+    if (GNUNET_DB_STATUS_SOFT_ERROR == ires)
+    {
+      ctx->qs = GNUNET_DB_STATUS_SOFT_ERROR;
+      return;
+    }
+    deposit_refund[i] = ictx.refunded_amount;
+    deposit_amount_with_fee[i] = amount_with_fee;
+    deposit_coin_pubs[i] = coin_pub;
+    deposit_refund_fee[i] = refund_fee;
+    if (GNUNET_SYSERR ==
+        TALER_amount_add (&current_refund,
+                          &current_refund,
+                          &ictx.refunded_amount))
+    {
+      GNUNET_break (0);
+      ctx->qs = GNUNET_DB_STATUS_HARD_ERROR;
+      return;
+    }
+    GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+                "Existing refund for coin %s is %s\n",
+                TALER_B2S (&coin_pub),
+                TALER_amount2s (&ictx.refunded_amount));
+  }
+
+  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+              "Total existing refund is %s\n",
+              TALER_amount2s (&current_refund));
+
+  /* stop immediately if we are 'done' === amount already
+   * refunded.  */
+  if (0 >= TALER_amount_cmp (ctx->refund,
+                             &current_refund))
+  {
+    ctx->qs = GNUNET_DB_STATUS_SUCCESS_NO_RESULTS;
+    return;
+  }
+
+  /* Phase 2:  Try to increase current refund until it matches desired refund 
*/
+  for (unsigned int i=0;i<num_results; i++)
+  {
+    const struct TALER_Amount *increment;
+    struct TALER_Amount left;
+    struct TALER_Amount remaining_refund;
+
+    /* How much of the coin is left after the existing refunds? */
+    if (GNUNET_SYSERR ==
+        TALER_amount_subtract (&left,
+                               &deposit_amount_with_fee[i],
+                               &deposit_refund[i]))
+    {
+      GNUNET_break (0);
+      ctx->qs = GNUNET_DB_STATUS_HARD_ERROR;
+      return;
+    }
+
+    if ( (0 == left.value) &&
+         (0 == left.fraction) )
+    {
+      /* coin was fully refunded, move to next coin */
+      GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+                  "Coin %s fully refunded, moving to next coin\n",
+                  TALER_B2S (&deposit_coin_pubs[i]));
+      continue;
+    }
+
+    /* How much of the refund is still to be paid back? */
+    if (GNUNET_SYSERR ==
+        TALER_amount_subtract (&remaining_refund,
+                               ctx->refund,
+                               &current_refund))
+    {
+      GNUNET_break (0);
+      ctx->qs = GNUNET_DB_STATUS_HARD_ERROR;
+      return;
+    }
+
+    /* By how much will we increase the refund for this coin? */
+    if (0 >= TALER_amount_cmp (&remaining_refund,
+                               &left))
+    {
+      /* remaining_refund <= left */
+      increment = &remaining_refund;
+    }
+    else
+    {
+      increment = &left;
+    }
+
+    if (GNUNET_SYSERR ==
+        TALER_amount_add (&current_refund,
+                          &current_refund,
+                          increment))
+    {
+      GNUNET_break (0);
+      ctx->qs = GNUNET_DB_STATUS_HARD_ERROR;
+      return;
+    }
+
+    /* actually run the refund */
+    GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+                "Coin %s deposit amount is %s\n",
+                TALER_B2S (&deposit_coin_pubs[i]),
+                TALER_amount2s (&deposit_amount_with_fee[i]));
+    GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+                "Coin %s refund will be incremented by %s\n",
+                TALER_B2S (&deposit_coin_pubs[i]),
+                TALER_amount2s (increment));
+    {
+      enum GNUNET_DB_QueryStatus qs;
+
+      if (GNUNET_DB_STATUS_SUCCESS_ONE_RESULT !=
+          (qs = insert_refund (ctx->pg,
+                               ctx->merchant_pub,
+                               ctx->h_contract_terms,
+                               &deposit_coin_pubs[i],
+                               ctx->reason,
+                               increment,
+                               &deposit_refund_fee[i])))
+      {
+        GNUNET_break (GNUNET_DB_STATUS_SOFT_ERROR == qs);
+        ctx->qs = qs;
+        return;
+      }
+    }
+
+    /* stop immediately if we are done */
+    if (0 == TALER_amount_cmp (ctx->refund,
+                               &current_refund))
+      return;
+  }
+
+  /**
+   * We end up here if not all of the refund has been covered.
+   * Although this should be checked as the business should never
+   * issue a refund bigger than the contract's actual price, we cannot
+   * rely upon the frontend being correct.
+   */
+  GNUNET_log (GNUNET_ERROR_TYPE_ERROR,
+              "The refund of %s is bigger than the order's value\n",
+              TALER_amount2s (ctx->refund));
+
+  ctx->qs = GNUNET_DB_STATUS_HARD_ERROR;
+}
+
+
+/**
+ * Function called when some backoffice staff decides to award or
+ * increase the refund on an existing contract.  This function
+ * MUST be called from within a transaction scope setup by the
+ * caller as it executes multiple SQL statements (NT).
+ *
+ * @param cls closure
+ * @param h_contract_terms
+ * @param merchant_pub merchant's instance public key
+ * @param refund maximum refund to return to the customer for this contract
+ * @param reason 0-terminated UTF-8 string giving the reason why the customer
+ *               got a refund (free form, business-specific)
+ * @return transaction status
+ *         #GNUNET_DB_STATUS_SUCCESS_ONE_RESULT if the refund is accepted
+ *         #GNUNET_DB_STATUS_SUCCESS_NO_RESULTS if the refund cannot be 
issued: this can happen for two
+ *               reasons: the issued refund is not greater of the previous 
refund,
+ *               or the coins don't have enough amount left to pay for this 
refund.
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_increase_refund_for_contract_NT (void *cls,
+                                          const struct GNUNET_HashCode 
*h_contract_terms,
+                                          const struct 
TALER_MerchantPublicKeyP *merchant_pub,
+                                          const struct TALER_Amount *refund,
+                                          const char *reason)
+{
+  struct PostgresClosure *pg = cls;
+  struct InsertRefundContext ctx;
+  enum GNUNET_DB_QueryStatus qs;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (h_contract_terms),
+    GNUNET_PQ_query_param_auto_from_type (merchant_pub),
+    GNUNET_PQ_query_param_end
+  };
+
+  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+              "Asked to refund %s on contract %s\n",
+              TALER_amount2s (refund),
+              GNUNET_h2s (h_contract_terms));
+  ctx.pg = pg;
+  ctx.qs = GNUNET_DB_STATUS_SUCCESS_ONE_RESULT;
+  ctx.refund = refund;
+  ctx.reason = reason;
+  ctx.h_contract_terms = h_contract_terms;
+  ctx.merchant_pub = merchant_pub;
+  qs = GNUNET_PQ_eval_prepared_multi_select (pg->conn,
+                                             "find_deposits",
+                                             params,
+                                             &process_deposits_for_refund_cb,
+                                             &ctx);
+  switch (qs)
+  {
+  case GNUNET_DB_STATUS_SUCCESS_NO_RESULTS:
+    GNUNET_log (GNUNET_ERROR_TYPE_ERROR,
+                "Unknown contract: %s (merchant_pub: %s), no refund 
possible\n",
+                GNUNET_h2s (h_contract_terms),
+                TALER_B2S (merchant_pub));
+    return qs;
+  case GNUNET_DB_STATUS_SOFT_ERROR:
+  case GNUNET_DB_STATUS_HARD_ERROR:
+    return qs;
+  default:
+    /* Got one or more deposits */
+    return ctx.qs;
+  }
+}
+
+
+/**
+ * Lookup proof information about a wire transfer.
+ *
+ * @param cls closure
+ * @param exchange_url from which exchange are we looking for proof
+ * @param wtid wire transfer identifier for the search
+ * @param cb function to call with proof data
+ * @param cb_cls closure for @a cb
+ * @return transaction status
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_find_proof_by_wtid (void *cls,
+                             const char *exchange_url,
+                             const struct TALER_WireTransferIdentifierRawP 
*wtid,
+                             TALER_MERCHANTDB_ProofCallback cb,
+                             void *cb_cls)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (wtid),
+    GNUNET_PQ_query_param_string (exchange_url),
+    GNUNET_PQ_query_param_end
+  };
+  json_t *proof;
+  struct GNUNET_PQ_ResultSpec rs[] = {
+    TALER_PQ_result_spec_json ("proof",
+                               &proof),
+    GNUNET_PQ_result_spec_end
+  };
+  enum GNUNET_DB_QueryStatus qs;
+
+  check_connection (pg);
+  qs = GNUNET_PQ_eval_prepared_singleton_select (pg->conn,
+                                                 "find_proof_by_wtid",
+                                                 params,
+                                                 rs);
+  if (GNUNET_DB_STATUS_SUCCESS_ONE_RESULT == qs)
+  {
+    cb (cb_cls,
+        proof);
+    GNUNET_PQ_cleanup_result (rs);
+  }
+  return qs;
+}
+
+
+/**
+ * Add @a credit to a reserve to be used for tipping.  Note that
+ * this function does not actually perform any wire transfers to
+ * credit the reserve, it merely tells the merchant backend that
+ * a reserve was topped up.  This has to happen before tips can be
+ * authorized.
+ *
+ * @param cls closure, typically a connection to the db
+ * @param reserve_priv which reserve is topped up or created
+ * @param credit_uuid unique identifier for the credit operation
+ * @param credit how much money was added to the reserve
+ * @param expiration when does the reserve expire?
+ * @return transaction status, usually
+ *      #GNUNET_DB_STATUS_SUCCESS_ONE_RESULT for success
+ *      #GNUNET_DB_STATUS_SUCCESS_NO_RESULTS if @a credit_uuid already known
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_enable_tip_reserve_TR (void *cls,
+                                const struct TALER_ReservePrivateKeyP 
*reserve_priv,
+                                const struct GNUNET_HashCode *credit_uuid,
+                                const struct TALER_Amount *credit,
+                                struct GNUNET_TIME_Absolute expiration)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_TIME_Absolute old_expiration;
+  struct TALER_Amount old_balance;
+  enum GNUNET_DB_QueryStatus qs;
+  struct GNUNET_TIME_Absolute new_expiration;
+  struct TALER_Amount new_balance;
+  unsigned int retries;
+
+  retries = 0;
+  check_connection (pg);
+ RETRY:
+  if (MAX_RETRIES < ++retries)
+    return GNUNET_DB_STATUS_SOFT_ERROR;
+  if (GNUNET_OK !=
+      postgres_start (pg,
+                      "enable tip reserve"))
+  {
+    GNUNET_break (0);
+    return GNUNET_DB_STATUS_HARD_ERROR;
+  }
+
+  /* ensure that credit_uuid is new/unique */
+  {
+    struct GNUNET_PQ_QueryParam params[] = {
+      GNUNET_PQ_query_param_auto_from_type (credit_uuid),
+      GNUNET_PQ_query_param_auto_from_type (reserve_priv),
+      GNUNET_PQ_query_param_end
+    };
+
+    struct GNUNET_PQ_ResultSpec rs[] = {
+      GNUNET_PQ_result_spec_end
+    };
+    qs = GNUNET_PQ_eval_prepared_singleton_select (pg->conn,
+                                                   "lookup_tip_credit_uuid",
+                                                   params,
+                                                   rs);
+    if (0 > qs)
+    {
+      GNUNET_break (GNUNET_DB_STATUS_SOFT_ERROR == qs);
+      postgres_rollback (pg);
+      if (GNUNET_DB_STATUS_SOFT_ERROR == qs)
+        goto RETRY;
+      return qs;
+    }
+    if (GNUNET_DB_STATUS_SUCCESS_NO_RESULTS != qs)
+    {
+      /* UUID already exists, we are done! */
+      postgres_rollback (pg);
+      return GNUNET_DB_STATUS_SUCCESS_NO_RESULTS;
+    }
+  }
+
+  {
+    struct GNUNET_TIME_Absolute now;
+    struct GNUNET_PQ_QueryParam params[] = {
+      GNUNET_PQ_query_param_auto_from_type (reserve_priv),
+      GNUNET_PQ_query_param_auto_from_type (credit_uuid),
+      GNUNET_PQ_query_param_absolute_time (&now),
+      TALER_PQ_query_param_amount (credit),
+      GNUNET_PQ_query_param_end
+    };
+
+    now = GNUNET_TIME_absolute_get ();
+    (void) GNUNET_TIME_round_abs (&now);
+    qs = GNUNET_PQ_eval_prepared_non_select (pg->conn,
+                                             "insert_tip_credit_uuid",
+                                             params);
+    if (0 > qs)
+    {
+      GNUNET_break (GNUNET_DB_STATUS_SOFT_ERROR == qs);
+      postgres_rollback (pg);
+      if (GNUNET_DB_STATUS_SOFT_ERROR == qs)
+        goto RETRY;
+      return qs;
+    }
+  }
+
+  /* Obtain existing reserve balance */
+  {
+    struct GNUNET_PQ_QueryParam params[] = {
+      GNUNET_PQ_query_param_auto_from_type (reserve_priv),
+      GNUNET_PQ_query_param_end
+    };
+    struct GNUNET_PQ_ResultSpec rs[] = {
+      GNUNET_PQ_result_spec_absolute_time ("expiration",
+                                           &old_expiration),
+      TALER_PQ_result_spec_amount ("balance",
+                                   &old_balance),
+      GNUNET_PQ_result_spec_end
+    };
+
+    qs = GNUNET_PQ_eval_prepared_singleton_select (pg->conn,
+                                                   
"lookup_tip_reserve_balance",
+                                                   params,
+                                                   rs);
+  }
+  if (0 > qs)
+  {
+    GNUNET_break (GNUNET_DB_STATUS_SOFT_ERROR == qs);
+    postgres_rollback (pg);
+    if (GNUNET_DB_STATUS_SOFT_ERROR == qs)
+      goto RETRY;
+    return qs;
+  }
+  if ( (GNUNET_DB_STATUS_SUCCESS_ONE_RESULT == qs) &&
+       (GNUNET_TIME_absolute_get_remaining (old_expiration).rel_value_us > 0) )
+  {
+    new_expiration = GNUNET_TIME_absolute_max (old_expiration,
+                                               expiration);
+    if (GNUNET_OK !=
+        TALER_amount_add (&new_balance,
+                          credit,
+                          &old_balance))
+    {
+      GNUNET_break (0);
+      postgres_rollback (pg);
+      return GNUNET_DB_STATUS_HARD_ERROR;
+    }
+  }
+  else
+  {
+    if (GNUNET_DB_STATUS_SUCCESS_ONE_RESULT == qs)
+    {
+      GNUNET_log (GNUNET_ERROR_TYPE_INFO,
+                  "Old reserve balance of %s had expired at %s, not carrying 
it over!\n",
+                  TALER_amount2s (&old_balance),
+                  GNUNET_STRINGS_absolute_time_to_string (old_expiration));
+    }
+    new_expiration = expiration;
+    new_balance = *credit;
+  }
+
+  {
+    struct GNUNET_PQ_QueryParam params[] = {
+      GNUNET_PQ_query_param_auto_from_type (reserve_priv),
+      GNUNET_PQ_query_param_absolute_time (&new_expiration),
+      TALER_PQ_query_param_amount (&new_balance),
+      GNUNET_PQ_query_param_end
+    };
+    const char *stmt;
+
+    stmt = (GNUNET_DB_STATUS_SUCCESS_ONE_RESULT == qs)
+      ? "update_tip_reserve_balance"
+      : "insert_tip_reserve_balance";
+    qs = GNUNET_PQ_eval_prepared_non_select (pg->conn,
+                                             stmt,
+                                             params);
+    if (0 > qs)
+    {
+      GNUNET_break (GNUNET_DB_STATUS_SOFT_ERROR == qs);
+      postgres_rollback (pg);
+      if (GNUNET_DB_STATUS_SOFT_ERROR == qs)
+        goto RETRY;
+      return qs;
+    }
+  }
+  qs = postgres_commit (pg);
+  if (GNUNET_DB_STATUS_SUCCESS_NO_RESULTS == qs)
+    return GNUNET_DB_STATUS_SUCCESS_ONE_RESULT;
+  GNUNET_break (GNUNET_DB_STATUS_SOFT_ERROR == qs);
+  if (GNUNET_DB_STATUS_SOFT_ERROR == qs)
+    goto RETRY;
+  return qs;
+}
+
+
+/**
+ * Authorize a tip over @a amount from reserve @a reserve_priv.  Remember
+ * the authorization under @a tip_id for later, together with the
+ * @a justification.
+ *
+ * @param cls closure, typically a connection to the db
+ * @param justification why was the tip approved
+ * @param amount how high is the tip (with fees)
+ * @param reserve_priv which reserve is debited
+ * @param exchange_url which exchange manages the tip
+ * @param[out] expiration set to when the tip expires
+ * @param[out] tip_id set to the unique ID for the tip
+ * @return taler error code
+ *      #TALER_EC_TIP_AUTHORIZE_RESERVE_EXPIRED if the reserve is known but 
has expired
+ *      #TALER_EC_TIP_AUTHORIZE_RESERVE_UNKNOWN if the reserve is not known
+ *      #TALER_EC_TIP_AUTHORIZE_INSUFFICIENT_FUNDS if the reserve has 
insufficient funds left
+ *      #TALER_EC_TIP_AUTHORIZE_DB_HARD_ERROR on hard DB errors
+ *      #TALER_EC_TIP_AUTHORIZE_DB_SOFT_ERROR on soft DB errors (client should 
retry)
+ *      #TALER_EC_NONE upon success
+ */
+static enum TALER_ErrorCode
+postgres_authorize_tip_TR (void *cls,
+                           const char *justification,
+                           const struct TALER_Amount *amount,
+                           const struct TALER_ReservePrivateKeyP *reserve_priv,
+                           const char *exchange_url,
+                           struct GNUNET_TIME_Absolute *expiration,
+                           struct GNUNET_HashCode *tip_id)
+{
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (reserve_priv),
+    GNUNET_PQ_query_param_end
+  };
+  struct GNUNET_TIME_Absolute old_expiration;
+  struct TALER_Amount old_balance;
+  struct GNUNET_PQ_ResultSpec rs[] = {
+    GNUNET_PQ_result_spec_absolute_time ("expiration",
+                                         &old_expiration),
+    TALER_PQ_result_spec_amount ("balance",
+                                 &old_balance),
+    GNUNET_PQ_result_spec_end
+  };
+  enum GNUNET_DB_QueryStatus qs;
+  struct TALER_Amount new_balance;
+  unsigned int retries;
+
+  retries = 0;
+  check_connection (pg);
+ RETRY:
+  if (MAX_RETRIES < ++retries)
+    return TALER_EC_TIP_AUTHORIZE_DB_SOFT_ERROR;
+  if (GNUNET_OK !=
+      postgres_start (pg,
+                      "authorize tip"))
+  {
+    GNUNET_break (0);
+    return TALER_EC_TIP_AUTHORIZE_DB_HARD_ERROR;
+  }
+  qs = GNUNET_PQ_eval_prepared_singleton_select (pg->conn,
+                                                "lookup_tip_reserve_balance",
+                                                params,
+                                                rs);
+  if (0 >= qs)
+  {
+    /* reserve unknown */
+    postgres_rollback (pg);
+    if (GNUNET_DB_STATUS_SOFT_ERROR == qs)
+      goto RETRY;
+    if (GNUNET_DB_STATUS_SUCCESS_NO_RESULTS == qs)
+      return TALER_EC_TIP_AUTHORIZE_INSUFFICIENT_FUNDS;
+    return TALER_EC_TIP_AUTHORIZE_DB_HARD_ERROR;
+  }
+  if (0 == GNUNET_TIME_absolute_get_remaining (old_expiration).rel_value_us)
+  {
+    /* reserve expired, can't be used */
+    postgres_rollback (pg);
+    return TALER_EC_TIP_AUTHORIZE_RESERVE_EXPIRED;
+  }
+  if (GNUNET_SYSERR ==
+      TALER_amount_subtract (&new_balance,
+                             &old_balance,
+                             amount))
+  {
+    /* insufficient funds left in reserve */
+    postgres_rollback (pg);
+    return TALER_EC_TIP_AUTHORIZE_INSUFFICIENT_FUNDS;
+  }
+  /* Update reserve balance */
+  {
+    struct GNUNET_PQ_QueryParam params[] = {
+      GNUNET_PQ_query_param_auto_from_type (reserve_priv),
+      GNUNET_PQ_query_param_absolute_time (&old_expiration),
+      TALER_PQ_query_param_amount (&new_balance),
+      GNUNET_PQ_query_param_end
+    };
+
+    qs = GNUNET_PQ_eval_prepared_non_select (pg->conn,
+                                             "update_tip_reserve_balance",
+                                             params);
+    if (0 > qs)
+    {
+      postgres_rollback (pg);
+      if (GNUNET_DB_STATUS_SOFT_ERROR == qs)
+        goto RETRY;
+      return TALER_EC_TIP_AUTHORIZE_DB_HARD_ERROR;
+    }
+  }
+  /* Generate and store tip ID */
+  *expiration = old_expiration;
+  GNUNET_CRYPTO_hash_create_random (GNUNET_CRYPTO_QUALITY_STRONG,
+                                    tip_id);
+  {
+    struct GNUNET_TIME_Absolute now = GNUNET_TIME_absolute_get ();
+    struct GNUNET_PQ_QueryParam params[] = {
+      GNUNET_PQ_query_param_auto_from_type (reserve_priv),
+      GNUNET_PQ_query_param_auto_from_type (tip_id),
+      GNUNET_PQ_query_param_string (exchange_url),
+      GNUNET_PQ_query_param_string (justification),
+      GNUNET_PQ_query_param_absolute_time (&now),
+      TALER_PQ_query_param_amount (amount), /* overall amount */
+      TALER_PQ_query_param_amount (amount), /* amount left */
+      GNUNET_PQ_query_param_end
+    };
+
+    (void) GNUNET_TIME_round_abs (&now);
+    qs = GNUNET_PQ_eval_prepared_non_select (pg->conn,
+                                             "insert_tip_justification",
+                                             params);
+    if (0 > qs)
+    {
+      postgres_rollback (pg);
+      if (GNUNET_DB_STATUS_SOFT_ERROR == qs)
+        goto RETRY;
+      return TALER_EC_TIP_AUTHORIZE_DB_HARD_ERROR;
+    }
+  }
+  qs = postgres_commit (pg);
+  if (0 <= qs)
+    return TALER_EC_NONE; /* success! */
+  if (GNUNET_DB_STATUS_SOFT_ERROR == qs)
+    goto RETRY;
+  return TALER_EC_TIP_AUTHORIZE_DB_HARD_ERROR;
+}
+
+
+/**
+ * Find out tip authorization details associated with @a tip_id
+ *
+ * @param cls closure, typically a connection to the d
+ * @param tip_id the unique ID for the tip
+ * @param[out] exchange_url set to the URL of the exchange (unless NULL)
+ * @param[out] amount set to the authorized amount (unless NULL)
+ * @param[out] timestamp set to the timestamp of the tip authorization (unless 
NULL)
+ * @return transaction status, usually
+ *      #GNUNET_DB_STATUS_SUCCESS_ONE_RESULT for success
+ *      #GNUNET_DB_STATUS_SUCCESS_NO_RESULTS if @a credit_uuid already known
+ */
+static enum GNUNET_DB_QueryStatus
+postgres_lookup_tip_by_id (void *cls,
+                           const struct GNUNET_HashCode *tip_id,
+                           char **exchange_url,
+                           struct TALER_Amount *amount,
+                           struct GNUNET_TIME_Absolute *timestamp)
+{
+  char *res_exchange_url;
+  struct TALER_Amount res_amount;
+  struct GNUNET_TIME_Absolute res_timestamp;
+
+  struct PostgresClosure *pg = cls;
+  struct GNUNET_PQ_QueryParam params[] = {
+    GNUNET_PQ_query_param_auto_from_type (tip_id),
+    GNUNET_PQ_query_param_end
+  };
+  struct GNUNET_PQ_ResultSpec rs[] = {
+    GNUNET_PQ_result_spec_string ("exchange_url",
+                                  &res_exchange_url),
+    GNUNET_PQ_result_spec_absolute_time ("timestamp",
+                                         &res_timestamp),
+    TALER_PQ_result_spec_amount ("amount",
+                                 &res_amount),
+    GNUNET_PQ_result_spec_end
+  };
+  enum GNUNET_DB_QueryStatus qs;
+
+  qs = GNUNET_PQ_eval_prepared_singleton_select (pg->conn,
+                                                 "find_tip_by_id",
+                                                 params,
+                                                 rs);
+  if (0 >= qs)
+  {
+    if (NULL != exchange_url)
+      *exchange_url = NULL;
+    return qs;
+  }
+  if (NULL != exchange_url)
+    *exchange_url = strdup (res_exchange_url);
+  if (NULL != amount)
+    *amount = res_amount;
+  if (NULL != timestamp)
+    *timestamp = res_timestamp;
+  GNUNET_PQ_cleanup_result (rs);
+  return qs;
+}
+
+
+
+/**
+ * Initialize Postgres database subsystem.
+ *
+ * @param cls a configuration instance
+ * @return NULL on error, otherwise a `struct TALER_MERCHANTDB_Plugin`
+ */
+void *
+libtaler_plugin_merchantdb_postgres_init (void *cls)
+{
+  struct GNUNET_CONFIGURATION_Handle *cfg = cls;
+  struct PostgresClosure *pg;
+  struct TALER_MERCHANTDB_Plugin *plugin;
+  const char *ec;
+
+  pg = GNUNET_new (struct PostgresClosure);
+  ec = getenv ("TALER_MERCHANTDB_POSTGRES_CONFIG");
+  if (NULL != ec)
+  {
+    GNUNET_CONFIGURATION_set_value_string (cfg,
+                                           "merchantdb-postgres",
+                                           "CONFIG",
+                                           ec);
+  }
+  else
+  {
+    if (GNUNET_OK !=
+        GNUNET_CONFIGURATION_have_value (cfg,
+                                         "merchantdb-postgres",
+                                         "CONFIG"))
+    {
+      GNUNET_log_config_missing (GNUNET_ERROR_TYPE_ERROR,
+                                 "merchantdb-postgres",
+                                 "CONFIG");
+      return NULL;
+    }
+  }
+  pg->cfg = cfg;
+  pg->conn = GNUNET_PQ_connect_with_cfg (cfg,
+                                         "merchantdb-postgres");
+  if (NULL == pg->conn)
+  {
+    GNUNET_free (pg);
+    return NULL;
+  }
+  plugin = GNUNET_new (struct TALER_MERCHANTDB_Plugin);
+  plugin->cls = pg;
+  plugin->drop_tables = &postgres_drop_tables;
+  plugin->initialize = &postgres_initialize;
+  plugin->start = postgres_start;
+  plugin->commit = postgres_commit;
+  plugin->preflight = postgres_preflight;
+  plugin->rollback = postgres_rollback;
+
+  return plugin;
+}
+
+
+/**
+ * Shutdown Postgres database subsystem.
+ *
+ * @param cls a `struct TALER_MERCHANTDB_Plugin`
+ * @return NULL (always)
+ */
+void *
+libtaler_plugin_merchantdb_postgres_done (void *cls)
+{
+  struct TALER_MERCHANTDB_Plugin *plugin = cls;
+  struct PostgresClosure *pg = plugin->cls;
+
+  PQfinish (pg->conn);
+  GNUNET_free (pg);
+  GNUNET_free (plugin);
+  return NULL;
+}
+
+/* end of plugin_merchantdb_postgres.c */
diff --git a/src/backup/Makefile.am b/src/backup/Makefile.am
new file mode 100644
index 0000000..77716a4
--- /dev/null
+++ b/src/backup/Makefile.am
@@ -0,0 +1,26 @@
+# This Makefile.am is in the public domain
+AM_CPPFLAGS = -I$(top_srcdir)/src/include -I$(top_srcdir)/src/backend-lib/
+
+pkgcfgdir = $(prefix)/share/taler/config.d/
+
+pkgcfg_DATA = \
+  anastasis.conf
+
+EXTRA_DIST = \
+  $(pkgcfg_DATA)
+
+bin_PROGRAMS = \
+  anastasis-httpd
+
+anastasis_httpd_SOURCES = \
+  anastasis-httpd.c taler-merchant-httpd.h \
+  anastasis-httpd_parsing.c anastasis-httpd_parsing.h \
+  anastasis-httpd_responses.c anastasis-httpd_responses.h \
+  anastasis-httpd_mhd.c anastasis-httpd_mhd.h \
+
+taler_merchant_httpd_LDADD = \
+  -lmicrohttpd \
+  -ljansson \
+  -lgnunetcurl \
+  -lgnunetjson \
+  -lgnunetutil
diff --git a/src/backup/Makefile.in b/src/backup/Makefile.in
new file mode 100644
index 0000000..f7b710f
--- /dev/null
+++ b/src/backup/Makefile.in
@@ -0,0 +1,869 @@
+# Makefile.in generated by automake 1.16.1 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994-2018 Free Software Foundation, Inc.
+
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+@SET_MAKE@
+
+
+VPATH = @srcdir@
+am__is_gnu_make = { \
+  if test -z '$(MAKELEVEL)'; then \
+    false; \
+  elif test -n '$(MAKE_HOST)'; then \
+    true; \
+  elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
+    true; \
+  else \
+    false; \
+  fi; \
+}
+am__make_running_with_option = \
+  case $${target_option-} in \
+      ?) ;; \
+      *) echo "am__make_running_with_option: internal error: invalid" \
+              "target option '$${target_option-}' specified" >&2; \
+         exit 1;; \
+  esac; \
+  has_opt=no; \
+  sane_makeflags=$$MAKEFLAGS; \
+  if $(am__is_gnu_make); then \
+    sane_makeflags=$$MFLAGS; \
+  else \
+    case $$MAKEFLAGS in \
+      *\\[\ \  ]*) \
+        bs=\\; \
+        sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
+          | sed "s/$$bs$$bs[$$bs $$bs  ]*//g"`;; \
+    esac; \
+  fi; \
+  skip_next=no; \
+  strip_trailopt () \
+  { \
+    flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
+  }; \
+  for flg in $$sane_makeflags; do \
+    test $$skip_next = yes && { skip_next=no; continue; }; \
+    case $$flg in \
+      *=*|--*) continue;; \
+        -*I) strip_trailopt 'I'; skip_next=yes;; \
+      -*I?*) strip_trailopt 'I';; \
+        -*O) strip_trailopt 'O'; skip_next=yes;; \
+      -*O?*) strip_trailopt 'O';; \
+        -*l) strip_trailopt 'l'; skip_next=yes;; \
+      -*l?*) strip_trailopt 'l';; \
+      -[dEDm]) skip_next=yes;; \
+      -[JT]) skip_next=yes;; \
+    esac; \
+    case $$flg in \
+      *$$target_option*) has_opt=yes; break;; \
+    esac; \
+  done; \
+  test $$has_opt = yes
+am__make_dryrun = (target_option=n; $(am__make_running_with_option))
+am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
+pkgdatadir = $(datadir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkglibexecdir = $(libexecdir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+bin_PROGRAMS = taler-merchant-httpd$(EXEEXT)
+subdir = src/backend
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_lib_postgresql.m4 \
+       $(top_srcdir)/m4/ax_prog_doxygen.m4 \
+       $(top_srcdir)/m4/libcurl.m4 $(top_srcdir)/m4/libgnurl.m4 \
+       $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \
+       $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \
+       $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+       $(ACLOCAL_M4)
+DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON)
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = $(top_builddir)/taler_merchant_config.h
+CONFIG_CLEAN_FILES =
+CONFIG_CLEAN_VPATH_FILES =
+am__installdirs = "$(DESTDIR)$(bindir)" "$(DESTDIR)$(pkgcfgdir)"
+PROGRAMS = $(bin_PROGRAMS)
+am_taler_merchant_httpd_OBJECTS = taler-merchant-httpd.$(OBJEXT) \
+       taler-merchant-httpd_parsing.$(OBJEXT) \
+       taler-merchant-httpd_responses.$(OBJEXT) \
+       taler-merchant-httpd_mhd.$(OBJEXT) \
+       taler-merchant-httpd_auditors.$(OBJEXT) \
+       taler-merchant-httpd_exchanges.$(OBJEXT) \
+       taler-merchant-httpd_proposal.$(OBJEXT) \
+       taler-merchant-httpd_pay.$(OBJEXT) \
+       taler-merchant-httpd_history.$(OBJEXT) \
+       taler-merchant-httpd_tip-authorize.$(OBJEXT) \
+       taler-merchant-httpd_tip-pickup.$(OBJEXT) \
+       taler-merchant-httpd_tip-query.$(OBJEXT) \
+       taler-merchant-httpd_tip-reserve-helper.$(OBJEXT) \
+       taler-merchant-httpd_track-transaction.$(OBJEXT) \
+       taler-merchant-httpd_track-transfer.$(OBJEXT) \
+       taler-merchant-httpd_refund.$(OBJEXT) \
+       taler-merchant-httpd_check-payment.$(OBJEXT) \
+       taler-merchant-httpd_trigger-pay.$(OBJEXT)
+taler_merchant_httpd_OBJECTS = $(am_taler_merchant_httpd_OBJECTS)
+taler_merchant_httpd_DEPENDENCIES =  \
+       $(top_builddir)/src/backenddb/libtalermerchantdb.la
+AM_V_lt = $(am__v_lt_@AM_V@)
+am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@)
+am__v_lt_0 = --silent
+am__v_lt_1 = 
+AM_V_P = $(am__v_P_@AM_V@)
+am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
+am__v_P_0 = false
+am__v_P_1 = :
+AM_V_GEN = $(am__v_GEN_@AM_V@)
+am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
+am__v_GEN_0 = @echo "  GEN     " $@;
+am__v_GEN_1 = 
+AM_V_at = $(am__v_at_@AM_V@)
+am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
+am__v_at_0 = @
+am__v_at_1 = 
+DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir)
+depcomp = $(SHELL) $(top_srcdir)/depcomp
+am__maybe_remake_depfiles = depfiles
+am__depfiles_remade = ./$(DEPDIR)/taler-merchant-httpd.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_auditors.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_check-payment.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_exchanges.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_history.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_mhd.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_parsing.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_pay.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_proposal.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_refund.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_responses.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_tip-authorize.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_tip-pickup.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_tip-query.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_tip-reserve-helper.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_track-transaction.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_track-transfer.Po \
+       ./$(DEPDIR)/taler-merchant-httpd_trigger-pay.Po
+am__mv = mv -f
+COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
+       $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
+       $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \
+       $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
+       $(AM_CFLAGS) $(CFLAGS)
+AM_V_CC = $(am__v_CC_@AM_V@)
+am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@)
+am__v_CC_0 = @echo "  CC      " $@;
+am__v_CC_1 = 
+CCLD = $(CC)
+LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
+       $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
+       $(AM_LDFLAGS) $(LDFLAGS) -o $@
+AM_V_CCLD = $(am__v_CCLD_@AM_V@)
+am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@)
+am__v_CCLD_0 = @echo "  CCLD    " $@;
+am__v_CCLD_1 = 
+SOURCES = $(taler_merchant_httpd_SOURCES)
+DIST_SOURCES = $(taler_merchant_httpd_SOURCES)
+am__can_run_installinfo = \
+  case $$AM_UPDATE_INFO_DIR in \
+    n|no|NO) false;; \
+    *) (install-info --version) >/dev/null 2>&1;; \
+  esac
+am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
+am__vpath_adj = case $$p in \
+    $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
+    *) f=$$p;; \
+  esac;
+am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
+am__install_max = 40
+am__nobase_strip_setup = \
+  srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
+am__nobase_strip = \
+  for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
+am__nobase_list = $(am__nobase_strip_setup); \
+  for p in $$list; do echo "$$p $$p"; done | \
+  sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
+  $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
+    if (++n[$$2] == $(am__install_max)) \
+      { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
+    END { for (dir in files) print dir, files[dir] }'
+am__base_list = \
+  sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
+  sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
+am__uninstall_files_from_dir = { \
+  test -z "$$files" \
+    || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
+    || { echo " ( cd '$$dir' && rm -f" $$files ")"; \
+         $(am__cd) "$$dir" && rm -f $$files; }; \
+  }
+DATA = $(pkgcfg_DATA)
+am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
+# Read a list of newline-separated strings from the standard input,
+# and print each of them once, without duplicates.  Input order is
+# *not* preserved.
+am__uniquify_input = $(AWK) '\
+  BEGIN { nonempty = 0; } \
+  { items[$$0] = 1; nonempty = 1; } \
+  END { if (nonempty) { for (i in items) print i; }; } \
+'
+# Make sure the list of sources is unique.  This is necessary because,
+# e.g., the same source file might be shared among _SOURCES variables
+# for different programs/libraries.
+am__define_uniq_tagged_files = \
+  list='$(am__tagged_files)'; \
+  unique=`for i in $$list; do \
+    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+  done | $(am__uniquify_input)`
+ETAGS = etags
+CTAGS = ctags
+am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/depcomp
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DLLTOOL = @DLLTOOL@
+DOXYGEN_PAPER_SIZE = @DOXYGEN_PAPER_SIZE@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+DX_CONFIG = @DX_CONFIG@
+DX_CONFIG2 = @DX_CONFIG2@
+DX_CONFIG3 = @DX_CONFIG3@
+DX_CONFIG4 = @DX_CONFIG4@
+DX_DOCDIR = @DX_DOCDIR@
+DX_DOCDIR2 = @DX_DOCDIR2@
+DX_DOCDIR3 = @DX_DOCDIR3@
+DX_DOCDIR4 = @DX_DOCDIR4@
+DX_DOT = @DX_DOT@
+DX_DOXYGEN = @DX_DOXYGEN@
+DX_DVIPS = @DX_DVIPS@
+DX_EGREP = @DX_EGREP@
+DX_ENV = @DX_ENV@
+DX_FLAG_chi = @DX_FLAG_chi@
+DX_FLAG_chm = @DX_FLAG_chm@
+DX_FLAG_doc = @DX_FLAG_doc@
+DX_FLAG_dot = @DX_FLAG_dot@
+DX_FLAG_html = @DX_FLAG_html@
+DX_FLAG_man = @DX_FLAG_man@
+DX_FLAG_pdf = @DX_FLAG_pdf@
+DX_FLAG_ps = @DX_FLAG_ps@
+DX_FLAG_rtf = @DX_FLAG_rtf@
+DX_FLAG_xml = @DX_FLAG_xml@
+DX_HHC = @DX_HHC@
+DX_LATEX = @DX_LATEX@
+DX_MAKEINDEX = @DX_MAKEINDEX@
+DX_PDFLATEX = @DX_PDFLATEX@
+DX_PERL = @DX_PERL@
+DX_PROJECT = @DX_PROJECT@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+FGREP = @FGREP@
+GREP = @GREP@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JANSSON_CFLAGS = @JANSSON_CFLAGS@
+JANSSON_LIBS = @JANSSON_LIBS@
+LD = @LD@
+LDFLAGS = @LDFLAGS@
+LIBCURL = @LIBCURL@
+LIBCURL_CPPFLAGS = @LIBCURL_CPPFLAGS@
+LIBGCRYPT_CFLAGS = @LIBGCRYPT_CFLAGS@
+LIBGCRYPT_CONFIG = @LIBGCRYPT_CONFIG@
+LIBGCRYPT_LIBS = @LIBGCRYPT_LIBS@
+LIBGNURL = @LIBGNURL@
+LIBGNURL_CPPFLAGS = @LIBGNURL_CPPFLAGS@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@
+MAKEINFO = @MAKEINFO@
+MANIFEST_TOOL = @MANIFEST_TOOL@
+MKDIR_P = @MKDIR_P@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+PG_CONFIG = @PG_CONFIG@
+PKG_CONFIG = @PKG_CONFIG@
+PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@
+PKG_CONFIG_PATH = @PKG_CONFIG_PATH@
+POSTGRESQL_CPPFLAGS = @POSTGRESQL_CPPFLAGS@
+POSTGRESQL_LDFLAGS = @POSTGRESQL_LDFLAGS@
+POSTGRESQL_VERSION = @POSTGRESQL_VERSION@
+RANLIB = @RANLIB@
+SED = @SED@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+_libcurl_config = @_libcurl_config@
+_libgnurl_config = @_libgnurl_config@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_AR = @ac_ct_AR@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+gitcommand = @gitcommand@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+runstatedir = @runstatedir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+tsc = @tsc@
+
+# This Makefile.am is in the public domain
+AM_CPPFLAGS = -I$(top_srcdir)/src/include -I$(top_srcdir)/src/backend-lib/
+pkgcfgdir = $(prefix)/share/taler/config.d/
+pkgcfg_DATA = \
+  merchant.conf
+
+EXTRA_DIST = \
+  $(pkgcfg_DATA)
+
+taler_merchant_httpd_SOURCES = \
+  taler-merchant-httpd.c taler-merchant-httpd.h \
+  taler-merchant-httpd_parsing.c taler-merchant-httpd_parsing.h \
+  taler-merchant-httpd_responses.c taler-merchant-httpd_responses.h \
+  taler-merchant-httpd_mhd.c taler-merchant-httpd_mhd.h \
+  taler-merchant-httpd_auditors.c taler-merchant-httpd_auditors.h \
+  taler-merchant-httpd_exchanges.c taler-merchant-httpd_exchanges.h \
+  taler-merchant-httpd_proposal.c taler-merchant-httpd_proposal.h \
+  taler-merchant-httpd_pay.c taler-merchant-httpd_pay.h \
+  taler-merchant-httpd_history.c taler-merchant-httpd_history.h \
+  taler-merchant-httpd_tip-authorize.c taler-merchant-httpd_tip-authorize.h \
+  taler-merchant-httpd_tip-pickup.c taler-merchant-httpd_tip-pickup.h \
+  taler-merchant-httpd_tip-query.c taler-merchant-httpd_tip-query.h \
+  taler-merchant-httpd_tip-reserve-helper.c 
taler-merchant-httpd_tip-reserve-helper.h \
+  taler-merchant-httpd_track-transaction.c 
taler-merchant-httpd_track-transaction.h \
+  taler-merchant-httpd_track-transfer.c taler-merchant-httpd_track-transfer.h \
+  taler-merchant-httpd_refund.c taler-merchant-httpd_refund.h \
+  taler-merchant-httpd_check-payment.c taler-merchant-httpd_check-payment.h \
+  taler-merchant-httpd_trigger-pay.c taler-merchant-httpd_trigger-pay.h
+
+taler_merchant_httpd_LDADD = \
+  $(top_builddir)/src/backenddb/libtalermerchantdb.la \
+  -ltalerexchange \
+  -ltalerwire \
+  -ltalerjson \
+  -ltalerutil \
+  -ltalerpq \
+  -lmicrohttpd \
+  -ljansson \
+  -lgnunetcurl \
+  -lgnunetjson \
+  -lgnunetutil
+
+all: all-am
+
+.SUFFIXES:
+.SUFFIXES: .c .lo .o .obj
+$(srcdir)/Makefile.in:  $(srcdir)/Makefile.am  $(am__configure_deps)
+       @for dep in $?; do \
+         case '$(am__configure_deps)' in \
+           *$$dep*) \
+             ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
+               && { if test -f $@; then exit 0; else break; fi; }; \
+             exit 1;; \
+         esac; \
+       done; \
+       echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu src/backend/Makefile'; \
+       $(am__cd) $(top_srcdir) && \
+         $(AUTOMAKE) --gnu src/backend/Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+       @case '$?' in \
+         *config.status*) \
+           cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+         *) \
+           echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ 
$(am__maybe_remake_depfiles)'; \
+           cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ 
$(am__maybe_remake_depfiles);; \
+       esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure 
$(CONFIG_STATUS_DEPENDENCIES)
+       cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure:  $(am__configure_deps)
+       cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4):  $(am__aclocal_m4_deps)
+       cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(am__aclocal_m4_deps):
+install-binPROGRAMS: $(bin_PROGRAMS)
+       @$(NORMAL_INSTALL)
+       @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \
+       if test -n "$$list"; then \
+         echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \
+         $(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \
+       fi; \
+       for p in $$list; do echo "$$p $$p"; done | \
+       sed 's/$(EXEEXT)$$//' | \
+       while read p p1; do if test -f $$p \
+        || test -f $$p1 \
+         ; then echo "$$p"; echo "$$p"; else :; fi; \
+       done | \
+       sed -e 'p;s,.*/,,;n;h' \
+           -e 's|.*|.|' \
+           -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \
+       sed 'N;N;N;s,\n, ,g' | \
+       $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \
+         { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \
+           if ($$2 == $$4) files[d] = files[d] " " $$1; \
+           else { print "f", $$3 "/" $$4, $$1; } } \
+         END { for (d in files) print "f", d, files[d] }' | \
+       while read type dir files; do \
+           if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \
+           test -z "$$files" || { \
+           echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) 
$(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files 
'$(DESTDIR)$(bindir)$$dir'"; \
+           $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) 
$(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files 
"$(DESTDIR)$(bindir)$$dir" || exit $$?; \
+           } \
+       ; done
+
+uninstall-binPROGRAMS:
+       @$(NORMAL_UNINSTALL)
+       @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \
+       files=`for p in $$list; do echo "$$p"; done | \
+         sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \
+             -e 's/$$/$(EXEEXT)/' \
+       `; \
+       test -n "$$list" || exit 0; \
+       echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \
+       cd "$(DESTDIR)$(bindir)" && rm -f $$files
+
+clean-binPROGRAMS:
+       @list='$(bin_PROGRAMS)'; test -n "$$list" || exit 0; \
+       echo " rm -f" $$list; \
+       rm -f $$list || exit $$?; \
+       test -n "$(EXEEXT)" || exit 0; \
+       list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
+       echo " rm -f" $$list; \
+       rm -f $$list
+
+taler-merchant-httpd$(EXEEXT): $(taler_merchant_httpd_OBJECTS) 
$(taler_merchant_httpd_DEPENDENCIES) $(EXTRA_taler_merchant_httpd_DEPENDENCIES) 
+       @rm -f taler-merchant-httpd$(EXEEXT)
+       $(AM_V_CCLD)$(LINK) $(taler_merchant_httpd_OBJECTS) 
$(taler_merchant_httpd_LDADD) $(LIBS)
+
+mostlyclean-compile:
+       -rm -f *.$(OBJEXT)
+
+distclean-compile:
+       -rm -f *.tab.c
+
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd.Po@am__quote@ # am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_auditors.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_check-payment.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_exchanges.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_history.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_mhd.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_parsing.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_pay.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_proposal.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_refund.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_responses.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_tip-authorize.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_tip-pickup.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_tip-query.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_tip-reserve-helper.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_track-transaction.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_track-transfer.Po@am__quote@ # 
am--include-marker
+@AMDEP_TRUE@@am__include@ 
@am__quote@./$(DEPDIR)/taler-merchant-httpd_trigger-pay.Po@am__quote@ # 
am--include-marker
+
+$(am__depfiles_remade):
+       @$(MKDIR_P) $(@D)
+       @echo '# dummy' >$@-t && $(am__mv) $@-t $@
+
+am--depfiles: $(am__depfiles_remade)
+
+.c.o:
+@am__fastdepCC_TRUE@   $(AM_V_CC)depbase=`echo $@ | sed 
's|[^/]*$$|$(DEPDIR)/&|;s|\.o$$||'`;\
+@am__fastdepCC_TRUE@   $(COMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< 
&&\
+@am__fastdepCC_TRUE@   $(am__mv) $$depbase.Tpo $$depbase.Po
+@AMDEP_TRUE@@am__fastdepCC_FALSE@      $(AM_V_CC)source='$<' object='$@' 
libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@      DEPDIR=$(DEPDIR) $(CCDEPMODE) 
$(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@  $(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ $<
+
+.c.obj:
+@am__fastdepCC_TRUE@   $(AM_V_CC)depbase=`echo $@ | sed 
's|[^/]*$$|$(DEPDIR)/&|;s|\.obj$$||'`;\
+@am__fastdepCC_TRUE@   $(COMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ 
`$(CYGPATH_W) '$<'` &&\
+@am__fastdepCC_TRUE@   $(am__mv) $$depbase.Tpo $$depbase.Po
+@AMDEP_TRUE@@am__fastdepCC_FALSE@      $(AM_V_CC)source='$<' object='$@' 
libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@      DEPDIR=$(DEPDIR) $(CCDEPMODE) 
$(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@  $(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ `$(CYGPATH_W) 
'$<'`
+
+.c.lo:
+@am__fastdepCC_TRUE@   $(AM_V_CC)depbase=`echo $@ | sed 
's|[^/]*$$|$(DEPDIR)/&|;s|\.lo$$||'`;\
+@am__fastdepCC_TRUE@   $(LTCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ 
$< &&\
+@am__fastdepCC_TRUE@   $(am__mv) $$depbase.Tpo $$depbase.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@      $(AM_V_CC)source='$<' object='$@' 
libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@      DEPDIR=$(DEPDIR) $(CCDEPMODE) 
$(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@  $(AM_V_CC@am__nodep@)$(LTCOMPILE) -c -o $@ $<
+
+mostlyclean-libtool:
+       -rm -f *.lo
+
+clean-libtool:
+       -rm -rf .libs _libs
+install-pkgcfgDATA: $(pkgcfg_DATA)
+       @$(NORMAL_INSTALL)
+       @list='$(pkgcfg_DATA)'; test -n "$(pkgcfgdir)" || list=; \
+       if test -n "$$list"; then \
+         echo " $(MKDIR_P) '$(DESTDIR)$(pkgcfgdir)'"; \
+         $(MKDIR_P) "$(DESTDIR)$(pkgcfgdir)" || exit 1; \
+       fi; \
+       for p in $$list; do \
+         if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+         echo "$$d$$p"; \
+       done | $(am__base_list) | \
+       while read files; do \
+         echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pkgcfgdir)'"; \
+         $(INSTALL_DATA) $$files "$(DESTDIR)$(pkgcfgdir)" || exit $$?; \
+       done
+
+uninstall-pkgcfgDATA:
+       @$(NORMAL_UNINSTALL)
+       @list='$(pkgcfg_DATA)'; test -n "$(pkgcfgdir)" || list=; \
+       files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
+       dir='$(DESTDIR)$(pkgcfgdir)'; $(am__uninstall_files_from_dir)
+
+ID: $(am__tagged_files)
+       $(am__define_uniq_tagged_files); mkid -fID $$unique
+tags: tags-am
+TAGS: tags
+
+tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
+       set x; \
+       here=`pwd`; \
+       $(am__define_uniq_tagged_files); \
+       shift; \
+       if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
+         test -n "$$unique" || unique=$$empty_fix; \
+         if test $$# -gt 0; then \
+           $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+             "$$@" $$unique; \
+         else \
+           $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+             $$unique; \
+         fi; \
+       fi
+ctags: ctags-am
+
+CTAGS: ctags
+ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
+       $(am__define_uniq_tagged_files); \
+       test -z "$(CTAGS_ARGS)$$unique" \
+         || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+            $$unique
+
+GTAGS:
+       here=`$(am__cd) $(top_builddir) && pwd` \
+         && $(am__cd) $(top_srcdir) \
+         && gtags -i $(GTAGS_ARGS) "$$here"
+cscopelist: cscopelist-am
+
+cscopelist-am: $(am__tagged_files)
+       list='$(am__tagged_files)'; \
+       case "$(srcdir)" in \
+         [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
+         *) sdir=$(subdir)/$(srcdir) ;; \
+       esac; \
+       for i in $$list; do \
+         if test -f "$$i"; then \
+           echo "$(subdir)/$$i"; \
+         else \
+           echo "$$sdir/$$i"; \
+         fi; \
+       done >> $(top_builddir)/cscope.files
+
+distclean-tags:
+       -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+distdir: $(BUILT_SOURCES)
+       $(MAKE) $(AM_MAKEFLAGS) distdir-am
+
+distdir-am: $(DISTFILES)
+       @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+       topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+       list='$(DISTFILES)'; \
+         dist_files=`for file in $$list; do echo $$file; done | \
+         sed -e "s|^$$srcdirstrip/||;t" \
+             -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+       case $$dist_files in \
+         */*) $(MKDIR_P) `echo "$$dist_files" | \
+                          sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+                          sort -u` ;; \
+       esac; \
+       for file in $$dist_files; do \
+         if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+         if test -d $$d/$$file; then \
+           dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+           if test -d "$(distdir)/$$file"; then \
+             find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx 
{} \;; \
+           fi; \
+           if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+             cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
+             find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx 
{} \;; \
+           fi; \
+           cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
+         else \
+           test -f "$(distdir)/$$file" \
+           || cp -p $$d/$$file "$(distdir)/$$file" \
+           || exit 1; \
+         fi; \
+       done
+check-am: all-am
+check: check-am
+all-am: Makefile $(PROGRAMS) $(DATA)
+installdirs:
+       for dir in "$(DESTDIR)$(bindir)" "$(DESTDIR)$(pkgcfgdir)"; do \
+         test -z "$$dir" || $(MKDIR_P) "$$dir"; \
+       done
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+       @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+       if test -z '$(STRIP)'; then \
+         $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+           install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s 
\
+             install; \
+       else \
+         $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+           install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s 
\
+           "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
+       fi
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+       -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+       -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f 
$(CONFIG_CLEAN_VPATH_FILES)
+
+maintainer-clean-generic:
+       @echo "This command is intended for maintainers to use"
+       @echo "it deletes files that may require special tools to rebuild."
+clean: clean-am
+
+clean-am: clean-binPROGRAMS clean-generic clean-libtool mostlyclean-am
+
+distclean: distclean-am
+               -rm -f ./$(DEPDIR)/taler-merchant-httpd.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_auditors.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_check-payment.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_exchanges.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_history.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_mhd.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_parsing.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_pay.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_proposal.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_refund.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_responses.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_tip-authorize.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_tip-pickup.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_tip-query.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_tip-reserve-helper.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_track-transaction.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_track-transfer.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_trigger-pay.Po
+       -rm -f Makefile
+distclean-am: clean-am distclean-compile distclean-generic \
+       distclean-tags
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+html-am:
+
+info: info-am
+
+info-am:
+
+install-data-am: install-pkgcfgDATA
+
+install-dvi: install-dvi-am
+
+install-dvi-am:
+
+install-exec-am: install-binPROGRAMS
+
+install-html: install-html-am
+
+install-html-am:
+
+install-info: install-info-am
+
+install-info-am:
+
+install-man:
+
+install-pdf: install-pdf-am
+
+install-pdf-am:
+
+install-ps: install-ps-am
+
+install-ps-am:
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+               -rm -f ./$(DEPDIR)/taler-merchant-httpd.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_auditors.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_check-payment.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_exchanges.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_history.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_mhd.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_parsing.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_pay.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_proposal.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_refund.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_responses.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_tip-authorize.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_tip-pickup.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_tip-query.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_tip-reserve-helper.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_track-transaction.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_track-transfer.Po
+       -rm -f ./$(DEPDIR)/taler-merchant-httpd_trigger-pay.Po
+       -rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-compile mostlyclean-generic \
+       mostlyclean-libtool
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am: uninstall-binPROGRAMS uninstall-pkgcfgDATA
+
+.MAKE: install-am install-strip
+
+.PHONY: CTAGS GTAGS TAGS all all-am am--depfiles check check-am clean \
+       clean-binPROGRAMS clean-generic clean-libtool cscopelist-am \
+       ctags ctags-am distclean distclean-compile distclean-generic \
+       distclean-libtool distclean-tags distdir dvi dvi-am html \
+       html-am info info-am install install-am install-binPROGRAMS \
+       install-data install-data-am install-dvi install-dvi-am \
+       install-exec install-exec-am install-html install-html-am \
+       install-info install-info-am install-man install-pdf \
+       install-pdf-am install-pkgcfgDATA install-ps install-ps-am \
+       install-strip installcheck installcheck-am installdirs \
+       maintainer-clean maintainer-clean-generic mostlyclean \
+       mostlyclean-compile mostlyclean-generic mostlyclean-libtool \
+       pdf pdf-am ps ps-am tags tags-am uninstall uninstall-am \
+       uninstall-binPROGRAMS uninstall-pkgcfgDATA
+
+.PRECIOUS: Makefile
+
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/src/backup/anastasis-httpd.c b/src/backup/anastasis-httpd.c
new file mode 100644
index 0000000..0f9d0f7
--- /dev/null
+++ b/src/backup/anastasis-httpd.c
@@ -0,0 +1,705 @@
+/*
+  This file is part of TALER
+  (C) 2019 Taler Systems SA
+
+  TALER is free software; you can redistribute it and/or modify it under the
+  terms of the GNU Affero General Public License as published by the Free 
Software
+  Foundation; either version 3, or (at your option) any later version.
+
+  TALER is distributed in the hope that it will be useful, but WITHOUT ANY
+  WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+  A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License along with
+  TALER; see the file COPYING.  If not, see <http://www.gnu.org/licenses/>
+*/
+/**
+ * @file backup/anastasis-httpd.c
+ * @brief HTTP serving layer intended to provide basic backup operations
+ * @author Christian Grothoff
+ */
+#include "platform.h"
+#include <microhttpd.h>
+#include <gnunet/gnunet_util_lib.h>
+#include "taler-merchant-httpd_responses.h"
+#include "taler-merchant-httpd.h"
+#include "taler-merchant-httpd_parsing.h"
+#include "taler-merchant-httpd_mhd.h"
+
+/**
+ * Backlog for listen operation on unix-domain sockets.
+ */
+#define UNIX_BACKLOG 500
+
+/**
+ * The port we are running on
+ */
+static long long unsigned port;
+
+/**
+ * Should a "Connection: close" header be added to each HTTP response?
+ */
+int TMH_merchant_connection_close;
+
+/**
+ * Task running the HTTP server.
+ */
+static struct GNUNET_SCHEDULER_Task *mhd_task;
+
+/**
+ * Global return code
+ */
+static int result;
+
+/**
+ * The MHD Daemon
+ */
+static struct MHD_Daemon *mhd;
+
+/**
+ * Path for the unix domain-socket
+ * to run the daemon on.
+ */
+static char *serve_unixpath;
+
+/**
+ * File mode for unix-domain socket.
+ */
+static mode_t unixpath_mode;
+
+
+/**
+ * Return GNUNET_YES if given a valid correlation ID and
+ * GNUNET_NO otherwise.
+ *
+ * @returns GNUNET_YES iff given a valid correlation ID
+ */
+static int
+is_valid_correlation_id (const char *correlation_id)
+{
+  if (strlen (correlation_id) >= 64)
+    return GNUNET_NO;
+  for (int i = 0; i < strlen (correlation_id); i++)
+    if (!(isalnum (correlation_id[i]) || correlation_id[i] == '-'))
+      return GNUNET_NO;
+  return GNUNET_YES;
+}
+
+
+/**
+ * A client has requested the given url using the given method
+ * (#MHD_HTTP_METHOD_GET, #MHD_HTTP_METHOD_PUT,
+ * #MHD_HTTP_METHOD_DELETE, #MHD_HTTP_METHOD_POST, etc).  The callback
+ * must call MHD callbacks to provide content to give back to the
+ * client and return an HTTP status code (i.e. #MHD_HTTP_OK,
+ * #MHD_HTTP_NOT_FOUND, etc.).
+ *
+ * @param cls argument given together with the function
+ *        pointer when the handler was registered with MHD
+ * @param url the requested url
+ * @param method the HTTP method used (#MHD_HTTP_METHOD_GET,
+ *        #MHD_HTTP_METHOD_PUT, etc.)
+ * @param version the HTTP version string (i.e.
+ *        #MHD_HTTP_VERSION_1_1)
+ * @param upload_data the data being uploaded (excluding HEADERS,
+ *        for a POST that fits into memory and that is encoded
+ *        with a supported encoding, the POST data will NOT be
+ *        given in upload_data and is instead available as
+ *        part of #MHD_get_connection_values; very large POST
+ *        data *will* be made available incrementally in
+ *        @a upload_data)
+ * @param upload_data_size set initially to the size of the
+ *        @a upload_data provided; the method must update this
+ *        value to the number of bytes NOT processed;
+ * @param con_cls pointer that the callback can set to some
+ *        address and that will be preserved by MHD for future
+ *        calls for this request; since the access handler may
+ *        be called many times (i.e., for a PUT/POST operation
+ *        with plenty of upload data) this allows the application
+ *        to easily associate some request-specific state.
+ *        If necessary, this state can be cleaned up in the
+ *        global #MHD_RequestCompletedCallback (which
+ *        can be set with the #MHD_OPTION_NOTIFY_COMPLETED).
+ *        Initially, `*con_cls` will be NULL.
+ * @return #MHD_YES if the connection was handled successfully,
+ *         #MHD_NO if the socket must be closed due to a serios
+ *         error while handling the request
+ */
+static int
+url_handler (void *cls,
+             struct MHD_Connection *connection,
+             const char *url,
+             const char *method,
+             const char *version,
+             const char *upload_data,
+             size_t *upload_data_size,
+             void **con_cls)
+{
+  static struct TMH_RequestHandler handlers[] =
+    {
+      /* Landing page, tell humans to go away. */
+      { "/", MHD_HTTP_METHOD_GET, "text/plain",
+        "Hello, I'm Anastasis. This HTTP server is not for humans.\n", 0,
+        &TMH_MHD_handler_static_response, MHD_HTTP_OK },
+      { "/agpl", MHD_HTTP_METHOD_GET, "text/plain",
+        NULL, 0,
+        &TMH_MHD_handler_agpl_redirect, MHD_HTTP_FOUND },
+      {NULL, NULL, NULL, NULL, 0, 0 }
+    };
+  static struct TMH_RequestHandler h404 =
+    {
+      "", NULL, "text/html",
+      "<html><title>404: not found</title></html>", 0,
+      &TMH_MHD_handler_static_response, MHD_HTTP_NOT_FOUND
+    };
+
+  struct TM_HandlerContext *hc;
+  struct GNUNET_AsyncScopeId aid;
+  const char *correlation_id = NULL;
+
+  hc = *con_cls;
+
+  if (NULL == hc)
+  {
+    GNUNET_async_scope_fresh (&aid);
+    /* We only read the correlation ID on the first callback for every client 
*/
+    correlation_id = MHD_lookup_connection_value (connection,
+                                                  MHD_HEADER_KIND,
+                                                  "Anastasis-Correlation-Id");
+    if ((NULL != correlation_id) &&
+        (GNUNET_YES != is_valid_correlation_id (correlation_id)))
+    {
+        GNUNET_log (GNUNET_ERROR_TYPE_WARNING, "illegal incoming correlation 
ID\n");
+        correlation_id = NULL;
+    }
+  }
+  else
+  {
+    aid = hc->async_scope_id;
+  }
+
+  GNUNET_SCHEDULER_begin_async_scope (&aid);
+
+  if (NULL != correlation_id)
+    GNUNET_log (GNUNET_ERROR_TYPE_INFO,
+                "Handling request for (%s) URL '%s', correlation_id=%s\n",
+                method,
+                url,
+                correlation_id);
+  else
+    GNUNET_log (GNUNET_ERROR_TYPE_INFO,
+                "Handling request (%s) for URL '%s'\n",
+                method,
+                url);
+
+  for (unsigned int i=0;NULL != handlers[i].url;i++)
+  {
+    struct TMH_RequestHandler *rh = &handlers[i];
+
+    if ( (0 == strcasecmp (url,
+                           rh->url)) &&
+         ( (NULL == rh->method) ||
+           (0 == strcasecmp (method,
+                             rh->method)) ) )
+    {
+      int ret;
+
+      ret = rh->handler (rh,
+                        connection,
+                        con_cls,
+                        upload_data,
+                        upload_data_size);
+      hc = *con_cls;
+      if (NULL != hc)
+      {
+        hc->rh = rh;
+        /* Store the async context ID, so we can restore it if
+         * we get another callack for this request. */
+        hc->async_scope_id = aid;
+      }
+      return ret;
+    }
+  }
+  return TMH_MHD_handler_static_response (&h404,
+                                          connection,
+                                          con_cls,
+                                          upload_data,
+                                          upload_data_size);
+}
+
+
+/**
+ * Shutdown task (magically invoked when the application is being
+ * quit)
+ *
+ * @param cls NULL
+ */
+static void
+do_shutdown (void *cls)
+{
+  if (NULL != mhd_task)
+  {
+    GNUNET_SCHEDULER_cancel (mhd_task);
+    mhd_task = NULL;
+  }
+  if (NULL != mhd)
+  {
+    MHD_stop_daemon (mhd);
+    mhd = NULL;
+  }
+}
+
+
+/**
+ * Function called whenever MHD is done with a request.  If the
+ * request was a POST, we may have stored a `struct Buffer *` in the
+ * @a con_cls that might still need to be cleaned up.  Call the
+ * respective function to free the memory.
+ *
+ * @param cls client-defined closure
+ * @param connection connection handle
+ * @param con_cls value as set by the last call to
+ *        the #MHD_AccessHandlerCallback
+ * @param toe reason for request termination
+ * @see #MHD_OPTION_NOTIFY_COMPLETED
+ * @ingroup request
+ */
+static void
+handle_mhd_completion_callback (void *cls,
+                                struct MHD_Connection *connection,
+                                void **con_cls,
+                                enum MHD_RequestTerminationCode toe)
+{
+  struct TM_HandlerContext *hc = *con_cls;
+
+  if (NULL == hc)
+    return;
+  GNUNET_log (GNUNET_ERROR_TYPE_INFO,
+              "Finished handling request for `%s' with status %d\n",
+              hc->rh->url,
+              (int) toe);
+  hc->cc (hc);
+  *con_cls = NULL;
+}
+
+
+/**
+ * Function that queries MHD's select sets and
+ * starts the task waiting for them.
+ */
+static struct GNUNET_SCHEDULER_Task *
+prepare_daemon (void);
+
+
+/**
+ * Set if we should immediately #MHD_run again.
+ */
+static int triggered;
+
+
+/**
+ * Call MHD to process pending requests and then go back
+ * and schedule the next run.
+ *
+ * @param cls the `struct MHD_Daemon` of the HTTP server to run
+ */
+static void
+run_daemon (void *cls)
+{
+  mhd_task = NULL;
+  do {
+    triggered = 0;
+    GNUNET_assert (MHD_YES == MHD_run (mhd));
+  } while (0 != triggered);
+  mhd_task = prepare_daemon ();
+}
+
+
+/**
+ * Kick MHD to run now, to be called after MHD_resume_connection().
+ * Basically, we need to explicitly resume MHD's event loop whenever
+ * we made progress serving a request.  This function re-schedules
+ * the task processing MHD's activities to run immediately.
+ */
+void
+TMH_trigger_daemon ()
+{
+  if (NULL != mhd_task)
+  {
+    GNUNET_SCHEDULER_cancel (mhd_task);
+    mhd_task = NULL;
+    run_daemon (NULL);
+  }
+  else
+  {
+    triggered = 1;
+  }
+}
+
+
+/**
+ * Function that queries MHD's select sets and
+ * starts the task waiting for them.
+ *
+ * @param daemon_handle HTTP server to prepare to run
+ */
+static struct GNUNET_SCHEDULER_Task *
+prepare_daemon ()
+{
+  struct GNUNET_SCHEDULER_Task * ret;
+  fd_set rs;
+  fd_set ws;
+  fd_set es;
+  struct GNUNET_NETWORK_FDSet *wrs;
+  struct GNUNET_NETWORK_FDSet *wws;
+  int max;
+  MHD_UNSIGNED_LONG_LONG timeout;
+  int haveto;
+  struct GNUNET_TIME_Relative tv;
+
+  FD_ZERO (&rs);
+  FD_ZERO (&ws);
+  FD_ZERO (&es);
+  wrs = GNUNET_NETWORK_fdset_create ();
+  wws = GNUNET_NETWORK_fdset_create ();
+  max = -1;
+  GNUNET_assert (MHD_YES ==
+                 MHD_get_fdset (mhd,
+                                &rs,
+                                &ws,
+                                &es,
+                                &max));
+  haveto = MHD_get_timeout (mhd, &timeout);
+  if (haveto == MHD_YES)
+    tv.rel_value_us = (uint64_t) timeout * 1000LL;
+  else
+    tv = GNUNET_TIME_UNIT_FOREVER_REL;
+  GNUNET_NETWORK_fdset_copy_native (wrs, &rs, max + 1);
+  GNUNET_NETWORK_fdset_copy_native (wws, &ws, max + 1);
+  GNUNET_log (GNUNET_ERROR_TYPE_DEBUG,
+              "Adding run_daemon select task\n");
+  ret = GNUNET_SCHEDULER_add_select (GNUNET_SCHEDULER_PRIORITY_HIGH,
+                                    tv,
+                                    wrs,
+                                    wws,
+                                    &run_daemon,
+                                    NULL);
+  GNUNET_NETWORK_fdset_destroy (wrs);
+  GNUNET_NETWORK_fdset_destroy (wws);
+  return ret;
+}
+
+
+/**
+ * Main function that will be run by the scheduler.
+ *
+ * @param cls closure
+ * @param args remaining command-line arguments
+ * @param cfgfile name of the configuration file used (for saving, can be
+ *        NULL!)
+ * @param config configuration
+ */
+static void
+run (void *cls,
+     char *const *args,
+     const char *cfgfile,
+     const struct GNUNET_CONFIGURATION_Handle *config)
+{
+  int fh;
+
+  GNUNET_log (GNUNET_ERROR_TYPE_INFO,
+              "Starting taler-merchant-httpd\n");
+
+  result = GNUNET_SYSERR;
+  GNUNET_SCHEDULER_add_shutdown (&do_shutdown,
+                                 NULL);
+  GNUNET_assert (GNUNET_OK ==
+                 GNUNET_log_setup ("taler-merchant-httpd",
+                                   "WARNING",
+                                   NULL));
+  {
+    const char *choices[] = {"tcp",
+                             "unix",
+                             NULL};
+
+    const char *serve_type;
+
+    if (GNUNET_OK !=
+        GNUNET_CONFIGURATION_get_value_choice (config,
+                                               "merchant",
+                                               "serve",
+                                               choices,
+                                               &serve_type))
+    {
+      GNUNET_log_config_invalid (GNUNET_ERROR_TYPE_ERROR,
+                                 "merchant",
+                                 "serve",
+                                 "serve type required");
+      GNUNET_SCHEDULER_shutdown ();
+      return;
+    }
+
+    if (0 == strcmp (serve_type, "unix"))
+    {
+      struct sockaddr_un *un;
+      char *mode;
+      struct GNUNET_NETWORK_Handle *nh;
+
+      if (GNUNET_OK !=
+          GNUNET_CONFIGURATION_get_value_filename (config,
+                                                   "merchant",
+                                                   "unixpath",
+                                                   &serve_unixpath))
+      {
+        GNUNET_log_config_invalid (GNUNET_ERROR_TYPE_ERROR,
+                                   "merchant",
+                                   "unixpath",
+                                   "unixpath required");
+        GNUNET_SCHEDULER_shutdown ();
+        return;
+      }
+
+      if (strlen (serve_unixpath) >= sizeof (un->sun_path))
+      {
+        fprintf (stderr,
+                 "Invalid configuration: unix path too long\n");
+        GNUNET_SCHEDULER_shutdown ();
+        return;
+      }
+
+      if (GNUNET_OK !=
+          GNUNET_CONFIGURATION_get_value_string (config,
+                                                 "merchant",
+                                                 "UNIXPATH_MODE",
+                                                 &mode))
+      {
+        GNUNET_log_config_missing (GNUNET_ERROR_TYPE_ERROR,
+                                   "merchant",
+                                   "UNIXPATH_MODE");
+        GNUNET_SCHEDULER_shutdown ();
+        return;
+      }
+      errno = 0;
+      unixpath_mode = (mode_t) strtoul (mode, NULL, 8);
+      if (0 != errno)
+      {
+        GNUNET_log_config_invalid (GNUNET_ERROR_TYPE_ERROR,
+                                   "merchant",
+                                   "UNIXPATH_MODE",
+                                   "must be octal number");
+        GNUNET_free (mode);
+        GNUNET_SCHEDULER_shutdown ();
+        return;
+      }
+      GNUNET_free (mode);
+
+      GNUNET_log (GNUNET_ERROR_TYPE_INFO,
+                  "Creating listen socket '%s' with mode %o\n",
+                  serve_unixpath, unixpath_mode);
+
+      if (GNUNET_OK != GNUNET_DISK_directory_create_for_file (serve_unixpath))
+      {
+        GNUNET_log_strerror_file (GNUNET_ERROR_TYPE_ERROR,
+                                  "mkdir",
+                                  serve_unixpath);
+      }
+
+      un = GNUNET_new (struct sockaddr_un);
+      un->sun_family = AF_UNIX;
+      strncpy (un->sun_path,
+               serve_unixpath,
+               sizeof (un->sun_path) - 1);
+
+      GNUNET_NETWORK_unix_precheck (un);
+
+      if (NULL == (nh = GNUNET_NETWORK_socket_create (AF_UNIX,
+                                                      SOCK_STREAM,
+                                                      0)))
+      {
+        GNUNET_log_strerror (GNUNET_ERROR_TYPE_ERROR,
+                             "socket(AF_UNIX)");
+        GNUNET_SCHEDULER_shutdown ();
+        return;
+      }
+      if (GNUNET_OK !=
+          GNUNET_NETWORK_socket_bind (nh,
+                                      (void *) un,
+                                      sizeof (struct sockaddr_un)))
+      {
+        GNUNET_log_strerror (GNUNET_ERROR_TYPE_ERROR,
+                             "bind(AF_UNIX)");
+        GNUNET_SCHEDULER_shutdown ();
+        return;
+      }
+      if (GNUNET_OK !=
+          GNUNET_NETWORK_socket_listen (nh,
+                                        UNIX_BACKLOG))
+      {
+        GNUNET_log_strerror (GNUNET_ERROR_TYPE_ERROR,
+                             "listen(AF_UNIX)");
+        GNUNET_SCHEDULER_shutdown ();
+        return;
+      }
+
+      fh = GNUNET_NETWORK_get_fd (nh);
+      GNUNET_NETWORK_socket_free_memory_only_ (nh);
+      if (0 != chmod (serve_unixpath,
+                      unixpath_mode))
+      {
+        GNUNET_log_strerror (GNUNET_ERROR_TYPE_ERROR,
+                             "chmod");
+        GNUNET_SCHEDULER_shutdown ();
+        return;
+      }
+      port = 0;
+    }
+    else if (0 == strcmp (serve_type, "tcp"))
+    {
+      char *bind_to;
+
+      if (GNUNET_SYSERR ==
+          GNUNET_CONFIGURATION_get_value_number (config,
+                                                 "merchant",
+                                                 "PORT",
+                                                 &port))
+      {
+        GNUNET_log_config_missing (GNUNET_ERROR_TYPE_ERROR,
+                                   "merchant",
+                                   "PORT");
+        GNUNET_SCHEDULER_shutdown ();
+        return;
+      }
+      if (GNUNET_OK ==
+          GNUNET_CONFIGURATION_get_value_string (config,
+                                                 "merchant",
+                                                 "BIND_TO",
+                                                 &bind_to))
+      {
+        char port_str[6];
+        struct addrinfo hints;
+        struct addrinfo *res;
+        int ec;
+        struct GNUNET_NETWORK_Handle *nh;
+
+        GNUNET_snprintf (port_str,
+                         sizeof (port_str),
+                         "%u",
+                         (uint16_t) port);
+        memset (&hints, 0, sizeof (hints));
+        hints.ai_family = AF_UNSPEC;
+        hints.ai_socktype = SOCK_STREAM;
+        hints.ai_protocol = IPPROTO_TCP;
+        hints.ai_flags = AI_PASSIVE
+#ifdef AI_IDN
+          | AI_IDN
+#endif
+          ;
+        if (0 !=
+            (ec = getaddrinfo (bind_to,
+                               port_str,
+                               &hints,
+                               &res)))
+        {
+          GNUNET_log (GNUNET_ERROR_TYPE_ERROR,
+                      "Failed to resolve BIND_TO address `%s': %s\n",
+                      bind_to,
+                      gai_strerror (ec));
+          GNUNET_free (bind_to);
+          GNUNET_SCHEDULER_shutdown ();
+          return;
+        }
+        GNUNET_free (bind_to);
+
+        if (NULL == (nh = GNUNET_NETWORK_socket_create (res->ai_family,
+                                                        res->ai_socktype,
+                                                        res->ai_protocol)))
+        {
+          GNUNET_log_strerror (GNUNET_ERROR_TYPE_ERROR,
+                               "socket");
+          freeaddrinfo (res);
+          GNUNET_SCHEDULER_shutdown ();
+          return;
+        }
+        if (GNUNET_OK !=
+            GNUNET_NETWORK_socket_bind (nh,
+                                        res->ai_addr,
+                                        res->ai_addrlen))
+        {
+          GNUNET_log_strerror (GNUNET_ERROR_TYPE_ERROR,
+                               "bind");
+          freeaddrinfo (res);
+          GNUNET_SCHEDULER_shutdown ();
+          return;
+        }
+        freeaddrinfo (res);
+        if (GNUNET_OK !=
+            GNUNET_NETWORK_socket_listen (nh,
+                                          UNIX_BACKLOG))
+        {
+          GNUNET_log_strerror (GNUNET_ERROR_TYPE_ERROR,
+                               "listen");
+          GNUNET_SCHEDULER_shutdown ();
+          return;
+        }
+        fh = GNUNET_NETWORK_get_fd (nh);
+        GNUNET_NETWORK_socket_free_memory_only_ (nh);
+      }
+      else
+      {
+        fh = -1;
+      }
+    }
+    else
+    {
+      // not reached
+      GNUNET_assert (0);
+    }
+  }
+  mhd = MHD_start_daemon (MHD_USE_SUSPEND_RESUME | MHD_USE_DUAL_STACK,
+                          port,
+                          NULL, NULL,
+                          &url_handler, NULL,
+                          MHD_OPTION_LISTEN_SOCKET, fh,
+                          MHD_OPTION_NOTIFY_COMPLETED, 
&handle_mhd_completion_callback, NULL,
+                          MHD_OPTION_CONNECTION_TIMEOUT, (unsigned int) 10 /* 
10s */,
+                          MHD_OPTION_END);
+  if (NULL == mhd)
+  {
+    GNUNET_log (GNUNET_ERROR_TYPE_ERROR,
+                "Failed to launch HTTP service, exiting.\n");
+    GNUNET_SCHEDULER_shutdown ();
+    return;
+  }
+  result = GNUNET_OK;
+  mhd_task = prepare_daemon ();
+}
+
+
+/**
+ * The main function of the serve tool
+ *
+ * @param argc number of arguments from the command line
+ * @param argv command line arguments
+ * @return 0 ok, 1 on error
+ */
+int
+main (int argc,
+      char *const *argv)
+{
+  struct GNUNET_GETOPT_CommandLineOption options[] = {
+    GNUNET_GETOPT_option_flag ('C',
+                               "connection-close",
+                               "force HTTP connections to be closed after each 
request",
+                               &TMH_merchant_connection_close),
+
+    GNUNET_GETOPT_OPTION_END
+  };
+
+  if (GNUNET_OK !=
+      GNUNET_PROGRAM_run (argc, argv,
+                          "anastasis-httpd",
+                          "Anastasis HTTP interface",
+                          options, &run, NULL))
+    return 3;
+  return (GNUNET_OK == result) ? 0 : 1;
+}
diff --git a/src/backup/anastasis-httpd.h b/src/backup/anastasis-httpd.h
new file mode 100644
index 0000000..f53a806
--- /dev/null
+++ b/src/backup/anastasis-httpd.h
@@ -0,0 +1,142 @@
+/*
+  This file is part of TALER
+  Copyright (C) 2019 Taler Systems SA
+
+  TALER is free software; you can redistribute it and/or modify it under the
+  terms of the GNU General Public License as published by the Free Software
+  Foundation; either version 3, or (at your option) any later version.
+
+  TALER is distributed in the hope that it will be useful, but WITHOUT ANY
+  WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+  A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License along with
+  TALER; see the file COPYING.  If not, see <http://www.gnu.org/licenses/>
+*/
+/**
+ * @file anastasis/anastasis-httpd.h
+ * @brief HTTP serving layer
+ * @author Christian Grothoff
+ */
+#ifndef ANASTASIS_HTTPD_H
+#define ANASTASIS_HTTPD_H
+
+#include "platform.h"
+#include <microhttpd.h>
+
+/**
+ * @brief Struct describing an URL and the handler for it.
+ */
+struct TMH_RequestHandler
+{
+
+  /**
+   * URL the handler is for.
+   */
+  const char *url;
+
+  /**
+   * Method the handler is for, NULL for "all".
+   */
+  const char *method;
+
+  /**
+   * Mime type to use in reply (hint, can be NULL).
+   */
+  const char *mime_type;
+
+  /**
+   * Raw data for the @e handler
+   */
+  const void *data;
+
+  /**
+   * Number of bytes in @e data, 0 for 0-terminated.
+   */
+  size_t data_size;
+
+  /**
+   * Function to call to handle the request.
+   *
+   * @param rh this struct
+   * @param mime_type the @e mime_type for the reply (hint, can be NULL)
+   * @param connection the MHD connection to handle
+   * @param[in,out] connection_cls the connection's closure (can be updated)
+   * @param upload_data upload data
+   * @param[in,out] upload_data_size number of bytes (left) in @a upload_data
+   * @return MHD result code
+   */
+  int (*handler)(struct TMH_RequestHandler *rh,
+                 struct MHD_Connection *connection,
+                 void **connection_cls,
+                 const char *upload_data,
+                 size_t *upload_data_size);
+
+  /**
+   * Default response code.
+   */
+  int response_code;
+};
+
+
+/**
+ * Each MHD response handler that sets the "connection_cls" to a
+ * non-NULL value must use a struct that has this struct as its first
+ * member.  This struct contains a single callback, which will be
+ * invoked to clean up the memory when the contection is completed.
+ */
+struct TM_HandlerContext;
+
+/**
+ * Signature of a function used to clean up the context
+ * we keep in the "connection_cls" of MHD when handling
+ * a request.
+ *
+ * @param hc header of the context to clean up.
+ */
+typedef void
+(*TM_ContextCleanup)(struct TM_HandlerContext *hc);
+
+
+/**
+ * Each MHD response handler that sets the "connection_cls" to a
+ * non-NULL value must use a struct that has this struct as its first
+ * member.  This struct contains a single callback, which will be
+ * invoked to clean up the memory when the connection is completed.
+ */
+struct TM_HandlerContext
+{
+
+  /**
+   * Function to execute the handler-specific cleanup of the
+   * (typically larger) context.
+   */
+  TM_ContextCleanup cc;
+
+  /**
+   * Which request handler is handling this request?
+   */
+  const struct TMH_RequestHandler *rh;
+
+  /**
+   * Asynchronous request context id.
+   */
+  struct GNUNET_AsyncScopeId async_scope_id;
+};
+
+
+/**
+ * Should a "Connection: close" header be added to each HTTP response?
+ */
+extern int TMH_merchant_connection_close;
+
+/**
+ * Kick MHD to run now, to be called after MHD_resume_connection().
+ * Basically, we need to explicitly resume MHD's event loop whenever
+ * we made progress serving a request.  This function re-schedules
+ * the task processing MHD's activities to run immediately.
+ */
+void
+TMH_trigger_daemon (void);
+
+#endif
diff --git a/src/backup/anastasis-httpd_mhd.c b/src/backup/anastasis-httpd_mhd.c
new file mode 100644
index 0000000..3ee093e
--- /dev/null
+++ b/src/backup/anastasis-httpd_mhd.c
@@ -0,0 +1,156 @@
+/*
+  This file is part of TALER
+  Copyright (C) 2014, 2015, 2016 GNUnet e.V. and INRIA
+
+  TALER is free software; you can redistribute it and/or modify it under the
+  terms of the GNU Affero General Public License as published by the Free 
Software
+  Foundation; either version 3, or (at your option) any later version.
+
+  TALER is distributed in the hope that it will be useful, but WITHOUT ANY
+  WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+  A PARTICULAR PURPOSE.  See the GNU Affero General Public License for more 
details.
+
+  You should have received a copy of the GNU Affero General Public License 
along with
+  TALER; see the file COPYING.  If not, see <http://www.gnu.org/licenses/>
+*/
+/**
+ * @file anastasis-httpd_mhd.c
+ * @brief helpers for MHD interaction; these are TALER_EXCHANGE_handler_ 
functions
+ *        that generate simple MHD replies that do not require any real 
operations
+ *        to be performed (error handling, static pages, etc.)
+ * @author Florian Dold
+ * @author Benedikt Mueller
+ * @author Christian Grothoff
+ */
+#include "platform.h"
+#include <jansson.h>
+#include "taler-merchant-httpd_mhd.h"
+#include "taler-merchant-httpd_responses.h"
+
+
+/**
+ * Function to call to handle the request by sending
+ * back static data from the @a rh.
+ *
+ * @param rh context of the handler
+ * @param connection the MHD connection to handle
+ * @param[in,out] connection_cls the connection's closure (can be updated)
+ * @param upload_data upload data
+ * @param[in,out] upload_data_size number of bytes (left) in @a upload_data
+ * @return MHD result code
+ */
+int
+TMH_MHD_handler_static_response (struct TMH_RequestHandler *rh,
+                                 struct MHD_Connection *connection,
+                                 void **connection_cls,
+                                 const char *upload_data,
+                                 size_t *upload_data_size)
+{
+  struct MHD_Response *response;
+  int ret;
+
+  if (0 == rh->data_size)
+    rh->data_size = strlen ((const char *) rh->data);
+  response = MHD_create_response_from_buffer (rh->data_size,
+                                              (void *) rh->data,
+                                              MHD_RESPMEM_PERSISTENT);
+  if (NULL == response)
+  {
+    GNUNET_break (0);
+    return MHD_NO;
+  }
+  TMH_RESPONSE_add_global_headers (response);
+  if (NULL != rh->mime_type)
+    (void) MHD_add_response_header (response,
+                                    MHD_HTTP_HEADER_CONTENT_TYPE,
+                                    rh->mime_type);
+  ret = MHD_queue_response (connection,
+                            rh->response_code,
+                            response);
+  MHD_destroy_response (response);
+  return ret;
+}
+
+
+/**
+ * Function to call to handle the request by sending
+ * back a redirect to the AGPL source code.
+ *
+ * @param rh context of the handler
+ * @param connection the MHD connection to handle
+ * @param[in,out] connection_cls the connection's closure (can be updated)
+ * @param upload_data upload data
+ * @param[in,out] upload_data_size number of bytes (left) in @a upload_data
+ * @return MHD result code
+ */
+int
+TMH_MHD_handler_agpl_redirect (struct TMH_RequestHandler *rh,
+                                  struct MHD_Connection *connection,
+                                  void **connection_cls,
+                                  const char *upload_data,
+                                  size_t *upload_data_size)
+{
+  const char *agpl =
+    "This server is licensed under the Affero GPL. You will now be redirected 
to the source code.";
+  struct MHD_Response *response;
+  int ret;
+
+  response = MHD_create_response_from_buffer (strlen (agpl),
+                                              (void *) agpl,
+                                              MHD_RESPMEM_PERSISTENT);
+  if (NULL == response)
+  {
+    GNUNET_break (0);
+    return MHD_NO;
+  }
+  TMH_RESPONSE_add_global_headers (response);
+  if (NULL != rh->mime_type)
+    (void) MHD_add_response_header (response,
+                                    MHD_HTTP_HEADER_CONTENT_TYPE,
+                                    rh->mime_type);
+  if (MHD_NO ==
+      MHD_add_response_header (response,
+                               MHD_HTTP_HEADER_LOCATION,
+                               "http://www.git.taler.net/?p=exchange.git";))
+  {
+    GNUNET_break (0);
+    ret = MHD_NO;
+  }
+  else
+  {
+    ret = MHD_queue_response (connection,
+                              rh->response_code,
+                              response);
+  }
+  MHD_destroy_response (response);
+  return ret;
+}
+
+
+/**
+ * Function to call to handle the request by building a JSON
+ * reply with an error message from @a rh.
+ *
+ * @param rh context of the handler
+ * @param connection the MHD connection to handle
+ * @param[in,out] connection_cls the connection's closure (can be updated)
+ * @param upload_data upload data
+ * @param[in,out] upload_data_size number of bytes (left) in @a upload_data
+ * @return MHD result code
+ */
+int
+TMH_MHD_handler_send_json_pack_error (struct TMH_RequestHandler *rh,
+                                         struct MHD_Connection *connection,
+                                         void **connection_cls,
+                                         const char *upload_data,
+                                         size_t *upload_data_size)
+{
+  return TMH_RESPONSE_reply_json_pack (connection,
+                                       rh->response_code,
+                                       "{s:s}",
+                                       "error",
+                                       rh->data);
+}
+
+
+/* end of taler-exchange-httpd_mhd.c */
diff --git a/src/backup/anastasis-httpd_parsing.c 
b/src/backup/anastasis-httpd_parsing.c
new file mode 100644
index 0000000..eca0099
--- /dev/null
+++ b/src/backup/anastasis-httpd_parsing.c
@@ -0,0 +1,272 @@
+/*
+  This file is part of TALER
+  Copyright (C) 2014, 2015, 2016 GNUnet e.V.
+
+  TALER is free software; you can redistribute it and/or modify
+  it under the terms of the GNU Affero General Public License as
+  published by the Free Software Foundation; either version 3,
+  or (at your option) any later version.
+
+  TALER is distributed in the hope that it will be useful, but
+  WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU Affero General Public License for more details.
+
+  You should have received a copy of the GNU Affero General Public
+  License along with TALER; see the file COPYING.  If not,
+  see <http://www.gnu.org/licenses/>
+*/
+
+/**
+ * @file anastasis-httpd_parsing.c
+ * @brief functions to parse incoming requests
+ *        (MHD arguments and JSON snippets)
+ * @author Florian Dold
+ * @author Benedikt Mueller
+ * @author Christian Grothoff
+ */
+#include "platform.h"
+#include <gnunet/gnunet_util_lib.h>
+#include <taler/taler_json_lib.h>
+#include "taler-merchant-httpd_parsing.h"
+#include "taler-merchant-httpd_responses.h"
+
+/* FIXME: de-duplicate code with taler-exchange-httpd_parsing.c
+   and taler-exchange-httpd_response.c */
+
+/**
+ * Initial size for POST request buffer.
+ */
+#define REQUEST_BUFFER_INITIAL (2*1024)
+
+/**
+ * Maximum POST request size.
+ */
+#define REQUEST_BUFFER_MAX (1024*1024)
+
+
+/**
+ * Buffer for POST requests.
+ */
+struct Buffer
+{
+  /**
+   * Allocated memory
+   */
+  char *data;
+
+  /**
+   * Number of valid bytes in buffer.
+   */
+  size_t fill;
+
+  /**
+   * Number of allocated bytes in buffer.
+   */
+  size_t alloc;
+};
+
+
+
+
+/**
+ * Free the data in a buffer.  Does *not* free
+ * the buffer object itself.
+ *
+ * @param buf buffer to de-initialize
+ */
+static void
+buffer_deinit (struct Buffer *buf)
+{
+  GNUNET_free_non_null (buf->data);
+  buf->data = NULL;
+}
+
+
+
+/**
+ * Function called whenever we are done with a request
+ * to clean up our state.
+ *
+ * @param con_cls value as it was left by
+ *        #TMH_PARSE_post_json(), to be cleaned up
+ */
+void
+TMH_PARSE_post_cleanup_callback (void *con_cls)
+{
+  struct Buffer *r = con_cls;
+
+  if (NULL != r)
+  {
+    buffer_deinit (r);
+    GNUNET_free (r);
+  }
+}
+
+
+/**
+ * Process a POST request containing a JSON object.  This function
+ * realizes an MHD POST processor that will (incrementally) process
+ * JSON data uploaded to the HTTP server.  It will store the
+ * required state in the @a con_cls, which must be cleaned up
+ * using #TMH_PARSE_post_cleanup_callback().
+ *
+ * @param connection the MHD connection
+ * @param con_cls the closure (points to a `struct Buffer *`)
+ * @param upload_data the POST data
+ * @param upload_data_size number of bytes in @a upload_data
+ * @param json the JSON object for a completed request
+ * @return
+ *    #GNUNET_YES if json object was parsed or at least
+ *               may be parsed in the future (call again);
+ *               `*json` will be NULL if we need to be called again,
+ *                and non-NULL if we are done.
+ *    #GNUNET_NO if request is incomplete or invalid
+ *               (error message was generated)
+ *    #GNUNET_SYSERR on internal error
+ *               (we could not even queue an error message,
+ *                close HTTP session with MHD_NO)
+ */
+int
+TMH_PARSE_post_json (struct MHD_Connection *connection,
+                     void **con_cls,
+                     const char *upload_data,
+                     size_t *upload_data_size,
+                     json_t **json)
+{
+  enum GNUNET_JSON_PostResult pr;
+
+  pr = GNUNET_JSON_post_parser (REQUEST_BUFFER_MAX,
+                                connection,
+                                con_cls,
+                                upload_data,
+                                upload_data_size,
+                                json);
+  switch (pr)
+  {
+
+  case GNUNET_JSON_PR_OUT_OF_MEMORY:
+    return (MHD_NO == TMH_RESPONSE_reply_internal_error
+      (connection,
+       TALER_EC_PARSER_OUT_OF_MEMORY,
+       "out of memory")) ? GNUNET_SYSERR : GNUNET_NO;
+
+  case GNUNET_JSON_PR_CONTINUE:
+    return GNUNET_YES;
+
+  case GNUNET_JSON_PR_REQUEST_TOO_LARGE:
+    return (MHD_NO == TMH_RESPONSE_reply_request_too_large
+      (connection)) ? GNUNET_SYSERR : GNUNET_NO;
+
+  case GNUNET_JSON_PR_JSON_INVALID:
+    return (MHD_YES ==
+            TMH_RESPONSE_reply_invalid_json (connection))
+      ? GNUNET_NO : GNUNET_SYSERR;
+  case GNUNET_JSON_PR_SUCCESS:
+    GNUNET_break (NULL != *json);
+    return GNUNET_YES;
+  }
+  /* this should never happen */
+  GNUNET_break (0);
+  return GNUNET_SYSERR;
+}
+
+
+/**
+ * Parse JSON object into components based on the given field
+ * specification.
+ *
+ * @param connection the connection to send an error response to
+ * @param root the JSON node to start the navigation at.
+ * @param spec field specification for the parser
+ * @return
+ *    #GNUNET_YES if navigation was successful (caller is responsible
+ *                for freeing allocated variable-size data using
+ *                #GNUNET_JSON_parse_free() when done)
+ *    #GNUNET_NO if json is malformed, error response was generated
+ *    #GNUNET_SYSERR on internal error
+ */
+int
+TMH_PARSE_json_data (struct MHD_Connection *connection,
+                     const json_t *root,
+                     struct GNUNET_JSON_Specification *spec)
+{
+  int ret;
+  const char *error_json_name;
+  unsigned int error_line;
+
+  ret = GNUNET_JSON_parse (root,
+                           spec,
+                           &error_json_name,
+                           &error_line);
+  if (GNUNET_SYSERR == ret)
+  {
+    if (NULL == error_json_name)
+      error_json_name = "<no field>";
+    GNUNET_log (GNUNET_ERROR_TYPE_WARNING,
+                "Parsing failed due to field '%s'\n",
+                error_json_name);
+    ret = (MHD_YES ==
+           TMH_RESPONSE_reply_json_pack (connection,
+                                         MHD_HTTP_BAD_REQUEST,
+                                         "{s:s, s:s, s:I}",
+                                         "error", "parse error",
+                                         "field", error_json_name,
+                                         "line", (json_int_t) error_line))
+      ? GNUNET_NO : GNUNET_SYSERR;
+    return ret;
+  }
+  return GNUNET_YES;
+}
+
+
+
+/**
+ * Extract base32crockford encoded data from request.
+ *
+ * Queues an error response to the connection if the parameter is
+ * missing or invalid.
+ *
+ * @param connection the MHD connection
+ * @param param_name the name of the parameter with the key
+ * @param[out] out_data pointer to store the result
+ * @param out_size expected size of data
+ * @return
+ *   #GNUNET_YES if the the argument is present
+ *   #GNUNET_NO if the argument is absent or malformed
+ *   #GNUNET_SYSERR on internal error (error response could not be sent)
+ */
+int
+TMH_PARSE_mhd_request_arg_data (struct MHD_Connection *connection,
+                                const char *param_name,
+                                void *out_data,
+                                size_t out_size)
+{
+  const char *str;
+
+  str = MHD_lookup_connection_value (connection,
+                                     MHD_GET_ARGUMENT_KIND,
+                                     param_name);
+  if (NULL == str)
+  {
+    return (MHD_NO ==
+            TMH_RESPONSE_reply_arg_missing (connection,
+                                           TALER_EC_PARAMETER_MISSING,
+                                           param_name))
+      ? GNUNET_SYSERR : GNUNET_NO;
+  }
+  if (GNUNET_OK !=
+      GNUNET_STRINGS_string_to_data (str,
+                                     strlen (str),
+                                     out_data,
+                                     out_size))
+    return (MHD_NO ==
+            TMH_RESPONSE_reply_arg_invalid (connection,
+                                           TALER_EC_PARAMETER_MALFORMED,
+                                           param_name))
+      ? GNUNET_SYSERR : GNUNET_NO;
+  return GNUNET_OK;
+}
+
+
+/* end of taler-merchant-httpd_parsing.c */
diff --git a/src/backup/anastasis-httpd_parsing.h 
b/src/backup/anastasis-httpd_parsing.h
new file mode 100644
index 0000000..d6e1039
--- /dev/null
+++ b/src/backup/anastasis-httpd_parsing.h
@@ -0,0 +1,93 @@
+/*
+  This file is part of TALER
+  Copyright (C) 2014, 2015, 2016 GNUnet e.V.
+
+  TALER is free software; you can redistribute it and/or modify it under the
+  terms of the GNU Affero General Public License as published by the Free 
Software
+  Foundation; either version 3, or (at your option) any later version.
+
+  TALER is distributed in the hope that it will be useful, but WITHOUT ANY
+  WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+  A PARTICULAR PURPOSE.  See the GNU Affero General Public License for more 
details.
+
+  You should have received a copy of the GNU Affero General Public License 
along with
+  TALER; see the file COPYING.  If not, see <http://www.gnu.org/licenses/>
+*/
+/**
+ * @file anastasis-httpd_parsing.h
+ * @brief functions to parse incoming requests
+ * @author Florian Dold
+ * @author Benedikt Mueller
+ * @author Christian Grothoff
+ */
+#ifndef TALER_EXCHANGE_HTTPD_PARSING_H
+#define TALER_EXCHANGE_HTTPD_PARSING_H
+
+#include <microhttpd.h>
+#include <taler/taler_util.h>
+#include <taler/taler_json_lib.h>
+
+/**
+ * Process a POST request containing a JSON object.  This
+ * function realizes an MHD POST processor that will
+ * (incrementally) process JSON data uploaded to the HTTP
+ * server.  It will store the required state in the
+ * "connection_cls", which must be cleaned up using
+ * #TMH_PARSE_post_cleanup_callback().
+ *
+ * @param connection the MHD connection
+ * @param con_cls the closure (points to a `struct Buffer *`)
+ * @param upload_data the POST data
+ * @param upload_data_size number of bytes in @a upload_data
+ * @param json the JSON object for a completed request
+ * @return
+ *    #GNUNET_YES if json object was parsed or at least
+ *               may be parsed in the future (call again);
+ *               `*json` will be NULL if we need to be called again,
+ *                and non-NULL if we are done.
+ *    #GNUNET_NO is request incomplete or invalid
+ *               (error message was generated)
+ *    #GNUNET_SYSERR on internal error
+ *               (we could not even queue an error message,
+ *                close HTTP session with MHD_NO)
+ */
+int
+TMH_PARSE_post_json (struct MHD_Connection *connection,
+                     void **con_cls,
+                     const char *upload_data,
+                     size_t *upload_data_size,
+                     json_t **json);
+
+
+/**
+ * Function called whenever we are done with a request
+ * to clean up our state.
+ *
+ * @param con_cls value as it was left by
+ *        #TMH_PARSE_post_json(), to be cleaned up
+ */
+void
+TMH_PARSE_post_cleanup_callback (void *con_cls);
+
+
+/**
+ * Parse JSON object into components based on the given field
+ * specification.
+ *
+ * @param connection the connection to send an error response to
+ * @param root the JSON node to start the navigation at.
+ * @param spec field specification for the parser
+ * @return
+ *    #GNUNET_YES if navigation was successful (caller is responsible
+ *                for freeing allocated variable-size data using
+ *                #GNUNET_JSON_parse_free() when done)
+ *    #GNUNET_NO if json is malformed, error response was generated
+ *    #GNUNET_SYSERR on internal error
+ */
+int
+TMH_PARSE_json_data (struct MHD_Connection *connection,
+                     const json_t *root,
+                     struct GNUNET_JSON_Specification *spec);
+
+
+#endif /* TALER_MERCHANT_HTTPD_PARSING_H */
diff --git a/src/backup/anastasis-httpd_responses.c 
b/src/backup/anastasis-httpd_responses.c
new file mode 100644
index 0000000..ca214ec
--- /dev/null
+++ b/src/backup/anastasis-httpd_responses.c
@@ -0,0 +1,407 @@
+/*
+  This file is part of TALER
+  Copyright (C) 2014-2017 GNUnet e.V.
+
+  TALER is free software; you can redistribute it and/or modify it under the
+  terms of the GNU Affero General Public License as published by the Free 
Software
+  Foundation; either version 3, or (at your option) any later version.
+
+  TALER is distributed in the hope that it will be useful, but WITHOUT ANY
+  WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+  A PARTICULAR PURPOSE.  See the GNU Affero General Public License for more 
details.
+
+  You should have received a copy of the GNU Affero General Public License 
along with
+  TALER; see the file COPYING.  If not, see <http://www.gnu.org/licenses/>
+*/
+/**
+ * @file anastasis-httpd_responses.c
+ * @brief API for generating the various replies of the exchange; these
+ *        functions are called TMH_RESPONSE_reply_ and they generate
+ *        and queue MHD response objects for a given connection.
+ * @author Florian Dold
+ * @author Benedikt Mueller
+ * @author Christian Grothoff
+ */
+#include "platform.h"
+#include "taler-merchant-httpd.h"
+#include "taler-merchant-httpd_responses.h"
+#include <taler/taler_util.h>
+#include <taler/taler_json_lib.h>
+#include <gnunet/gnunet_util_lib.h>
+
+
+/**
+ * Make JSON response object.
+ *
+ * @param json the json object
+ * @return MHD response object
+ */
+struct MHD_Response *
+TMH_RESPONSE_make_json (const json_t *json)
+{
+  struct MHD_Response *resp;
+  char *json_str;
+
+  json_str = json_dumps (json,
+                         JSON_INDENT(2));
+  if (NULL == json_str)
+  {
+    GNUNET_break (0);
+    return NULL;
+  }
+  resp = MHD_create_response_from_buffer (strlen (json_str),
+                                          json_str,
+                                          MHD_RESPMEM_MUST_FREE);
+  if (NULL == resp)
+  {
+    free (json_str);
+    GNUNET_break (0);
+    return NULL;
+  }
+  GNUNET_break (MHD_YES ==
+                MHD_add_response_header (resp,
+                                         MHD_HTTP_HEADER_CONTENT_TYPE,
+                                         "application/json"));
+  return resp;
+}
+
+
+/**
+ * Send JSON object as response.
+ *
+ * @param connection the MHD connection
+ * @param json the json object
+ * @param response_code the http response code
+ * @return MHD result code
+ */
+int
+TMH_RESPONSE_reply_json (struct MHD_Connection *connection,
+                         const json_t *json,
+                         unsigned int response_code)
+{
+  struct MHD_Response *resp;
+  int ret;
+
+  resp = TMH_RESPONSE_make_json (json);
+  if (NULL == resp)
+    return MHD_NO;
+  ret = MHD_queue_response (connection,
+                            response_code,
+                            resp);
+  MHD_destroy_response (resp);
+  return ret;
+}
+
+
+/**
+ * Make JSON response object.
+ *
+ * @param fmt format string for pack
+ * @param ... varargs
+ * @return MHD response object
+ */
+struct MHD_Response *
+TMH_RESPONSE_make_json_pack (const char *fmt,
+                             ...)
+{
+  json_t *json;
+  va_list argp;
+  struct MHD_Response *ret;
+  json_error_t jerror;
+
+  va_start (argp, fmt);
+  json = json_vpack_ex (&jerror,
+                       0,
+                       fmt,
+                       argp);
+  va_end (argp);
+  if (NULL == json)
+  {
+    GNUNET_log (GNUNET_ERROR_TYPE_ERROR,
+                "Failed to pack JSON with format `%s': %s\n",
+                fmt,
+                jerror.text);
+    GNUNET_break (0);
+    return MHD_NO;
+  }
+  ret = TMH_RESPONSE_make_json (json);
+  json_decref (json);
+  return ret;
+}
+
+
+/**
+ * Function to call to handle the request by building a JSON
+ * reply from a format string and varargs.
+ *
+ * @param connection the MHD connection to handle
+ * @param response_code HTTP response code to use
+ * @param fmt format string for pack
+ * @param ... varargs
+ * @return MHD result code
+ */
+int
+TMH_RESPONSE_reply_json_pack (struct MHD_Connection *connection,
+                              unsigned int response_code,
+                              const char *fmt,
+                              ...)
+{
+  json_t *json;
+  va_list argp;
+  int ret;
+  json_error_t jerror;
+
+  va_start (argp, fmt);
+  json = json_vpack_ex (&jerror,
+                        0,
+                        fmt,
+                        argp);
+  va_end (argp);
+  if (NULL == json)
+  {
+    GNUNET_log (GNUNET_ERROR_TYPE_ERROR,
+                "Failed to pack JSON with format `%s': %s\n",
+                fmt,
+                jerror.text);
+    GNUNET_break (0);
+    return MHD_NO;
+  }
+  ret = TMH_RESPONSE_reply_json (connection,
+                                 json,
+                                 response_code);
+  json_decref (json);
+  return ret;
+}
+
+
+/**
+ * Create a response indicating an internal error.
+ *
+ * @param ec error code to return
+ * @param hint hint about the internal error's nature
+ * @return a MHD response object
+ */
+struct MHD_Response *
+TMH_RESPONSE_make_error (enum TALER_ErrorCode ec,
+                         const char *hint)
+{
+  return TMH_RESPONSE_make_json_pack ("{s:I, s:s}",
+                                      "code", (json_int_t) ec,
+                                      "hint", hint);
+}
+
+
+/**
+ * Send a response indicating an internal error.
+ *
+ * @param connection the MHD connection to use
+ * @param ec error code to return
+ * @param hint hint about the internal error's nature
+ * @return a MHD result code
+ */
+int
+TMH_RESPONSE_reply_internal_error (struct MHD_Connection *connection,
+                                   enum TALER_ErrorCode ec,
+                                   const char *hint)
+{
+  return TMH_RESPONSE_reply_json_pack (connection,
+                                       MHD_HTTP_INTERNAL_SERVER_ERROR,
+                                       "{s:I, s:s}",
+                                       "code", (json_int_t) ec,
+                                       "hint", hint);
+}
+
+
+/**
+ * Send a response indicating that the request was too big.
+ *
+ * @param connection the MHD connection to use
+ * @return a MHD result code
+ */
+int
+TMH_RESPONSE_reply_request_too_large (struct MHD_Connection *connection)
+{
+  struct MHD_Response *resp;
+  int ret;
+
+  resp = MHD_create_response_from_buffer (0,
+                                          NULL,
+                                          MHD_RESPMEM_PERSISTENT);
+  if (NULL == resp)
+    return MHD_NO;
+  ret = MHD_queue_response (connection,
+                            MHD_HTTP_REQUEST_ENTITY_TOO_LARGE,
+                            resp);
+  MHD_destroy_response (resp);
+  return ret;
+}
+
+
+/**
+ * Send a response indicating that we did not find the @a object
+ * needed for the reply.
+ *
+ * @param connection the MHD connection to use
+ * @param response_code response code to use
+ * @param ec error code to return
+ * @param msg human-readable diagnostic
+ * @return a MHD result code
+ */
+int
+TMH_RESPONSE_reply_rc (struct MHD_Connection *connection,
+                       unsigned int response_code,
+                       enum TALER_ErrorCode ec,
+                       const char *msg)
+{
+  return TMH_RESPONSE_reply_json_pack (connection,
+                                       response_code,
+                                       "{s:I, s:s}",
+                                       "code", (json_int_t) ec,
+                                       "error", msg);
+}
+
+
+/**
+ * Send a response indicating that the JSON was malformed.
+ *
+ * @param connection the MHD connection to use
+ * @return a MHD result code
+ */
+int
+TMH_RESPONSE_reply_invalid_json (struct MHD_Connection *connection)
+{
+  return TMH_RESPONSE_reply_json_pack (connection,
+                                       MHD_HTTP_BAD_REQUEST,
+                                       "{s:I, s:s}",
+                                       "code", (json_int_t) 
TALER_EC_JSON_INVALID,
+                                       "error", "invalid json");
+}
+
+
+/**
+ * Send a response indicating that we did not find the @a object
+ * needed for the reply.
+ *
+ * @param connection the MHD connection to use
+ * @param ec error code to return
+ * @param object name of the object we did not find
+ * @return a MHD result code
+ */
+int
+TMH_RESPONSE_reply_not_found (struct MHD_Connection *connection,
+                              enum TALER_ErrorCode ec,
+                              const char *object)
+{
+  return TMH_RESPONSE_reply_json_pack (connection,
+                                       MHD_HTTP_NOT_FOUND,
+                                       "{s:I, s:s}",
+                                       "code", (json_int_t) ec,
+                                       "error", object);
+}
+
+
+/**
+ * Send a response indicating that the request was malformed.
+ *
+ * @param connection the MHD connection to use
+ * @param ec error code to return
+ * @param issue description of what was wrong with the request
+ * @return a MHD result code
+ */
+int
+TMH_RESPONSE_reply_bad_request (struct MHD_Connection *connection,
+                                enum TALER_ErrorCode ec,
+                                const char *issue)
+{
+  return TMH_RESPONSE_reply_json_pack (connection,
+                                       MHD_HTTP_BAD_REQUEST,
+                                       "{s:I, s:s}",
+                                       "code", (json_int_t) ec,
+                                       "error", issue);
+}
+
+
+/**
+ * Add headers we want to return in every response.
+ * Useful for testing, like if we want to always close
+ * connections.
+ *
+ * @param response response to modify
+ */
+void
+TMH_RESPONSE_add_global_headers (struct MHD_Response *response)
+{
+  if (TMH_merchant_connection_close)
+    GNUNET_break (MHD_YES ==
+                  MHD_add_response_header (response,
+                                           MHD_HTTP_HEADER_CONNECTION,
+                                           "close"));
+}
+
+
+/**
+ * Send a response indicating an external error.
+ *
+ * @param connection the MHD connection to use
+ * @param ec error code to return
+ * @param hint hint about the error's nature
+ * @return a MHD result code
+ */
+int
+TMH_RESPONSE_reply_external_error (struct MHD_Connection *connection,
+                                   enum TALER_ErrorCode ec,
+                                   const char *hint)
+{
+  return TMH_RESPONSE_reply_json_pack (connection,
+                                       MHD_HTTP_BAD_REQUEST,
+                                       "{s:I, s:s}",
+                                       "code", (json_int_t) ec,
+                                       "hint", hint);
+}
+
+
+/**
+ * Send a response indicating a missing argument.
+ *
+ * @param connection the MHD connection to use
+ * @param ec error code to return
+ * @param param_name the parameter that is missing
+ * @return a MHD result code
+ */
+int
+TMH_RESPONSE_reply_arg_missing (struct MHD_Connection *connection,
+                                enum TALER_ErrorCode ec,
+                                const char *param_name)
+{
+  return TMH_RESPONSE_reply_json_pack (connection,
+                                       MHD_HTTP_BAD_REQUEST,
+                                       "{s:s, s:I, s:s}",
+                                       "error", "missing parameter",
+                                       "code", (json_int_t) ec,
+                                       "parameter", param_name);
+}
+
+
+/**
+ * Send a response indicating an invalid argument.
+ *
+ * @param connection the MHD connection to use
+ * @param ec error code to return
+ * @param param_name the parameter that is invalid
+ * @return a MHD result code
+ */
+int
+TMH_RESPONSE_reply_arg_invalid (struct MHD_Connection *connection,
+                                enum TALER_ErrorCode ec,
+                                const char *param_name)
+{
+  return TMH_RESPONSE_reply_json_pack (connection,
+                                       MHD_HTTP_BAD_REQUEST,
+                                       "{s:s, s:I, s:s}",
+                                       "error", "invalid parameter",
+                                       "code", (json_int_t) ec,
+                                       "parameter", param_name);
+}
+
+
+/* end of taler-exchange-httpd_responses.c */
diff --git a/src/backup/anastasis.conf b/src/backup/anastasis.conf
new file mode 100644
index 0000000..74e508a
--- /dev/null
+++ b/src/backup/anastasis.conf
@@ -0,0 +1,30 @@
+# This file is in the public domain.
+
+# These are default/sample settings for a merchant backend.
+
+
+# General settings for the backend.
+[anastasis]
+# Use TCP or UNIX domain sockets?
+SERVE = tcp
+
+# Which HTTP port does the backend listen on?  Only used if "SERVE" is 'tcp'.
+PORT = 9966
+
+# Which IP address should we bind to? i.e. 127.0.0.1 or ::1 for loopback.
+# Can also be given as a hostname.  We will bind to the wildcard (dual-stack)
+# if left empty.  Only used if "SERVE" is 'tcp'.
+# BIND_TO =
+
+
+# Which unix domain path should we bind to? Only used if "SERVE" is 'unix'.
+UNIXPATH = ${ANASTASIS_RUNTIME_DIR}/backup.http
+# What should be the file access permissions (see chmod) for "UNIXPATH"?
+UNIXPATH_MODE = 660
+
+# Which database backend do we use?
+DB = postgres
+
+# Configuration for postgres database.
+[anastasis-postgres]
+CONFIG = postgres:///anastasis
diff --git a/src/include/Makefile.am b/src/include/Makefile.am
new file mode 100644
index 0000000..b2e3f07
--- /dev/null
+++ b/src/include/Makefile.am
@@ -0,0 +1,9 @@
+# This Makefile.am is in the public domain
+EXTRA_DIST = \
+  platform.h
+
+anastasisincludedir = $(includedir)/anastasis
+
+anastasisinclude_HEADERS = \
+  anastasis_database_plugin.h \
+  anastasis_service.h
diff --git a/src/include/Makefile.in b/src/include/Makefile.in
new file mode 100644
index 0000000..8430d0a
--- /dev/null
+++ b/src/include/Makefile.in
@@ -0,0 +1,622 @@
+# Makefile.in generated by automake 1.16.1 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994-2018 Free Software Foundation, Inc.
+
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+@SET_MAKE@
+
+VPATH = @srcdir@
+am__is_gnu_make = { \
+  if test -z '$(MAKELEVEL)'; then \
+    false; \
+  elif test -n '$(MAKE_HOST)'; then \
+    true; \
+  elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
+    true; \
+  else \
+    false; \
+  fi; \
+}
+am__make_running_with_option = \
+  case $${target_option-} in \
+      ?) ;; \
+      *) echo "am__make_running_with_option: internal error: invalid" \
+              "target option '$${target_option-}' specified" >&2; \
+         exit 1;; \
+  esac; \
+  has_opt=no; \
+  sane_makeflags=$$MAKEFLAGS; \
+  if $(am__is_gnu_make); then \
+    sane_makeflags=$$MFLAGS; \
+  else \
+    case $$MAKEFLAGS in \
+      *\\[\ \  ]*) \
+        bs=\\; \
+        sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
+          | sed "s/$$bs$$bs[$$bs $$bs  ]*//g"`;; \
+    esac; \
+  fi; \
+  skip_next=no; \
+  strip_trailopt () \
+  { \
+    flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
+  }; \
+  for flg in $$sane_makeflags; do \
+    test $$skip_next = yes && { skip_next=no; continue; }; \
+    case $$flg in \
+      *=*|--*) continue;; \
+        -*I) strip_trailopt 'I'; skip_next=yes;; \
+      -*I?*) strip_trailopt 'I';; \
+        -*O) strip_trailopt 'O'; skip_next=yes;; \
+      -*O?*) strip_trailopt 'O';; \
+        -*l) strip_trailopt 'l'; skip_next=yes;; \
+      -*l?*) strip_trailopt 'l';; \
+      -[dEDm]) skip_next=yes;; \
+      -[JT]) skip_next=yes;; \
+    esac; \
+    case $$flg in \
+      *$$target_option*) has_opt=yes; break;; \
+    esac; \
+  done; \
+  test $$has_opt = yes
+am__make_dryrun = (target_option=n; $(am__make_running_with_option))
+am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
+pkgdatadir = $(datadir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkglibexecdir = $(libexecdir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+subdir = src/include
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/m4/ax_lib_postgresql.m4 \
+       $(top_srcdir)/m4/ax_prog_doxygen.m4 \
+       $(top_srcdir)/m4/libcurl.m4 $(top_srcdir)/m4/libgnurl.m4 \
+       $(top_srcdir)/m4/libtool.m4 $(top_srcdir)/m4/ltoptions.m4 \
+       $(top_srcdir)/m4/ltsugar.m4 $(top_srcdir)/m4/ltversion.m4 \
+       $(top_srcdir)/m4/lt~obsolete.m4 $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+       $(ACLOCAL_M4)
+DIST_COMMON = $(srcdir)/Makefile.am $(talerinclude_HEADERS) \
+       $(am__DIST_COMMON)
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = $(top_builddir)/taler_merchant_config.h
+CONFIG_CLEAN_FILES =
+CONFIG_CLEAN_VPATH_FILES =
+AM_V_P = $(am__v_P_@AM_V@)
+am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
+am__v_P_0 = false
+am__v_P_1 = :
+AM_V_GEN = $(am__v_GEN_@AM_V@)
+am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
+am__v_GEN_0 = @echo "  GEN     " $@;
+am__v_GEN_1 = 
+AM_V_at = $(am__v_at_@AM_V@)
+am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
+am__v_at_0 = @
+am__v_at_1 = 
+SOURCES =
+DIST_SOURCES =
+am__can_run_installinfo = \
+  case $$AM_UPDATE_INFO_DIR in \
+    n|no|NO) false;; \
+    *) (install-info --version) >/dev/null 2>&1;; \
+  esac
+am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
+am__vpath_adj = case $$p in \
+    $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
+    *) f=$$p;; \
+  esac;
+am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
+am__install_max = 40
+am__nobase_strip_setup = \
+  srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
+am__nobase_strip = \
+  for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
+am__nobase_list = $(am__nobase_strip_setup); \
+  for p in $$list; do echo "$$p $$p"; done | \
+  sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
+  $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
+    if (++n[$$2] == $(am__install_max)) \
+      { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
+    END { for (dir in files) print dir, files[dir] }'
+am__base_list = \
+  sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
+  sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
+am__uninstall_files_from_dir = { \
+  test -z "$$files" \
+    || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
+    || { echo " ( cd '$$dir' && rm -f" $$files ")"; \
+         $(am__cd) "$$dir" && rm -f $$files; }; \
+  }
+am__installdirs = "$(DESTDIR)$(talerincludedir)"
+HEADERS = $(talerinclude_HEADERS)
+am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
+# Read a list of newline-separated strings from the standard input,
+# and print each of them once, without duplicates.  Input order is
+# *not* preserved.
+am__uniquify_input = $(AWK) '\
+  BEGIN { nonempty = 0; } \
+  { items[$$0] = 1; nonempty = 1; } \
+  END { if (nonempty) { for (i in items) print i; }; } \
+'
+# Make sure the list of sources is unique.  This is necessary because,
+# e.g., the same source file might be shared among _SOURCES variables
+# for different programs/libraries.
+am__define_uniq_tagged_files = \
+  list='$(am__tagged_files)'; \
+  unique=`for i in $$list; do \
+    if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+  done | $(am__uniquify_input)`
+ETAGS = etags
+CTAGS = ctags
+am__DIST_COMMON = $(srcdir)/Makefile.in
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
+AR = @AR@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DLLTOOL = @DLLTOOL@
+DOXYGEN_PAPER_SIZE = @DOXYGEN_PAPER_SIZE@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+DX_CONFIG = @DX_CONFIG@
+DX_CONFIG2 = @DX_CONFIG2@
+DX_CONFIG3 = @DX_CONFIG3@
+DX_CONFIG4 = @DX_CONFIG4@
+DX_DOCDIR = @DX_DOCDIR@
+DX_DOCDIR2 = @DX_DOCDIR2@
+DX_DOCDIR3 = @DX_DOCDIR3@
+DX_DOCDIR4 = @DX_DOCDIR4@
+DX_DOT = @DX_DOT@
+DX_DOXYGEN = @DX_DOXYGEN@
+DX_DVIPS = @DX_DVIPS@
+DX_EGREP = @DX_EGREP@
+DX_ENV = @DX_ENV@
+DX_FLAG_chi = @DX_FLAG_chi@
+DX_FLAG_chm = @DX_FLAG_chm@
+DX_FLAG_doc = @DX_FLAG_doc@
+DX_FLAG_dot = @DX_FLAG_dot@
+DX_FLAG_html = @DX_FLAG_html@
+DX_FLAG_man = @DX_FLAG_man@
+DX_FLAG_pdf = @DX_FLAG_pdf@
+DX_FLAG_ps = @DX_FLAG_ps@
+DX_FLAG_rtf = @DX_FLAG_rtf@
+DX_FLAG_xml = @DX_FLAG_xml@
+DX_HHC = @DX_HHC@
+DX_LATEX = @DX_LATEX@
+DX_MAKEINDEX = @DX_MAKEINDEX@
+DX_PDFLATEX = @DX_PDFLATEX@
+DX_PERL = @DX_PERL@
+DX_PROJECT = @DX_PROJECT@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+FGREP = @FGREP@
+GREP = @GREP@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+JANSSON_CFLAGS = @JANSSON_CFLAGS@
+JANSSON_LIBS = @JANSSON_LIBS@
+LD = @LD@
+LDFLAGS = @LDFLAGS@
+LIBCURL = @LIBCURL@
+LIBCURL_CPPFLAGS = @LIBCURL_CPPFLAGS@
+LIBGCRYPT_CFLAGS = @LIBGCRYPT_CFLAGS@
+LIBGCRYPT_CONFIG = @LIBGCRYPT_CONFIG@
+LIBGCRYPT_LIBS = @LIBGCRYPT_LIBS@
+LIBGNURL = @LIBGNURL@
+LIBGNURL_CPPFLAGS = @LIBGNURL_CPPFLAGS@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@
+MAKEINFO = @MAKEINFO@
+MANIFEST_TOOL = @MANIFEST_TOOL@
+MKDIR_P = @MKDIR_P@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+PG_CONFIG = @PG_CONFIG@
+PKG_CONFIG = @PKG_CONFIG@
+PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@
+PKG_CONFIG_PATH = @PKG_CONFIG_PATH@
+POSTGRESQL_CPPFLAGS = @POSTGRESQL_CPPFLAGS@
+POSTGRESQL_LDFLAGS = @POSTGRESQL_LDFLAGS@
+POSTGRESQL_VERSION = @POSTGRESQL_VERSION@
+RANLIB = @RANLIB@
+SED = @SED@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+STRIP = @STRIP@
+VERSION = @VERSION@
+_libcurl_config = @_libcurl_config@
+_libgnurl_config = @_libgnurl_config@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_AR = @ac_ct_AR@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+gitcommand = @gitcommand@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+runstatedir = @runstatedir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+tsc = @tsc@
+
+# This Makefile.am is in the public domain
+EXTRA_DIST = \
+  platform.h
+
+talerincludedir = $(includedir)/taler
+talerinclude_HEADERS = \
+  taler_merchantdb_lib.h \
+  taler_merchantdb_plugin.h \
+  taler_merchant_service.h
+
+all: all-am
+
+.SUFFIXES:
+$(srcdir)/Makefile.in:  $(srcdir)/Makefile.am  $(am__configure_deps)
+       @for dep in $?; do \
+         case '$(am__configure_deps)' in \
+           *$$dep*) \
+             ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
+               && { if test -f $@; then exit 0; else break; fi; }; \
+             exit 1;; \
+         esac; \
+       done; \
+       echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu src/include/Makefile'; \
+       $(am__cd) $(top_srcdir) && \
+         $(AUTOMAKE) --gnu src/include/Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+       @case '$?' in \
+         *config.status*) \
+           cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+         *) \
+           echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ 
$(am__maybe_remake_depfiles)'; \
+           cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ 
$(am__maybe_remake_depfiles);; \
+       esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure 
$(CONFIG_STATUS_DEPENDENCIES)
+       cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure:  $(am__configure_deps)
+       cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4):  $(am__aclocal_m4_deps)
+       cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(am__aclocal_m4_deps):
+
+mostlyclean-libtool:
+       -rm -f *.lo
+
+clean-libtool:
+       -rm -rf .libs _libs
+install-talerincludeHEADERS: $(talerinclude_HEADERS)
+       @$(NORMAL_INSTALL)
+       @list='$(talerinclude_HEADERS)'; test -n "$(talerincludedir)" || list=; 
\
+       if test -n "$$list"; then \
+         echo " $(MKDIR_P) '$(DESTDIR)$(talerincludedir)'"; \
+         $(MKDIR_P) "$(DESTDIR)$(talerincludedir)" || exit 1; \
+       fi; \
+       for p in $$list; do \
+         if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+         echo "$$d$$p"; \
+       done | $(am__base_list) | \
+       while read files; do \
+         echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(talerincludedir)'"; \
+         $(INSTALL_HEADER) $$files "$(DESTDIR)$(talerincludedir)" || exit $$?; 
\
+       done
+
+uninstall-talerincludeHEADERS:
+       @$(NORMAL_UNINSTALL)
+       @list='$(talerinclude_HEADERS)'; test -n "$(talerincludedir)" || list=; 
\
+       files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
+       dir='$(DESTDIR)$(talerincludedir)'; $(am__uninstall_files_from_dir)
+
+ID: $(am__tagged_files)
+       $(am__define_uniq_tagged_files); mkid -fID $$unique
+tags: tags-am
+TAGS: tags
+
+tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
+       set x; \
+       here=`pwd`; \
+       $(am__define_uniq_tagged_files); \
+       shift; \
+       if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
+         test -n "$$unique" || unique=$$empty_fix; \
+         if test $$# -gt 0; then \
+           $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+             "$$@" $$unique; \
+         else \
+           $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+             $$unique; \
+         fi; \
+       fi
+ctags: ctags-am
+
+CTAGS: ctags
+ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
+       $(am__define_uniq_tagged_files); \
+       test -z "$(CTAGS_ARGS)$$unique" \
+         || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+            $$unique
+
+GTAGS:
+       here=`$(am__cd) $(top_builddir) && pwd` \
+         && $(am__cd) $(top_srcdir) \
+         && gtags -i $(GTAGS_ARGS) "$$here"
+cscopelist: cscopelist-am
+
+cscopelist-am: $(am__tagged_files)
+       list='$(am__tagged_files)'; \
+       case "$(srcdir)" in \
+         [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
+         *) sdir=$(subdir)/$(srcdir) ;; \
+       esac; \
+       for i in $$list; do \
+         if test -f "$$i"; then \
+           echo "$(subdir)/$$i"; \
+         else \
+           echo "$$sdir/$$i"; \
+         fi; \
+       done >> $(top_builddir)/cscope.files
+
+distclean-tags:
+       -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+distdir: $(BUILT_SOURCES)
+       $(MAKE) $(AM_MAKEFLAGS) distdir-am
+
+distdir-am: $(DISTFILES)
+       @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+       topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+       list='$(DISTFILES)'; \
+         dist_files=`for file in $$list; do echo $$file; done | \
+         sed -e "s|^$$srcdirstrip/||;t" \
+             -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+       case $$dist_files in \
+         */*) $(MKDIR_P) `echo "$$dist_files" | \
+                          sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+                          sort -u` ;; \
+       esac; \
+       for file in $$dist_files; do \
+         if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+         if test -d $$d/$$file; then \
+           dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+           if test -d "$(distdir)/$$file"; then \
+             find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx 
{} \;; \
+           fi; \
+           if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+             cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
+             find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx 
{} \;; \
+           fi; \
+           cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
+         else \
+           test -f "$(distdir)/$$file" \
+           || cp -p $$d/$$file "$(distdir)/$$file" \
+           || exit 1; \
+         fi; \
+       done
+check-am: all-am
+check: check-am
+all-am: Makefile $(HEADERS)
+installdirs:
+       for dir in "$(DESTDIR)$(talerincludedir)"; do \
+         test -z "$$dir" || $(MKDIR_P) "$$dir"; \
+       done
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+       @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+       if test -z '$(STRIP)'; then \
+         $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+           install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s 
\
+             install; \
+       else \
+         $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+           install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s 
\
+           "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
+       fi
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+       -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+       -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f 
$(CONFIG_CLEAN_VPATH_FILES)
+
+maintainer-clean-generic:
+       @echo "This command is intended for maintainers to use"
+       @echo "it deletes files that may require special tools to rebuild."
+clean: clean-am
+
+clean-am: clean-generic clean-libtool mostlyclean-am
+
+distclean: distclean-am
+       -rm -f Makefile
+distclean-am: clean-am distclean-generic distclean-tags
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+html-am:
+
+info: info-am
+
+info-am:
+
+install-data-am: install-talerincludeHEADERS
+
+install-dvi: install-dvi-am
+
+install-dvi-am:
+
+install-exec-am:
+
+install-html: install-html-am
+
+install-html-am:
+
+install-info: install-info-am
+
+install-info-am:
+
+install-man:
+
+install-pdf: install-pdf-am
+
+install-pdf-am:
+
+install-ps: install-ps-am
+
+install-ps-am:
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+       -rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-generic mostlyclean-libtool
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am: uninstall-talerincludeHEADERS
+
+.MAKE: install-am install-strip
+
+.PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \
+       clean-libtool cscopelist-am ctags ctags-am distclean \
+       distclean-generic distclean-libtool distclean-tags distdir dvi \
+       dvi-am html html-am info info-am install install-am \
+       install-data install-data-am install-dvi install-dvi-am \
+       install-exec install-exec-am install-html install-html-am \
+       install-info install-info-am install-man install-pdf \
+       install-pdf-am install-ps install-ps-am install-strip \
+       install-talerincludeHEADERS installcheck installcheck-am \
+       installdirs maintainer-clean maintainer-clean-generic \
+       mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \
+       ps ps-am tags tags-am uninstall uninstall-am \
+       uninstall-talerincludeHEADERS
+
+.PRECIOUS: Makefile
+
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/src/include/anastasis_database_plugin.h 
b/src/include/anastasis_database_plugin.h
new file mode 100644
index 0000000..ddcc17c
--- /dev/null
+++ b/src/include/anastasis_database_plugin.h
@@ -0,0 +1,119 @@
+/*
+  This file is part of Anastasis
+  Copyright (C) 2019 Taler Systems SA
+
+  Anastasis is free software; you can redistribute it and/or modify it under 
the
+  terms of the GNU Lesser General Public License as published by the Free 
Software
+  Foundation; either version 3, or (at your option) any later version.
+
+  Anastasis is distributed in the hope that it will be useful, but WITHOUT ANY
+  WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+  A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License along with
+  Anastasis; see the file COPYING.GPL.  If not, see 
<http://www.gnu.org/licenses/>
+*/
+/**
+ * @file include/anastasis_database_plugin.h
+ * @brief database access for Anastasis
+ * @author Christian Grothoff
+ */
+#ifndef ANASTASIS_DATABASE_PLUGIN_H
+#define ANASTASIS_DATABASE_PLUGIN_H
+
+#include <gnunet/gnunet_util_lib.h>
+#include <gnunet/gnunet_db_lib.h>
+#include <jansson.h>
+
+/**
+ * Handle to interact with the database.
+ */
+struct Anastasis_MERCHANTDB_Plugin;
+
+
+/**
+ * Handle to interact with the database.
+ *
+ * Functions ending with "_TR" run their OWN transaction scope
+ * and MUST NOT be called from within a transaction setup by the
+ * caller.  Functions ending with "_NT" require the caller to
+ * setup a transaction scope.  Functions without a suffix are
+ * simple, single SQL queries that MAY be used either way.
+ */
+struct AnastasisDatabasePlugin
+{
+
+  /**
+   * Closure for all callbacks.
+   */
+  void *cls;
+
+  /**
+   * Name of the library which generated this plugin.  Set by the
+   * plugin loader.
+   */
+  char *library_name;
+
+  /**
+   * Drop merchant tables. Used for testcases.
+   *
+   * @param cls closure
+   * @return #GNUNET_OK upon success; #GNUNET_SYSERR upon failure
+   */
+  int
+  (*drop_tables) (void *cls);
+
+  /**
+   * Initialize merchant tables
+   *
+   * @param cls closure
+   * @return #GNUNET_OK upon success; #GNUNET_SYSERR upon failure
+   */
+  int
+  (*initialize) (void *cls);
+
+  /**
+   * Roll back the current transaction of a database connection.
+   *
+   * @param cls the plugin-specific state
+   * @return #GNUNET_OK on success
+   */
+  void
+  (*rollback) (void *cls);
+
+  /**
+   * Start a transaction.
+   *
+   * @param cls the plugin-specific state
+   * @param name unique name identifying the transaction (for debugging),
+   *             must point to a constant
+   * @return #GNUNET_OK on success
+   */
+  int
+  (*start) (void *cls,
+            const char *name);
+
+
+  /**
+   * Do a pre-flight check that we are not in an uncommitted transaction.
+   * If we are, try to commit the previous transaction and output a warning.
+   * Does not return anything, as we will continue regardless of the outcome.
+   *
+   * @param cls the plugin-specific state
+   */
+  void
+  (*preflight) (void *cls);
+
+
+  /**
+   * Commit the current transaction of a database connection.
+   *
+   * @param cls the plugin-specific state
+   * @return transaction status code
+   */
+  enum GNUNET_DB_QueryStatus
+  (*commit) (void *cls);
+
+};
+
+#endif
diff --git a/src/include/anastasis_service.h b/src/include/anastasis_service.h
new file mode 100644
index 0000000..7a366ca
--- /dev/null
+++ b/src/include/anastasis_service.h
@@ -0,0 +1,45 @@
+/*
+  This file is part of TALER
+  Copyright (C) 2019 Taler Systems SA
+
+  Anastasis is free software; you can redistribute it and/or modify it under 
the
+  terms of the GNU Affero General Public License as published by the Free 
Software
+  Foundation; either version 3, or (at your option) any later version.
+
+  Anastasis is distributed in the hope that it will be useful, but WITHOUT ANY
+  WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+  A PARTICULAR PURPOSE.  See the GNU Lesser General Public License for more 
details.
+
+  You should have received a copy of the GNU Lesser General Public License 
along with
+  Anastasis; see the file COPYING.LIB.  If not, see 
<http://www.gnu.org/licenses/>
+*/
+/**
+ * @file include/taler_merchant_service.h
+ * @brief C interface of libtalermerchant, a C library to use merchant's HTTP 
API
+ * @author Christian Grothoff
+ * @author Marcello Stanisci
+ */
+#ifndef _ANASTASIS_MERCHANT_SERVICE_H
+#define _ANASTASIS_MERCHANT_SERVICE_H
+
+#include <gnunet/gnunet_curl_lib.h>
+#include <jansson.h>
+
+ */
+struct Anastasis_MERCHANT_Pay *
+Anastasis_MERCHANT_pay_abort (struct GNUNET_CURL_Context *ctx,
+                         const char *merchant_url,
+                         const char *instance,
+                         const struct GNUNET_HashCode *h_contract,
+                         const struct Anastasis_Amount *amount,
+                         const struct Anastasis_Amount *max_fee,
+                         const struct Anastasis_MerchantPublicKeyP 
*merchant_pub,
+                         const struct Anastasis_MerchantSignatureP 
*merchant_sig,
+                         struct GNUNET_TIME_Absolute timestamp,
+                         struct GNUNET_TIME_Absolute refund_deadline,
+                         struct GNUNET_TIME_Absolute pay_deadline,
+                         const struct GNUNET_HashCode *h_wire,
+                         const char *order_id,
+
+
+#endif  /* _ANASTASIS_MERCHANT_SERVICE_H */
diff --git a/src/include/platform.h b/src/include/platform.h
new file mode 100644
index 0000000..a2ed6f4
--- /dev/null
+++ b/src/include/platform.h
@@ -0,0 +1,60 @@
+/*
+  This file is part of TALER
+  Copyright (C) 2014, 2015, 2016 GNUnet e.V. and INRIA
+
+  TALER is free software; you can redistribute it and/or modify it under the
+  terms of the GNU Affero General Public License as published by the Free 
Software
+  Foundation; either version 3, or (at your option) any later version.
+
+  TALER is distributed in the hope that it will be useful, but WITHOUT ANY
+  WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+  A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
+
+  You should have received a copy of the GNU General Public License along with
+  TALER; see the file COPYING.  If not, see <http://www.gnu.org/licenses/>
+*/
+
+/**
+ * @file include/platform.h
+ * @brief This file contains the includes and definitions which are used by the
+ *        rest of the modules
+ * @author Sree Harsha Totakura <address@hidden>
+ */
+
+#ifndef PLATFORM_H_
+#define PLATFORM_H_
+
+/* Include our configuration header */
+#ifndef HAVE_USED_CONFIG_H
+# define HAVE_USED_CONFIG_H
+# ifdef HAVE_CONFIG_H
+#  include "taler_merchant_config.h"
+# endif
+#endif
+
+
+#if (GNUNET_EXTRA_LOGGING >= 1)
+#define VERBOSE(cmd) cmd
+#else
+#define VERBOSE(cmd) do { break; }while(0)
+#endif
+
+/* Include the features available for GNU source */
+#define _GNU_SOURCE
+
+/* Include GNUnet's platform file */
+#include <gnunet/platform.h>
+
+/* Do not use shortcuts for gcrypt mpi */
+#define GCRYPT_NO_MPI_MACROS 1
+
+/* Do not use deprecated functions from gcrypt */
+#define GCRYPT_NO_DEPRECATED 1
+
+/* Ignore MHD deprecations for now as we want to be compatible
+   to "ancient" MHD releases. */
+#define MHD_NO_DEPRECATION 1
+
+#endif  /* PLATFORM_H_ */
+
+/* end of platform.h */
diff --git a/src/lib/Makefile.am b/src/lib/Makefile.am
new file mode 100644
index 0000000..813214f
--- /dev/null
+++ b/src/lib/Makefile.am
@@ -0,0 +1,32 @@
+# This Makefile.am is in the public domain
+AM_CPPFLAGS = -I$(top_srcdir)/src/include -I$(top_srcdir)/src/backend
+
+if USE_COVERAGE
+  AM_CFLAGS = --coverage -O0
+  XLIB = -lgcov
+endif
+
+lib_LTLIBRARIES = \
+  libanastasis.la
+
+libanastasis_la_LDFLAGS = \
+  -version-info 0:0:0 \
+  -no-undefined
+
+libanastasis_la_SOURCES = \
+  anastasis_api_salt.c
+
+libanastasis_la_LIBADD = \
+  -lgnunetcurl \
+  -lgnunetjson \
+  -lgnunetutil \
+  -ljansson \
+  $(XLIB)
+
+if HAVE_LIBCURL
+libanastasis_la_LIBADD += -lcurl
+else
+if HAVE_LIBGNURL
+libanastasis_la_LIBADD += -lgnurl
+endif
+endif
diff --git a/src/lib/anastasis_api_salt.c b/src/lib/anastasis_api_salt.c
new file mode 100644
index 0000000..a403c45
--- /dev/null
+++ b/src/lib/anastasis_api_salt.c
@@ -0,0 +1,58 @@
+/*
+  This file is part of TALER
+  Copyright (C) 2014-2017 GNUnet e.V. and INRIA
+
+  TALER is free software; you can redistribute it and/or modify
+  it under the terms of the GNU Lesser General Public License as
+  published by the Free Software Foundation; either version 2.1,
+  or (at your option) any later version.
+
+  TALER is distributed in the hope that it will be useful, but
+  WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU Lesser General Public License for more details.
+
+  You should have received a copy of the GNU Lesser General Public
+  License along with TALER; see the file COPYING.LGPL.  If not,
+  see <http://www.gnu.org/licenses/>
+*/
+
+/**
+ * @file lib/anastasis_api_salt.c
+ * @brief Implementation of the /salt GET
+ * @author Christian Grothoff
+ */
+#include "platform.h"
+#include <curl/curl.h>
+#include <jansson.h>
+#include <microhttpd.h> /* just for HTTP status codes */
+#include <gnunet/gnunet_util_lib.h>
+#include <gnunet/gnunet_curl_lib.h>
+#include "anastasis_service.h"
+
+
+/**
+ * @brief A Contract Operation Handle
+ */
+struct ANASTASIS_SaltOperation
+{
+
+  /**
+   * The url for this request.
+   */
+  char *url;
+
+  /**
+   * Handle for the request.
+   */
+  struct GNUNET_CURL_Job *job;
+
+  /**
+   * Reference to the execution context.
+   */
+  struct GNUNET_CURL_Context *ctx;
+
+};
+
+
+/* end of anastasis_api_salt.c */

-- 
To stop receiving notification emails like this one, please contact
address@hidden.



reply via email to

[Prev in Thread] Current Thread [Next in Thread]