diff options
99 files changed, 4853 insertions, 1845 deletions
@@ -12,6 +12,7 @@ Makefile.in aclocal.m4 autom4te.cache/ compile +config.cache config.guess config.log config.status @@ -56,3 +57,4 @@ sync_config.h sync_config.h.in src/sync/sync-httpd src/testing/test_sync_api_home/.local/share/taler/auditors/ +src/testing/test_sync_api_home/.local/share/taler/exchange-secmod-cs/ diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..6c91f29 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,4 @@ +[submodule "doc/prebuilt"] + path = doc/prebuilt + url = https://git.taler.net/docs.git + branch = prebuilt @@ -1,19 +1,22 @@ +Fri Apr 12 10:43:41 AM CEST 2024 + Releasing sync 0.10.1. -CG + Sun 08 Aug 2021 08:44:35 PM CEST - Update to latest Taler APIs. - Releasing sync 0.8.2. -CG + Update to latest Taler APIs. + Releasing sync 0.8.2. -CG Tue 10 Nov 2020 01:11:02 PM CET - Update to latest Taler APIs. - Releasing sync 0.8.1. -CG + Update to latest Taler APIs. + Releasing sync 0.8.1. -CG Sat 03 Oct 2020 04:37:37 PM CEST - Releasing sync 0.8.0. -CG + Releasing sync 0.8.0. -CG Tue 31 Mar 2020 03:44:33 PM CEST - Releasing sync 0.7.0. -CG + Releasing sync 0.7.0. -CG Tue 24 Dec 2019 11:01:21 PM CET - Releasing sync 0.6.0. -CG + Releasing sync 0.6.0. -CG Wed Nov 13 15:01:09 2019 +0100 - Initial project setup. -CG + Initial project setup. -CG @@ -5,7 +5,7 @@ #--------------------------------------------------------------------------- DOXYFILE_ENCODING = UTF-8 PROJECT_NAME = "Sync" -PROJECT_NUMBER = 0.5 +PROJECT_NUMBER = 0.9.3 OUTPUT_DIRECTORY = doxygen-doc/ CREATE_SUBDIRS = YES OUTPUT_LANGUAGE = English @@ -1,8 +1,8 @@ Installation Instructions ************************* - Copyright (C) 1994-1996, 1999-2002, 2004-2016 Free Software -Foundation, Inc. + Copyright (C) 1994-1996, 1999-2002, 2004-2017, 2020-2021 Free +Software Foundation, Inc. Copying and distribution of this file, with or without modification, are permitted in any medium without royalty provided the copyright @@ -225,7 +225,7 @@ order to use an ANSI C compiler: and if that doesn't work, install pre-built binaries of GCC for HP-UX. - HP-UX 'make' updates targets which have the same time stamps as their + HP-UX 'make' updates targets which have the same timestamps as their prerequisites, which makes it generally unusable when shipped generated files such as 'configure' are involved. Use GNU 'make' instead. diff --git a/Makefile.am b/Makefile.am index 3227908..d7a53ea 100644 --- a/Makefile.am +++ b/Makefile.am @@ -8,9 +8,9 @@ else endif else if ENABLE_DOC - SUBDIRS = . src doc + SUBDIRS = . src doc contrib else - SUBDIRS = . src doc + SUBDIRS = . src doc contrib endif endif @@ -20,7 +20,4 @@ ACLOCAL_AMFLAGS = -I m4 EXTRA_DIST = \ AUTHORS \ COPYING.AGPL \ - contrib/gnunet.tag \ - contrib/uncrustify.cfg \ - contrib/uncrustify_precommit \ Doxyfile @@ -68,6 +68,24 @@ At this time, Sync only works with a Postgres database. Other database backends could easily be written in the future. +Dependencies +============ + +See INSTALL for generic installation instructions. In addition, Sync +requires GNUnet, Taler Exchange, and Taler Merchant libraries. If these +are installed in a non-standard place, use the configure script options +--with-gnunet=DIR, --with-exchange=DIR, and --with-merchant=DIR, +respectively. + +The following are likewise required: + + libmicrohttpd --with-microhttpd=DIR + PostgreSQL --with-postgresql=PATH-TO-PG-CONFIG + +Other non-essential dependencies are indicated by --with-FOO +in the "./configure --help" output. + + License: ======== diff --git a/configure.ac b/configure.ac index 7cb6627..46cd31b 100644 --- a/configure.ac +++ b/configure.ac @@ -4,7 +4,7 @@ # This configure file is in the public domain AC_PREREQ([2.69]) -AC_INIT([sync], [0.8.3], [taler-bug@gnu.org]) +AC_INIT([sync],[0.10.1],[taler-bug@gnu.org]) AC_CONFIG_SRCDIR([src/sync/sync-httpd.c]) AC_CONFIG_HEADERS([sync_config.h]) # support for non-recursive builds @@ -15,7 +15,7 @@ AM_SILENT_RULES([yes]) AC_CONFIG_MACRO_DIR([m4]) AC_PROG_AWK -AC_PROG_CC_C99 +AC_PROG_CC AC_PROG_OBJC AC_PROG_INSTALL AC_PROG_LN_S @@ -47,7 +47,6 @@ AS_IF([test "x$doc_only" != xyes],[ # Checks for programs. AC_PROG_CC -AC_PROG_CC_C99 # Force some CFLAGS CFLAGS="-Wall -Wno-address-of-packed-member $CFLAGS" @@ -90,11 +89,8 @@ AS_CASE([$with_gnunet], [no], [AC_MSG_ERROR([--with-gnunet is required])], [LDFLAGS="-L$with_gnunet/lib $LDFLAGS" CPPFLAGS="-I$with_gnunet/include $CPPFLAGS"]) -AC_CHECK_HEADERS([gnunet/platform.h gnunet/gnunet_util_lib.h], - [AC_CHECK_LIB([gnunetutil], [GNUNET_SCHEDULER_run], libgnunetutil=1)], - [], [#ifdef HAVE_GNUNET_PLATFORM_H - #include <gnunet/platform.h> - #endif]) +AC_CHECK_HEADERS([gnunet/gnunet_util_lib.h], + [AC_CHECK_LIB([gnunetutil], [GNUNET_SCHEDULER_run], libgnunetutil=1)]) AS_IF([test $libgnunetutil != 1], [AC_MSG_ERROR([[ *** @@ -105,7 +101,7 @@ AS_IF([test $libgnunetutil != 1], # test for postgres -AX_LIB_POSTGRESQL([9.3]) +AX_LIB_POSTGRESQL([15]) AS_IF([test "x$found_postgresql" = "xyes"],[postgres=true]) AM_CONDITIONAL(HAVE_POSTGRESQL, test x$postgres = xtrue) @@ -133,10 +129,7 @@ CPPFLAGS="$CPPFLAGS $POSTGRESQL_CPPFLAGS" LDFLAGS="$LDFLAGS -L/usr/local/lib" AC_CHECK_HEADERS([gnunet/gnunet_pq_lib.h], - [AC_CHECK_LIB([gnunetpq], [GNUNET_PQ_connect_with_cfg], libgnunetpq=1)], - [], [#ifdef HAVE_GNUNET_PLATFORM_H - #include <gnunet/platform.h> - #endif]) + [AC_CHECK_LIB([gnunetpq], [GNUNET_PQ_connect_with_cfg], libgnunetpq=1)]) AM_CONDITIONAL(HAVE_GNUNETPQ, test x$libgnunetpq = x1) @@ -172,16 +165,6 @@ PKG_CHECK_MODULES([JANSSON], [jansson >= 2.3], *** You need libjansson to build this program. ***]])]) -# check for libgnurl -# libgnurl -LIBGNURL_CHECK_CONFIG(,7.34.0,gnurl=1,gnurl=0) -AS_IF([test "x$gnurl" = x1],[ - AM_CONDITIONAL(HAVE_LIBGNURL, true) - AC_DEFINE([HAVE_LIBGNURL],[1],[Have libgnurl]) -],[ - AM_CONDITIONAL(HAVE_LIBGNURL, false) -]) - # libcurl-gnutls LIBCURL_CHECK_CONFIG(,7.34.0,[curl=true],[curl=false]) AS_IF([test "x$curl" = xtrue], @@ -199,30 +182,16 @@ AS_IF([test "x$curl" = xtrue], [curl=false])]) # cURL must support CURLINFO_TLS_SESSION, version >= 7.34 -# Check for curl/curl.h and gnurl/curl.h so we can use #ifdef -# HAVE_CURL_CURL_H later (the above LIBCURL_CHECK_CONFIG accepted -# *either* header set). +# Check for curl/curl.h AC_CHECK_HEADERS([curl/curl.h],, - curl=false - AC_CHECK_HEADERS([gnurl/curl.h],, - gnurl=false)) - -# libgnurl -AS_IF([test "x$gnurl" = "x0"], - [AS_IF([test "x$curl" = "x0"], - [AC_MSG_NOTICE([NOTICE: libgnurl not found. taler-bank support will not be compiled.])], - [AC_MSG_NOTICE([WARNING: libgnurl not found, trying to use libcurl-gnutls instead.])])]) + curl=false) AS_IF([test x$curl = xfalse], - [AM_CONDITIONAL(HAVE_LIBCURL, false) - AS_IF([test "x$gnurl" = "x0"], - [AC_MSG_WARN([GNU Taler requires libcurl-gnutls >= 7.34])])], - [AM_CONDITIONAL(HAVE_LIBCURL, true) - AC_DEFINE([HAVE_LIBCURL],[1],[Have CURL])]) + [AC_MSG_ERROR([GNU Taler requires libcurl-gnutls >= 7.34])]) # check for libtalertwistertesting twistertesting=0 -AC_MSG_CHECKING([for talerwtistertesting]) +AC_MSG_CHECKING([for talertwistertesting]) AC_ARG_WITH([twister], [AS_HELP_STRING([--with-twister=PFX], [base of libtalertwistertesting])], [AC_MSG_RESULT([given as $with_twister])], @@ -235,10 +204,7 @@ AS_CASE([$with_twister], CPPFLAGS="-I$with_twister/include $CPPFLAGS"]) AC_CHECK_HEADERS([taler/taler_twister_testing_lib.h], - [AC_CHECK_LIB([talertwistertesting], [TALER_TESTING_run_twister], twistertesting=1,, [-ltalerexchange -ltalerbank])], - [], [#ifdef HAVE_GNUNET_PLATFORM_H - #include <gnunet/platform.h> - #endif]) + [AC_CHECK_LIB([talertwistertesting], [TALER_TESTING_run_twister], twistertesting=1,, [-ltalerexchange -ltalerbank])]) AM_CONDITIONAL(HAVE_TWISTER, test x$twistertesting = x1) # gcov compilation @@ -316,8 +282,6 @@ AM_CONDITIONAL([ENABLE_DOC], [test "x$enable_doc" = "xyes"]) # logic if doc_only is set, make sure conditionals are still defined AM_CONDITIONAL([HAVE_GNUNETPQ], [false]) AM_CONDITIONAL([HAVE_POSTGRESQL], [false]) -AM_CONDITIONAL([HAVE_LIBCURL], [false]) -AM_CONDITIONAL([HAVE_LIBGNURL], [false]) AM_CONDITIONAL([USE_COVERAGE], [false]) AM_CONDITIONAL([ENABLE_DOC], [true]) AM_CONDITIONAL([HAVE_TWISTER], [true]) @@ -338,7 +302,9 @@ AM_CONDITIONAL([HAVE_EXPERIMENTAL], [test "x$enable_experimental" = "xyes"]) AC_CONFIG_FILES([Makefile +contrib/Makefile doc/Makefile +doc/doxygen/Makefile src/Makefile src/include/Makefile src/lib/Makefile diff --git a/contrib/Makefile.am b/contrib/Makefile.am new file mode 100644 index 0000000..b6eac2f --- /dev/null +++ b/contrib/Makefile.am @@ -0,0 +1,14 @@ +# This file is in the public domain. + +SUBDIRS = . + +bin_SCRIPTS = \ + sync-dbconfig + +EXTRA_DIST = \ + $(bin_SCRIPTS) \ + gnunet.tag \ + microhttpd.tag \ + taler-exchange.tag \ + uncrustify.cfg \ + uncrustify_precommit diff --git a/contrib/ci/Containerfile b/contrib/ci/Containerfile new file mode 100644 index 0000000..96c9b28 --- /dev/null +++ b/contrib/ci/Containerfile @@ -0,0 +1,81 @@ +FROM docker.io/library/debian:bookworm + +ENV DEBIAN_FRONTEND=noninteractive + +RUN apt-get update -yqq && \ + apt-get install -yqq \ + git \ + autoconf \ + libjansson-dev \ + libgcrypt-dev \ + libqrencode-dev \ + libpq-dev \ + pkg-config \ + libtool \ + recutils \ + make \ + python3-pip \ + python3-sphinx \ + python3-sphinx-rtd-theme \ + texinfo \ + autopoint \ + curl \ + wget \ + libcurl4-gnutls-dev \ + libsodium-dev \ + libidn11-dev \ + zlib1g-dev \ + libunistring-dev + +# Debian packaging tools +RUN apt-get install -yqq \ + po-debconf \ + build-essential \ + debhelper-compat \ + devscripts \ + git-buildpackage + +RUN pip3 install --break-system-packages requests click poetry uwsgi htmlark + +# Install docs generation utils +RUN apt-get update -yqq && \ + apt-get install -yqq \ + graphviz \ + doxygen \ + && rm -rf /var/lib/apt/lists/* + +# Install Taler (and friends) packages +RUN curl -sS https://deb.taler.net/apt-nightly/taler-bookworm-ci.sources \ + | tee /etc/apt/sources.list.d/taler-bookworm-ci.sources + +RUN echo '\ +Package: * \n\ +Pin: origin "deb.taler.net" \n\ +Pin-Priority: 999' > /etc/apt/preferences.d/taler + +# FIXME: we need libeufin-bank here for the CI to work! +RUN cat /etc/apt/preferences.d/taler && \ + apt-get update -y && \ + apt-get install -y \ + libgnunet-dev \ + libgnunet \ + libtalerexchange-dev \ + libtalerexchange \ + libtalermerchant-dev \ + libtalermerchant \ + taler-exchange \ + taler-merchant \ + taler-exchange-database \ + taler-exchange-offline \ + taler-auditor \ + && rm -rf /var/lib/apt/lists/* + +RUN apt-get update -yqq && \ + apt-get install -yqq \ + postgresql \ + sudo \ + jq + +WORKDIR /workdir + +CMD ["bash", "/workdir/ci/ci.sh"] diff --git a/contrib/ci/ci.sh b/contrib/ci/ci.sh new file mode 100755 index 0000000..0719015 --- /dev/null +++ b/contrib/ci/ci.sh @@ -0,0 +1,34 @@ +#!/bin/bash +set -exvuo pipefail + +# Requires podman +# Fails if not found in PATH +OCI_RUNTIME=$(which podman) +REPO_NAME=$(basename "${PWD}") +JOB_NAME="${1}" +JOB_ARCH=$((grep CONTAINER_ARCH contrib/ci/jobs/${JOB_NAME}/config.ini | cut -d' ' -f 3) || echo "${2:-amd64}") +JOB_CONTAINER=$((grep CONTAINER_NAME contrib/ci/jobs/${JOB_NAME}/config.ini | cut -d' ' -f 3) || echo "localhost/${REPO_NAME}:${JOB_ARCH}") +CONTAINER_BUILD=$((grep CONTAINER_BUILD contrib/ci/jobs/${JOB_NAME}/config.ini | cut -d' ' -f 3) || echo "True") + +echo "Image name: ${JOB_CONTAINER}" + +if [ "${CONTAINER_BUILD}" = "True" ] ; then + "${OCI_RUNTIME}" build \ + --arch "${JOB_ARCH}" \ + -t "${JOB_CONTAINER}" \ + -f contrib/ci/Containerfile . +fi + +"${OCI_RUNTIME}" run \ + --rm \ + -ti \ + --arch "${JOB_ARCH}" \ + --env CI_COMMIT_REF="$(git rev-parse HEAD)" \ + --volume "${PWD}":/workdir \ + --workdir /workdir \ + "${JOB_CONTAINER}" \ + contrib/ci/jobs/"${JOB_NAME}"/job.sh + +top_dir=$(dirname "${BASH_SOURCE[0]}") + +#"${top_dir}"/build.sh diff --git a/contrib/ci/jobs/0-codespell/config.ini b/contrib/ci/jobs/0-codespell/config.ini new file mode 100644 index 0000000..bd7d738 --- /dev/null +++ b/contrib/ci/jobs/0-codespell/config.ini @@ -0,0 +1,6 @@ +[build] +HALT_ON_FAILURE = False +WARN_ON_FAILURE = True +CONTAINER_BUILD = False +CONTAINER_NAME = nixery.dev/shell/codespell +CONTAINER_ARCH = amd64 diff --git a/contrib/ci/jobs/0-codespell/dictionary.txt b/contrib/ci/jobs/0-codespell/dictionary.txt new file mode 100644 index 0000000..5ad828f --- /dev/null +++ b/contrib/ci/jobs/0-codespell/dictionary.txt @@ -0,0 +1,15 @@ +# List of "words" that codespell should ignore in our sources. +# +# Note: The word sensitivity depends on how the to-be-ignored word is +# spelled in codespell_lib/data/dictionary.txt. F.e. if there is a word +# 'foo' and you add 'Foo' _here_, codespell will continue to complain +# about 'Foo'. +# +ifset +bu +fIDN +ECT +complet +ges +UE +Te diff --git a/contrib/ci/jobs/0-codespell/job.sh b/contrib/ci/jobs/0-codespell/job.sh new file mode 100755 index 0000000..bb02feb --- /dev/null +++ b/contrib/ci/jobs/0-codespell/job.sh @@ -0,0 +1,31 @@ +#!/bin/bash +set -exuo pipefail + +job_dir=$(dirname "${BASH_SOURCE[0]}") + +skip=$(cat <<EOF +ABOUT-NLS +configure +config.guess +configure~ +*/debian/upstream/* +*/debian/.debhelper/* +*/doc/prebuilt/* +*/testing/test_sync_api_home/* +*build-aux* +*.cache/* +*/.git/* +*/contrib/ci/* +depcomp +*libtool* +ltmain.sh +*.log +*/m4/* +*.m4 +*.rpath +EOF +); + +echo Current directory: `pwd` + +codespell -I "${job_dir}"/dictionary.txt -S ${skip//$'\n'/,} diff --git a/contrib/ci/jobs/1-build/build.sh b/contrib/ci/jobs/1-build/build.sh new file mode 100755 index 0000000..7d1b502 --- /dev/null +++ b/contrib/ci/jobs/1-build/build.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -exuo pipefail + +./bootstrap +./configure CFLAGS="-ggdb -O0" \ + --prefix=/usr \ + --enable-logging=verbose \ + --disable-doc + +make diff --git a/contrib/ci/jobs/1-build/job.sh b/contrib/ci/jobs/1-build/job.sh new file mode 100755 index 0000000..c1fc4e3 --- /dev/null +++ b/contrib/ci/jobs/1-build/job.sh @@ -0,0 +1,9 @@ +#!/bin/bash +set -exuo pipefail + +apt-get update -yq +apt-get upgrade -yq + +job_dir=$(dirname "${BASH_SOURCE[0]}") + +"${job_dir}"/build.sh diff --git a/contrib/ci/jobs/2-test/config.ini b/contrib/ci/jobs/2-test/config.ini new file mode 100644 index 0000000..d62c4d4 --- /dev/null +++ b/contrib/ci/jobs/2-test/config.ini @@ -0,0 +1,6 @@ +[build] +HALT_ON_FAILURE = False +WARN_ON_FAILURE = True +CONTAINER_BUILD = True +CONTAINER_NAME = localhost/sync +CONTAINER_ARCH = amd64 diff --git a/contrib/ci/jobs/2-test/job.sh b/contrib/ci/jobs/2-test/job.sh new file mode 100755 index 0000000..bfb24e3 --- /dev/null +++ b/contrib/ci/jobs/2-test/job.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -exuo pipefail + +job_dir=$(dirname "${BASH_SOURCE[0]}") + +"${job_dir}"/test.sh diff --git a/contrib/ci/jobs/2-test/test.sh b/contrib/ci/jobs/2-test/test.sh new file mode 100755 index 0000000..aebbe01 --- /dev/null +++ b/contrib/ci/jobs/2-test/test.sh @@ -0,0 +1,51 @@ +#!/bin/bash +set -evu + +apt-get update +apt-get upgrade -yqq + +./bootstrap +./configure CFLAGS="-ggdb -O0" \ + --prefix=/usr \ + --enable-logging=verbose \ + --disable-doc +make -j install + +sudo -u postgres /usr/lib/postgresql/15/bin/postgres -D /etc/postgresql/15/main -h localhost -p 5432 & +sleep 10 +sudo -u postgres createuser -p 5432 root +sudo -u postgres createdb -p 5432 -O root synccheck + +check_command() +{ + # Set LD_LIBRARY_PATH so tests can find the installed libs + LD_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu/taler:/usr/lib:/usr/lib/taler PGPORT=5432 make check +} + +print_logs() +{ + for i in src/*/test-suite.log + do + for FAILURE in $(grep '^FAIL:' ${i} | cut -d' ' -f2) + do + echo "Printing ${FAILURE}.log" + echo "========BEGIN======" + cat "$(dirname $i)/${FAILURE}.log" + echo "=========END=======" + echo "End of ${FAILURE}.log" + done + done + for LOGFILE in src/testing/*.log + do + echo "Printing ${LOGFILE}" + echo "========BEGIN======" + cat "${LOGFILE}" + echo "=========END=======" + echo "End of ${LOGFILE}" + done +} + +if ! check_command ; then + print_logs + exit 1 +fi diff --git a/contrib/ci/jobs/3-docs/config.ini b/contrib/ci/jobs/3-docs/config.ini new file mode 100644 index 0000000..d62c4d4 --- /dev/null +++ b/contrib/ci/jobs/3-docs/config.ini @@ -0,0 +1,6 @@ +[build] +HALT_ON_FAILURE = False +WARN_ON_FAILURE = True +CONTAINER_BUILD = True +CONTAINER_NAME = localhost/sync +CONTAINER_ARCH = amd64 diff --git a/contrib/ci/jobs/3-docs/docs.sh b/contrib/ci/jobs/3-docs/docs.sh new file mode 100755 index 0000000..fe2b968 --- /dev/null +++ b/contrib/ci/jobs/3-docs/docs.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -exuo pipefail + +./bootstrap +./configure --enable-only-doc + +pushd ./doc/doxygen/ + +make full + +popd diff --git a/contrib/ci/jobs/3-docs/job.sh b/contrib/ci/jobs/3-docs/job.sh new file mode 100755 index 0000000..a72bca4 --- /dev/null +++ b/contrib/ci/jobs/3-docs/job.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -exuo pipefail + +job_dir=$(dirname "${BASH_SOURCE[0]}") + +"${job_dir}"/docs.sh diff --git a/contrib/ci/jobs/4-deb-package/job.sh b/contrib/ci/jobs/4-deb-package/job.sh new file mode 100755 index 0000000..42636ed --- /dev/null +++ b/contrib/ci/jobs/4-deb-package/job.sh @@ -0,0 +1,23 @@ +#!/bin/bash +set -exuo pipefail +# This file is in the public domain. +# Helper script to build the latest DEB packages in the container. + + +unset LD_LIBRARY_PATH + +# Install build-time dependencies. +# Update apt cache first +apt-get update +apt-get upgrade -y +mk-build-deps --install --tool='apt-get -o Debug::pkgProblemResolver=yes --no-install-recommends --yes' debian/control + +export VERSION="$(./contrib/ci/jobs/4-deb-package/version.sh)" +echo "Building package version ${VERSION}" +EMAIL=none gbp dch --ignore-branch --debian-tag="%(version)s" --git-author --new-version="${VERSION}" +./bootstrap +dpkg-buildpackage -rfakeroot -b -uc -us + +ls -alh ../*.deb +mkdir -p /artifacts/sync/${CI_COMMIT_REF} # Variable comes from CI environment +mv ../*.deb /artifacts/sync/${CI_COMMIT_REF}/ diff --git a/contrib/ci/jobs/4-deb-package/version.sh b/contrib/ci/jobs/4-deb-package/version.sh new file mode 100755 index 0000000..52031b2 --- /dev/null +++ b/contrib/ci/jobs/4-deb-package/version.sh @@ -0,0 +1,17 @@ +#!/bin/sh +set -ex + +BRANCH=$(git name-rev --name-only HEAD) +if [ -z "${BRANCH}" ]; then + exit 1 +else + # "Unshallow" our checkout, but only our current branch, and exclude the submodules. + git fetch --no-recurse-submodules --tags --depth=1000 origin "${BRANCH}" + RECENT_VERSION_TAG=$(git describe --tags --match 'v*.*.*' --exclude '*-dev*' --always --abbrev=0 HEAD || exit 1) + commits="$(git rev-list ${RECENT_VERSION_TAG}..HEAD --count)" + if [ "${commits}" = "0" ]; then + git describe --tag HEAD | sed -r 's/^v//' || exit 1 + else + echo $(echo ${RECENT_VERSION_TAG} | sed -r 's/^v//')-${commits}-$(git rev-parse --short=8 HEAD) + fi +fi diff --git a/contrib/ci/jobs/5-deploy-package/config.ini b/contrib/ci/jobs/5-deploy-package/config.ini new file mode 100644 index 0000000..08c106f --- /dev/null +++ b/contrib/ci/jobs/5-deploy-package/config.ini @@ -0,0 +1,6 @@ +[build] +HALT_ON_FAILURE = True +WARN_ON_FAILURE = True +CONTAINER_BUILD = False +CONTAINER_NAME = nixery.dev/shell/rsync +CONTAINER_ARCH = amd64 diff --git a/contrib/ci/jobs/5-deploy-package/job.sh b/contrib/ci/jobs/5-deploy-package/job.sh new file mode 100755 index 0000000..9ad8f21 --- /dev/null +++ b/contrib/ci/jobs/5-deploy-package/job.sh @@ -0,0 +1,14 @@ +#!/bin/bash +set -exuo pipefail + +ARTIFACT_PATH="/artifacts/sync/${CI_COMMIT_REF}/*.deb" + +RSYNC_HOST="taler.host.internal" +RSYNC_PORT=424242 +RSYNC_PATH="incoming_packages/bookworm-taler-ci/" +RSYNC_DEST="rsync://${RSYNC_HOST}/${RSYNC_PATH}" + + +rsync -vP \ + --port ${RSYNC_PORT} \ + ${ARTIFACT_PATH} ${RSYNC_DEST} diff --git a/contrib/microhttpd.tag b/contrib/microhttpd.tag index 8fab93d..ade11c6 100644 --- a/contrib/microhttpd.tag +++ b/contrib/microhttpd.tag @@ -24,6 +24,18 @@ </member> <member kind="define"> <type>#define</type> + <name>MHD_HTTP_NOT_FOUND</name> + <anchorfile>microhttpd.h</anchorfile> + <arglist></arglist> + </member> + <member kind="define"> + <type>#define</type> + <name>MHD_HTTP_CONFLICT</name> + <anchorfile>microhttpd.h</anchorfile> + <arglist></arglist> + </member> + <member kind="define"> + <type>#define</type> <name>MHD_HTTP_NO_CONTENT</name> <anchorfile>microhttpd.h</anchorfile> <arglist></arglist> @@ -64,11 +76,29 @@ <anchorfile>microhttpd.h</anchorfile> <arglist></arglist> </member> + <member kind="define"> + <type>function</type> + <name>MHD_run</name> + <anchorfile>microhttpd.h</anchorfile> + <arglist></arglist> + </member> + <member kind="define"> + <type>function</type> + <name>MHD_get_connection_values</name> + <anchorfile>microhttpd.h</anchorfile> + <arglist></arglist> + </member> <member kind="typedef"> <type>int</type> <name>MHD_AccessHandlerCallback</name> <anchorfile>microhttpd.h</anchorfile> - <arglist>)(void *cls, struct MHD_Connection *connection, const char *url, const char *method, const char *version, const char *upload_data, size_t *upload_data_size, void **con_cls)</arglist> + <arglist></arglist> + </member> + <member kind="typedef"> + <type>int</type> + <name>MHD_RequestCompletedCallback</name> + <anchorfile>microhttpd.h</anchorfile> + <arglist></arglist> </member> </compound> </tagfile> diff --git a/contrib/sync-dbconfig b/contrib/sync-dbconfig new file mode 100755 index 0000000..d0d3a4b --- /dev/null +++ b/contrib/sync-dbconfig @@ -0,0 +1,149 @@ +#!/bin/bash +# This file is part of GNU TALER. +# Copyright (C) 2023 Taler Systems SA +# +# TALER is free software; you can redistribute it and/or modify it under the +# terms of the GNU Lesser General Public License as published by the Free Software +# Foundation; either version 2.1, or (at your option) any later version. +# +# TALER is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License along with +# TALER; see the file COPYING. If not, see <http://www.gnu.org/licenses/> +# +# @author Christian Grothoff +# +# +# Error checking on +set -eu + +RESET_DB=0 +SKIP_DBINIT=0 +DBUSER="sync-httpd" +CFGFILE="/etc/sync/sync.conf" + +# Parse command-line options +while getopts 'c:hrsu:' OPTION; do + case "$OPTION" in + c) + CFGFILE="$OPTARG" + ;; + h) + echo 'Supported options:' + echo " -c FILENAME -- write configuration to FILENAME (default: $CFGFILE)" + echo " -r -- reset database (dangerous)" + echo " -s -- skip database initialization" + echo " -u USER -- sync-httpd to be run by USER (default: $DBUSER)" + exit 0 + ;; + r) + RESET_DB="1" + ;; + s) + SKIP_DBINIT="1" + ;; + u) + DBUSER="$OPTARG" + ;; + ?) + exit_fail "Unrecognized command line option" + ;; + esac +done + +if ! id postgres > /dev/null +then + echo "Could not find 'postgres' user. Please install Postgresql first" + exit 1 +fi + +if [ "$(id -u)" -ne 0 ] +then + echo "This script must be run as root" + exit 1 +fi + +if [ 0 = "$SKIP_DBINIT" ] +then + if ! sync-dbinit -v 2> /dev/null + then + echo "Required 'sync-dbinit' not found. Please fix your installation." + exit 1 + fi + DBINIT=$(which sync-dbinit) +fi + +if ! id "$DBUSER" > /dev/null +then + echo "Could not find '$DBUSER' user. Please set it up first" + exit 1 +fi + +echo "Setting up database user $DBUSER." 1>&2 + +if ! sudo -i -u postgres createuser "$DBUSER" 2> /dev/null +then + echo "Database user '$DBUSER' already existed. Continuing anyway." 1>&2 +fi + +DBPATH=$(sync-config \ + -c "$CFGFILE" \ + -s syncdb-postgres \ + -o CONFIG) + +if ! echo "$DBPATH" | grep "postgres://" > /dev/null +then + echo "Invalid database configuration value '$DBPATH'." 1>&2 + exit 1 +fi + +DBNAME=$(echo "$DBPATH" \ + | sed \ + -e "s/postgres:\/\/.*\///" \ + -e "s/?.*//") + +if sudo -i -u postgres psql "$DBNAME" < /dev/null 2> /dev/null +then + if [ 1 = "$RESET_DB" ] + then + echo "Deleting existing database $DBNAME." 1>&2 + if ! sudo -i -u postgres dropdb "$DBNAME" + then + echo "Failed to delete existing database '$DBNAME'" + exit 1 + fi + DO_CREATE=1 + else + echo "Database '$DBNAME' already exists, continuing anyway." + DO_CREATE=0 + fi +else + DO_CREATE=1 +fi + +if [1 = "$DO_CREATE" ] +then + echo "Creating database $DBNAME." 1>&2 + + if ! sudo -i -u postgres createdb -O "$DBUSER" "$DBNAME" + then + echo "Failed to create database '$DBNAME'" + exit 1 + fi +fi + +if [ 0 = "$SKIP_DBINIT" ] +then + echo "Initializing database $DBNAME." 1>&2 + if ! sudo -u "$DBUSER" "$DBINIT" -c "$CFGFILE" + then + echo "Failed to initialize database schema" + exit 1 + fi +fi + +echo "Database configuration finished." 1>&2 + +exit 0 diff --git a/contrib/taler-exchange.tag b/contrib/taler-exchange.tag new file mode 100644 index 0000000..11dcbb5 --- /dev/null +++ b/contrib/taler-exchange.tag @@ -0,0 +1,118 @@ +<?xml version='1.0' encoding='UTF-8' standalone='yes' ?> +<tagfile doxygen_version="1.9.4"> + <compound kind="file"> + <name>taler_error_codes.h</name> + <path>/research/taler/exchange/src/include/</path> + <filename>d5/dcb/taler__error__codes_8h.html</filename> + <member kind="enumeration"> + <type></type> + <name>TALER_ErrorCode</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49a</anchor> + <arglist></arglist> + </member> + <member kind="enumvalue"> + <name>TALER_EC_SYNC_ACCOUNT_UNKNOWN</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49aaeffc5c26407ed73d7638a5588de7c1dd</anchor> + <arglist></arglist> + </member> + <member kind="enumvalue"> + <name>TALER_EC_SYNC_BAD_IF_NONE_MATCH</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49aab4c0f2469a013643db71e9d1093a17f0</anchor> + <arglist></arglist> + </member> + <member kind="enumvalue"> + <name>TALER_EC_SYNC_BAD_IF_MATCH</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49aa5deccc6c9d196e9a5d15d2b9e8a462ab</anchor> + <arglist></arglist> + </member> + <member kind="enumvalue"> + <name>TALER_EC_SYNC_BAD_SYNC_SIGNATURE</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49aa6c88e2f7b362aa75422e188c205501d3</anchor> + <arglist></arglist> + </member> + <member kind="enumvalue"> + <name>TALER_EC_SYNC_INVALID_SIGNATURE</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49aa0c79eda43c2de90e28521b466bfeb958</anchor> + <arglist></arglist> + </member> + <member kind="enumvalue"> + <name>TALER_EC_SYNC_MALFORMED_CONTENT_LENGTH</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49aaa811843c4362e3b91ff16563a7f11a9f</anchor> + <arglist></arglist> + </member> + <member kind="enumvalue"> + <name>TALER_EC_SYNC_EXCESSIVE_CONTENT_LENGTH</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49aac67bcfb7de5cd35d554280caaec494f9</anchor> + <arglist></arglist> + </member> + <member kind="enumvalue"> + <name>TALER_EC_SYNC_OUT_OF_MEMORY_ON_CONTENT_LENGTH</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49aa8dced986853a382e1fe1478cd4115d57</anchor> + <arglist></arglist> + </member> + <member kind="enumvalue"> + <name>TALER_EC_SYNC_INVALID_UPLOAD</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49aa4ad086f449ddbabebd4c09af46054728</anchor> + <arglist></arglist> + </member> + <member kind="enumvalue"> + <name>TALER_EC_SYNC_PAYMENT_GENERIC_TIMEOUT</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49aacdf25dccb3b6e11b8705f1954e881aac</anchor> + <arglist></arglist> + </member> + <member kind="enumvalue"> + <name>TALER_EC_SYNC_PAYMENT_CREATE_BACKEND_ERROR</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49aabbb8e55ff86dbe38c25747fadb64e2a6</anchor> + <arglist></arglist> + </member> + <member kind="enumvalue"> + <name>TALER_EC_SYNC_PREVIOUS_BACKUP_UNKNOWN</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49aaeb094aed793a576fdf76fdbca7643686</anchor> + <arglist></arglist> + </member> + <member kind="enumvalue"> + <name>TALER_EC_SYNC_MISSING_CONTENT_LENGTH</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49aad04abf794980705173d77fd28435d08a</anchor> + <arglist></arglist> + </member> + <member kind="enumvalue"> + <name>TALER_EC_SYNC_GENERIC_BACKEND_ERROR</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49aab534c9117dccea53d74e87ace8e4fea1</anchor> + <arglist></arglist> + </member> + <member kind="enumvalue"> + <name>TALER_EC_SYNC_GENERIC_BACKEND_TIMEOUT</name> + <anchorfile>d5/dcb/taler__error__codes_8h.html</anchorfile> + <anchor>a05ff23648cb502e6128a53a5aef8d49aa264068a45ffad5779c0de179fdf79658</anchor> + <arglist></arglist> + </member> + </compound> + <compound kind="file"> + <name>taler_mhd_lib.h</name> + <path>/research/taler/exchange/src/include/</path> + <filename>df/d6d/taler__mhd__lib_8h.html</filename> + <includes id="d5/dcb/taler__error__codes_8h" name="taler_error_codes.h" local="yes" imported="no">taler_error_codes.h</includes> + <member kind="define"> + <type>#define</type> + <name>TALER_SIGNATURE_SYNC_BACKUP_UPLOAD</name> + <anchorfile>dc/d61/taler__signatures_8h.html</anchorfile> + <anchor>af15ac86c81f8c3993b56a59a4ccdaa1a</anchor> + <arglist></arglist> + </member> + </compound> +</tagfile>
\ No newline at end of file diff --git a/contrib/uncrustify.cfg b/contrib/uncrustify.cfg index f56c8e7..af2d8e6 100644 --- a/contrib/uncrustify.cfg +++ b/contrib/uncrustify.cfg @@ -28,7 +28,7 @@ ls_code_width=true pos_arith=lead # Fully parenthesize boolean exprs -mod_full_paren_if_bool=true +mod_full_paren_if_bool=false # Braces should be on their own line nl_fdef_brace=add @@ -49,8 +49,12 @@ nl_assign_brace=remove # No extra newlines that cause noisy diffs nl_start_of_file=remove +nl_after_func_proto = 2 +nl_after_func_body = 3 # If there's no new line, it's not a text file! nl_end_of_file=add +nl_max_blank_in_func = 3 +nl_max = 3 sp_inside_paren = remove @@ -69,6 +73,7 @@ sp_between_ptr_star = remove sp_before_sparen = add sp_inside_fparen = remove +sp_inside_sparen = remove # add space before function call and decl: "foo (x)" sp_func_call_paren = add @@ -76,3 +81,15 @@ sp_func_proto_paren = add sp_func_proto_paren_empty = add sp_func_def_paren = add sp_func_def_paren_empty = add + +# We'd want it for "if ( (foo) || (bar) )", but not for "if (m())", +# so as uncrustify doesn't give exactly what we want => ignore +sp_paren_paren = ignore +sp_inside_paren = remove +sp_bool = force + +nl_func_type_name = force +#nl_branch_else = add +nl_else_brace = add +nl_elseif_brace = add +nl_for_brace = add diff --git a/contrib/uncrustify.sh b/contrib/uncrustify.sh new file mode 100755 index 0000000..e8e05d3 --- /dev/null +++ b/contrib/uncrustify.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash + +set -eu + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" + +if ! uncrustify --version >/dev/null; then + echo "you need to install uncrustify for indentation" + exit 1 +fi + +find "$DIR/../src" \( -name "*.cpp" -o -name "*.c" -o -name "*.h" \) \ + -exec uncrustify -c "$DIR/uncrustify.cfg" --replace --no-backup {} + \ + || true diff --git a/debian/changelog b/debian/changelog index cb9f177..ca93df4 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,76 @@ +sync (0.10.1) unstable; urgency=low + + * Updating man page submodule. + + -- Christian Grothoff <grothoff@gnu.org> Fri, 12 Apr 2024 09:50:12 +0200 + +sync (0.10.0) unstable; urgency=low + + * Fix spelling issues + * Make drop.sql more robust to future changes + + -- Christian Grothoff <grothoff@gnu.org> Tue, 9 Apr 2024 09:50:12 +0200 + +sync (0.9.4-2) unstable; urgency=low + + * v0.9.4a bugfix release. + + -- Christian Grothoff <grothoff@gnu.org> Mon, 3 Mar 2024 21:50:12 +0200 + +sync (0.9.4-1) unstable; urgency=low + + * Actual v0.9.4 release. + + -- Christian Grothoff <grothoff@gnu.org> Sat, 10 Feb 2024 03:50:12 +0200 + +sync (0.9.4) unstable; urgency=low + + * First work towards packaging v0.9.4. + + -- Christian Grothoff <grothoff@gnu.org> Sun, 21 Jan 2024 23:50:12 +0200 + +sync (0.9.3-1) unstable; urgency=low + + * Actual v0.9.3 release. + + -- Christian Grothoff <grothoff@gnu.org> Wed, 27 Sep 2023 03:50:12 +0200 + +sync (0.9.3) unstable; urgency=low + + * First work towards packaging v0.9.3. + + -- Christian Grothoff <grothoff@gnu.org> Thu, 7 Sep 2023 23:50:12 +0200 + +sync (0.9.2) unstable; urgency=low + + * Packaging latest release. + + -- Christian Grothoff <grothoff@gnu.org> Tue, 21 Feb 2023 13:50:12 +0200 + +sync (0.9.1) unstable; urgency=low + + * Packaging latest release. + + -- Christian Grothoff <grothoff@gnu.org> Tue, 17 Jan 2023 11:50:12 +0200 + +sync (0.9.0) unstable; urgency=low + + * Packaging official release. + + -- Christian Grothoff <grothoff@gnu.org> Sat, 5 Nov 2022 09:50:12 +0200 + +sync (0.8.99-2) unstable; urgency=low + + * Packaging latest pre-release from Git. + + -- Christian Grothoff <grothoff@gnu.org> Mon, 26 Sep 2022 09:50:12 +0200 + +sync (0.8.99) unstable; urgency=low + + * Updating to latest pre-release from Git. + + -- Christian Grothoff <grothoff@taler.net> Mon, 20 Jun 2022 13:12:58 +0200 + sync (0.8.3) unstable; urgency=medium * Initial Debian package. diff --git a/debian/control b/debian/control index d951a91..4f02003 100644 --- a/debian/control +++ b/debian/control @@ -8,16 +8,14 @@ Build-Depends: autopoint, debhelper-compat (= 12), gettext, - libgnunet-dev, - libtalerexchange-dev (>= 0.8.5), - libtalermerchant-dev (>= 0.8.4), - libpq-dev (>=9.5), + libgnunet-dev (>=0.21), + libtalerexchange-dev (>= 0.10.2), + libtalermerchant-dev (>= 0.10.2), + libpq-dev (>=14.0), pkg-config, po-debconf, - zlib1g-dev -Build-Conflicts: - autoconf2.13, - automake1.4 + zlib1g-dev, + texinfo Standards-Version: 4.5.0 Vcs-Git: https://salsa.debian.org/debian/sync.git Vcs-browser: https://salsa.debian.org/debian/sync @@ -33,34 +31,45 @@ Depends: ${misc:Depends}, ${shlibs:Depends} Description: libraries to talk to a Sync provider. + . + A Sync provider enables a GNU Taler wallet to store + an encrypted backup (possibly in return for payment). + The C library in this package implements the protocol + to talk to a Sync provider. Package: sync-httpd Architecture: any Pre-Depends: ${misc:Pre-Depends} Depends: - libtalerexchange (>= 0.8.4), - libtalermerchant (>= 0.8.3), + libtalerexchange (>= 0.10.2), + libtalermerchant (>= 0.10.2), libgnutls30 (>= 3.7.1), adduser, lsb-base, netbase, - apache2 | nginx | httpd, - postgresql, - dbconfig-pgsql | dbconfig-no-thanks, ${misc:Depends}, ${shlibs:Depends} +Recommends: + postgresql (>=14.0), + apache2 | nginx | httpd Description: GNU Taler's encrypted backup store. + . + A Sync provider enables a GNU Taler wallet to store + an encrypted backup (possibly in return for payment). + This is the HTTP(S) REST backend that a Sync provider + must run to implement the protocol server-side. Package: libsync-dev Section: libdevel Architecture: any Depends: - libtalermerchant-dev (>= 0.8.3), - libtalerexchange-dev (>= 0.8.4), - libgnunet-dev (>=0.15.3), + libtalermerchant-dev (>= 0.10.2), + libtalerexchange-dev (>= 0.10.2), + libgnunet-dev (>=0.21), ${misc:Depends}, ${shlibs:Depends} -Description: libraries to talk to a Sync provider (development) +Description: libraries to talk to a Sync provider (development). . - This package contains the development files. + This package contains the development files for + libsync. diff --git a/debian/db/install/pgsql b/debian/db/install/pgsql deleted file mode 100755 index 4695224..0000000 --- a/debian/db/install/pgsql +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash - -set -eu - -conf_sync_db=/etc/sync/secrets/sync-db.secret.conf -conf_override=/etc/sync/override.conf - -# Get database settings from dbconfig-common and write sync configuration files. -if [ -f /etc/dbconfig-common/sync-httpd.conf ]; then - . /etc/dbconfig-common/sync-httpd.conf - case "$dbc_dbtype" in - pgsql) - echo -e "# Config file auto-generated by Debian.\n[sync]\nDB=postgres\n\n" > \ - $conf_override - # We assume ident auth here. We might support password auth later. - echo -e "[stasis-postgres]\nCONFIG=postgres:///${dbc_dbname}\n\n" > \ - $conf_sync_db - - # Allow the taler-merchant-httpd user to create schemas, needed by dbinit - echo "GRANT CREATE on database \"$dbc_dbname\" to \"sync-httpd\";" | sudo -u postgres psql -f - - # Run database initialization logic - sudo -u sync-httpd sync-dbinit -c /etc/sync/sync.conf - ;; - sqlite3) - # Later: use something like: - # sqlite:///$DATA_DIR/sync.db - # But for now, sqlite is unsupported: - echo "Unsupported database type $dbc_type." - exit 1 - ;; - "") ;; - - *) - echo "Unsupported database type $dbc_type." - exit 1 - ;; - esac -fi diff --git a/debian/etc/sync/override.conf b/debian/etc/sync/override.conf deleted file mode 100644 index 7392b1a..0000000 --- a/debian/etc/sync/override.conf +++ /dev/null @@ -1,2 +0,0 @@ -# Do not edit by hand. -# This file is used by tooling to override configuration settings. diff --git a/debian/etc/sync/secrets/sync-db.secret.conf b/debian/etc/sync/secrets/sync-db.secret.conf index 742d509..c7e3f12 100644 --- a/debian/etc/sync/secrets/sync-db.secret.conf +++ b/debian/etc/sync/secrets/sync-db.secret.conf @@ -1,3 +1,8 @@ [syncdb-postgres] -#The connection string the plugin has to use for connecting to the database -CONFIG = postgres:///sync + +# Typically, there should only be a single line here, of the form: + +CONFIG=postgres:///taler-sync + +# The details of the URI depend on where the database lives and how +# access control was configured. diff --git a/debian/libsync.install b/debian/libsync.install index fd41a4c..d9cfb32 100644 --- a/debian/libsync.install +++ b/debian/libsync.install @@ -1,3 +1,5 @@ usr/bin/sync-config usr/lib/*/libsync.so.* usr/lib/*/libsyncutil.so.* +usr/share/man/man5/sync.conf.5 +usr/share/man/man1/sync-config.1 diff --git a/debian/rules b/debian/rules index 17e3c0b..fd158ef 100755 --- a/debian/rules +++ b/debian/rules @@ -7,6 +7,9 @@ include /usr/share/dpkg/architecture.mk %: dh ${@} +override_dh_builddeb: + dh_builddeb -- -Zgzip + override_dh_auto_configure-arch: dh_auto_configure -- --disable-rpath --with-microhttpd=yes $(shell dpkg-buildflags --export=configure) @@ -30,6 +33,11 @@ override_dh_auto_install-indep: override_dh_auto_clean: dh_auto_clean +override_dh_installsystemd: + dh_installsystemd -psync-httpd --name=sync-httpd --no-start --no-enable + # final invocation to generate daemon reload + dh_installsystemd + override_dh_install: dh_install # Done manually for debhelper-compat<13 diff --git a/debian/sync-httpd.config b/debian/sync-httpd.config deleted file mode 100644 index 36e67d1..0000000 --- a/debian/sync-httpd.config +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/sh - -set -e - -. /usr/share/debconf/confmodule - -_USERNAME=sync-httpd -_GROUPNAME=www-data - -# For now, we only support postgres -dbc_dbtypes=pgsql -dbc_dbuser=${_USERNAME} - -dbc_authmethod_user=ident -dbc_authmethod_admin=ident - -if [ -f /usr/share/dbconfig-common/dpkg/config.pgsql ]; then - . /usr/share/dbconfig-common/dpkg/config.pgsql - dbc_go sync-httpd "$@" -fi - -db_stop diff --git a/debian/sync-httpd.install b/debian/sync-httpd.install index 24dffb1..a764eed 100644 --- a/debian/sync-httpd.install +++ b/debian/sync-httpd.install @@ -3,5 +3,8 @@ usr/lib/*/libsyncdb.so.* usr/lib/*/sync/libsync_plugin*.* usr/share/sync/sql/* usr/share/sync/config.d/* +usr/share/man/man1/sync-dbconfig.1 +usr/share/man/man1/sync-dbinit.1 +usr/share/man/man1/sync-httpd.1 + debian/etc/* /etc/ -debian/db/install/* usr/share/dbconfig-common/scripts/sync/install/ diff --git a/debian/sync-httpd.postinst b/debian/sync-httpd.postinst index 60393ad..8fcf255 100644 --- a/debian/sync-httpd.postinst +++ b/debian/sync-httpd.postinst @@ -2,15 +2,26 @@ set -e +if [ -d /run/systemd/system ]; then + systemctl --system daemon-reload >/dev/null || true +fi +if [ "$1" = "remove" ]; then + if [ -x "/usr/bin/deb-systemd-helper" ]; then + deb-systemd-helper mask 'sync-httpd.service' >/dev/null || true + fi +fi + +if [ "$1" = "purge" ]; then + if [ -x "/usr/bin/deb-systemd-helper" ]; then + deb-systemd-helper purge 'sync-httpd.service' >/dev/null || true + deb-systemd-helper unmask 'sync-httpd.service' >/dev/null || true + fi +fi + SYNC_HOME="/var/lib/sync/" _USERNAME=sync-httpd _GROUPNAME=www-data -# Set permissions for sqlite3 file -# (for when we support sqlite3 in the future) -dbc_dbfile_owner="${_USERNAME}:${_GROUPNAME}" -dbc_dbfile_perms="0600" - . /usr/share/debconf/confmodule case "${1}" in @@ -20,19 +31,12 @@ configure) adduser --quiet --system --ingroup ${_GROUPNAME} --no-create-home --home ${SYNC_HOME} ${_USERNAME} fi - if ! dpkg-statoverride --list /etc/sync/secrets/sync-db.secret.conf >/dev/null 2>&1; then - dpkg-statoverride --add --update \ - sync-httpd root 460 \ - /etc/sync/secrets/sync-db.secret.conf + if ! dpkg-statoverride --list /etc/sync/secrets/sync-db.secret.conf >/dev/null 2>&1; + then + dpkg-statoverride --add --update \ + sync-httpd root 640 \ + /etc/sync/secrets/sync-db.secret.conf fi - - # Setup postgres database (needs dbconfig-pgsql package) - if [ -f /usr/share/dbconfig-common/dpkg/postinst.pgsql ]; then - . /usr/share/dbconfig-common/dpkg/postinst.pgsql - dbc_pgsql_createdb_encoding="UTF8" - dbc_go sync-httpd "$@" - fi - ;; abort-upgrade | abort-remove | abort-deconfigure) ;; diff --git a/debian/sync-httpd.postrm b/debian/sync-httpd.postrm index 1cd85bc..60aa9a5 100644 --- a/debian/sync-httpd.postrm +++ b/debian/sync-httpd.postrm @@ -6,14 +6,14 @@ if [ -f /usr/share/debconf/confmodule ]; then . /usr/share/debconf/confmodule fi -if [ -f /usr/share/dbconfig-common/dpkg/postrm.pgsql ]; then - . /usr/share/dbconfig-common/dpkg/postrm.pgsql - dbc_go sync-httpd "$@" -fi +_USERNAME=sync-httpd case "${1}" in purge) rm -rf /var/lib/sync/httpd/ + rm -f /etc/sync/override.conf + dpkg-statoverride --remove /etc/sync/secrets/sync-db.secret.conf + deluser --system --quiet ${_USERNAME} || true ;; remove | upgrade | failed-upgrade | abort-install | abort-upgrade | disappear) ;; diff --git a/debian/sync-httpd.preinst b/debian/sync-httpd.preinst deleted file mode 100644 index 9588b73..0000000 --- a/debian/sync-httpd.preinst +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash - -# We prevent a few questions from being asked -# upon installation by specifying defaults. Namely, -# we want the database to be accessed via Unix domain -# sockets and password-less. - -set -e - -# When purging this package after the selections in the preinst have been made, -# the debconf database is left in an inconsistent state and the package cannot -# be installed again. This happens because dbconf-common will create a -# template for these questions with a shared owner. Purging will only delete -# one of the two templates, leading to a DB state where debconf-set-selections -# fails. We work around this by manually fixing up the debconf database. -# -# Unfortunately we can't do this in "postrm", because during "postrm" -# the configuration database is locked (even after db_stop). -# -# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=487300 -if [ -x /usr/share/debconf/fix_db.pl ]; then - /usr/share/debconf/fix_db.pl || true -fi - -echo sync-httpd sync-httpd/pgsql/method select Unix socket | debconf-set-selections -echo sync-httpd sync-httpd/pgsql/authmethod-user select ident | debconf-set-selections -echo sync-httpd sync-httpd/pgsql/app-pass password | debconf-set-selections - -exit 0 diff --git a/debian/sync-httpd.prerm b/debian/sync-httpd.prerm index 24bed99..317d020 100644 --- a/debian/sync-httpd.prerm +++ b/debian/sync-httpd.prerm @@ -2,15 +2,8 @@ set -e -if [ -f /usr/share/debconf/confmodule ]; then - . /usr/share/debconf/confmodule +if [ -d /run/systemd/system ] && [ "$1" = remove ]; then + deb-systemd-invoke stop 'sync-httpd.service' >/dev/null || true fi -. /usr/share/dbconfig-common/dpkg/prerm -if [ -f /usr/share/dbconfig-common/dpkg/prerm.pgsql ]; then - . /usr/share/dbconfig-common/dpkg/prerm.pgsql - dbc_go sync-httpd "$@" -fi - -db_stop exit 0 diff --git a/debian/sync-httpd.service b/debian/sync-httpd.service index 2c97c37..74fb8b4 100644 --- a/debian/sync-httpd.service +++ b/debian/sync-httpd.service @@ -4,8 +4,12 @@ Description=Sync backup backend [Service] User=sync-httpd Type=simple -Restart=on-failure -ExecStart=/usr/bin/sync-httpd -c /etc/sync/sync.conf +Restart=always +RestartMode=direct +RestartSec=1s +RestartPreventExitStatus=2 3 4 5 6 9 +RuntimeMaxSec=3600s +ExecStart=/usr/bin/sync-httpd -c /etc/sync/sync.conf -L INFO [Install] WantedBy=multi-user.target diff --git a/doc/Makefile.am b/doc/Makefile.am index 737ac0c..71ceca2 100644 --- a/doc/Makefile.am +++ b/doc/Makefile.am @@ -1,3 +1,13 @@ # This Makefile is in the public domain AM_CPPFLAGS = -I$(top_srcdir)/src/include -SUBDIRS = . +SUBDIRS = . doxygen + +man_MANS = \ + prebuilt/man/sync-config.1 \ + prebuilt/man/sync-dbconfig.1 \ + prebuilt/man/sync-dbinit.1 \ + prebuilt/man/sync-httpd.1 \ + prebuilt/man/sync.conf.5 + +EXTRA_DIST = \ + $(man_MANS) diff --git a/doc/doxygen/.gitignore b/doc/doxygen/.gitignore new file mode 100644 index 0000000..e992a9c --- /dev/null +++ b/doc/doxygen/.gitignore @@ -0,0 +1,3 @@ +html/ +taler-exchange.tag +sync.tag diff --git a/doc/doxygen/Makefile.am b/doc/doxygen/Makefile.am new file mode 100644 index 0000000..1e171de --- /dev/null +++ b/doc/doxygen/Makefile.am @@ -0,0 +1,18 @@ +# This Makefile.am is in the public domain +all: + @echo -e \ +"Generate documentation:\n" \ +"\tmake full - full documentation with dependency graphs (slow)\n" \ +"\tmake fast - fast mode without dependency graphs" + +full: sync.doxy + doxygen $< + +fast: sync.doxy + sed 's/\(HAVE_DOT.*=\).*/\1 NO/' $< | doxygen - + +clean: + rm -rf html + +EXTRA_DIST = \ + sync.doxy diff --git a/doc/doxygen/logo.svg b/doc/doxygen/logo.svg new file mode 100644 index 0000000..ddb8425 --- /dev/null +++ b/doc/doxygen/logo.svg @@ -0,0 +1,87 @@ +<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<svg + xmlns:dc="http://purl.org/dc/elements/1.1/" + xmlns:cc="http://creativecommons.org/ns#" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:svg="http://www.w3.org/2000/svg" + xmlns="http://www.w3.org/2000/svg" + xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" + xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" + viewBox="0 0 180 40" + version="1.1" + id="svg14" + sodipodi:docname="logo-2018-dold.svg" + inkscape:version="0.92.2 2405546, 2018-03-11"> + <metadata + id="metadata20"> + <rdf:RDF> + <cc:Work + rdf:about=""> + <dc:format>image/svg+xml</dc:format> + <dc:type + rdf:resource="http://purl.org/dc/dcmitype/StillImage" /> + <dc:title></dc:title> + </cc:Work> + </rdf:RDF> + </metadata> + <defs + id="defs18" /> + <sodipodi:namedview + pagecolor="#ffffff" + bordercolor="#666666" + borderopacity="1" + objecttolerance="10" + gridtolerance="10" + guidetolerance="10" + inkscape:pageopacity="0" + inkscape:pageshadow="2" + inkscape:window-width="1916" + inkscape:window-height="1041" + id="namedview16" + showgrid="false" + inkscape:zoom="1.8833333" + inkscape:cx="91.061947" + inkscape:cy="20" + inkscape:window-x="0" + inkscape:window-y="18" + inkscape:window-maximized="0" + inkscape:current-layer="text12" /> + <style + id="style2"> + .ts1 { fill: #aa3939; letter-spacing:0; } + .ts2 { letter-spacing:0; } + </style> + <g + aria-label="❬Taler❭" + style="color:#ff0000;font-weight:bold;font-size:36px;font-family:'Lucida Console', Monaco, monospace;letter-spacing:0.2em" + id="text12"> + <path + d="M 15.978516,31.285156 H 12.234375 L 5.6953125,18.154297 12.234375,5.0058594 h 3.744141 L 9.4042969,18.154297 Z" + style="letter-spacing:0;fill:#aa3939" + id="path3725" /> + <path + d="M 35.085937,29 H 29.900391 V 7.2910156 h -6.66211 V 2.7558594 h 18.509766 v 4.5351562 h -6.66211 z" + style="" + id="path3727" /> + <path + d="m 62.817188,19.753906 q -2.882812,0 -4.02539,0.738281 -1.142578,0.738282 -1.142578,2.53125 0,1.335938 0.791015,2.126954 0.791016,0.791015 2.144531,0.791015 2.039063,0 3.164063,-1.529297 1.125,-1.546875 1.125,-4.30664 v -0.351563 z m 7.171875,-1.986328 V 29 h -5.115234 v -2.197266 q -0.931641,1.300782 -2.390625,2.003907 -1.458984,0.703125 -3.216797,0.703125 -3.357422,0 -5.238281,-1.775391 -1.863281,-1.775391 -1.863281,-4.957031 0,-3.445313 2.232421,-5.080078 2.232422,-1.652344 6.908204,-1.652344 h 3.568359 v -0.861328 q 0,-1.248047 -0.914063,-1.88086 -0.896484,-0.65039 -2.654296,-0.65039 -1.845704,0 -3.585938,0.474609 -1.722656,0.457031 -3.603516,1.476563 v -4.394532 q 1.705079,-0.7031246 3.462891,-1.037109 1.757813,-0.3339844 3.726563,-0.3339844 4.798828,0 6.732421,1.9511724 1.951172,1.951171 1.951172,6.978515 z" + style="" + id="path3729" /> + <path + d="M 86.171486,20.791016 V 5.6035156 H 80.950783 V 1.6484375 H 91.321877 V 20.791016 q 0,2.320312 0.720703,3.287109 0.720703,0.966797 2.443359,0.966797 H 98.59922 V 29 h -5.554687 q -3.673828,0 -5.273438,-1.898438 -1.599609,-1.898437 -1.599609,-6.310546 z" + style="" + id="path3731" /> + <path + d="m 127.59609,28.033203 q -1.79297,0.738281 -3.65625,1.107422 -1.86328,0.369141 -3.9375,0.369141 -4.93945,0 -7.55859,-2.636719 -2.60156,-2.654297 -2.60156,-7.628906 0,-4.816407 2.51367,-7.611328 2.51367,-2.7949224 6.85547,-2.7949224 4.37695,0 6.78515,2.6015624 2.42578,2.583985 2.42578,7.294922 v 2.091797 h -13.34179 q 0.0176,2.320312 1.37109,3.46289 1.35352,1.142579 4.04297,1.142579 1.77539,0 3.49805,-0.509766 1.72265,-0.509766 3.60351,-1.617188 z m -4.35937,-11.074219 q -0.0352,-2.039062 -1.05469,-3.076171 -1.00195,-1.054688 -2.9707,-1.054688 -1.77539,0 -2.83008,1.089844 -1.05469,1.072265 -1.24805,3.058593 z" + style="" + id="path3733" /> + <path + d="m 157.31367,14.744141 q -0.84375,-0.773438 -1.98632,-1.160157 -1.125,-0.386718 -2.47852,-0.386718 -1.63476,0 -2.86523,0.580078 -1.21289,0.5625 -1.88086,1.652344 -0.42188,0.667968 -0.59766,1.617187 -0.1582,0.949219 -0.1582,2.882812 V 29 h -5.15039 V 9.3125 h 5.15039 v 3.058594 q 0.75586,-1.6875 2.32031,-2.6015627 1.56445,-0.9316407 3.65625,-0.9316407 1.05469,0 2.05664,0.2636719 1.01953,0.2460938 1.93359,0.7382813 z" + style="letter-spacing:0" + id="path3735" /> + <path + d="m 164.43282,31.285156 6.55664,-13.130859 -6.53907,-13.1484376 h 3.72657 l 6.53906,13.1484376 -6.53906,13.130859 z" + style="letter-spacing:0;fill:#aa3939" + id="path3737" /> + </g> +</svg> diff --git a/doc/doxygen/sync.doxy b/doc/doxygen/sync.doxy new file mode 100644 index 0000000..a224fdf --- /dev/null +++ b/doc/doxygen/sync.doxy @@ -0,0 +1,2575 @@ +# Doxyfile 1.9.1 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a double hash (##) is considered a comment and is placed in +# front of the TAG it is preceding. +# +# All text after a single hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists, items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (\" \"). + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the configuration +# file that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# https://www.gnu.org/software/libiconv/ for the list of possible encodings. +# The default value is: UTF-8. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by +# double-quotes, unless you are using Doxywizard) that should identify the +# project for which the documentation is generated. This name is used in the +# title of most generated pages and in a few other places. +# The default value is: My Project. + +PROJECT_NAME = "GNU Taler: Sync" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. This +# could be handy for archiving the generated documentation or if some version +# control system is used. + +PROJECT_NUMBER = 0.10.1 + +# Using the PROJECT_BRIEF tag one can provide an optional one line description +# for a project that appears at the top of each page and should give viewer a +# quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = Encrypted backup storage service with a REST API + +# With the PROJECT_LOGO tag one can specify a logo or an icon that is included +# in the documentation. The maximum height of the logo should not exceed 55 +# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy +# the logo to the output directory. + +PROJECT_LOGO = logo.svg + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path +# into which the generated documentation will be written. If a relative path is +# entered, it will be relative to the location where doxygen was started. If +# left blank the current directory will be used. + +OUTPUT_DIRECTORY = . + +# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- +# directories (in 2 levels) under the output directory of each output format and +# will distribute the generated files over these directories. Enabling this +# option can be useful when feeding doxygen a huge amount of source files, where +# putting all generated files in the same directory would otherwise causes +# performance problems for the file system. +# The default value is: NO. + +CREATE_SUBDIRS = YES + +# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII +# characters to appear in the names of generated files. If set to NO, non-ASCII +# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode +# U+3044. +# The default value is: NO. + +ALLOW_UNICODE_NAMES = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, +# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), +# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, +# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), +# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, +# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, +# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, +# Ukrainian and Vietnamese. +# The default value is: English. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member +# descriptions after the members that are listed in the file and class +# documentation (similar to Javadoc). Set to NO to disable this. +# The default value is: YES. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief +# description of a member or function before the detailed description +# +# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. +# The default value is: YES. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator that is +# used to form the text in various listings. Each string in this list, if found +# as the leading text of the brief description, will be stripped from the text +# and the result, after processing the whole list, is used as the annotated +# text. Otherwise, the brief description is used as-is. If left blank, the +# following values are used ($name is automatically replaced with the name of +# the entity):The $name class, The $name widget, The $name file, is, provides, +# specifies, contains, represents, a, an and the. + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# doxygen will generate a detailed section even if there is only a brief +# description. +# The default value is: NO. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. +# The default value is: NO. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path +# before files name in the file list and in the header files. If set to NO the +# shortest path that makes the file name unique will be used +# The default value is: YES. + +FULL_PATH_NAMES = YES + +# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. +# Stripping is only done if one of the specified strings matches the left-hand +# part of the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the path to +# strip. +# +# Note that you can specify absolute paths here, but also relative paths, which +# will be relative from the directory where doxygen is started. +# This tag requires that the tag FULL_PATH_NAMES is set to YES. + +STRIP_FROM_PATH = ../.. + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the +# path mentioned in the documentation of a class, which tells the reader which +# header file to include in order to use a class. If left blank only the name of +# the header file containing the class definition is used. Otherwise one should +# specify the list of include paths that are normally passed to the compiler +# using the -I flag. + +STRIP_FROM_INC_PATH = ../../src/include \ + src/include \ + include + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but +# less readable) file names. This can be useful is your file systems doesn't +# support long names like on DOS, Mac, or CD-ROM. +# The default value is: NO. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the +# first line (until the first dot) of a Javadoc-style comment as the brief +# description. If set to NO, the Javadoc-style will behave just like regular Qt- +# style comments (thus requiring an explicit @brief command for a brief +# description.) +# The default value is: NO. + +JAVADOC_AUTOBRIEF = YES + +# If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line +# such as +# /*************** +# as being the beginning of a Javadoc-style comment "banner". If set to NO, the +# Javadoc-style will behave just like regular comments and it will not be +# interpreted by doxygen. +# The default value is: NO. + +JAVADOC_BANNER = NO + +# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first +# line (until the first dot) of a Qt-style comment as the brief description. If +# set to NO, the Qt-style will behave just like regular Qt-style comments (thus +# requiring an explicit \brief command for a brief description.) +# The default value is: NO. + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a +# multi-line C++ special comment block (i.e. a block of //! or /// comments) as +# a brief description. This used to be the default behavior. The new default is +# to treat a multi-line C++ comment block as a detailed description. Set this +# tag to YES if you prefer the old behavior instead. +# +# Note that setting this tag to YES also means that rational rose comments are +# not recognized any more. +# The default value is: NO. + +MULTILINE_CPP_IS_BRIEF = NO + +# By default Python docstrings are displayed as preformatted text and doxygen's +# special commands cannot be used. By setting PYTHON_DOCSTRING to NO the +# doxygen's special commands can be used and the contents of the docstring +# documentation blocks is shown as doxygen documentation. +# The default value is: YES. + +PYTHON_DOCSTRING = YES + +# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the +# documentation from any documented member that it re-implements. +# The default value is: YES. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new +# page for each member. If set to NO, the documentation of a member will be part +# of the file/class/namespace that contains it. +# The default value is: NO. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen +# uses this value to replace tabs by spaces in code fragments. +# Minimum value: 1, maximum value: 16, default value: 4. + +TAB_SIZE = 8 + +# This tag can be used to specify a number of aliases that act as commands in +# the documentation. An alias has the form: +# name=value +# For example adding +# "sideeffect=@par Side Effects:\n" +# will allow you to put the command \sideeffect (or @sideeffect) in the +# documentation, which will result in a user-defined paragraph with heading +# "Side Effects:". You can put \n's in the value part of an alias to insert +# newlines (in the resulting output). You can put ^^ in the value part of an +# alias to insert a newline as if a physical newline was in the original file. +# When you need a literal { or } or , in the value part of an alias you have to +# escape them by means of a backslash (\), this can lead to conflicts with the +# commands \{ and \} for these it is advised to use the version @{ and @} or use +# a double escape (\\{ and \\}) + +ALIASES = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources +# only. Doxygen will then generate output that is more tailored for C. For +# instance, some of the names that are used will be different. The list of all +# members will be omitted, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_FOR_C = YES + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or +# Python sources only. Doxygen will then generate output that is more tailored +# for that language. For instance, namespaces will be presented as packages, +# qualified scopes will look different, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources. Doxygen will then generate output that is tailored for Fortran. +# The default value is: NO. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for VHDL. +# The default value is: NO. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice +# sources only. Doxygen will then generate output that is more tailored for that +# language. For instance, namespaces will be presented as modules, types will be +# separated into more groups, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_SLICE = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given +# extension. Doxygen has a built-in mapping, but you can override or extend it +# using this tag. The format is ext=language, where ext is a file extension, and +# language is one of the parsers supported by doxygen: IDL, Java, JavaScript, +# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice, VHDL, +# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: +# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser +# tries to guess whether the code is fixed or free formatted code, this is the +# default for Fortran type files). For instance to make doxygen treat .inc files +# as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. +# +# Note: For files without extension you can use no_extension as a placeholder. +# +# Note that for custom extensions you also need to set FILE_PATTERNS otherwise +# the files are not read by doxygen. When specifying no_extension you should add +# * to the FILE_PATTERNS. +# +# Note see also the list of default file extension mappings. + +EXTENSION_MAPPING = + +# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments +# according to the Markdown format, which allows for more readable +# documentation. See https://daringfireball.net/projects/markdown/ for details. +# The output of markdown processing is further processed by doxygen, so you can +# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in +# case of backward compatibilities issues. +# The default value is: YES. + +MARKDOWN_SUPPORT = YES + +# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up +# to that level are automatically included in the table of contents, even if +# they do not have an id attribute. +# Note: This feature currently applies only to Markdown headings. +# Minimum value: 0, maximum value: 99, default value: 5. +# This tag requires that the tag MARKDOWN_SUPPORT is set to YES. + +TOC_INCLUDE_HEADINGS = 5 + +# When enabled doxygen tries to link words that correspond to documented +# classes, or namespaces to their corresponding documentation. Such a link can +# be prevented in individual cases by putting a % sign in front of the word or +# globally by setting AUTOLINK_SUPPORT to NO. +# The default value is: YES. + +AUTOLINK_SUPPORT = YES + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should set this +# tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); +# versus func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. +# The default value is: NO. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. +# The default value is: NO. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: +# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen +# will parse them like normal C++ but will assume all classes use public instead +# of private inheritance when no explicit protection keyword is present. +# The default value is: NO. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate +# getter and setter methods for a property. Setting this option to YES will make +# doxygen to replace the get and set methods by a property in the documentation. +# This will only work if the methods are indeed getting or setting a simple +# type. If this is not the case, or you want to show the methods anyway, you +# should set this option to NO. +# The default value is: YES. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. +# The default value is: NO. + +DISTRIBUTE_GROUP_DOC = NO + +# If one adds a struct or class to a group and this option is enabled, then also +# any nested class or struct is added to the same group. By default this option +# is disabled and one has to add nested compounds explicitly via \ingroup. +# The default value is: NO. + +GROUP_NESTED_COMPOUNDS = NO + +# Set the SUBGROUPING tag to YES to allow class member groups of the same type +# (for instance a group of public functions) to be put as a subgroup of that +# type (e.g. under the Public Functions section). Set it to NO to prevent +# subgrouping. Alternatively, this can be done per class using the +# \nosubgrouping command. +# The default value is: YES. + +SUBGROUPING = YES + +# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions +# are shown inside the group in which they are included (e.g. using \ingroup) +# instead of on a separate page (for HTML and Man pages) or section (for LaTeX +# and RTF). +# +# Note that this feature does not work in combination with +# SEPARATE_MEMBER_PAGES. +# The default value is: NO. + +INLINE_GROUPED_CLASSES = NO + +# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions +# with only public data fields or simple typedef fields will be shown inline in +# the documentation of the scope in which they are defined (i.e. file, +# namespace, or group documentation), provided this scope is documented. If set +# to NO, structs, classes, and unions are shown on a separate page (for HTML and +# Man pages) or section (for LaTeX and RTF). +# The default value is: NO. + +INLINE_SIMPLE_STRUCTS = NO + +# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or +# enum is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically be +# useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. +# The default value is: NO. + +TYPEDEF_HIDES_STRUCT = NO + +# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This +# cache is used to resolve symbols given their name and scope. Since this can be +# an expensive process and often the same symbol appears multiple times in the +# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small +# doxygen will become slower. If the cache is too large, memory is wasted. The +# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range +# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 +# symbols. At the end of a run doxygen will report the cache usage and suggest +# the optimal cache size from a speed point of view. +# Minimum value: 0, maximum value: 9, default value: 0. + +LOOKUP_CACHE_SIZE = 0 + +# The NUM_PROC_THREADS specifies the number threads doxygen is allowed to use +# during processing. When set to 0 doxygen will based this on the number of +# cores available in the system. You can set it explicitly to a value larger +# than 0 to get more control over the balance between CPU load and processing +# speed. At this moment only the input processing can be done using multiple +# threads. Since this is still an experimental feature the default is set to 1, +# which efficively disables parallel processing. Please report any issues you +# encounter. Generating dot graphs in parallel is controlled by the +# DOT_NUM_THREADS setting. +# Minimum value: 0, maximum value: 32, default value: 1. + +NUM_PROC_THREADS = 1 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in +# documentation are documented, even if no documentation was available. Private +# class members and static file members will be hidden unless the +# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. +# Note: This will also disable the warnings about undocumented members that are +# normally produced when WARNINGS is set to YES. +# The default value is: NO. + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will +# be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual +# methods of a class will be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIV_VIRTUAL = NO + +# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal +# scope will be included in the documentation. +# The default value is: NO. + +EXTRACT_PACKAGE = NO + +# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be +# included in the documentation. +# The default value is: NO. + +EXTRACT_STATIC = YES + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined +# locally in source files will be included in the documentation. If set to NO, +# only classes defined in header files are included. Does not have any effect +# for Java sources. +# The default value is: YES. + +EXTRACT_LOCAL_CLASSES = NO + +# This flag is only useful for Objective-C code. If set to YES, local methods, +# which are defined in the implementation section but not in the interface are +# included in the documentation. If set to NO, only methods in the interface are +# included. +# The default value is: NO. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base name of +# the file that contains the anonymous namespace. By default anonymous namespace +# are hidden. +# The default value is: NO. + +EXTRACT_ANON_NSPACES = YES + +# If this flag is set to YES, the name of an unnamed parameter in a declaration +# will be determined by the corresponding definition. By default unnamed +# parameters remain unnamed in the output. +# The default value is: YES. + +RESOLVE_UNNAMED_PARAMS = YES + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all +# undocumented members inside documented classes or files. If set to NO these +# members will be included in the various overviews, but no documentation +# section is generated. This option has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. If set +# to NO, these classes will be included in the various overviews. This option +# has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend +# declarations. If set to NO, these declarations will be included in the +# documentation. +# The default value is: NO. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any +# documentation blocks found inside the body of a function. If set to NO, these +# blocks will be appended to the function's detailed documentation block. +# The default value is: NO. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation that is typed after a +# \internal command is included. If the tag is set to NO then the documentation +# will be excluded. Set it to YES to include the internal documentation. +# The default value is: NO. + +INTERNAL_DOCS = YES + +# With the correct setting of option CASE_SENSE_NAMES doxygen will better be +# able to match the capabilities of the underlying filesystem. In case the +# filesystem is case sensitive (i.e. it supports files in the same directory +# whose names only differ in casing), the option must be set to YES to properly +# deal with such files in case they appear in the input. For filesystems that +# are not case sensitive the option should be be set to NO to properly deal with +# output files written for symbols that only differ in casing, such as for two +# classes, one named CLASS and the other named Class, and to also support +# references to files without having to specify the exact matching casing. On +# Windows (including Cygwin) and MacOS, users should typically set this option +# to NO, whereas on Linux or other Unix flavors it should typically be set to +# YES. +# The default value is: system dependent. + +CASE_SENSE_NAMES = YES + +# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with +# their full class and namespace scopes in the documentation. If set to YES, the +# scope will be hidden. +# The default value is: NO. + +HIDE_SCOPE_NAMES = NO + +# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will +# append additional text to a page's title, such as Class Reference. If set to +# YES the compound reference will be hidden. +# The default value is: NO. + +HIDE_COMPOUND_REFERENCE= NO + +# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of +# the files that are included by a file in the documentation of that file. +# The default value is: YES. + +SHOW_INCLUDE_FILES = YES + +# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each +# grouped member an include statement to the documentation, telling the reader +# which file to include in order to use the member. +# The default value is: NO. + +SHOW_GROUPED_MEMB_INC = NO + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include +# files with double quotes in the documentation rather than with sharp brackets. +# The default value is: NO. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the +# documentation for inline members. +# The default value is: YES. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the +# (detailed) documentation of file and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. +# The default value is: YES. + +SORT_MEMBER_DOCS = NO + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief +# descriptions of file, namespace and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. Note that +# this will also influence the order of the classes in the class list. +# The default value is: NO. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the +# (brief and detailed) documentation of class members so that constructors and +# destructors are listed first. If set to NO the constructors will appear in the +# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. +# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief +# member documentation. +# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting +# detailed member documentation. +# The default value is: NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy +# of group names into alphabetical order. If set to NO the group names will +# appear in their defined order. +# The default value is: NO. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by +# fully-qualified names, including namespaces. If set to NO, the class list will +# be sorted only by class name, not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the alphabetical +# list. +# The default value is: NO. + +SORT_BY_SCOPE_NAME = NO + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper +# type resolution of all parameters of a function it will reject a match between +# the prototype and the implementation of a member function even if there is +# only one candidate or it is obvious which candidate to choose by doing a +# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still +# accept a match between prototype and implementation in such cases. +# The default value is: NO. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo +# list. This list is created by putting \todo commands in the documentation. +# The default value is: YES. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test +# list. This list is created by putting \test commands in the documentation. +# The default value is: YES. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug +# list. This list is created by putting \bug commands in the documentation. +# The default value is: YES. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) +# the deprecated list. This list is created by putting \deprecated commands in +# the documentation. +# The default value is: YES. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional documentation +# sections, marked by \if <section_label> ... \endif and \cond <section_label> +# ... \endcond blocks. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the +# initial value of a variable or macro / define can have for it to appear in the +# documentation. If the initializer consists of more lines than specified here +# it will be hidden. Use a value of 0 to hide initializers completely. The +# appearance of the value of individual variables and macros / defines can be +# controlled using \showinitializer or \hideinitializer command in the +# documentation regardless of this setting. +# Minimum value: 0, maximum value: 10000, default value: 30. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at +# the bottom of the documentation of classes and structs. If set to YES, the +# list will mention the files that were used to generate the documentation. +# The default value is: YES. + +SHOW_USED_FILES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This +# will remove the Files entry from the Quick Index and from the Folder Tree View +# (if specified). +# The default value is: YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces +# page. This will remove the Namespaces entry from the Quick Index and from the +# Folder Tree View (if specified). +# The default value is: YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command command input-file, where command is the value of the +# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided +# by doxygen. Whatever the program writes to standard output is used as the file +# version. For an example see the documentation. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. To create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. You can +# optionally specify a file name after the option, if omitted DoxygenLayout.xml +# will be used as the name of the layout file. +# +# Note that if you run doxygen from a directory containing a file called +# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE +# tag is left empty. + +LAYOUT_FILE = + +# The CITE_BIB_FILES tag can be used to specify one or more bib files containing +# the reference definitions. This must be a list of .bib files. The .bib +# extension is automatically appended if omitted. This requires the bibtex tool +# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info. +# For LaTeX the style of the bibliography can be controlled using +# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the +# search path. See also \cite for info how to create references. + +CITE_BIB_FILES = + +#--------------------------------------------------------------------------- +# Configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated to +# standard output by doxygen. If QUIET is set to YES this implies that the +# messages are off. +# The default value is: NO. + +QUIET = YES + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES +# this implies that the warnings are on. +# +# Tip: Turn warnings on while writing the documentation. +# The default value is: YES. + +WARNINGS = YES + +# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate +# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: YES. + +WARN_IF_UNDOCUMENTED = YES + +# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some parameters +# in a documented function, or documenting parameters that don't exist or using +# markup commands wrongly. +# The default value is: YES. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that +# are documented, but have no documentation for their parameters or return +# value. If set to NO, doxygen will only warn about wrong or incomplete +# parameter documentation, but not about the absence of documentation. If +# EXTRACT_ALL is set to YES then this flag will automatically be disabled. +# The default value is: NO. + +WARN_NO_PARAMDOC = YES + +# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when +# a warning is encountered. If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS +# then doxygen will continue running as if WARN_AS_ERROR tag is set to NO, but +# at the end of the doxygen process doxygen will return with a non-zero status. +# Possible values are: NO, YES and FAIL_ON_WARNINGS. +# The default value is: NO. + +WARN_AS_ERROR = NO + +# The WARN_FORMAT tag determines the format of the warning messages that doxygen +# can produce. The string should contain the $file, $line, and $text tags, which +# will be replaced by the file and line number from which the warning originated +# and the warning text. Optionally the format may contain $version, which will +# be replaced by the version of the file (if it could be obtained via +# FILE_VERSION_FILTER) +# The default value is: $file:$line: $text. + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning and error +# messages should be written. If left blank the output is written to standard +# error (stderr). + +WARN_LOGFILE = + +#--------------------------------------------------------------------------- +# Configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag is used to specify the files and/or directories that contain +# documented source files. You may enter file names like myfile.cpp or +# directories like /usr/src/myproject. Separate the files or directories with +# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING +# Note: If this tag is empty the current directory is searched. + +INPUT = ../../src/ + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses +# libiconv (or the iconv built into libc) for the transcoding. See the libiconv +# documentation (see: +# https://www.gnu.org/software/libiconv/) for the list of possible encodings. +# The default value is: UTF-8. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and +# *.h) to filter out the source-files in the directories. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# read by doxygen. +# +# Note the list of default checked file patterns might differ from the list of +# default file extension mappings. +# +# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, +# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, +# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, +# *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C comment), +# *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, *.vhdl, +# *.ucf, *.qsf and *.ice. + +FILE_PATTERNS = *.c \ + *.h + +# The RECURSIVE tag can be used to specify whether or not subdirectories should +# be searched for input files as well. +# The default value is: NO. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should be +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. +# +# Note that relative paths are relative to the directory from which doxygen is +# run. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. +# The default value is: NO. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories for example use the pattern */test/* + +EXCLUDE_PATTERNS = */test_* \ + */.git/* \ + */perf_* \ + */tls_test_* \ + sync_config.h + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories use the pattern */test/* + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or directories +# that contain example code fragments that are included (see the \include +# command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank all +# files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude commands +# irrespective of the value of the RECURSIVE tag. +# The default value is: NO. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or directories +# that contain images that are to be included in the documentation (see the +# \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command: +# +# <filter> <input-file> +# +# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the +# name of an input file. Doxygen will then use the output that the filter +# program writes to standard output. If FILTER_PATTERNS is specified, this tag +# will be ignored. +# +# Note that the filter must not add or remove lines; it is applied before the +# code is scanned, but not when the output code is generated. If lines are added +# or removed, the anchors will not be placed correctly. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. The filters are a list of the form: pattern=filter +# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how +# filters are used. If the FILTER_PATTERNS tag is empty or if none of the +# patterns match the file name, INPUT_FILTER is applied. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will also be used to filter the input files that are used for +# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). +# The default value is: NO. + +FILTER_SOURCE_FILES = NO + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and +# it is also possible to disable source filtering for a specific pattern using +# *.ext= (so without naming a filter). +# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. + +FILTER_SOURCE_PATTERNS = + +# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that +# is part of the input, its contents will be placed on the main page +# (index.html). This can be useful if you have a project on for instance GitHub +# and want to reuse the introduction page also for the doxygen output. + +USE_MDFILE_AS_MAINPAGE = + +#--------------------------------------------------------------------------- +# Configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will be +# generated. Documented entities will be cross-referenced with these sources. +# +# Note: To get rid of all source code in the generated output, make sure that +# also VERBATIM_HEADERS is set to NO. +# The default value is: NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body of functions, +# classes and enums directly into the documentation. +# The default value is: NO. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any +# special comment blocks from generated source code fragments. Normal C, C++ and +# Fortran comments will always remain visible. +# The default value is: YES. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES then for each documented +# entity all documented functions referencing it will be listed. +# The default value is: NO. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES then for each documented function +# all documented entities called/used by that function will be listed. +# The default value is: NO. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set +# to YES then the hyperlinks from functions in REFERENCES_RELATION and +# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will +# link to the documentation. +# The default value is: YES. + +REFERENCES_LINK_SOURCE = YES + +# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the +# source code will show a tooltip with additional information such as prototype, +# brief description and links to the definition and documentation. Since this +# will make the HTML file larger and loading of large files a bit slower, you +# can opt to disable this feature. +# The default value is: YES. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +SOURCE_TOOLTIPS = YES + +# If the USE_HTAGS tag is set to YES then the references to source code will +# point to the HTML generated by the htags(1) tool instead of doxygen built-in +# source browser. The htags tool is part of GNU's global source tagging system +# (see https://www.gnu.org/software/global/global.html). You will need version +# 4.8.6 or higher. +# +# To use it do the following: +# - Install the latest version of global +# - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file +# - Make sure the INPUT points to the root of the source tree +# - Run doxygen as normal +# +# Doxygen will invoke htags (and that will in turn invoke gtags), so these +# tools must be available from the command line (i.e. in the search path). +# +# The result: instead of the source browser generated by doxygen, the links to +# source code will now point to the output of htags. +# The default value is: NO. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a +# verbatim copy of the header file for each class for which an include is +# specified. Set to NO to disable this. +# See also: Section \class. +# The default value is: YES. + +VERBATIM_HEADERS = NO + +# If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the +# clang parser (see: +# http://clang.llvm.org/) for more accurate parsing at the cost of reduced +# performance. This can be particularly helpful with template rich C++ code for +# which doxygen's built-in parser lacks the necessary type information. +# Note: The availability of this option depends on whether or not doxygen was +# generated with the -Duse_libclang=ON option for CMake. +# The default value is: NO. + +CLANG_ASSISTED_PARSING = NO + +# If clang assisted parsing is enabled and the CLANG_ADD_INC_PATHS tag is set to +# YES then doxygen will add the directory of each input to the include path. +# The default value is: YES. + +CLANG_ADD_INC_PATHS = YES + +# If clang assisted parsing is enabled you can provide the compiler with command +# line options that you would normally use when invoking the compiler. Note that +# the include paths will already be set by doxygen for the files and directories +# specified with INPUT and INCLUDE_PATH. +# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. + +CLANG_OPTIONS = + +# If clang assisted parsing is enabled you can provide the clang parser with the +# path to the directory containing a file called compile_commands.json. This +# file is the compilation database (see: +# http://clang.llvm.org/docs/HowToSetupToolingForLLVM.html) containing the +# options used when the source files were built. This is equivalent to +# specifying the -p option to a clang tool, such as clang-check. These options +# will then be passed to the parser. Any options specified with CLANG_OPTIONS +# will be added as well. +# Note: The availability of this option depends on whether or not doxygen was +# generated with the -Duse_libclang=ON option for CMake. + +CLANG_DATABASE_PATH = + +#--------------------------------------------------------------------------- +# Configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all +# compounds will be generated. Enable this if the project contains a lot of +# classes, structs, unions or interfaces. +# The default value is: YES. + +ALPHABETICAL_INDEX = YES + +# In case all classes in a project start with a common prefix, all classes will +# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag +# can be used to specify a prefix (or a list of prefixes) that should be ignored +# while generating the index headers. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +IGNORE_PREFIX = SYNC_ + +#--------------------------------------------------------------------------- +# Configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output +# The default value is: YES. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_OUTPUT = html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each +# generated HTML page (for example: .htm, .php, .asp). +# The default value is: .html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a user-defined HTML header file for +# each generated HTML page. If the tag is left blank doxygen will generate a +# standard header. +# +# To get valid HTML the header file that includes any scripts and style sheets +# that doxygen needs, which is dependent on the configuration options used (e.g. +# the setting GENERATE_TREEVIEW). It is highly recommended to start with a +# default header using +# doxygen -w html new_header.html new_footer.html new_stylesheet.css +# YourConfigFile +# and then modify the file new_header.html. See also section "Doxygen usage" +# for information on how to generate the default header that doxygen normally +# uses. +# Note: The header is subject to change so you typically have to regenerate the +# default header when upgrading to a newer version of doxygen. For a description +# of the possible markers and block names see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each +# generated HTML page. If the tag is left blank doxygen will generate a standard +# footer. See HTML_HEADER for more information on how to generate a default +# footer and what special commands can be used inside the footer. See also +# section "Doxygen usage" for information on how to generate the default footer +# that doxygen normally uses. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FOOTER = + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style +# sheet that is used by each HTML page. It can be used to fine-tune the look of +# the HTML output. If left blank doxygen will generate a default style sheet. +# See also section "Doxygen usage" for information on how to generate the style +# sheet that doxygen normally uses. +# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as +# it is more robust and this tag (HTML_STYLESHEET) will in the future become +# obsolete. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_STYLESHEET = + +# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined +# cascading style sheets that are included after the standard style sheets +# created by doxygen. Using this option one can overrule certain style aspects. +# This is preferred over using HTML_STYLESHEET since it does not replace the +# standard style sheet and is therefore more robust against future updates. +# Doxygen will copy the style sheet files to the output directory. +# Note: The order of the extra style sheet files is of importance (e.g. the last +# style sheet in the list overrules the setting of the previous ones in the +# list). For an example see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_STYLESHEET = + +# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the HTML output directory. Note +# that these files will be copied to the base HTML output directory. Use the +# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these +# files. In the HTML_STYLESHEET file, use the file name only. Also note that the +# files will be copied as-is; there are no commands or markers available. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_FILES = + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen +# will adjust the colors in the style sheet and background images according to +# this color. Hue is specified as an angle on a colorwheel, see +# https://en.wikipedia.org/wiki/Hue for more information. For instance the value +# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 +# purple, and 360 is red again. +# Minimum value: 0, maximum value: 359, default value: 220. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_HUE = 220 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors +# in the HTML output. For a value of 0 the output will use grayscales only. A +# value of 255 will produce the most vivid colors. +# Minimum value: 0, maximum value: 255, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_SAT = 100 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the +# luminance component of the colors in the HTML output. Values below 100 +# gradually make the output lighter, whereas values above 100 make the output +# darker. The value divided by 100 is the actual gamma applied, so 80 represents +# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not +# change the gamma. +# Minimum value: 40, maximum value: 240, default value: 80. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML +# page will contain the date and time when the page was generated. Setting this +# to YES can help to show when doxygen was last run and thus if the +# documentation is up to date. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_TIMESTAMP = NO + +# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML +# documentation will contain a main index with vertical navigation menus that +# are dynamically created via JavaScript. If disabled, the navigation index will +# consists of multiple levels of tabs that are statically embedded in every HTML +# page. Disable this option to support browsers that do not have JavaScript, +# like the Qt help browser. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_MENUS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_SECTIONS = NO + +# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries +# shown in the various tree structured indices initially; the user can expand +# and collapse entries dynamically later on. Doxygen will expand the tree to +# such a level that at most the specified number of entries are visible (unless +# a fully collapsed tree already exceeds this amount). So setting the number of +# entries 1 will produce a full collapsed tree by default. 0 is a special value +# representing an infinite number of entries and will result in a full expanded +# tree by default. +# Minimum value: 0, maximum value: 9999, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_INDEX_NUM_ENTRIES = 100 + +# If the GENERATE_DOCSET tag is set to YES, additional index files will be +# generated that can be used as input for Apple's Xcode 3 integrated development +# environment (see: +# https://developer.apple.com/xcode/), introduced with OSX 10.5 (Leopard). To +# create a documentation set, doxygen will generate a Makefile in the HTML +# output directory. Running make will produce the docset in that directory and +# running make install will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at +# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy +# genXcode/_index.html for more information. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_DOCSET = NO + +# This tag determines the name of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# The default value is: Doxygen generated docs. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDNAME = "GNU Taler Source Documentation" + +# This tag specifies a string that should uniquely identify the documentation +# set bundle. This should be a reverse domain-name style string, e.g. +# com.mycompany.MyDocSet. Doxygen will append .docset to the name. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_BUNDLE_ID = net.taler.sync + +# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. +# The default value is: org.doxygen.Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_ID = net.taler + +# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. +# The default value is: Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_NAME = Taler Systems SA + +# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three +# additional HTML index files: index.hhp, index.hhc, and index.hhk. The +# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop +# (see: +# https://www.microsoft.com/en-us/download/details.aspx?id=21138) on Windows. +# +# The HTML Help Workshop contains a compiler that can convert all HTML output +# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML +# files are now used as the Windows 98 help format, and will replace the old +# Windows help format (.hlp) on all Windows platforms in the future. Compressed +# HTML files also contain an index, a table of contents, and you can search for +# words in the documentation. The HTML workshop also contains a viewer for +# compressed HTML files. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_HTMLHELP = NO + +# The CHM_FILE tag can be used to specify the file name of the resulting .chm +# file. You can add a path in front of the file if the result should not be +# written to the html output directory. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_FILE = + +# The HHC_LOCATION tag can be used to specify the location (absolute path +# including file name) of the HTML help compiler (hhc.exe). If non-empty, +# doxygen will try to run the HTML help compiler on the generated index.hhp. +# The file has to be specified with full path. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +HHC_LOCATION = + +# The GENERATE_CHI flag controls if a separate .chi index file is generated +# (YES) or that it should be included in the main .chm file (NO). +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +GENERATE_CHI = NO + +# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) +# and project file content. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_INDEX_ENCODING = + +# The BINARY_TOC flag controls whether a binary table of contents is generated +# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it +# enables the Previous and Next buttons. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members to +# the table of contents of the HTML help documentation and to the tree view. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +TOC_EXPAND = NO + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that +# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help +# (.qch) of the generated HTML documentation. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify +# the file name of the resulting .qch file. The path specified is relative to +# the HTML output folder. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help +# Project output. For more information please see Qt Help Project / Namespace +# (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_NAMESPACE = org.doxygen.Project + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt +# Help Project output. For more information please see Qt Help Project / Virtual +# Folders (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-folders). +# The default value is: doc. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_VIRTUAL_FOLDER = doc + +# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom +# filter to add. For more information please see Qt Help Project / Custom +# Filters (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see Qt Help Project / Custom +# Filters (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's filter section matches. Qt Help Project / Filter Attributes (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_SECT_FILTER_ATTRS = + +# The QHG_LOCATION tag can be used to specify the location (absolute path +# including file name) of Qt's qhelpgenerator. If non-empty doxygen will try to +# run qhelpgenerator on the generated .qhp file. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be +# generated, together with the HTML files, they form an Eclipse help plugin. To +# install this plugin and make it available under the help contents menu in +# Eclipse, the contents of the directory containing the HTML and XML files needs +# to be copied into the plugins directory of eclipse. The name of the directory +# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. +# After copying Eclipse needs to be restarted before the help appears. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the Eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have this +# name. Each documentation set should have its own identifier. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# If you want full control over the layout of the generated HTML pages it might +# be necessary to disable the index and replace it with your own. The +# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top +# of each HTML page. A value of NO enables the index and the value YES disables +# it. Since the tabs in the index contain the same information as the navigation +# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +DISABLE_INDEX = NO + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. If the tag +# value is set to YES, a side panel will be generated containing a tree-like +# index structure (just like the one that is generated for HTML Help). For this +# to work a browser that supports JavaScript, DHTML, CSS and frames is required +# (i.e. any modern browser). Windows users are probably better off using the +# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can +# further fine-tune the look of the index. As an example, the default style +# sheet generated by doxygen has an example that shows how to put an image at +# the root of the tree instead of the PROJECT_NAME. Since the tree basically has +# the same information as the tab index, you could consider setting +# DISABLE_INDEX to YES when enabling this option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_TREEVIEW = YES + +# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that +# doxygen will group on one line in the generated HTML documentation. +# +# Note that a value of 0 will completely suppress the enum values from appearing +# in the overview section. +# Minimum value: 0, maximum value: 20, default value: 4. +# This tag requires that the tag GENERATE_HTML is set to YES. + +ENUM_VALUES_PER_LINE = 4 + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used +# to set the initial width (in pixels) of the frame in which the tree is shown. +# Minimum value: 0, maximum value: 1500, default value: 250. +# This tag requires that the tag GENERATE_HTML is set to YES. + +TREEVIEW_WIDTH = 250 + +# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to +# external symbols imported via tag files in a separate window. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +EXT_LINKS_IN_WINDOW = NO + +# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg +# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see +# https://inkscape.org) to generate formulas as SVG images instead of PNGs for +# the HTML output. These images will generally look nicer at scaled resolutions. +# Possible values are: png (the default) and svg (looks nicer but requires the +# pdf2svg or inkscape tool). +# The default value is: png. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FORMULA_FORMAT = png + +# Use this tag to change the font size of LaTeX formulas included as images in +# the HTML documentation. When you change the font size after a successful +# doxygen run you need to manually remove any form_*.png images from the HTML +# output directory to force them to be regenerated. +# Minimum value: 8, maximum value: 50, default value: 10. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_FONTSIZE = 10 + +# Use the FORMULA_TRANSPARENT tag to determine whether or not the images +# generated for formulas are transparent PNGs. Transparent PNGs are not +# supported properly for IE 6.0, but are supported on all modern browsers. +# +# Note that when changing this option you need to delete any form_*.png files in +# the HTML output directory before the changes have effect. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_TRANSPARENT = YES + +# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands +# to create new LaTeX commands to be used in formulas as building blocks. See +# the section "Including formulas" for details. + +FORMULA_MACROFILE = + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see +# https://www.mathjax.org) which uses client side JavaScript for the rendering +# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX +# installed or if you want to formulas look prettier in the HTML output. When +# enabled you may also need to install MathJax separately and configure the path +# to it using the MATHJAX_RELPATH option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +USE_MATHJAX = NO + +# When MathJax is enabled you can set the default output format to be used for +# the MathJax output. See the MathJax site (see: +# http://docs.mathjax.org/en/v2.7-latest/output.html) for more details. +# Possible values are: HTML-CSS (which is slower, but has the best +# compatibility), NativeMML (i.e. MathML) and SVG. +# The default value is: HTML-CSS. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_FORMAT = HTML-CSS + +# When MathJax is enabled you need to specify the location relative to the HTML +# output directory using the MATHJAX_RELPATH option. The destination directory +# should contain the MathJax.js script. For instance, if the mathjax directory +# is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax +# Content Delivery Network so you can quickly see the result without installing +# MathJax. However, it is strongly recommended to install a local copy of +# MathJax from https://www.mathjax.org before deployment. +# The default value is: https://cdn.jsdelivr.net/npm/mathjax@2. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_RELPATH = https://cdn.jsdelivr.net/npm/mathjax@2 + +# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax +# extension names that should be enabled during MathJax rendering. For example +# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_EXTENSIONS = + +# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces +# of code that will be used on startup of the MathJax code. See the MathJax site +# (see: +# http://docs.mathjax.org/en/v2.7-latest/output.html) for more details. For an +# example see the documentation. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_CODEFILE = + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box for +# the HTML output. The underlying search engine uses javascript and DHTML and +# should work on any modern browser. Note that when using HTML help +# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) +# there is already a search function so this one should typically be disabled. +# For large projects the javascript based search engine can be slow, then +# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to +# search using the keyboard; to jump to the search box use <access key> + S +# (what the <access key> is depends on the OS and browser, but it is typically +# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down +# key> to jump into the search results window, the results can be navigated +# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel +# the search. The filter options can be selected when the cursor is inside the +# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys> +# to select a filter and <Enter> or <escape> to activate or cancel the filter +# option. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +SEARCHENGINE = YES + +# When the SERVER_BASED_SEARCH tag is enabled the search engine will be +# implemented using a web server instead of a web client using JavaScript. There +# are two flavors of web server based searching depending on the EXTERNAL_SEARCH +# setting. When disabled, doxygen will generate a PHP script for searching and +# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing +# and searching needs to be provided by external tools. See the section +# "External Indexing and Searching" for details. +# The default value is: NO. +# This tag requires that the tag SEARCHENGINE is set to YES. + +SERVER_BASED_SEARCH = NO + +# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP +# script for searching. Instead the search results are written to an XML file +# which needs to be processed by an external indexer. Doxygen will invoke an +# external search engine pointed to by the SEARCHENGINE_URL option to obtain the +# search results. +# +# Doxygen ships with an example indexer (doxyindexer) and search engine +# (doxysearch.cgi) which are based on the open source search engine library +# Xapian (see: +# https://xapian.org/). +# +# See the section "External Indexing and Searching" for details. +# The default value is: NO. +# This tag requires that the tag SEARCHENGINE is set to YES. + +EXTERNAL_SEARCH = NO + +# The SEARCHENGINE_URL should point to a search engine hosted by a web server +# which will return the search results when EXTERNAL_SEARCH is enabled. +# +# Doxygen ships with an example indexer (doxyindexer) and search engine +# (doxysearch.cgi) which are based on the open source search engine library +# Xapian (see: +# https://xapian.org/). See the section "External Indexing and Searching" for +# details. +# This tag requires that the tag SEARCHENGINE is set to YES. + +SEARCHENGINE_URL = + +# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed +# search data is written to a file for indexing by an external tool. With the +# SEARCHDATA_FILE tag the name of this file can be specified. +# The default file is: searchdata.xml. +# This tag requires that the tag SEARCHENGINE is set to YES. + +SEARCHDATA_FILE = searchdata.xml + +# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the +# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is +# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple +# projects and redirect the results back to the right project. +# This tag requires that the tag SEARCHENGINE is set to YES. + +EXTERNAL_SEARCH_ID = + +# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen +# projects other than the one defined by this configuration file, but that are +# all added to the same external search index. Each project needs to have a +# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of +# to a relative location where the documentation can be found. The format is: +# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ... +# This tag requires that the tag SEARCHENGINE is set to YES. + +EXTRA_SEARCH_MAPPINGS = + +#--------------------------------------------------------------------------- +# Configuration options related to the LaTeX output +#--------------------------------------------------------------------------- + +# If the GENERATE_LATEX tag is set to YES, doxygen will generate LaTeX output. +# The default value is: YES. + +GENERATE_LATEX = NO + +# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: latex. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_OUTPUT = latex + +# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be +# invoked. +# +# Note that when not enabling USE_PDFLATEX the default is latex when enabling +# USE_PDFLATEX the default is pdflatex and when in the later case latex is +# chosen this is overwritten by pdflatex. For specific output languages the +# default can have been set differently, this depends on the implementation of +# the output language. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_CMD_NAME = latex + +# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate +# index for LaTeX. +# Note: This tag is used in the Makefile / make.bat. +# See also: LATEX_MAKEINDEX_CMD for the part in the generated output file +# (.tex). +# The default file is: makeindex. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +MAKEINDEX_CMD_NAME = makeindex + +# The LATEX_MAKEINDEX_CMD tag can be used to specify the command name to +# generate index for LaTeX. In case there is no backslash (\) as first character +# it will be automatically added in the LaTeX code. +# Note: This tag is used in the generated output file (.tex). +# See also: MAKEINDEX_CMD_NAME for the part in the Makefile / make.bat. +# The default value is: makeindex. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_MAKEINDEX_CMD = makeindex + +# If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX +# documents. This may be useful for small projects and may help to save some +# trees in general. +# The default value is: NO. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +COMPACT_LATEX = YES + +# The PAPER_TYPE tag can be used to set the paper type that is used by the +# printer. +# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x +# 14 inches) and executive (7.25 x 10.5 inches). +# The default value is: a4. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +PAPER_TYPE = a4 + +# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names +# that should be included in the LaTeX output. The package can be specified just +# by its name or with the correct syntax as to be used with the LaTeX +# \usepackage command. To get the times font for instance you can specify : +# EXTRA_PACKAGES=times or EXTRA_PACKAGES={times} +# To use the option intlimits with the amsmath package you can specify: +# EXTRA_PACKAGES=[intlimits]{amsmath} +# If left blank no extra packages will be included. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +EXTRA_PACKAGES = + +# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the +# generated LaTeX document. The header should contain everything until the first +# chapter. If it is left blank doxygen will generate a standard header. See +# section "Doxygen usage" for information on how to let doxygen write the +# default header to a separate file. +# +# Note: Only use a user-defined header if you know what you are doing! The +# following commands have a special meaning inside the header: $title, +# $datetime, $date, $doxygenversion, $projectname, $projectnumber, +# $projectbrief, $projectlogo. Doxygen will replace $title with the empty +# string, for the replacement values of the other commands the user is referred +# to HTML_HEADER. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_HEADER = + +# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the +# generated LaTeX document. The footer should contain everything after the last +# chapter. If it is left blank doxygen will generate a standard footer. See +# LATEX_HEADER for more information on how to generate a default footer and what +# special commands can be used inside the footer. +# +# Note: Only use a user-defined footer if you know what you are doing! +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_FOOTER = + +# The LATEX_EXTRA_STYLESHEET tag can be used to specify additional user-defined +# LaTeX style sheets that are included after the standard style sheets created +# by doxygen. Using this option one can overrule certain style aspects. Doxygen +# will copy the style sheet files to the output directory. +# Note: The order of the extra style sheet files is of importance (e.g. the last +# style sheet in the list overrules the setting of the previous ones in the +# list). +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_EXTRA_STYLESHEET = + +# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the LATEX_OUTPUT output +# directory. Note that the files will be copied as-is; there are no commands or +# markers available. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_EXTRA_FILES = + +# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is +# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will +# contain links (just like the HTML output) instead of page references. This +# makes the output suitable for online browsing using a PDF viewer. +# The default value is: YES. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +PDF_HYPERLINKS = YES + +# If the USE_PDFLATEX tag is set to YES, doxygen will use the engine as +# specified with LATEX_CMD_NAME to generate the PDF file directly from the LaTeX +# files. Set this option to YES, to get a higher quality PDF documentation. +# +# See also section LATEX_CMD_NAME for selecting the engine. +# The default value is: YES. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +USE_PDFLATEX = YES + +# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode +# command to the generated LaTeX files. This will instruct LaTeX to keep running +# if errors occur, instead of asking the user for help. This option is also used +# when generating formulas in HTML. +# The default value is: NO. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_BATCHMODE = NO + +# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the +# index chapters (such as File Index, Compound Index, etc.) in the output. +# The default value is: NO. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_HIDE_INDICES = NO + +# The LATEX_BIB_STYLE tag can be used to specify the style to use for the +# bibliography, e.g. plainnat, or ieeetr. See +# https://en.wikipedia.org/wiki/BibTeX and \cite for more info. +# The default value is: plain. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_BIB_STYLE = plain + +# If the LATEX_TIMESTAMP tag is set to YES then the footer of each generated +# page will contain the date and time when the page was generated. Setting this +# to NO can help when comparing the output of multiple runs. +# The default value is: NO. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_TIMESTAMP = NO + +# The LATEX_EMOJI_DIRECTORY tag is used to specify the (relative or absolute) +# path from which the emoji images will be read. If a relative path is entered, +# it will be relative to the LATEX_OUTPUT directory. If left blank the +# LATEX_OUTPUT directory will be used. +# This tag requires that the tag GENERATE_LATEX is set to YES. + +LATEX_EMOJI_DIRECTORY = + +#--------------------------------------------------------------------------- +# Configuration options related to the RTF output +#--------------------------------------------------------------------------- + +# If the GENERATE_RTF tag is set to YES, doxygen will generate RTF output. The +# RTF output is optimized for Word 97 and may not look too pretty with other RTF +# readers/editors. +# The default value is: NO. + +GENERATE_RTF = NO + +# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: rtf. +# This tag requires that the tag GENERATE_RTF is set to YES. + +RTF_OUTPUT = rtf + +# If the COMPACT_RTF tag is set to YES, doxygen generates more compact RTF +# documents. This may be useful for small projects and may help to save some +# trees in general. +# The default value is: NO. +# This tag requires that the tag GENERATE_RTF is set to YES. + +COMPACT_RTF = YES + +# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will +# contain hyperlink fields. The RTF file will contain links (just like the HTML +# output) instead of page references. This makes the output suitable for online +# browsing using Word or some other Word compatible readers that support those +# fields. +# +# Note: WordPad (write) and others do not support links. +# The default value is: NO. +# This tag requires that the tag GENERATE_RTF is set to YES. + +RTF_HYPERLINKS = NO + +# Load stylesheet definitions from file. Syntax is similar to doxygen's +# configuration file, i.e. a series of assignments. You only have to provide +# replacements, missing definitions are set to their default value. +# +# See also section "Doxygen usage" for information on how to generate the +# default style sheet that doxygen normally uses. +# This tag requires that the tag GENERATE_RTF is set to YES. + +RTF_STYLESHEET_FILE = + +# Set optional variables used in the generation of an RTF document. Syntax is +# similar to doxygen's configuration file. A template extensions file can be +# generated using doxygen -e rtf extensionFile. +# This tag requires that the tag GENERATE_RTF is set to YES. + +RTF_EXTENSIONS_FILE = + +#--------------------------------------------------------------------------- +# Configuration options related to the man page output +#--------------------------------------------------------------------------- + +# If the GENERATE_MAN tag is set to YES, doxygen will generate man pages for +# classes and files. +# The default value is: NO. + +GENERATE_MAN = NO + +# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. A directory man3 will be created inside the directory specified by +# MAN_OUTPUT. +# The default directory is: man. +# This tag requires that the tag GENERATE_MAN is set to YES. + +MAN_OUTPUT = man + +# The MAN_EXTENSION tag determines the extension that is added to the generated +# man pages. In case the manual section does not start with a number, the number +# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is +# optional. +# The default value is: .3. +# This tag requires that the tag GENERATE_MAN is set to YES. + +MAN_EXTENSION = .3 + +# The MAN_SUBDIR tag determines the name of the directory created within +# MAN_OUTPUT in which the man pages are placed. If defaults to man followed by +# MAN_EXTENSION with the initial . removed. +# This tag requires that the tag GENERATE_MAN is set to YES. + +MAN_SUBDIR = + +# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it +# will generate one additional man file for each entity documented in the real +# man page(s). These additional files only source the real man page, but without +# them the man command would be unable to find the correct page. +# The default value is: NO. +# This tag requires that the tag GENERATE_MAN is set to YES. + +MAN_LINKS = NO + +#--------------------------------------------------------------------------- +# Configuration options related to the XML output +#--------------------------------------------------------------------------- + +# If the GENERATE_XML tag is set to YES, doxygen will generate an XML file that +# captures the structure of the code including all documentation. +# The default value is: NO. + +GENERATE_XML = NO + +# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: xml. +# This tag requires that the tag GENERATE_XML is set to YES. + +XML_OUTPUT = xml + +# If the XML_PROGRAMLISTING tag is set to YES, doxygen will dump the program +# listings (including syntax highlighting and cross-referencing information) to +# the XML output. Note that enabling this will significantly increase the size +# of the XML output. +# The default value is: YES. +# This tag requires that the tag GENERATE_XML is set to YES. + +XML_PROGRAMLISTING = YES + +# If the XML_NS_MEMB_FILE_SCOPE tag is set to YES, doxygen will include +# namespace members in file scope as well, matching the HTML output. +# The default value is: NO. +# This tag requires that the tag GENERATE_XML is set to YES. + +XML_NS_MEMB_FILE_SCOPE = NO + +#--------------------------------------------------------------------------- +# Configuration options related to the DOCBOOK output +#--------------------------------------------------------------------------- + +# If the GENERATE_DOCBOOK tag is set to YES, doxygen will generate Docbook files +# that can be used to generate PDF. +# The default value is: NO. + +GENERATE_DOCBOOK = NO + +# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put. +# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in +# front of it. +# The default directory is: docbook. +# This tag requires that the tag GENERATE_DOCBOOK is set to YES. + +DOCBOOK_OUTPUT = docbook + +#--------------------------------------------------------------------------- +# Configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- + +# If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an +# AutoGen Definitions (see http://autogen.sourceforge.net/) file that captures +# the structure of the code including all documentation. Note that this feature +# is still experimental and incomplete at the moment. +# The default value is: NO. + +GENERATE_AUTOGEN_DEF = NO + +#--------------------------------------------------------------------------- +# Configuration options related to the Perl module output +#--------------------------------------------------------------------------- + +# If the GENERATE_PERLMOD tag is set to YES, doxygen will generate a Perl module +# file that captures the structure of the code including all documentation. +# +# Note that this feature is still experimental and incomplete at the moment. +# The default value is: NO. + +GENERATE_PERLMOD = NO + +# If the PERLMOD_LATEX tag is set to YES, doxygen will generate the necessary +# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI +# output from the Perl module output. +# The default value is: NO. +# This tag requires that the tag GENERATE_PERLMOD is set to YES. + +PERLMOD_LATEX = NO + +# If the PERLMOD_PRETTY tag is set to YES, the Perl module output will be nicely +# formatted so it can be parsed by a human reader. This is useful if you want to +# understand what is going on. On the other hand, if this tag is set to NO, the +# size of the Perl module output will be much smaller and Perl will parse it +# just the same. +# The default value is: YES. +# This tag requires that the tag GENERATE_PERLMOD is set to YES. + +PERLMOD_PRETTY = YES + +# The names of the make variables in the generated doxyrules.make file are +# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful +# so different doxyrules.make files included by the same Makefile don't +# overwrite each other's variables. +# This tag requires that the tag GENERATE_PERLMOD is set to YES. + +PERLMOD_MAKEVAR_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- + +# If the ENABLE_PREPROCESSING tag is set to YES, doxygen will evaluate all +# C-preprocessor directives found in the sources and include files. +# The default value is: YES. + +ENABLE_PREPROCESSING = YES + +# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names +# in the source code. If set to NO, only conditional compilation will be +# performed. Macro expansion can be done in a controlled way by setting +# EXPAND_ONLY_PREDEF to YES. +# The default value is: NO. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +MACRO_EXPANSION = YES + +# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then +# the macro expansion is limited to the macros specified with the PREDEFINED and +# EXPAND_AS_DEFINED tags. +# The default value is: NO. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +EXPAND_ONLY_PREDEF = NO + +# If the SEARCH_INCLUDES tag is set to YES, the include files in the +# INCLUDE_PATH will be searched if a #include is found. +# The default value is: YES. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +SEARCH_INCLUDES = YES + +# The INCLUDE_PATH tag can be used to specify one or more directories that +# contain include files that are not input files but should be processed by the +# preprocessor. +# This tag requires that the tag SEARCH_INCLUDES is set to YES. + +INCLUDE_PATH = + +# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard +# patterns (like *.h and *.hpp) to filter out the header-files in the +# directories. If left blank, the patterns specified with FILE_PATTERNS will be +# used. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +INCLUDE_FILE_PATTERNS = + +# The PREDEFINED tag can be used to specify one or more macro names that are +# defined before the preprocessor is started (similar to the -D option of e.g. +# gcc). The argument of the tag is a list of macros of the form: name or +# name=definition (no spaces). If the definition and the "=" are omitted, "=1" +# is assumed. To prevent a macro definition from being undefined via #undef or +# recursively expanded use the := operator instead of the = operator. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +PREDEFINED = + +# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this +# tag can be used to specify a list of macro names that should be expanded. The +# macro definition that is found in the sources will be used. Use the PREDEFINED +# tag if you want to use a different macro definition that overrules the +# definition found in the source code. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +EXPAND_AS_DEFINED = + +# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will +# remove all references to function-like macros that are alone on a line, have +# an all uppercase name, and do not end with a semicolon. Such function macros +# are typically used for boiler-plate code, and will confuse the parser if not +# removed. +# The default value is: YES. +# This tag requires that the tag ENABLE_PREPROCESSING is set to YES. + +SKIP_FUNCTION_MACROS = YES + +#--------------------------------------------------------------------------- +# Configuration options related to external references +#--------------------------------------------------------------------------- + +# The TAGFILES tag can be used to specify one or more tag files. For each tag +# file the location of the external documentation should be added. The format of +# a tag file without this location is as follows: +# TAGFILES = file1 file2 ... +# Adding location for the tag files is done as follows: +# TAGFILES = file1=loc1 "file2 = loc2" ... +# where loc1 and loc2 can be relative or absolute paths or URLs. See the +# section "Linking to external documentation" for more information about the use +# of tag files. +# Note: Each tag file must have a unique name (where the name does NOT include +# the path). If a tag file is not located in the directory in which doxygen is +# run, you must also specify the path to the tagfile here. + +TAGFILES = ../../contrib/gnunet.tag ../../contrib/microhttpd.tag ../../contrib/taler-exchange.tag + +# When a file name is specified after GENERATE_TAGFILE, doxygen will create a +# tag file that is based on the input files it reads. See section "Linking to +# external documentation" for more information about the usage of tag files. + +GENERATE_TAGFILE = sync.tag + +# If the ALLEXTERNALS tag is set to YES, all external class will be listed in +# the class index. If set to NO, only the inherited external classes will be +# listed. +# The default value is: NO. + +ALLEXTERNALS = NO + +# If the EXTERNAL_GROUPS tag is set to YES, all external groups will be listed +# in the modules index. If set to NO, only the current project's groups will be +# listed. +# The default value is: YES. + +EXTERNAL_GROUPS = YES + +# If the EXTERNAL_PAGES tag is set to YES, all external pages will be listed in +# the related pages index. If set to NO, only the current project's pages will +# be listed. +# The default value is: YES. + +EXTERNAL_PAGES = YES + +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- + +# You can include diagrams made with dia in doxygen documentation. Doxygen will +# then run dia to produce the diagram and insert it in the documentation. The +# DIA_PATH tag allows you to specify the directory where the dia binary resides. +# If left empty dia is assumed to be found in the default search path. + +DIA_PATH = + +# If set to YES the inheritance and collaboration graphs will hide inheritance +# and usage relations if the target is undocumented or is not a class. +# The default value is: YES. + +HIDE_UNDOC_RELATIONS = YES + +# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is +# available from the path. This tool is part of Graphviz (see: +# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent +# Bell Labs. The other options in this section have no effect if this option is +# set to NO +# The default value is: YES. + +HAVE_DOT = YES + +# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed +# to run in parallel. When set to 0 doxygen will base this on the number of +# processors available in the system. You can set it explicitly to a value +# larger than 0 to get control over the balance between CPU load and processing +# speed. +# Minimum value: 0, maximum value: 32, default value: 0. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_NUM_THREADS = 0 + +# When you want a differently looking font in the dot files that doxygen +# generates you can specify the font name using DOT_FONTNAME. You need to make +# sure dot is able to find the font, which can be done by putting it in a +# standard location or by setting the DOTFONTPATH environment variable or by +# setting DOT_FONTPATH to the directory containing the font. +# The default value is: Helvetica. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_FONTNAME = Helvetica + +# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of +# dot graphs. +# Minimum value: 4, maximum value: 24, default value: 10. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_FONTSIZE = 10 + +# By default doxygen will tell dot to use the default font as specified with +# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set +# the path where dot can find it using this tag. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_FONTPATH = + +# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for +# each documented class showing the direct and indirect inheritance relations. +# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +CLASS_GRAPH = NO + +# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a +# graph for each documented class showing the direct and indirect implementation +# dependencies (inheritance, containment, and class references variables) of the +# class with other documented classes. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +COLLABORATION_GRAPH = NO + +# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for +# groups, showing the direct groups dependencies. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +GROUP_GRAPHS = NO + +# If the UML_LOOK tag is set to YES, doxygen will generate inheritance and +# collaboration diagrams in a style similar to the OMG's Unified Modeling +# Language. +# The default value is: NO. +# This tag requires that the tag HAVE_DOT is set to YES. + +UML_LOOK = NO + +# If the UML_LOOK tag is enabled, the fields and methods are shown inside the +# class node. If there are many fields or methods and many nodes the graph may +# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the +# number of items for each type to make the size more manageable. Set this to 0 +# for no limit. Note that the threshold may be exceeded by 50% before the limit +# is enforced. So when you set the threshold to 10, up to 15 fields may appear, +# but if the number exceeds 15, the total amount of fields shown is limited to +# 10. +# Minimum value: 0, maximum value: 100, default value: 10. +# This tag requires that the tag UML_LOOK is set to YES. + +UML_LIMIT_NUM_FIELDS = 10 + +# If the DOT_UML_DETAILS tag is set to NO, doxygen will show attributes and +# methods without types and arguments in the UML graphs. If the DOT_UML_DETAILS +# tag is set to YES, doxygen will add type and arguments for attributes and +# methods in the UML graphs. If the DOT_UML_DETAILS tag is set to NONE, doxygen +# will not generate fields with class member information in the UML graphs. The +# class diagrams will look similar to the default class diagrams but using UML +# notation for the relationships. +# Possible values are: NO, YES and NONE. +# The default value is: NO. +# This tag requires that the tag UML_LOOK is set to YES. + +DOT_UML_DETAILS = NO + +# The DOT_WRAP_THRESHOLD tag can be used to set the maximum number of characters +# to display on a single line. If the actual line length exceeds this threshold +# significantly it will wrapped across multiple lines. Some heuristics are apply +# to avoid ugly line breaks. +# Minimum value: 0, maximum value: 1000, default value: 17. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_WRAP_THRESHOLD = 17 + +# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and +# collaboration graphs will show the relations between templates and their +# instances. +# The default value is: NO. +# This tag requires that the tag HAVE_DOT is set to YES. + +TEMPLATE_RELATIONS = NO + +# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to +# YES then doxygen will generate a graph for each documented file showing the +# direct and indirect include dependencies of the file with other documented +# files. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +INCLUDE_GRAPH = YES + +# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are +# set to YES then doxygen will generate a graph for each documented file showing +# the direct and indirect include dependencies of the file with other documented +# files. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +INCLUDED_BY_GRAPH = YES + +# If the CALL_GRAPH tag is set to YES then doxygen will generate a call +# dependency graph for every global function or class method. +# +# Note that enabling this option will significantly increase the time of a run. +# So in most cases it will be better to enable call graphs for selected +# functions only using the \callgraph command. Disabling a call graph can be +# accomplished by means of the command \hidecallgraph. +# The default value is: NO. +# This tag requires that the tag HAVE_DOT is set to YES. + +CALL_GRAPH = YES + +# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller +# dependency graph for every global function or class method. +# +# Note that enabling this option will significantly increase the time of a run. +# So in most cases it will be better to enable caller graphs for selected +# functions only using the \callergraph command. Disabling a caller graph can be +# accomplished by means of the command \hidecallergraph. +# The default value is: NO. +# This tag requires that the tag HAVE_DOT is set to YES. + +CALLER_GRAPH = YES + +# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical +# hierarchy of all classes instead of a textual one. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +GRAPHICAL_HIERARCHY = NO + +# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the +# dependencies a directory has on other directories in a graphical way. The +# dependency relations are determined by the #include relations between the +# files in the directories. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +DIRECTORY_GRAPH = YES + +# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images +# generated by dot. For an explanation of the image formats see the section +# output formats in the documentation of the dot tool (Graphviz (see: +# http://www.graphviz.org/)). +# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order +# to make the SVG files visible in IE 9+ (other browsers do not have this +# requirement). +# Possible values are: png, png:cairo, png:cairo:cairo, png:cairo:gd, png:gd, +# png:gd:gd, jpg, jpg:cairo, jpg:cairo:gd, jpg:gd, jpg:gd:gd, gif, gif:cairo, +# gif:cairo:gd, gif:gd, gif:gd:gd, svg, png:gd, png:gd:gd, png:cairo, +# png:cairo:gd, png:cairo:cairo, png:cairo:gdiplus, png:gdiplus and +# png:gdiplus:gdiplus. +# The default value is: png. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_IMAGE_FORMAT = png + +# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to +# enable generation of interactive SVG images that allow zooming and panning. +# +# Note that this requires a modern browser other than Internet Explorer. Tested +# and working are Firefox, Chrome, Safari, and Opera. +# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make +# the SVG files visible. Older versions of IE do not have SVG support. +# The default value is: NO. +# This tag requires that the tag HAVE_DOT is set to YES. + +INTERACTIVE_SVG = NO + +# The DOT_PATH tag can be used to specify the path where the dot tool can be +# found. If left blank, it is assumed the dot tool can be found in the path. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_PATH = + +# The DOTFILE_DIRS tag can be used to specify one or more directories that +# contain dot files that are included in the documentation (see the \dotfile +# command). +# This tag requires that the tag HAVE_DOT is set to YES. + +DOTFILE_DIRS = + +# The MSCFILE_DIRS tag can be used to specify one or more directories that +# contain msc files that are included in the documentation (see the \mscfile +# command). + +MSCFILE_DIRS = + +# The DIAFILE_DIRS tag can be used to specify one or more directories that +# contain dia files that are included in the documentation (see the \diafile +# command). + +DIAFILE_DIRS = + +# When using plantuml, the PLANTUML_JAR_PATH tag should be used to specify the +# path where java can find the plantuml.jar file. If left blank, it is assumed +# PlantUML is not used or called during a preprocessing step. Doxygen will +# generate a warning when it encounters a \startuml command in this case and +# will not generate output for the diagram. + +PLANTUML_JAR_PATH = + +# When using plantuml, the PLANTUML_CFG_FILE tag can be used to specify a +# configuration file for plantuml. + +PLANTUML_CFG_FILE = + +# When using plantuml, the specified paths are searched for files specified by +# the !include statement in a plantuml block. + +PLANTUML_INCLUDE_PATH = + +# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes +# that will be shown in the graph. If the number of nodes in a graph becomes +# larger than this value, doxygen will truncate the graph, which is visualized +# by representing a node as a red box. Note that doxygen if the number of direct +# children of the root node in a graph is already larger than +# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that +# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. +# Minimum value: 0, maximum value: 10000, default value: 50. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_GRAPH_MAX_NODES = 100 + +# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs +# generated by dot. A depth value of 3 means that only nodes reachable from the +# root by following a path via at most 3 edges will be shown. Nodes that lay +# further from the root node will be omitted. Note that setting this option to 1 +# or 2 may greatly reduce the computation time needed for large code bases. Also +# note that the size of a graph can be further restricted by +# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. +# Minimum value: 0, maximum value: 1000, default value: 0. +# This tag requires that the tag HAVE_DOT is set to YES. + +MAX_DOT_GRAPH_DEPTH = 2 + +# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent +# background. This is disabled by default, because dot on Windows does not seem +# to support this out of the box. +# +# Warning: Depending on the platform used, enabling this option may lead to +# badly anti-aliased labels on the edges of a graph (i.e. they become hard to +# read). +# The default value is: NO. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_TRANSPARENT = YES + +# Set the DOT_MULTI_TARGETS tag to YES to allow dot to generate multiple output +# files in one run (i.e. multiple -o and -T options on the command line). This +# makes dot run faster, but since only newer versions of dot (>1.8.10) support +# this, this feature is disabled by default. +# The default value is: NO. +# This tag requires that the tag HAVE_DOT is set to YES. + +DOT_MULTI_TARGETS = NO + +# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page +# explaining the meaning of the various boxes and arrows in the dot generated +# graphs. +# The default value is: YES. +# This tag requires that the tag HAVE_DOT is set to YES. + +GENERATE_LEGEND = YES + +# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate +# files that are used to generate the various graphs. +# +# Note: This setting is not only used for dot files but also for msc and +# plantuml temporary files. +# The default value is: YES. + +DOT_CLEANUP = YES diff --git a/doc/prebuilt b/doc/prebuilt new file mode 160000 +Subproject b8d2d2fa2ed2a771880f451725176f256583cb2 diff --git a/m4/ax_compare_version.m4 b/m4/ax_compare_version.m4 new file mode 100644 index 0000000..ffb4997 --- /dev/null +++ b/m4/ax_compare_version.m4 @@ -0,0 +1,177 @@ +# =========================================================================== +# https://www.gnu.org/software/autoconf-archive/ax_compare_version.html +# =========================================================================== +# +# SYNOPSIS +# +# AX_COMPARE_VERSION(VERSION_A, OP, VERSION_B, [ACTION-IF-TRUE], [ACTION-IF-FALSE]) +# +# DESCRIPTION +# +# This macro compares two version strings. Due to the various number of +# minor-version numbers that can exist, and the fact that string +# comparisons are not compatible with numeric comparisons, this is not +# necessarily trivial to do in a autoconf script. This macro makes doing +# these comparisons easy. +# +# The six basic comparisons are available, as well as checking equality +# limited to a certain number of minor-version levels. +# +# The operator OP determines what type of comparison to do, and can be one +# of: +# +# eq - equal (test A == B) +# ne - not equal (test A != B) +# le - less than or equal (test A <= B) +# ge - greater than or equal (test A >= B) +# lt - less than (test A < B) +# gt - greater than (test A > B) +# +# Additionally, the eq and ne operator can have a number after it to limit +# the test to that number of minor versions. +# +# eq0 - equal up to the length of the shorter version +# ne0 - not equal up to the length of the shorter version +# eqN - equal up to N sub-version levels +# neN - not equal up to N sub-version levels +# +# When the condition is true, shell commands ACTION-IF-TRUE are run, +# otherwise shell commands ACTION-IF-FALSE are run. The environment +# variable 'ax_compare_version' is always set to either 'true' or 'false' +# as well. +# +# Examples: +# +# AX_COMPARE_VERSION([3.15.7],[lt],[3.15.8]) +# AX_COMPARE_VERSION([3.15],[lt],[3.15.8]) +# +# would both be true. +# +# AX_COMPARE_VERSION([3.15.7],[eq],[3.15.8]) +# AX_COMPARE_VERSION([3.15],[gt],[3.15.8]) +# +# would both be false. +# +# AX_COMPARE_VERSION([3.15.7],[eq2],[3.15.8]) +# +# would be true because it is only comparing two minor versions. +# +# AX_COMPARE_VERSION([3.15.7],[eq0],[3.15]) +# +# would be true because it is only comparing the lesser number of minor +# versions of the two values. +# +# Note: The characters that separate the version numbers do not matter. An +# empty string is the same as version 0. OP is evaluated by autoconf, not +# configure, so must be a string, not a variable. +# +# The author would like to acknowledge Guido Draheim whose advice about +# the m4_case and m4_ifvaln functions make this macro only include the +# portions necessary to perform the specific comparison specified by the +# OP argument in the final configure script. +# +# LICENSE +# +# Copyright (c) 2008 Tim Toolan <toolan@ele.uri.edu> +# +# Copying and distribution of this file, with or without modification, are +# permitted in any medium without royalty provided the copyright notice +# and this notice are preserved. This file is offered as-is, without any +# warranty. + +#serial 13 + +dnl ######################################################################### +AC_DEFUN([AX_COMPARE_VERSION], [ + AC_REQUIRE([AC_PROG_AWK]) + + # Used to indicate true or false condition + ax_compare_version=false + + # Convert the two version strings to be compared into a format that + # allows a simple string comparison. The end result is that a version + # string of the form 1.12.5-r617 will be converted to the form + # 0001001200050617. In other words, each number is zero padded to four + # digits, and non digits are removed. + AS_VAR_PUSHDEF([A],[ax_compare_version_A]) + A=`echo "$1" | sed -e 's/\([[0-9]]*\)/Z\1Z/g' \ + -e 's/Z\([[0-9]]\)Z/Z0\1Z/g' \ + -e 's/Z\([[0-9]][[0-9]]\)Z/Z0\1Z/g' \ + -e 's/Z\([[0-9]][[0-9]][[0-9]]\)Z/Z0\1Z/g' \ + -e 's/[[^0-9]]//g'` + + AS_VAR_PUSHDEF([B],[ax_compare_version_B]) + B=`echo "$3" | sed -e 's/\([[0-9]]*\)/Z\1Z/g' \ + -e 's/Z\([[0-9]]\)Z/Z0\1Z/g' \ + -e 's/Z\([[0-9]][[0-9]]\)Z/Z0\1Z/g' \ + -e 's/Z\([[0-9]][[0-9]][[0-9]]\)Z/Z0\1Z/g' \ + -e 's/[[^0-9]]//g'` + + dnl # In the case of le, ge, lt, and gt, the strings are sorted as necessary + dnl # then the first line is used to determine if the condition is true. + dnl # The sed right after the echo is to remove any indented white space. + m4_case(m4_tolower($2), + [lt],[ + ax_compare_version=`echo "x$A +x$B" | sed 's/^ *//' | sort -r | sed "s/x${A}/false/;s/x${B}/true/;1q"` + ], + [gt],[ + ax_compare_version=`echo "x$A +x$B" | sed 's/^ *//' | sort | sed "s/x${A}/false/;s/x${B}/true/;1q"` + ], + [le],[ + ax_compare_version=`echo "x$A +x$B" | sed 's/^ *//' | sort | sed "s/x${A}/true/;s/x${B}/false/;1q"` + ], + [ge],[ + ax_compare_version=`echo "x$A +x$B" | sed 's/^ *//' | sort -r | sed "s/x${A}/true/;s/x${B}/false/;1q"` + ],[ + dnl Split the operator from the subversion count if present. + m4_bmatch(m4_substr($2,2), + [0],[ + # A count of zero means use the length of the shorter version. + # Determine the number of characters in A and B. + ax_compare_version_len_A=`echo "$A" | $AWK '{print(length)}'` + ax_compare_version_len_B=`echo "$B" | $AWK '{print(length)}'` + + # Set A to no more than B's length and B to no more than A's length. + A=`echo "$A" | sed "s/\(.\{$ax_compare_version_len_B\}\).*/\1/"` + B=`echo "$B" | sed "s/\(.\{$ax_compare_version_len_A\}\).*/\1/"` + ], + [[0-9]+],[ + # A count greater than zero means use only that many subversions + A=`echo "$A" | sed "s/\(\([[0-9]]\{4\}\)\{m4_substr($2,2)\}\).*/\1/"` + B=`echo "$B" | sed "s/\(\([[0-9]]\{4\}\)\{m4_substr($2,2)\}\).*/\1/"` + ], + [.+],[ + AC_WARNING( + [invalid OP numeric parameter: $2]) + ],[]) + + # Pad zeros at end of numbers to make same length. + ax_compare_version_tmp_A="$A`echo $B | sed 's/./0/g'`" + B="$B`echo $A | sed 's/./0/g'`" + A="$ax_compare_version_tmp_A" + + # Check for equality or inequality as necessary. + m4_case(m4_tolower(m4_substr($2,0,2)), + [eq],[ + test "x$A" = "x$B" && ax_compare_version=true + ], + [ne],[ + test "x$A" != "x$B" && ax_compare_version=true + ],[ + AC_WARNING([invalid OP parameter: $2]) + ]) + ]) + + AS_VAR_POPDEF([A])dnl + AS_VAR_POPDEF([B])dnl + + dnl # Execute ACTION-IF-TRUE / ACTION-IF-FALSE. + if test "$ax_compare_version" = "true" ; then + m4_ifvaln([$4],[$4],[:])dnl + m4_ifvaln([$5],[else $5])dnl + fi +]) dnl AX_COMPARE_VERSION diff --git a/m4/ax_lib_postgresql.m4 b/m4/ax_lib_postgresql.m4 index 11b6991..cc8e750 100644 --- a/m4/ax_lib_postgresql.m4 +++ b/m4/ax_lib_postgresql.m4 @@ -1,10 +1,10 @@ # =========================================================================== -# http://www.gnu.org/software/autoconf-archive/ax_lib_postgresql.html +# https://www.gnu.org/software/autoconf-archive/ax_lib_postgresql.html # =========================================================================== # # SYNOPSIS # -# AX_LIB_POSTGRESQL([MINIMUM-VERSION]) +# AX_LIB_POSTGRESQL([MINIMUM-VERSION],[ACTION-IF-FOUND],[ACTION-IF-NOT-FOUND]) # # DESCRIPTION # @@ -23,133 +23,225 @@ # should be in the PATH) # # path - complete path to pg_config utility, use this option if pg_config -# can't be found in the PATH +# can't be found in the PATH (You could set also PG_CONFIG variable) # # This macro calls: # # AC_SUBST(POSTGRESQL_CPPFLAGS) # AC_SUBST(POSTGRESQL_LDFLAGS) +# AC_SUBST(POSTGRESQL_LIBS) # AC_SUBST(POSTGRESQL_VERSION) # # And sets: # # HAVE_POSTGRESQL # +# It execute if found ACTION-IF-FOUND (empty by default) and +# ACTION-IF-NOT-FOUND (AC_MSG_FAILURE by default) if not found. +# # LICENSE # # Copyright (c) 2008 Mateusz Loskot <mateusz@loskot.net> +# Copyright (c) 2014 Sree Harsha Totakura <sreeharsha@totakura.in> +# Copyright (c) 2018 Bastien Roucaries <rouca@debian.org> # # Copying and distribution of this file, with or without modification, are # permitted in any medium without royalty provided the copyright notice # and this notice are preserved. This file is offered as-is, without any # warranty. -#serial 9 +#serial 22 + +AC_DEFUN([_AX_LIB_POSTGRESQL_OLD],[ + found_postgresql="no" + _AX_LIB_POSTGRESQL_OLD_fail="no" + while true; do + AC_CACHE_CHECK([for the pg_config program], [ac_cv_path_PG_CONFIG], + [AC_PATH_PROGS_FEATURE_CHECK([PG_CONFIG], [pg_config], + [[ac_cv_path_PG_CONFIG="";$ac_path_PG_CONFIG --includedir > /dev/null \ + && ac_cv_path_PG_CONFIG=$ac_path_PG_CONFIG ac_path_PG_CONFIG_found=:]], + [ac_cv_path_PG_CONFIG=""])]) + PG_CONFIG=$ac_cv_path_PG_CONFIG + AS_IF([test "X$PG_CONFIG" = "X"],[break]) + + AC_CACHE_CHECK([for the PostgreSQL libraries CPPFLAGS],[ac_cv_POSTGRESQL_CPPFLAGS], + [ac_cv_POSTGRESQL_CPPFLAGS="-I`$PG_CONFIG --includedir`" || _AX_LIB_POSTGRESQL_OLD_fail=yes]) + AS_IF([test "X$_AX_LIB_POSTGRESQL_OLD_fail" = "Xyes"],[break]) + POSTGRESQL_CPPFLAGS="$ac_cv_POSTGRESQL_CPPFLAGS" + + AC_CACHE_CHECK([for the PostgreSQL libraries LDFLAGS],[ac_cv_POSTGRESQL_LDFLAGS], + [ac_cv_POSTGRESQL_LDFLAGS="-L`$PG_CONFIG --libdir`" || _AX_LIB_POSTGRESQL_OLD_fail=yes]) + AS_IF([test "X$_AX_LIB_POSTGRESQL_OLD_fail" = "Xyes"],[break]) + POSTGRESQL_LDFLAGS="$ac_cv_POSTGRESQL_LDFLAGS" + + AC_CACHE_CHECK([for the PostgreSQL libraries LIBS],[ac_cv_POSTGRESQL_LIBS], + [ac_cv_POSTGRESQL_LIBS="-lpq"]) + POSTGRESQL_LIBS="$ac_cv_POSTGRESQL_LIBS" + + AC_CACHE_CHECK([for the PostgreSQL version],[ac_cv_POSTGRESQL_VERSION], + [ + ac_cv_POSTGRESQL_VERSION=`$PG_CONFIG --version | sed "s/^PostgreSQL[[[:space:]]][[[:space:]]]*\([[0-9.]][[0-9.]]*\).*/\1/"` \ + || _AX_LIB_POSTGRESQL_OLD_fail=yes + ]) + AS_IF([test "X$_AX_LIB_POSTGRESQL_OLD_fail" = "Xyes"],[break]) + POSTGRESQL_VERSION="$ac_cv_POSTGRESQL_VERSION" + + + dnl + dnl Check if required version of PostgreSQL is available + dnl + AS_IF([test X"$postgresql_version_req" != "X"],[ + AC_MSG_CHECKING([if PostgreSQL version $POSTGRESQL_VERSION is >= $postgresql_version_req]) + AX_COMPARE_VERSION([$POSTGRESQL_VERSION],[ge],[$postgresql_version_req], + [found_postgresql_req_version=yes],[found_postgresql_req_version=no]) + AC_MSG_RESULT([$found_postgresql_req_version]) + ]) + AS_IF([test "Xfound_postgresql_req_version" = "Xno"],[break]) + + found_postgresql="yes" + break + done +]) -AC_DEFUN([AX_LIB_POSTGRESQL], +AC_DEFUN([_AX_LIB_POSTGRESQL_PKG_CONFIG], [ - AC_ARG_WITH([postgresql], - AS_HELP_STRING([--with-postgresql=@<:@ARG@:>@], - [use PostgreSQL library @<:@default=yes@:>@, optionally specify path to pg_config] - ), - [ - if test "$withval" = "no"; then - want_postgresql="no" - elif test "$withval" = "yes"; then - want_postgresql="yes" - else - want_postgresql="yes" - PG_CONFIG="$withval" - fi - ], - [want_postgresql="yes"] - ) + AC_REQUIRE([PKG_PROG_PKG_CONFIG]) + found_postgresql=no - POSTGRESQL_CPPFLAGS="" - POSTGRESQL_LDFLAGS="" - POSTGRESQL_VERSION="" + while true; do + PKG_PROG_PKG_CONFIG + AS_IF([test X$PKG_CONFIG = X],[break]) - dnl - dnl Check PostgreSQL libraries (libpq) - dnl + _AX_LIB_POSTGRESQL_PKG_CONFIG_fail=no; + AS_IF([test "X$postgresql_version_req" = "X"], + [PKG_CHECK_EXISTS([libpq],[found_postgresql_pkg_config=yes],[found_postgresql=no])], + [PKG_CHECK_EXISTS([libpq >= "$postgresql_version_req"], + [found_postgresql=yes],[found_postgresql=no])]) + AS_IF([test "X$found_postgresql" = "no"],[break]) + + AC_CACHE_CHECK([for the PostgreSQL libraries CPPFLAGS],[ac_cv_POSTGRESQL_CPPFLAGS], + [ac_cv_POSTGRESQL_CPPFLAGS="`$PKG_CONFIG libpq --cflags-only-I`" || _AX_LIB_POSTGRESQL_PKG_CONFIG_fail=yes]) + AS_IF([test "X$_AX_LIB_POSTGRESQL_PKG_CONFIG_fail" = "Xyes"],[break]) + POSTGRESQL_CPPFLAGS="$ac_cv_POSTGRESQL_CPPFLAGS" - if test "$want_postgresql" = "yes"; then - if test -z "$PG_CONFIG" -o test; then - AC_PATH_PROG([PG_CONFIG], [pg_config], []) - fi + AC_CACHE_CHECK([for the PostgreSQL libraries LDFLAGS],[ac_cv_POSTGRESQL_LDFLAGS], + [ac_cv_POSTGRESQL_LDFLAGS="`$PKG_CONFIG libpq --libs-only-L --libs-only-other`" || _AX_LIB_POSTGRESQL_PKG_CONFIG_fail=yes]) + AS_IF([test "X$_AX_LIB_POSTGRESQL_PKG_CONFIG_fail" = "Xyes"],[break]) + POSTGRESQL_LDFLAGS="$ac_cv_POSTGRESQL_LDFLAGS" - if test ! -x "$PG_CONFIG"; then - dnl AC_MSG_ERROR([$PG_CONFIG does not exist or it is not an exectuable file]) - PG_CONFIG="no" - found_postgresql="no" - fi - if test "$PG_CONFIG" != "no"; then - AC_MSG_CHECKING([for PostgreSQL libraries]) + AC_CACHE_CHECK([for the PostgreSQL libraries LIBS],[ac_cv_POSTGRESQL_LIBS], + [ac_cv_POSTGRESQL_LIBS="`$PKG_CONFIG libpq --libs-only-l`" || _AX_LIB_POSTGRESQL_PKG_CONFIG_fail=ye]) + AS_IF([test "X$_AX_LIB_POSTGRESQL_PKG_CONFIG_fail" = "Xyes"],[break]) + POSTGRESQL_LIBS="$ac_cv_POSTGRESQL_LIBS" - POSTGRESQL_CPPFLAGS="-I`$PG_CONFIG --includedir`" - POSTGRESQL_LDFLAGS="-L`$PG_CONFIG --libdir`" + dnl already checked by exist but need to be recovered + AC_CACHE_CHECK([for the PostgreSQL version],[ac_cv_POSTGRESQL_VERSION], + [ac_cv_POSTGRESQL_VERSION="`$PKG_CONFIG libpq --modversion`" || _AX_LIB_POSTGRESQL_PKG_CONFIG_fail=yes]) + AS_IF([test "X$_AX_LIB_POSTGRESQL_PKG_CONFIG_fail" = "Xyes"],[break]) + POSTGRESQL_VERSION="$ac_cv_POSTGRESQL_VERSION" - POSTGRESQL_VERSION=`$PG_CONFIG --version | sed -e 's#PostgreSQL ##' | awk '{print $1}'` + found_postgresql=yes + break; + done - AC_DEFINE([HAVE_POSTGRESQL], [1], - [Define to 1 if PostgreSQL libraries are available]) +]) - found_postgresql="yes" - AC_MSG_RESULT([yes]) - else - found_postgresql="no" - AC_MSG_RESULT([no]) - fi - fi - dnl - dnl Check if required version of PostgreSQL is available - dnl +AC_DEFUN([AX_LIB_POSTGRESQL], +[ + AC_ARG_WITH([postgresql], + AS_HELP_STRING([--with-postgresql=@<:@ARG@:>@], + [use PostgreSQL library @<:@default=yes@:>@, optionally specify path to pg_config] + ), + [ + AS_CASE([$withval], + [[[nN]][[oO]]],[want_postgresql="no"], + [[[yY]][[eE]][[sS]]],[want_postgresql="yes"], + [ + want_postgresql="yes" + PG_CONFIG="$withval" + ]) + ], + [want_postgresql="yes"] + ) + + AC_ARG_VAR([POSTGRESQL_CPPFLAGS],[cpp flags for PostgreSQL overriding detected flags]) + AC_ARG_VAR([POSTGRESQL_LIBFLAGS],[libs for PostgreSQL overriding detected flags]) + AC_ARG_VAR([POSTGRESQL_LDFLAGS],[linker flags for PostgreSQL overriding detected flags]) + + # populate cache + AS_IF([test "X$POSTGRESQL_CPPFLAGS" != X],[ac_cv_POSTGRESQL_CPPFLAGS="$POSTGRESQL_CPPFLAGS"]) + AS_IF([test "X$POSTGRESQL_LDFLAGS" != X],[ac_cv_POSTGRESQL_LDFLAGS="$POSTGRESQL_LDFLAGS"]) + AS_IF([test "X$POSTGRESQL_LIBS" != X],[ac_cv_POSTGRESQL_LIBS="$POSTGRESQL_LIBS"]) postgresql_version_req=ifelse([$1], [], [], [$1]) + found_postgresql="no" - if test "$found_postgresql" = "yes" -a -n "$postgresql_version_req"; then - - AC_MSG_CHECKING([if PostgreSQL version $POSTGRESQL_VERSION is >= $postgresql_version_req]) - - dnl Decompose required version string of PostgreSQL - dnl and calculate its number representation - postgresql_version_req_major=`expr $postgresql_version_req : '\([[0-9]]*\)'` - postgresql_version_req_minor=`expr $postgresql_version_req : '[[0-9]]*\.\([[0-9]]*\)'` - postgresql_version_req_micro=`expr $postgresql_version_req : '[[0-9]]*\.[[0-9]]*\.\([[0-9]]*\)'` - if test "x$postgresql_version_req_micro" = "x"; then - postgresql_version_req_micro="0" - fi - - postgresql_version_req_number=`expr $postgresql_version_req_major \* 1000000 \ - \+ $postgresql_version_req_minor \* 1000 \ - \+ $postgresql_version_req_micro` - - dnl Decompose version string of installed PostgreSQL - dnl and calculate its number representation - postgresql_version_major=`expr $POSTGRESQL_VERSION : '\([[0-9]]*\)'` - postgresql_version_minor=`expr $POSTGRESQL_VERSION : '[[0-9]]*\.\([[0-9]]*\)'` - postgresql_version_micro=`expr $POSTGRESQL_VERSION : '[[0-9]]*\.[[0-9]]*\.\([[0-9]]*\)'` - if test "x$postgresql_version_micro" = "x"; then - postgresql_version_micro="0" - fi - - postgresql_version_number=`expr $postgresql_version_major \* 1000000 \ - \+ $postgresql_version_minor \* 1000 \ - \+ $postgresql_version_micro` - - postgresql_version_check=`expr $postgresql_version_number \>\= $postgresql_version_req_number` - if test "$postgresql_version_check" = "1"; then - AC_MSG_RESULT([yes]) - else - AC_MSG_RESULT([no]) - fi - fi + POSTGRESQL_VERSION="" + + dnl + dnl Check PostgreSQL libraries (libpq) + dnl + AS_IF([test X"$want_postgresql" = "Xyes"],[ + _AX_LIB_POSTGRESQL_PKG_CONFIG + + + AS_IF([test X"$found_postgresql" = "Xno"], + [_AX_LIB_POSTGRESQL_OLD]) + + AS_IF([test X"$found_postgresql" = Xyes],[ + _AX_LIB_POSTGRESQL_OLD_CPPFLAGS="$CPPFLAGS" + CPPFLAGS="$CPPFLAGS $POSTGRESQL_CPPFLAGS" + _AX_LIB_POSTGRESQL_OLD_LDFLAGS="$LDFLAGS" + LDFLAGS="$LDFLAGS $POSTGRESQL_LDFLAGS" + _AX_LIB_POSTGRESQL_OLD_LIBS="$LIBS" + LIBS="$LIBS $POSTGRESQL_LIBS" + while true; do + dnl try to compile + AC_CHECK_HEADER([libpq-fe.h],[],[found_postgresql=no]) + AS_IF([test "X$found_postgresql" = "Xno"],[break]) + dnl try now to link + AC_CACHE_CHECK([for the PostgreSQL library linking is working],[ac_cv_postgresql_found], + [ + AC_LINK_IFELSE([ + AC_LANG_PROGRAM( + [ + #include <libpq-fe.h> + ], + [[ + char conninfo[]="dbname = postgres"; + PGconn *conn; + conn = PQconnectdb(conninfo); + ]] + ) + ],[ac_cv_postgresql_found=yes], + [ac_cv_postgresql_found=no]) + ]) + found_postgresql="$ac_cv_postgresql_found" + AS_IF([test "X$found_postgresql" = "Xno"],[break]) + break + done + CPPFLAGS="$_AX_LIB_POSTGRESQL_OLD_CPPFLAGS" + LDFLAGS="$_AX_LIB_POSTGRESQL_OLD_LDFLAGS" + LIBS="$_AX_LIB_POSTGRESQL_OLD_LIBS" + ]) + + + AS_IF([test "x$found_postgresql" = "xyes"],[ + AC_DEFINE([HAVE_POSTGRESQL], [1], + [Define to 1 if PostgreSQL libraries are available])]) + ]) AC_SUBST([POSTGRESQL_VERSION]) AC_SUBST([POSTGRESQL_CPPFLAGS]) AC_SUBST([POSTGRESQL_LDFLAGS]) + AC_SUBST([POSTGRESQL_LIBS]) + + AS_IF([test "x$found_postgresql" = "xyes"], + [ifelse([$2], , :, [$2])], + [ifelse([$3], , AS_IF([test X"$want_postgresql" = "Xyes"],[AC_MSG_ERROR([Library requirements (PostgreSQL) not met.])],[:]), [$3])]) + ]) diff --git a/m4/libcurl.m4 b/m4/libcurl.m4 index a84077a..047260b 100644 --- a/m4/libcurl.m4 +++ b/m4/libcurl.m4 @@ -61,7 +61,7 @@ AC_DEFUN([LIBCURL_CHECK_CONFIG], AH_TEMPLATE([LIBCURL_PROTOCOL_SMTP],[Defined if libcurl supports SMTP]) AC_ARG_WITH(libcurl, - AC_HELP_STRING([--with-libcurl=PREFIX],[look for the curl library in PREFIX/lib and headers in PREFIX/include]), + AS_HELP_STRING([--with-libcurl=PREFIX],[look for the curl library in PREFIX/lib and headers in PREFIX/include]), [_libcurl_with=$withval],[_libcurl_with=ifelse([$1],,[yes],[$1])]) if test "$_libcurl_with" != "no" ; then diff --git a/m4/libgnurl.m4 b/m4/libgnurl.m4 deleted file mode 100644 index 69aa166..0000000 --- a/m4/libgnurl.m4 +++ /dev/null @@ -1,250 +0,0 @@ -# LIBGNURL_CHECK_CONFIG ([DEFAULT-ACTION], [MINIMUM-VERSION], -# [ACTION-IF-YES], [ACTION-IF-NO]) -# ---------------------------------------------------------- -# David Shaw <dshaw@jabberwocky.com> May-09-2006 -# -# Checks for libgnurl. DEFAULT-ACTION is the string yes or no to -# specify whether to default to --with-libgnurl or --without-libgnurl. -# If not supplied, DEFAULT-ACTION is yes. MINIMUM-VERSION is the -# minimum version of libgnurl to accept. Pass the version as a regular -# version number like 7.10.1. If not supplied, any version is -# accepted. ACTION-IF-YES is a list of shell commands to run if -# libgnurl was successfully found and passed the various tests. -# ACTION-IF-NO is a list of shell commands that are run otherwise. -# Note that using --without-libgnurl does run ACTION-IF-NO. -# -# This macro #defines HAVE_LIBGNURL if a working libgnurl setup is -# found, and sets @LIBGNURL@ and @LIBGNURL_CPPFLAGS@ to the necessary -# values. Other useful defines are LIBGNURL_FEATURE_xxx where xxx are -# the various features supported by libgnurl, and LIBGNURL_PROTOCOL_yyy -# where yyy are the various protocols supported by libgnurl. Both xxx -# and yyy are capitalized. See the list of AH_TEMPLATEs at the top of -# the macro for the complete list of possible defines. Shell -# variables $libgnurl_feature_xxx and $libgnurl_protocol_yyy are also -# defined to 'yes' for those features and protocols that were found. -# Note that xxx and yyy keep the same capitalization as in the -# gnurl-config list (e.g. it's "HTTP" and not "http"). -# -# Users may override the detected values by doing something like: -# LIBGNURL="-lgnurl" LIBGNURL_CPPFLAGS="-I/usr/myinclude" ./configure -# -# For the sake of sanity, this macro assumes that any libgnurl that is -# found is after version 7.7.2, the first version that included the -# gnurl-config script. Note that it is very important for people -# packaging binary versions of libgnurl to include this script! -# Without gnurl-config, we can only guess what protocols are available, -# or use gnurl_version_info to figure it out at runtime. - -AC_DEFUN([LIBGNURL_CHECK_CONFIG], -[ - AH_TEMPLATE([LIBGNURL_FEATURE_SSL],[Defined if libgnurl supports SSL]) - AH_TEMPLATE([LIBGNURL_FEATURE_KRB4],[Defined if libgnurl supports KRB4]) - AH_TEMPLATE([LIBGNURL_FEATURE_IPV6],[Defined if libgnurl supports IPv6]) - AH_TEMPLATE([LIBGNURL_FEATURE_LIBZ],[Defined if libgnurl supports libz]) - AH_TEMPLATE([LIBGNURL_FEATURE_ASYNCHDNS],[Defined if libgnurl supports AsynchDNS]) - AH_TEMPLATE([LIBGNURL_FEATURE_IDN],[Defined if libgnurl supports IDN]) - AH_TEMPLATE([LIBGNURL_FEATURE_SSPI],[Defined if libgnurl supports SSPI]) - AH_TEMPLATE([LIBGNURL_FEATURE_NTLM],[Defined if libgnurl supports NTLM]) - - AH_TEMPLATE([LIBGNURL_PROTOCOL_HTTP],[Defined if libgnurl supports HTTP]) - AH_TEMPLATE([LIBGNURL_PROTOCOL_HTTPS],[Defined if libgnurl supports HTTPS]) - AH_TEMPLATE([LIBGNURL_PROTOCOL_FTP],[Defined if libgnurl supports FTP]) - AH_TEMPLATE([LIBGNURL_PROTOCOL_FTPS],[Defined if libgnurl supports FTPS]) - AH_TEMPLATE([LIBGNURL_PROTOCOL_FILE],[Defined if libgnurl supports FILE]) - AH_TEMPLATE([LIBGNURL_PROTOCOL_TELNET],[Defined if libgnurl supports TELNET]) - AH_TEMPLATE([LIBGNURL_PROTOCOL_LDAP],[Defined if libgnurl supports LDAP]) - AH_TEMPLATE([LIBGNURL_PROTOCOL_DICT],[Defined if libgnurl supports DICT]) - AH_TEMPLATE([LIBGNURL_PROTOCOL_TFTP],[Defined if libgnurl supports TFTP]) - AH_TEMPLATE([LIBGNURL_PROTOCOL_RTSP],[Defined if libgnurl supports RTSP]) - AH_TEMPLATE([LIBGNURL_PROTOCOL_POP3],[Defined if libgnurl supports POP3]) - AH_TEMPLATE([LIBGNURL_PROTOCOL_IMAP],[Defined if libgnurl supports IMAP]) - AH_TEMPLATE([LIBGNURL_PROTOCOL_SMTP],[Defined if libgnurl supports SMTP]) - - AC_ARG_WITH(libgnurl, - AC_HELP_STRING([--with-libgnurl=PREFIX],[look for the gnurl library in PREFIX/lib and headers in PREFIX/include]), - [_libgnurl_with=$withval],[_libgnurl_with=ifelse([$1],,[yes],[$1])]) - - if test "$_libgnurl_with" != "no" ; then - - AC_PROG_AWK - - _libgnurl_version_parse="eval $AWK '{split(\$NF,A,\".\"); X=256*256*A[[1]]+256*A[[2]]+A[[3]]; print X;}'" - - _libgnurl_try_link=yes - - if test -d "$_libgnurl_with" ; then - LIBGNURL_CPPFLAGS="-I$withval/include" - _libgnurl_ldflags="-L$withval/lib" - AC_PATH_PROG([_libgnurl_config],[gnurl-config],[], - ["$withval/bin"]) - else - AC_PATH_PROG([_libgnurl_config],[gnurl-config],[],[$PATH]) - fi - - if test x$_libgnurl_config != "x" ; then - AC_CACHE_CHECK([for the version of libgnurl], - [libgnurl_cv_lib_gnurl_version], - [libgnurl_cv_lib_gnurl_version=`$_libgnurl_config --version | $AWK '{print $[]2}'`]) - - _libgnurl_version=`echo $libgnurl_cv_lib_gnurl_version | $_libgnurl_version_parse` - _libgnurl_wanted=`echo ifelse([$2],,[0],[$2]) | $_libgnurl_version_parse` - - if test $_libgnurl_wanted -gt 0 ; then - AC_CACHE_CHECK([for libgnurl >= version $2], - [libgnurl_cv_lib_version_ok], - [ - if test $_libgnurl_version -ge $_libgnurl_wanted ; then - libgnurl_cv_lib_version_ok=yes - else - libgnurl_cv_lib_version_ok=no - fi - ]) - fi - - if test $_libgnurl_wanted -eq 0 || test x$libgnurl_cv_lib_version_ok = xyes ; then - if test x"$LIBGNURL_CPPFLAGS" = "x" ; then - LIBGNURL_CPPFLAGS=`$_libgnurl_config --cflags` - fi - if test x"$LIBGNURL" = "x" ; then - LIBGNURL=`$_libgnurl_config --libs` - - # This is so silly, but Apple actually has a bug in their - # gnurl-config script. Fixed in Tiger, but there are still - # lots of Panther installs around. - case "${host}" in - powerpc-apple-darwin7*) - LIBGNURL=`echo $LIBGNURL | sed -e 's|-arch i386||g'` - ;; - esac - fi - - # All gnurl-config scripts support --feature - _libgnurl_features=`$_libgnurl_config --feature` - - # Is it modern enough to have --protocols? (7.12.4) - if test $_libgnurl_version -ge 461828 ; then - _libgnurl_protocols=`$_libgnurl_config --protocols` - fi - else - _libgnurl_try_link=no - fi - - unset _libgnurl_wanted - fi - - if test $_libgnurl_try_link = yes ; then - - # we didn't find gnurl-config, so let's see if the user-supplied - # link line (or failing that, "-lgnurl") is enough. - LIBGNURL=${LIBGNURL-"$_libgnurl_ldflags -lgnurl"} - - AC_CACHE_CHECK([whether libgnurl is usable], - [libgnurl_cv_lib_gnurl_usable], - [ - _libgnurl_save_cppflags=$CPPFLAGS - CPPFLAGS="$LIBGNURL_CPPFLAGS $CPPFLAGS" - _libgnurl_save_libs=$LIBS - LIBS="$LIBGNURL $LIBS" - - AC_LINK_IFELSE([AC_LANG_PROGRAM([#include <curl/curl.h>],[ -/* Try and use a few common options to force a failure if we are - missing symbols or can't link. */ -int x; -curl_easy_setopt(NULL,CURLOPT_URL,NULL); -x=CURL_ERROR_SIZE; -x=CURLOPT_WRITEFUNCTION; -x=CURLOPT_FILE; -x=CURLOPT_ERRORBUFFER; -x=CURLOPT_STDERR; -x=CURLOPT_VERBOSE; -])],libgnurl_cv_lib_gnurl_usable=yes,libgnurl_cv_lib_gnurl_usable=no) - - CPPFLAGS=$_libgnurl_save_cppflags - LIBS=$_libgnurl_save_libs - unset _libgnurl_save_cppflags - unset _libgnurl_save_libs - ]) - - if test $libgnurl_cv_lib_gnurl_usable = yes ; then - - # Does gnurl_free() exist in this version of libgnurl? - # If not, fake it with free() - - _libgnurl_save_cppflags=$CPPFLAGS - CPPFLAGS="$CPPFLAGS $LIBGNURL_CPPFLAGS" - _libgnurl_save_libs=$LIBS - LIBS="$LIBS $LIBGNURL" - - AC_CHECK_FUNC(curl_free,, - AC_DEFINE(curl_free,free, - [Define curl_free() as free() if our version of gnurl lacks curl_free.])) - - CPPFLAGS=$_libgnurl_save_cppflags - LIBS=$_libgnurl_save_libs - unset _libgnurl_save_cppflags - unset _libgnurl_save_libs - - AC_DEFINE(HAVE_LIBGNURL,1, - [Define to 1 if you have a functional gnurl library.]) - AC_SUBST(LIBGNURL_CPPFLAGS) - AC_SUBST(LIBGNURL) - - for _libgnurl_feature in $_libgnurl_features ; do - AC_DEFINE_UNQUOTED(AS_TR_CPP(libgnurl_feature_$_libgnurl_feature),[1]) - eval AS_TR_SH(libgnurl_feature_$_libgnurl_feature)=yes - done - - if test "x$_libgnurl_protocols" = "x" ; then - - # We don't have --protocols, so just assume that all - # protocols are available - _libgnurl_protocols="HTTP FTP FILE TELNET LDAP DICT TFTP" - - if test x$libgnurl_feature_SSL = xyes ; then - _libgnurl_protocols="$_libgnurl_protocols HTTPS" - - # FTPS wasn't standards-compliant until version - # 7.11.0 (0x070b00 == 461568) - if test $_libgnurl_version -ge 461568; then - _libgnurl_protocols="$_libgnurl_protocols FTPS" - fi - fi - - # RTSP, IMAP, POP3 and SMTP were added in - # 7.20.0 (0x071400 == 463872) - if test $_libgnurl_version -ge 463872; then - _libgnurl_protocols="$_libgnurl_protocols RTSP IMAP POP3 SMTP" - fi - fi - - for _libgnurl_protocol in $_libgnurl_protocols ; do - AC_DEFINE_UNQUOTED(AS_TR_CPP(libgnurl_protocol_$_libgnurl_protocol),[1]) - eval AS_TR_SH(libgnurl_protocol_$_libgnurl_protocol)=yes - done - else - unset LIBGNURL - unset LIBGNURL_CPPFLAGS - fi - fi - - unset _libgnurl_try_link - unset _libgnurl_version_parse - unset _libgnurl_config - unset _libgnurl_feature - unset _libgnurl_features - unset _libgnurl_protocol - unset _libgnurl_protocols - unset _libgnurl_version - unset _libgnurl_ldflags - fi - - if test x$_libgnurl_with = xno || test x$libgnurl_cv_lib_gnurl_usable != xyes ; then - # This is the IF-NO path - ifelse([$4],,:,[$4]) - else - # This is the IF-YES path - ifelse([$3],,:,[$3]) - fi - - unset _libgnurl_with -])dnl diff --git a/src/include/platform.h b/src/include/platform.h index 2fbbe24..cfc42eb 100644 --- a/src/include/platform.h +++ b/src/include/platform.h @@ -42,8 +42,197 @@ /* Include the features available for GNU source */ #define _GNU_SOURCE -/* Include GNUnet's platform file */ -#include <gnunet/platform.h> +#ifdef HAVE_SYS_TYPES_H +#include <sys/types.h> +#endif + +#ifdef __clang__ +#undef HAVE_STATIC_ASSERT +#endif + +/** + * These may be expensive, but good for debugging... + */ +#define ALLOW_EXTRA_CHECKS GNUNET_YES + +/** + * For strptime (glibc2 needs this). + */ +#ifndef _XOPEN_SOURCE +#define _XOPEN_SOURCE 499 +#endif + +#ifndef _REENTRANT +#define _REENTRANT +#endif + +/* configuration options */ + +#define VERBOSE_STATS 0 + +#include <netdb.h> +#include <sys/socket.h> +#include <sys/un.h> +#if HAVE_NETINET_IN_H +#include <netinet/in.h> +#endif +#if HAVE_NETINET_IN_SYSTM_H +#include <netinet/in_systm.h> +#endif +#if HAVE_NETINET_IP_H +#include <netinet/ip.h> /* superset of previous */ +#endif +#include <arpa/inet.h> +#include <netinet/tcp.h> +#include <pwd.h> +#include <sys/ioctl.h> +#include <sys/wait.h> +#include <grp.h> + +#include <string.h> +#include <stdio.h> +#include <stdlib.h> +#include <stdint.h> +#include <stdarg.h> +#include <stdbool.h> +#include <errno.h> +#include <signal.h> +#include <libgen.h> +#ifdef HAVE_MALLOC_H +#include <malloc.h> /* for mallinfo on GNU */ +#endif +#include <unistd.h> /* KLB_FIX */ +#include <sys/stat.h> +#include <sys/types.h> +#include <dirent.h> /* KLB_FIX */ +#include <fcntl.h> +#include <math.h> +#if HAVE_SYS_PARAM_H +#include <sys/param.h> +#endif +#if HAVE_SYS_TIME_H +#include <sys/time.h> +#endif +#include <time.h> +#ifdef BSD +#include <net/if.h> +#endif +#if defined(BSD) && defined(__FreeBSD__) && defined(__FreeBSD_kernel__) +#include <semaphore.h> +#endif +#ifdef DARWIN +#include <dlfcn.h> +#include <semaphore.h> +#include <net/if.h> +#endif +#if defined(__linux__) || defined(GNU) +#include <net/if.h> +#endif +#ifdef SOLARIS +#include <sys/sockio.h> +#include <sys/filio.h> +#include <sys/loadavg.h> +#include <semaphore.h> +#endif +#if HAVE_UCRED_H +#include <ucred.h> +#endif +#if HAVE_SYS_UCRED_H +#include <sys/ucred.h> +#endif +#if HAVE_IFADDRS_H +#include <ifaddrs.h> +#endif +#include <errno.h> +#include <limits.h> + +#if HAVE_VFORK_H +#include <vfork.h> +#endif + +#include <ctype.h> +#if HAVE_SYS_RESOURCE_H +#include <sys/resource.h> +#endif + +#if HAVE_ENDIAN_H +#include <endian.h> +#endif +#if HAVE_SYS_ENDIAN_H +#include <sys/endian.h> +#endif + +#define DIR_SEPARATOR '/' +#define DIR_SEPARATOR_STR "/" +#define PATH_SEPARATOR ':' +#define PATH_SEPARATOR_STR ":" +#define NEWLINE "\n" + +#include <locale.h> +#include <sys/mman.h> + +/* FreeBSD_kernel is not defined on the now discontinued kFreeBSD */ +#if defined(BSD) && defined(__FreeBSD__) && defined(__FreeBSD_kernel__) +#define __BYTE_ORDER BYTE_ORDER +#define __BIG_ENDIAN BIG_ENDIAN +#endif + +#ifdef DARWIN +#define __BYTE_ORDER BYTE_ORDER +#define __BIG_ENDIAN BIG_ENDIAN +/* not available on darwin, override configure */ +#undef HAVE_STAT64 +#undef HAVE_MREMAP +#endif + +#if ! HAVE_ATOLL +long long +atoll (const char *nptr); + +#endif + +#if ENABLE_NLS +#include "langinfo.h" +#endif + +#ifndef SIZE_MAX +#define SIZE_MAX ((size_t) (-1)) +#endif + +#ifndef O_LARGEFILE +#define O_LARGEFILE 0 +#endif + +/** + * AI_NUMERICSERV not defined in windows. Then we just do without. + */ +#ifndef AI_NUMERICSERV +#define AI_NUMERICSERV 0 +#endif + + +#if defined(__sparc__) +#define MAKE_UNALIGNED(val) ({ __typeof__((val)) __tmp; memmove (&__tmp, &(val), \ + sizeof((val))); \ + __tmp; }) +#else +#define MAKE_UNALIGNED(val) val +#endif + + +#ifndef PATH_MAX +/** + * Assumed maximum path length. + */ +#define PATH_MAX 4096 +#endif + +#if HAVE_THREAD_LOCAL_GCC +#define GNUNET_THREAD_LOCAL __thread +#else +#define GNUNET_THREAD_LOCAL +#endif + /* Do not use shortcuts for gcrypt mpi */ #define GCRYPT_NO_MPI_MACROS 1 @@ -54,22 +243,42 @@ /* LSB-style exit status codes */ #ifndef EXIT_INVALIDARGUMENT +/** + * Command-line arguments are invalid. + * Restarting useless. + */ #define EXIT_INVALIDARGUMENT 2 #endif #ifndef EXIT_NOTIMPLEMENTED +/** + * The requested operation is not implemented. + * Restarting useless. + */ #define EXIT_NOTIMPLEMENTED 3 #endif #ifndef EXIT_NOPERMISSION +/** + * Permissions needed to run are not available. + * Restarting useless. + */ #define EXIT_NOPERMISSION 4 #endif #ifndef EXIT_NOTINSTALLED +/** + * Key resources are not installed. + * Restarting useless. + */ #define EXIT_NOTINSTALLED 5 #endif #ifndef EXIT_NOTCONFIGURED +/** + * Key configuration settings are missing or invalid. + * Restarting useless. + */ #define EXIT_NOTCONFIGURED 6 #endif @@ -78,6 +287,15 @@ #endif +#ifndef EXIT_NO_RESTART +/** + * Exit code from 'main' if we do not want to be restarted, + * except by manual intervention (hard failure). + */ +#define EXIT_NO_RESTART 9 +#endif + + /* Ignore MHD deprecations for now as we want to be compatible to "ancient" MHD releases. */ #define MHD_NO_DEPRECATION 1 diff --git a/src/include/sync_database_lib.h b/src/include/sync_database_lib.h index 2a67ec3..3311ce2 100644 --- a/src/include/sync_database_lib.h +++ b/src/include/sync_database_lib.h @@ -35,7 +35,7 @@ SYNC_DB_plugin_load (const struct GNUNET_CONFIGURATION_Handle *cfg); /** * Shutdown the plugin. * - * @param plugin plugin to unload + * @param[in] plugin plugin to unload */ void SYNC_DB_plugin_unload (struct SYNC_DatabasePlugin *plugin); diff --git a/src/include/sync_database_plugin.h b/src/include/sync_database_plugin.h index a779fab..ca54088 100644 --- a/src/include/sync_database_plugin.h +++ b/src/include/sync_database_plugin.h @@ -81,7 +81,7 @@ enum SYNC_DB_QueryStatus */ typedef void (*SYNC_DB_PaymentPendingIterator)(void *cls, - struct GNUNET_TIME_Absolute timestamp, + struct GNUNET_TIME_Timestamp timestamp, const char *order_id, const struct TALER_ClaimTokenP *token, const struct TALER_Amount *amount); diff --git a/src/include/sync_service.h b/src/include/sync_service.h index 8b47f33..1aac1cd 100644 --- a/src/include/sync_service.h +++ b/src/include/sync_service.h @@ -1,6 +1,6 @@ /* This file is part of TALER - Copyright (C) 2019 Taler Systems SA + Copyright (C) 2019-2023 Taler Systems SA Anastasis is free software; you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software @@ -138,19 +138,40 @@ enum SYNC_UploadStatus */ struct SYNC_UploadDetails { + + /** + * Taler error code. + */ + enum TALER_ErrorCode ec; + + /** + * HTTP status of the request. + */ + unsigned int http_status; + /** * High level status of the upload operation. */ enum SYNC_UploadStatus us; + /** + * Details depending on @e us. + */ union { /** - * Hash of the synchronized backup, returned if - * @e us is #SYNC_US_SUCCESS. + * Data returned if @e us is #SYNC_US_SUCCESS. */ - const struct GNUNET_HashCode *curr_backup_hash; + struct + { + + /** + * Hash of the synchronized backup. + */ + const struct GNUNET_HashCode *curr_backup_hash; + + } success; /** * Previous backup. Returned if @e us is @@ -177,11 +198,15 @@ struct SYNC_UploadDetails } recovered_backup; - /** - * A taler://pay/-URI with a request to pay the annual fee for - * the service. Returned if @e us is #SYNC_US_PAYMENT_REQUIRED. - */ - const char *payment_request; + struct + { + /** + * A taler://pay/-URI with a request to pay the annual fee for + * the service. Returned if @e us is #SYNC_US_PAYMENT_REQUIRED. + */ + const char *payment_request; + + } payment_required; } details; @@ -192,14 +217,10 @@ struct SYNC_UploadDetails * Function called with the results of a #SYNC_upload(). * * @param cls closure - * @param ec Taler error code - * @param http_status HTTP status of the request * @param ud details about the upload operation */ typedef void (*SYNC_UploadCallback)(void *cls, - enum TALER_ErrorCode ec, - unsigned int http_status, const struct SYNC_UploadDetails *ud); @@ -256,8 +277,8 @@ enum SYNC_PaymentOptions * @param backup_size number of bytes in @a backup * @param backup the encrypted backup, must remain in * memory until we are done with the operation! - * @param payment_requested #GNUNET_YES if the client wants to pay more for the account now * @param po payment options + * @param paid_order_id ID of the paid order, NULL if no payment was made so far * @param cb function to call with the result * @param cb_cls closure for @a cb * @return handle for the operation @@ -280,7 +301,7 @@ SYNC_upload (struct GNUNET_CURL_Context *ctx, * that it did not complete, it is possible that the server did * receive the full request before the upload is aborted. * - * @param uo operation to cancel. + * @param[in] uo operation to cancel. */ void SYNC_upload_cancel (struct SYNC_UploadOperation *uo); @@ -291,50 +312,63 @@ SYNC_upload_cancel (struct SYNC_UploadOperation *uo); */ struct SYNC_DownloadDetails { - /** - * Signature (already verified). - */ - struct SYNC_AccountSignatureP sig; /** - * Hash of the previous version. + * HTTP status code. */ - struct GNUNET_HashCode prev_backup_hash; + unsigned int http_status; /** - * Hash over @e backup and @e backup_size. + * Details depending on @e http_status. */ - struct GNUNET_HashCode curr_backup_hash; + union + { - /** - * The backup we downloaded. - */ - const void *backup; + /** + * Details if status is #MHD_HTTP_OK. + */ + struct + { - /** - * Number of bytes in @e backup. - */ - size_t backup_size; + /** + * Signature (already verified). + */ + struct SYNC_AccountSignatureP sig; + + /** + * Hash of the previous version. + */ + struct GNUNET_HashCode prev_backup_hash; + + /** + * Hash over @e backup and @e backup_size. + */ + struct GNUNET_HashCode curr_backup_hash; + + /** + * The backup we downloaded. + */ + const void *backup; + + /** + * Number of bytes in @e backup. + */ + size_t backup_size; + } ok; + + } details; }; + /** * Function called with the results of a #SYNC_download(). * * @param cls closure - * @param sig signature of the account owner, affirming the - * integrity of the backup (already verified) - * @param prev_backup_hash hash of the previous backup (used - * to verify the signature, could be used by clients - * to verify backup chains) - * @param curr_backup_hash hash over @a backup (verified) - * @param backup_size number of bytes in @a backup - * @param backup the latest backup as downloaded from the - * server and affirmed by @a sig + * @param dd download details */ typedef void (*SYNC_DownloadCallback)(void *cls, - unsigned int http_status, const struct SYNC_DownloadDetails *dd); @@ -365,7 +399,7 @@ SYNC_download (struct GNUNET_CURL_Context *ctx, /** * Cancel the download. * - * @param do operation to cancel. + * @param[in] download operation to cancel. */ void SYNC_download_cancel (struct SYNC_DownloadOperation *download); diff --git a/src/include/sync_testing_lib.h b/src/include/sync_testing_lib.h index 5a0d657..f0737de 100644 --- a/src/include/sync_testing_lib.h +++ b/src/include/sync_testing_lib.h @@ -29,124 +29,6 @@ #include <taler/taler_testing_lib.h> #include <microhttpd.h> -/** - * Index used in #SYNC_TESTING_get_trait_hash() for the current hash. - */ -#define SYNC_TESTING_TRAIT_HASH_CURRENT 0 - -/** - * Index used in #SYNC_TESTING_get_trait_hash() for the previous hash. - */ -#define SYNC_TESTING_TRAIT_HASH_PREVIOUS 1 - - -/** - * Obtain a hash from @a cmd. - * - * @param cmd command to extract the number from. - * @param index the number's index number, #SYNC_TESTING_TRAIT_HASH_CURRENT or - * #SYNC_TESTING_TRAIT_HASH_PREVIOUS - * @param h[out] set to the hash coming from @a cmd. - * @return #GNUNET_OK on success. - */ -int -SYNC_TESTING_get_trait_hash (const struct TALER_TESTING_Command *cmd, - unsigned int index, - const struct GNUNET_HashCode **h); - - -/** - * Offer a hash. - * - * @param index the number's index number. - * @param h the hash to offer. - * @return #GNUNET_OK on success. - */ -struct TALER_TESTING_Trait -SYNC_TESTING_make_trait_hash (unsigned int index, - const struct GNUNET_HashCode *h); - - -/** - * Obtain an account public key from @a cmd. - * - * @param cmd command to extract the public key from. - * @param index usually 0 - * @param pub[out] set to the account public key used in @a cmd. - * @return #GNUNET_OK on success. - */ -int -SYNC_TESTING_get_trait_account_pub (const struct TALER_TESTING_Command *cmd, - unsigned int index, - const struct SYNC_AccountPublicKeyP **pub); - - -/** - * Offer an account public key. - * - * @param index usually zero - * @param h the account_pub to offer. - * @return #GNUNET_OK on success. - */ -struct TALER_TESTING_Trait -SYNC_TESTING_make_trait_account_pub (unsigned int index, - const struct SYNC_AccountPublicKeyP *h); - - -/** - * Obtain an account private key from @a cmd. - * - * @param cmd command to extract the number from. - * @param index must be 0 - * @param priv[out] set to the account private key used in @a cmd. - * @return #GNUNET_OK on success. - */ -int -SYNC_TESTING_get_trait_account_priv (const struct TALER_TESTING_Command *cmd, - unsigned int index, - const struct - SYNC_AccountPrivateKeyP **priv); - - -/** - * Offer an account private key. - * - * @param index usually zero - * @param priv the account_priv to offer. - * @return #GNUNET_OK on success. - */ -struct TALER_TESTING_Trait -SYNC_TESTING_make_trait_account_priv (unsigned int index, - const struct - SYNC_AccountPrivateKeyP *priv); - - -/** - * Start the sync backend process. Assume the port - * is available and the database is clean. Use the "prepare - * sync" function to do such tasks. - * - * @param config_filename configuration filename. - * - * @return the process, or NULL if the process could not - * be started. - */ -struct GNUNET_OS_Process * -SYNC_TESTING_run_sync (const char *config_filename, - const char *sync_url); - - -/** - * Prepare the sync execution. Create tables and check if - * the port is available. - * - * @param config_filename configuration filename. - * @return the base url, or NULL upon errors. Must be freed - * by the caller. - */ -char * -SYNC_TESTING_prepare_sync (const char *config_filename); - /** * Make the "backup download" command for a non-existent upload. @@ -234,4 +116,20 @@ SYNC_TESTING_cmd_backup_upload (const char *label, const void *backup_data, size_t backup_data_size); +/** + * Call @a op on all simple traits. + * + * @param op macro to call + */ +#define SYNC_TESTING_SIMPLE_TRAITS(op) \ + op (account_pub, const struct SYNC_AccountPublicKeyP) \ + op (account_priv, const struct SYNC_AccountPrivateKeyP) \ + op (prev_hash, const struct GNUNET_HashCode) \ + op (curr_hash, const struct GNUNET_HashCode) + + +/* FIXME: eventually switch to libgnunettesting with the SYNC_ prefix for + the symbols instead of TALER_TESTING_! */ +SYNC_TESTING_SIMPLE_TRAITS (TALER_TESTING_MAKE_DECL_SIMPLE_TRAIT) + #endif diff --git a/src/lib/Makefile.am b/src/lib/Makefile.am index f4979b5..0ccd964 100644 --- a/src/lib/Makefile.am +++ b/src/lib/Makefile.am @@ -7,7 +7,7 @@ if USE_COVERAGE endif lib_LTLIBRARIES = \ - libsync.la + libsync.la libsync_la_LDFLAGS = \ -version-info 0:0:0 \ @@ -20,8 +20,8 @@ libsync_la_LIBADD = \ -lgnunetcurl \ -lgnunetjson \ -ltalerjson \ + -ltalercurl \ -ltalerutil \ -lgnunetutil \ -ljansson \ $(XLIB) - diff --git a/src/lib/sync_api_curl_defaults.c b/src/lib/sync_api_curl_defaults.c index 5a114ed..0827bc3 100644 --- a/src/lib/sync_api_curl_defaults.c +++ b/src/lib/sync_api_curl_defaults.c @@ -19,29 +19,28 @@ * @brief curl easy handle defaults * @author Florian Dold */ - +#include "platform.h" +#include <taler/taler_curl_lib.h> #include "sync_api_curl_defaults.h" -/** - * Get a curl handle with the right defaults - * for the sync lib. - * - * @param url URL to query - */ + CURL * SYNC_curl_easy_get_ (const char *url) { CURL *eh; eh = curl_easy_init (); + if (NULL == eh) + { + GNUNET_break (0); + return NULL; + } GNUNET_assert (CURLE_OK == curl_easy_setopt (eh, CURLOPT_URL, url)); - GNUNET_assert (CURLE_OK == - curl_easy_setopt (eh, - CURLOPT_FOLLOWLOCATION, - 1L)); + TALER_curl_set_secure_redirect_policy (eh, + url); GNUNET_assert (CURLE_OK == curl_easy_setopt (eh, CURLOPT_TCP_FASTOPEN, diff --git a/src/lib/sync_api_download.c b/src/lib/sync_api_download.c index 9a7c8f5..7c58957 100644 --- a/src/lib/sync_api_download.c +++ b/src/lib/sync_api_download.c @@ -91,7 +91,8 @@ struct SYNC_DownloadOperation * * @param cls the `struct SYNC_DownloadOperation` * @param response_code HTTP response code, 0 on error - * @param response + * @param data data we downloaded + * @param data_size number of bytes in @a data */ static void handle_download_finished (void *cls, @@ -100,6 +101,9 @@ handle_download_finished (void *cls, size_t data_size) { struct SYNC_DownloadOperation *download = cls; + struct SYNC_DownloadDetails dd = { + .http_status = (unsigned int) response_code + }; download->job = NULL; switch (response_code) @@ -108,7 +112,6 @@ handle_download_finished (void *cls, break; case MHD_HTTP_OK: { - struct SYNC_DownloadDetails dd; struct SYNC_UploadSignaturePS usp = { .purpose.purpose = htonl (TALER_SIGNATURE_SYNC_BACKUP_UPLOAD), .purpose.size = htonl (sizeof (usp)), @@ -125,18 +128,16 @@ handle_download_finished (void *cls, &download->account_pub.eddsa_pub)) { GNUNET_break_op (0); - response_code = 0; + dd.http_status = 0; break; } /* Success, call callback with all details! */ - memset (&dd, 0, sizeof (dd)); - dd.sig = download->account_sig; - dd.prev_backup_hash = download->sync_previous; - dd.curr_backup_hash = usp.new_backup_hash; - dd.backup = data; - dd.backup_size = data_size; + dd.details.ok.sig = download->account_sig; + dd.details.ok.prev_backup_hash = download->sync_previous; + dd.details.ok.curr_backup_hash = usp.new_backup_hash; + dd.details.ok.backup = data; + dd.details.ok.backup_size = data_size; download->cb (download->cb_cls, - response_code, &dd); download->cb = NULL; SYNC_download_cancel (download); @@ -159,14 +160,13 @@ handle_download_finished (void *cls, "Unexpected response code %u\n", (unsigned int) response_code); GNUNET_break (0); - response_code = 0; + dd.http_status = 0; break; } if (NULL != download->cb) { download->cb (download->cb_cls, - response_code, - NULL); + &dd); download->cb = NULL; } SYNC_download_cancel (download); @@ -248,16 +248,6 @@ handle_header (char *buffer, } -/** - * Download the latest version of a backup for account @a pub. - * - * @param ctx for HTTP client request processing - * @param base_url base URL of the Sync server - * @param pub account public key - * @param cb function to call with the backup - * @param cb_cls closure for @a cb - * @return handle for the operation - */ struct SYNC_DownloadOperation * SYNC_download (struct GNUNET_CURL_Context *ctx, const char *base_url, @@ -301,11 +291,6 @@ SYNC_download (struct GNUNET_CURL_Context *ctx, } -/** - * Cancel the download. - * - * @param do operation to cancel. - */ void SYNC_download_cancel (struct SYNC_DownloadOperation *download) { diff --git a/src/lib/sync_api_upload.c b/src/lib/sync_api_upload.c index 4ac5ee9..32f6376 100644 --- a/src/lib/sync_api_upload.c +++ b/src/lib/sync_api_upload.c @@ -83,7 +83,8 @@ struct SYNC_UploadOperation * * @param cls the `struct SYNC_UploadOperation` * @param response_code HTTP response code, 0 on error - * @param response + * @param data data we downloaded + * @param data_size number of bytes in @a data */ static void handle_upload_finished (void *cls, @@ -92,44 +93,40 @@ handle_upload_finished (void *cls, size_t data_size) { struct SYNC_UploadOperation *uo = cls; - enum TALER_ErrorCode ec = TALER_EC_INVALID; - struct SYNC_UploadDetails ud; - struct SYNC_UploadDetails *udp; + struct SYNC_UploadDetails ud = { + .http_status = (unsigned int) response_code, + .ec = TALER_EC_INVALID + }; uo->job = NULL; - udp = NULL; - memset (&ud, 0, sizeof (ud)); switch (response_code) { case 0: break; case MHD_HTTP_NO_CONTENT: ud.us = SYNC_US_SUCCESS; - ud.details.curr_backup_hash = &uo->new_upload_hash; - udp = &ud; - ec = TALER_EC_NONE; + ud.details.success.curr_backup_hash = &uo->new_upload_hash; + ud.ec = TALER_EC_NONE; break; case MHD_HTTP_NOT_MODIFIED: ud.us = SYNC_US_SUCCESS; - ud.details.curr_backup_hash = &uo->new_upload_hash; - udp = &ud; - ec = TALER_EC_NONE; + ud.details.success.curr_backup_hash = &uo->new_upload_hash; + ud.ec = TALER_EC_NONE; break; case MHD_HTTP_BAD_REQUEST: GNUNET_break (0); - ec = TALER_JSON_get_error_code2 (data, - data_size); + ud.ec = TALER_JSON_get_error_code2 (data, + data_size); break; case MHD_HTTP_PAYMENT_REQUIRED: ud.us = SYNC_US_PAYMENT_REQUIRED; - ud.details.payment_request = uo->pay_uri; - udp = &ud; - ec = TALER_EC_NONE; + ud.details.payment_required.payment_request = uo->pay_uri; + ud.ec = TALER_EC_NONE; break; case MHD_HTTP_FORBIDDEN: GNUNET_break (0); - ec = TALER_JSON_get_error_code2 (data, - data_size); + ud.ec = TALER_JSON_get_error_code2 (data, + data_size); break; case MHD_HTTP_CONFLICT: ud.us = SYNC_US_CONFLICTING_BACKUP; @@ -140,23 +137,22 @@ handle_upload_finished (void *cls, = data_size; ud.details.recovered_backup.existing_backup = data; - udp = &ud; - ec = TALER_EC_NONE; + ud.ec = TALER_EC_NONE; break; case MHD_HTTP_GONE: - ec = TALER_JSON_get_error_code2 (data, - data_size); + ud.ec = TALER_JSON_get_error_code2 (data, + data_size); break; case MHD_HTTP_LENGTH_REQUIRED: GNUNET_break (0); break; case MHD_HTTP_REQUEST_ENTITY_TOO_LARGE: - ec = TALER_JSON_get_error_code2 (data, - data_size); + ud.ec = TALER_JSON_get_error_code2 (data, + data_size); break; case MHD_HTTP_TOO_MANY_REQUESTS: - ec = TALER_JSON_get_error_code2 (data, - data_size); + ud.ec = TALER_JSON_get_error_code2 (data, + data_size); break; case MHD_HTTP_INTERNAL_SERVER_ERROR: GNUNET_log (GNUNET_ERROR_TYPE_WARNING, @@ -168,9 +164,7 @@ handle_upload_finished (void *cls, if (NULL != uo->cb) { uo->cb (uo->cb_cls, - ec, - response_code, - udp); + &ud); uo->cb = NULL; } SYNC_upload_cancel (uo); @@ -299,7 +293,7 @@ SYNC_upload (struct GNUNET_CURL_Context *ctx, val = GNUNET_STRINGS_data_to_string_alloc (&usp.new_backup_hash, sizeof (struct GNUNET_HashCode)); GNUNET_asprintf (&hdr, - "%s: %s", + "%s: \"%s\"", MHD_HTTP_HEADER_IF_NONE_MATCH, val); GNUNET_free (val); @@ -321,7 +315,7 @@ SYNC_upload (struct GNUNET_CURL_Context *ctx, sizeof (struct GNUNET_HashCode)); GNUNET_asprintf (&hdr, - "If-Match: %s", + "If-Match: \"%s\"", val); GNUNET_free (val); ext = curl_slist_append (job_headers, @@ -356,46 +350,46 @@ SYNC_upload (struct GNUNET_CURL_Context *ctx, if (0 != (po & SYNC_PO_FRESH_ORDER)) { uo->url = (0 != (po & SYNC_PO_FORCE_PAYMENT)) - ? TALER_url_join (base_url, - path, - "fresh", - "y", - "pay", - "y", - (NULL != paid_order_id) - ? "paying" - : NULL, - paid_order_id, - NULL) - : TALER_url_join (base_url, - path, - "fresh", - "y", - (NULL != paid_order_id) - ? "paying" - : NULL, - paid_order_id, - NULL); + ? TALER_url_join (base_url, + path, + "fresh", + "y", + "pay", + "y", + (NULL != paid_order_id) + ? "paying" + : NULL, + paid_order_id, + NULL) + : TALER_url_join (base_url, + path, + "fresh", + "y", + (NULL != paid_order_id) + ? "paying" + : NULL, + paid_order_id, + NULL); } else { uo->url = (0 != (po & SYNC_PO_FORCE_PAYMENT)) - ? TALER_url_join (base_url, - path, - "pay", - "y", - (NULL != paid_order_id) - ? "paying" - : NULL, - paid_order_id, - NULL) - : TALER_url_join (base_url, - path, - (NULL != paid_order_id) - ? "paying" - : NULL, - paid_order_id, - NULL); + ? TALER_url_join (base_url, + path, + "pay", + "y", + (NULL != paid_order_id) + ? "paying" + : NULL, + paid_order_id, + NULL) + : TALER_url_join (base_url, + path, + (NULL != paid_order_id) + ? "paying" + : NULL, + paid_order_id, + NULL); } GNUNET_free (path); @@ -430,13 +424,6 @@ SYNC_upload (struct GNUNET_CURL_Context *ctx, } -/** - * Cancel the upload. Note that aborting an upload does NOT guarantee - * that it did not complete, it is possible that the server did - * receive the full request before the upload is aborted. - * - * @param uo operation to cancel. - */ void SYNC_upload_cancel (struct SYNC_UploadOperation *uo) { diff --git a/src/sync/sync-httpd.c b/src/sync/sync-httpd.c index edb8212..fefefaa 100644 --- a/src/sync/sync-httpd.c +++ b/src/sync/sync-httpd.c @@ -77,7 +77,7 @@ static struct GNUNET_SCHEDULER_Task *mhd_task; /** * Global return code */ -static int result; +static int global_ret; /** * The MHD Daemon @@ -136,6 +136,7 @@ struct TALER_Amount SH_insurance; * * @param cls argument given together with the function * pointer when the handler was registered with MHD + * @param connection connection handle * @param url the requested url * @param method the HTTP method used (#MHD_HTTP_METHOD_GET, * #MHD_HTTP_METHOD_PUT, etc.) @@ -194,14 +195,13 @@ url_handler (void *cls, &SH_MHD_handler_static_response, MHD_HTTP_NOT_FOUND }; - struct TM_HandlerContext *hc; + struct TM_HandlerContext *hc = *con_cls; struct GNUNET_AsyncScopeId aid; const char *correlation_id = NULL; struct SYNC_AccountPublicKeyP account_pub; (void) cls; (void) version; - hc = *con_cls; if (NULL == hc) { GNUNET_async_scope_fresh (&aid); @@ -466,7 +466,7 @@ SH_trigger_curl () * Function that queries MHD's select sets and * starts the task waiting for them. * - * @param daemon_handle HTTP server to prepare to run + * @return task that runs the HTTP server */ static struct GNUNET_SCHEDULER_Task * prepare_daemon (void) @@ -540,13 +540,9 @@ run (void *cls, if (SH_sync_connection_close) go |= TALER_MHD_GO_FORCE_CONNECTION_CLOSE; TALER_MHD_setup (go); - result = EXIT_NOTCONFIGURED; + global_ret = EXIT_NOTCONFIGURED; GNUNET_SCHEDULER_add_shutdown (&do_shutdown, NULL); - GNUNET_assert (GNUNET_OK == - GNUNET_log_setup ("sync-httpd", - "WARNING", - NULL)); if (GNUNET_OK != GNUNET_CONFIGURATION_get_value_number (config, "sync", @@ -647,7 +643,7 @@ run (void *cls, if (NULL == (db = SYNC_DB_plugin_load (config))) { - result = EXIT_NOTINSTALLED; + global_ret = EXIT_NOTCONFIGURED; GNUNET_SCHEDULER_shutdown (); return; } @@ -665,7 +661,7 @@ run (void *cls, if ( (0 == port) && (-1 == fh) ) { - result = EXIT_NOPERMISSION; + global_ret = EXIT_NO_RESTART; GNUNET_SCHEDULER_shutdown (); return; } @@ -681,13 +677,13 @@ run (void *cls, MHD_OPTION_END); if (NULL == mhd) { - result = EXIT_FAILURE; GNUNET_log (GNUNET_ERROR_TYPE_ERROR, "Failed to launch HTTP service, exiting.\n"); + global_ret = EXIT_NO_RESTART; GNUNET_SCHEDULER_shutdown (); return; } - result = EXIT_SUCCESS; + global_ret = EXIT_SUCCESS; mhd_task = prepare_daemon (); } @@ -746,5 +742,5 @@ main (int argc, return EXIT_SUCCESS; if (GNUNET_SYSERR == ret) return EXIT_INVALIDARGUMENT; - return result; + return global_ret; } diff --git a/src/sync/sync-httpd_backup.c b/src/sync/sync-httpd_backup.c index 801fc14..d0313eb 100644 --- a/src/sync/sync-httpd_backup.c +++ b/src/sync/sync-httpd_backup.c @@ -94,13 +94,16 @@ SH_backup_get (struct MHD_Connection *connection, inm = MHD_lookup_connection_value (connection, MHD_HEADER_KIND, MHD_HTTP_HEADER_IF_NONE_MATCH); - if (NULL != inm) + if ( (NULL != inm) && + (2 < strlen (inm)) && + ('"' == inm[0]) && + ('=' == inm[strlen (inm) - 1]) ) { struct GNUNET_HashCode inm_h; if (GNUNET_OK != - GNUNET_STRINGS_string_to_data (inm, - strlen (inm), + GNUNET_STRINGS_string_to_data (inm + 1, + strlen (inm) - 2, &inm_h, sizeof (inm_h))) { @@ -221,6 +224,7 @@ SH_return_backup (struct MHD_Connection *connection, char *sig_s; char *prev_s; char *etag; + char *etagq; sig_s = GNUNET_STRINGS_data_to_string_alloc (&account_sig, sizeof (account_sig)); @@ -236,10 +240,14 @@ SH_return_backup (struct MHD_Connection *connection, MHD_add_response_header (resp, "Sync-Previous", prev_s)); + GNUNET_asprintf (&etagq, + "\"%s\"", + etag); GNUNET_break (MHD_YES == MHD_add_response_header (resp, MHD_HTTP_HEADER_ETAG, - etag)); + etagq)); + GNUNET_free (etagq); GNUNET_free (etag); GNUNET_free (prev_s); GNUNET_free (sig_s); diff --git a/src/sync/sync-httpd_backup.h b/src/sync/sync-httpd_backup.h index ebfbe5f..72cc109 100644 --- a/src/sync/sync-httpd_backup.h +++ b/src/sync/sync-httpd_backup.h @@ -14,7 +14,7 @@ TALER; see the file COPYING. If not, see <http://www.gnu.org/licenses/> */ /** - * @file sync-httpd_policy.h + * @file sync-httpd_backup.h * @brief functions to handle incoming requests on /backup/ * @author Christian Grothoff */ @@ -59,8 +59,10 @@ SH_backup_get (struct MHD_Connection *connection, /** + * Handle POST /backup requests. + * * @param connection the MHD connection to handle - * @param[in,out] connection_cls the connection's closure (can be updated) + * @param[in,out] con_cls the connection's closure (can be updated) * @param account public key of the account the request is for * @param upload_data upload data * @param[in,out] upload_data_size number of bytes (left) in @a upload_data diff --git a/src/sync/sync-httpd_backup_post.c b/src/sync/sync-httpd_backup_post.c index 962142d..f4bb2da 100644 --- a/src/sync/sync-httpd_backup_post.c +++ b/src/sync/sync-httpd_backup_post.c @@ -127,7 +127,7 @@ struct BackupContext * Timestamp of the order in @e existing_order_id. Used to * select the most recent unpaid offer. */ - struct GNUNET_TIME_Absolute existing_order_timestamp; + struct GNUNET_TIME_Timestamp existing_order_timestamp; /** * Expected total upload size. @@ -215,7 +215,6 @@ cleanup_ctx (struct TM_HandlerContext *hc) /** * Transmit a payment request for @a order_id on @a connection * - * @param connection MHD connection * @param order_id our backend's order ID * @param token the claim token generated by the merchant (NULL if * it wasn't generated). @@ -371,7 +370,7 @@ proposal_cb (void *cls, */ static void ongoing_payment_cb (void *cls, - struct GNUNET_TIME_Absolute timestamp, + struct GNUNET_TIME_Timestamp timestamp, const char *order_id, const struct TALER_ClaimTokenP *token, const struct TALER_Amount *amount) @@ -381,9 +380,11 @@ ongoing_payment_cb (void *cls, (void) amount; if (0 != TALER_amount_cmp (amount, &SH_annual_fee)) - return; /* can't re-use, fees changed */ + return; /* can't reuse, fees changed */ if ( (NULL == bc->existing_order_id) || - (bc->existing_order_timestamp.abs_value_us < timestamp.abs_value_us) ) + (GNUNET_TIME_timestamp_cmp (bc->existing_order_timestamp, + <, + timestamp)) ) { GNUNET_free (bc->existing_order_id); bc->existing_order_id = GNUNET_strdup (order_id); @@ -398,27 +399,57 @@ ongoing_payment_cb (void *cls, * Callback to process a GET /check-payment request * * @param cls our `struct BackupContext` - * @param hr HTTP response details * @param osr order status */ static void check_payment_cb (void *cls, - const struct TALER_MERCHANT_HttpResponse *hr, const struct TALER_MERCHANT_OrderStatusResponse *osr) { struct BackupContext *bc = cls; + const struct TALER_MERCHANT_HttpResponse *hr = &osr->hr; /* refunds are not supported, verify */ bc->omgh = NULL; - GNUNET_log (GNUNET_ERROR_TYPE_INFO, - "Payment status checked: %d\n", - osr->status); GNUNET_CONTAINER_DLL_remove (bc_head, bc_tail, bc); MHD_resume_connection (bc->con); SH_trigger_daemon (); - switch (osr->status) + switch (hr->http_status) + { + case 0: + /* Likely timeout, complain! */ + bc->response_code = MHD_HTTP_GATEWAY_TIMEOUT; + bc->resp = TALER_MHD_make_error ( + TALER_EC_SYNC_GENERIC_BACKEND_TIMEOUT, + NULL); + return; + case MHD_HTTP_OK: + break; /* handled below */ + default: + /* Unexpected backend response */ + bc->response_code = MHD_HTTP_BAD_GATEWAY; + bc->resp = TALER_MHD_MAKE_JSON_PACK ( + GNUNET_JSON_pack_uint64 ("code", + TALER_EC_SYNC_GENERIC_BACKEND_ERROR), + GNUNET_JSON_pack_string ("hint", + TALER_ErrorCode_get_hint ( + TALER_EC_SYNC_GENERIC_BACKEND_ERROR)), + GNUNET_JSON_pack_uint64 ("backend-ec", + (json_int_t) hr->ec), + GNUNET_JSON_pack_uint64 ("backend-http-status", + (json_int_t) hr->http_status), + GNUNET_JSON_pack_allow_null ( + GNUNET_JSON_pack_object_incref ("backend-reply", + (json_t *) hr->reply))); + return; + } + + GNUNET_assert (MHD_HTTP_OK == hr->http_status); + GNUNET_log (GNUNET_ERROR_TYPE_INFO, + "Payment status checked: %d\n", + osr->details.ok.status); + switch (osr->details.ok.status) { case TALER_MERCHANT_OSC_PAID: { @@ -485,7 +516,6 @@ await_payment (struct BackupContext *bc, SH_backend_url, order_id, NULL /* our payments are NOT session-bound */, - false, timeout, &check_payment_cb, bc); @@ -508,6 +538,7 @@ static MHD_RESULT begin_payment (struct BackupContext *bc, int pay_req) { + static const char *no_uuids[1] = { NULL }; json_t *order; if (! bc->force_fresh_order) @@ -565,7 +596,7 @@ begin_payment (struct BackupContext *bc, 0, NULL, /* no inventory products */ 0, - NULL, /* no uuids */ + no_uuids, /* no uuids */ false, /* do NOT require claim token */ &proposal_cb, bc); @@ -650,16 +681,6 @@ handle_database_error (struct BackupContext *bc, } -/** - * Handle a client POSTing a backup to us. - * - * @param connection the MHD connection to handle - * @param[in,out] connection_cls the connection's closure (can be updated) - * @param account public key of the account the request is for - * @param upload_data upload data - * @param[in,out] upload_data_size number of bytes (left) in @a upload_data - * @return MHD result code - */ MHD_RESULT SH_backup_post (struct MHD_Connection *connection, void **con_cls, @@ -737,18 +758,23 @@ SH_backup_post (struct MHD_Connection *connection, im = MHD_lookup_connection_value (connection, MHD_HEADER_KIND, MHD_HTTP_HEADER_IF_MATCH); - if ( (NULL != im) && - (GNUNET_OK != - GNUNET_STRINGS_string_to_data (im, - strlen (im), - &bc->old_backup_hash, - sizeof (bc->old_backup_hash))) ) + if (NULL != im) { - GNUNET_break_op (0); - return TALER_MHD_reply_with_error (connection, - MHD_HTTP_BAD_REQUEST, - TALER_EC_SYNC_BAD_IF_MATCH, - NULL); + if ( (2 >= strlen (im)) || + ('"' != im[0]) || + ('"' != im[strlen (im) - 1]) || + (GNUNET_OK != + GNUNET_STRINGS_string_to_data (im + 1, + strlen (im) - 2, + &bc->old_backup_hash, + sizeof (bc->old_backup_hash))) ) + { + GNUNET_break_op (0); + return TALER_MHD_reply_with_error (connection, + MHD_HTTP_BAD_REQUEST, + TALER_EC_SYNC_BAD_IF_MATCH, + NULL); + } } } { @@ -778,9 +804,12 @@ SH_backup_post (struct MHD_Connection *connection, MHD_HEADER_KIND, MHD_HTTP_HEADER_IF_NONE_MATCH); if ( (NULL == etag) || + (2 >= strlen (etag)) || + ('"' != etag[0]) || + ('"' != etag[strlen (etag) - 1]) || (GNUNET_OK != - GNUNET_STRINGS_string_to_data (etag, - strlen (etag), + GNUNET_STRINGS_string_to_data (etag + 1, + strlen (etag) - 2, &bc->new_backup_hash, sizeof (bc->new_backup_hash))) ) { diff --git a/src/sync/sync-httpd_config.c b/src/sync/sync-httpd_config.c index b75c1d5..c1c40ca 100644 --- a/src/sync/sync-httpd_config.c +++ b/src/sync/sync-httpd_config.c @@ -1,6 +1,6 @@ /* This file is part of Sync - Copyright (C) 2020 Taler Systems SA + Copyright (C) 2020,2024 Taler Systems SA Sync is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software @@ -14,7 +14,7 @@ Sync; see the file COPYING.GPL. If not, see <http://www.gnu.org/licenses/> */ /** - * @file backend/sync-httpd_config.c + * @file sync/sync-httpd_config.c * @brief headers for /config handler * @author Christian Grothoff */ @@ -31,17 +31,6 @@ * to be created */ -/** - * Manages a /config call. - * - * @param rh context of the handler - * @param connection the MHD connection to handle - * @param[in,out] connection_cls the connection's closure (can be updated) - * @param upload_data upload data - * @param[in,out] upload_data_size number of bytes (left) in @a upload_data - * @param mi merchant backend instance, never NULL - * @return MHD result code - */ MHD_RESULT SH_handler_config (struct SH_RequestHandler *rh, struct MHD_Connection *connection, @@ -49,19 +38,59 @@ SH_handler_config (struct SH_RequestHandler *rh, const char *upload_data, size_t *upload_data_size) { - return TALER_MHD_REPLY_JSON_PACK ( - connection, - MHD_HTTP_OK, - GNUNET_JSON_pack_string ("name", - "sync"), - GNUNET_JSON_pack_uint64 ("storage_limit_in_megabytes", - SH_upload_limit_mb), - TALER_JSON_pack_amount ("liability_limit", - &SH_insurance), - TALER_JSON_pack_amount ("annual_fee", - &SH_annual_fee), - GNUNET_JSON_pack_string ("version", - "1:0:1")); + static struct MHD_Response *response; + static struct GNUNET_TIME_Absolute a; + + (void) connection_cls; + (void) upload_data; + (void) upload_data_size; + if ( (GNUNET_TIME_absolute_is_past (a)) && + (NULL != response) ) + { + MHD_destroy_response (response); + response = NULL; + } + if (NULL == response) + { + struct GNUNET_TIME_Timestamp km; + char dat[128]; + + a = GNUNET_TIME_relative_to_absolute (GNUNET_TIME_UNIT_DAYS); + /* Round up to next full day to ensure the expiration + time does not become a fingerprint! */ + a = GNUNET_TIME_absolute_round_down (a, + GNUNET_TIME_UNIT_DAYS); + a = GNUNET_TIME_absolute_add (a, + GNUNET_TIME_UNIT_DAYS); + /* => /config response stays at most 48h in caches! */ + km = GNUNET_TIME_absolute_to_timestamp (a); + TALER_MHD_get_date_string (km.abs_time, + dat); + response = TALER_MHD_MAKE_JSON_PACK ( + GNUNET_JSON_pack_string ("name", + "sync"), + GNUNET_JSON_pack_string ("implementation", + "urn:net:taler:specs:sync:c-reference"), + GNUNET_JSON_pack_uint64 ("storage_limit_in_megabytes", + SH_upload_limit_mb), + TALER_JSON_pack_amount ("liability_limit", + &SH_insurance), + TALER_JSON_pack_amount ("annual_fee", + &SH_annual_fee), + GNUNET_JSON_pack_string ("version", + "2:2:0")); + GNUNET_break (MHD_YES == + MHD_add_response_header (response, + MHD_HTTP_HEADER_EXPIRES, + dat)); + GNUNET_break (MHD_YES == + MHD_add_response_header (response, + MHD_HTTP_HEADER_CACHE_CONTROL, + "public,max-age=21600")); /* 6h */ + } + return MHD_queue_response (connection, + MHD_HTTP_OK, + response); } diff --git a/src/sync/sync-httpd_config.h b/src/sync/sync-httpd_config.h index 0c882e2..3cafade 100644 --- a/src/sync/sync-httpd_config.h +++ b/src/sync/sync-httpd_config.h @@ -14,7 +14,7 @@ Sync; see the file COPYING.GPL. If not, see <http://www.gnu.org/licenses/> */ /** - * @file backend/sync-httpd_config.h + * @file sync/sync-httpd_config.h * @brief headers for /config handler * @author Christian Grothoff */ diff --git a/src/sync/sync-httpd_mhd.c b/src/sync/sync-httpd_mhd.c index fa2344b..c7c4824 100644 --- a/src/sync/sync-httpd_mhd.c +++ b/src/sync/sync-httpd_mhd.c @@ -1,6 +1,6 @@ /* This file is part of TALER - Copyright (C) 2014, 2015, 2016 GNUnet e.V. and INRIA + Copyright (C) 2014, 2015, 2016 Taler Systems SA TALER is free software; you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software @@ -27,17 +27,6 @@ #include "sync-httpd_mhd.h" -/** - * Function to call to handle the request by sending - * back static data from the @a rh. - * - * @param rh context of the handler - * @param connection the MHD connection to handle - * @param[in,out] connection_cls the connection's closure (can be updated) - * @param upload_data upload data - * @param[in,out] upload_data_size number of bytes (left) in @a upload_data - * @return MHD result code - */ MHD_RESULT SH_MHD_handler_static_response (struct SH_RequestHandler *rh, struct MHD_Connection *connection, @@ -58,17 +47,6 @@ SH_MHD_handler_static_response (struct SH_RequestHandler *rh, } -/** - * Function to call to handle the request by sending - * back a redirect to the AGPL source code. - * - * @param rh context of the handler - * @param connection the MHD connection to handle - * @param[in,out] connection_cls the connection's closure (can be updated) - * @param upload_data upload data - * @param[in,out] upload_data_size number of bytes (left) in @a upload_data - * @return MHD result code - */ MHD_RESULT SH_MHD_handler_agpl_redirect (struct SH_RequestHandler *rh, struct MHD_Connection *connection, diff --git a/src/sync/sync-httpd_mhd.h b/src/sync/sync-httpd_mhd.h index 5a7a9b2..5b1431b 100644 --- a/src/sync/sync-httpd_mhd.h +++ b/src/sync/sync-httpd_mhd.h @@ -37,7 +37,6 @@ * @param[in,out] connection_cls the connection's closure (can be updated) * @param upload_data upload data * @param[in,out] upload_data_size number of bytes (left) in @a upload_data - * @param mi merchant backend instance, NULL is allowed in this case! * @return MHD result code */ MHD_RESULT @@ -57,7 +56,6 @@ SH_MHD_handler_static_response (struct SH_RequestHandler *rh, * @param[in,out] connection_cls the connection's closure (can be updated) * @param upload_data upload data * @param[in,out] upload_data_size number of bytes (left) in @a upload_data - * @param mi merchant backend instance, never NULL * @return MHD result code */ MHD_RESULT diff --git a/src/sync/sync.conf b/src/sync/sync.conf index af3203f..2eccdc1 100644 --- a/src/sync/sync.conf +++ b/src/sync/sync.conf @@ -42,43 +42,3 @@ PAYMENT_BACKEND_URL = http://localhost:9966/ # API key to pass when accessing the merchant backend. # API_KEY = SECRET_VALUE - -# Configuration for postgres database. -[syncdb-postgres] -CONFIG = postgres:///sync - - -[PATHS] -# The PATHS section is special, as filenames including $-expression are -# expanded using the values from PATHS or the system environment (PATHS -# is checked first). libgnunetutil supports expanding $-expressions using -# defaults with the syntax "${VAR:-default}". Here, "default" can again -# be a $-expression. -# -# We usually want $HOME for $SYNC_HOME -# -SYNC_HOME = ${HOME:-${USERPROFILE}} - -# see XDG Base Directory Specification at -# http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html -# for how these should be used. - -# Persistent data storage -SYNC_DATA_HOME = ${XDG_DATA_HOME:-$SYNC_HOME/.local/share}/sync/ - -# Configuration files -SYNC_CONFIG_HOME = ${XDG_CONFIG_HOME:-$SYNC_HOME/.config}/sync/ - -# Cached data, no big deal if lost -SYNC_CACHE_HOME = ${XDG_CACHE_HOME:-$SYNC_HOME/.cache}/sync/ - -# Runtime data (i.e UNIX domain sockets, locks, always lost on system boot) -SYNC_RUNTIME_DIR = ${TMPDIR:-${TMP:-/tmp}}/sync-runtime/ - -# Directory to use for temporary files. -SYNC_TMP = ${TMPDIR:-${TMP:-/tmp}}/sync/ - -# DEFAULTCONFIG = /etc/sync.conf -# If 'DEFAULTCONFIG' is not defined, the current -# configuration file is assumed to be the default, -# which is what we want by default... diff --git a/src/syncdb/Makefile.am b/src/syncdb/Makefile.am index d71e24e..09b577c 100644 --- a/src/syncdb/Makefile.am +++ b/src/syncdb/Makefile.am @@ -1,6 +1,11 @@ # This Makefile.am is in the public domain AM_CPPFLAGS = -I$(top_srcdir)/src/include +pkgcfgdir = $(prefix)/share/sync/config.d/ + +pkgcfg_DATA = \ + sync_db_postgres.conf + plugindir = $(libdir)/sync if HAVE_POSTGRESQL @@ -18,9 +23,9 @@ endif sqldir = $(prefix)/share/sync/sql/ sql_DATA = \ - sync-0000.sql \ + versioning.sql \ sync-0001.sql \ - drop0001.sql + drop.sql bin_PROGRAMS = \ sync-dbinit @@ -53,14 +58,14 @@ libsyncdb_la_LDFLAGS = \ libsync_plugin_db_postgres_la_SOURCES = \ plugin_syncdb_postgres.c -libsync_plugin_db_postgres_la_LIBADD = \ - $(LTLIBINTL) libsync_plugin_db_postgres_la_LDFLAGS = \ - $(SYNC_PLUGIN_LDFLAGS) \ - -lgnunetpq \ - -lpq \ + $(SYNC_PLUGIN_LDFLAGS) +libsync_plugin_db_postgres_la_LIBADD = \ + $(LTLIBINTL) \ -ltalerpq \ + -lgnunetpq \ -lgnunetutil \ + -lpq \ $(XLIB) check_PROGRAMS = \ @@ -76,8 +81,11 @@ test_sync_db_postgres_LDFLAGS = \ -ltalerutil \ $(XLIB) +AM_TESTS_ENVIRONMENT=export SYNC_PREFIX=$${SYNC_PREFIX:-@libdir@};export PATH=$${SYNC_PREFIX:-@prefix@}/bin:$$PATH; TESTS = \ test_sync_db-postgres EXTRA_DIST = \ + $(pkgcfg_DATA) \ + $(sql_DATA) \ test_sync_db_postgres.conf diff --git a/src/syncdb/drop0001.sql b/src/syncdb/drop.sql index 9253517..aeaa102 100644 --- a/src/syncdb/drop0001.sql +++ b/src/syncdb/drop.sql @@ -1,6 +1,6 @@ -- -- This file is part of TALER --- Copyright (C) 2021 Taler Systems SA +-- Copyright (C) 2021, 2022 Taler Systems SA -- -- TALER is free software; you can redistribute it and/or modify it under the -- terms of the GNU General Public License as published by the Free Software @@ -17,18 +17,15 @@ -- Everything in one big transaction BEGIN; --- This script DROPs all of the tables we create. --- --- Unlike the other SQL files, it SHOULD be updated to reflect the --- latest requirements for dropping tables. - --- Drops for 0001.sql -DROP TABLE IF EXISTS payments CASCADE; -DROP TABLE IF EXISTS backups CASCADE; -DROP TABLE IF EXISTS accounts CASCADE; +WITH xpatches AS ( + SELECT patch_name + FROM _v.patches + WHERE starts_with(patch_name,'sync-') +) + SELECT _v.unregister_patch(xpatches.patch_name) + FROM xpatches; --- Unregister patch (0001.sql) -SELECT _v.unregister_patch('sync-0001'); +DROP SCHEMA sync CASCADE; -- And we're out of here... COMMIT; diff --git a/src/syncdb/plugin_syncdb_postgres.c b/src/syncdb/plugin_syncdb_postgres.c index 8e294cb..d250c82 100644 --- a/src/syncdb/plugin_syncdb_postgres.c +++ b/src/syncdb/plugin_syncdb_postgres.c @@ -1,6 +1,6 @@ /* This file is part of TALER - (C) 2014--2021 Taler Systems SA + (C) 2014--2022 Taler Systems SA TALER is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software @@ -14,7 +14,7 @@ TALER; see the file COPYING. If not, see <http://www.gnu.org/licenses/> */ /** - * @file sync/plugin_syncdb_postgres.c + * @file syncdb/plugin_syncdb_postgres.c * @brief database helper functions for postgres used by sync * @author Christian Grothoff */ @@ -78,16 +78,25 @@ postgres_drop_tables (void *cls) { struct PostgresClosure *pg = cls; struct GNUNET_PQ_Context *conn; + enum GNUNET_GenericReturnValue ret; + if (NULL != pg->conn) + { + GNUNET_PQ_disconnect (pg->conn); + pg->conn = NULL; + pg->init = false; + } conn = GNUNET_PQ_connect_with_cfg (pg->cfg, "syncdb-postgres", - "drop", + NULL, NULL, NULL); if (NULL == conn) return GNUNET_SYSERR; + ret = GNUNET_PQ_exec_sql (conn, + "drop"); GNUNET_PQ_disconnect (conn); - return GNUNET_OK; + return ret; } @@ -107,19 +116,16 @@ prepare_statements (void *cls) "(account_pub" ",expiration_date" ") VALUES " - "($1,$2);", - 2), + "($1,$2);"), GNUNET_PQ_make_prepare ("payment_insert", "INSERT INTO payments " "(account_pub" ",order_id" ",token" ",timestamp" - ",amount_val" - ",amount_frac" + ",amount" ") VALUES " - "($1,$2,$3,$4,$5,$6);", - 6), + "($1,$2,$3,$4,$5);"), GNUNET_PQ_make_prepare ("payment_done", "UPDATE payments " "SET" @@ -129,57 +135,48 @@ prepare_statements (void *cls) " AND" " account_pub=$2" " AND" - " paid=FALSE;", - 2), + " paid=FALSE;"), GNUNET_PQ_make_prepare ("account_update", "UPDATE accounts " "SET" " expiration_date=$1 " "WHERE" - " account_pub=$2;", - 2), + " account_pub=$2;"), GNUNET_PQ_make_prepare ("account_select", "SELECT" " expiration_date " "FROM" " accounts " "WHERE" - " account_pub=$1;", - 1), + " account_pub=$1;"), GNUNET_PQ_make_prepare ("payments_select", "SELECT" " account_pub" ",order_id" - ",amount_val" - ",amount_frac" + ",amount" " FROM payments" - " WHERE paid=FALSE;", - 0), + " WHERE paid=FALSE;"), GNUNET_PQ_make_prepare ("payments_select_by_account", "SELECT" " timestamp" ",order_id" ",token" - ",amount_val" - ",amount_frac" + ",amount" " FROM payments" " WHERE" " paid=FALSE" " AND" - " account_pub=$1;", - 1), + " account_pub=$1;"), GNUNET_PQ_make_prepare ("gc_accounts", "DELETE FROM accounts " "WHERE" - " expiration_date < $1;", - 1), + " expiration_date < $1;"), GNUNET_PQ_make_prepare ("gc_pending_payments", "DELETE FROM payments " "WHERE" " paid=FALSE" " AND" - " timestamp < $1;", - 1), + " timestamp < $1;"), GNUNET_PQ_make_prepare ("backup_insert", "INSERT INTO backups " "(account_pub" @@ -188,8 +185,7 @@ prepare_statements (void *cls) ",backup_hash" ",data" ") VALUES " - "($1,$2,$3,$4,$5);", - 5), + "($1,$2,$3,$4,$5);"), GNUNET_PQ_make_prepare ("backup_update", "UPDATE backups " " SET" @@ -200,16 +196,14 @@ prepare_statements (void *cls) " WHERE" " account_pub=$5" " AND" - " backup_hash=$6;", - 6), + " backup_hash=$6;"), GNUNET_PQ_make_prepare ("backup_select_hash", "SELECT " " backup_hash " "FROM" " backups " "WHERE" - " account_pub=$1;", - 1), + " account_pub=$1;"), GNUNET_PQ_make_prepare ("backup_select", "SELECT " " account_sig" @@ -219,11 +213,9 @@ prepare_statements (void *cls) "FROM" " backups " "WHERE" - " account_pub=$1;", - 1), + " account_pub=$1;"), GNUNET_PQ_make_prepare ("do_commit", - "COMMIT", - 0), + "COMMIT"), GNUNET_PQ_PREPARED_STATEMENT_END }; enum GNUNET_GenericReturnValue ret; @@ -264,10 +256,14 @@ internal_setup (struct PostgresClosure *pg, "SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL SERIALIZABLE;"), GNUNET_PQ_make_try_execute ("SET enable_sort=OFF;"), GNUNET_PQ_make_try_execute ("SET enable_seqscan=OFF;"), + GNUNET_PQ_make_execute ("SET search_path TO sync;"), GNUNET_PQ_EXECUTE_STATEMENT_END }; #else - struct GNUNET_PQ_ExecuteStatement *es = NULL; + struct GNUNET_PQ_ExecuteStatement es[] = { + GNUNET_PQ_make_execute ("SET search_path TO sync;"), + GNUNET_PQ_EXECUTE_STATEMENT_END + }; #endif struct GNUNET_PQ_Context *db_conn; @@ -314,7 +310,11 @@ postgres_preflight (void *cls) if (GNUNET_OK != internal_setup (pg, false)) + { + GNUNET_log (GNUNET_ERROR_TYPE_ERROR, + "Failed to ensure DB is initialized\n"); return GNUNET_SYSERR; + } } if (NULL == pg->transaction_name) return GNUNET_OK; /* all good */ @@ -452,11 +452,11 @@ postgres_gc (void *cls, { struct PostgresClosure *pg = cls; struct GNUNET_PQ_QueryParam params[] = { - TALER_PQ_query_param_absolute_time (&expire_backups), + GNUNET_PQ_query_param_absolute_time (&expire_backups), GNUNET_PQ_query_param_end }; struct GNUNET_PQ_QueryParam params2[] = { - TALER_PQ_query_param_absolute_time (&expire_pending_payments), + GNUNET_PQ_query_param_absolute_time (&expire_pending_payments), GNUNET_PQ_query_param_end }; enum GNUNET_DB_QueryStatus qs; @@ -496,13 +496,14 @@ postgres_store_payment (void *cls, struct PostgresClosure *pg = cls; enum GNUNET_DB_QueryStatus qs; struct TALER_ClaimTokenP tok; - struct GNUNET_TIME_Absolute now = GNUNET_TIME_absolute_get (); + struct GNUNET_TIME_Timestamp now = GNUNET_TIME_timestamp_get (); struct GNUNET_PQ_QueryParam params[] = { GNUNET_PQ_query_param_auto_from_type (account_pub), GNUNET_PQ_query_param_string (order_id), GNUNET_PQ_query_param_auto_from_type (&tok), - GNUNET_PQ_query_param_absolute_time (&now), - TALER_PQ_query_param_amount (amount), + GNUNET_PQ_query_param_timestamp (&now), + TALER_PQ_query_param_amount (pg->conn, + amount), GNUNET_PQ_query_param_end }; @@ -570,7 +571,7 @@ struct PaymentIteratorContext * * @param cls closure of type `struct PaymentIteratorContext *` * @param result the postgres result - * @param num_result the number of results in @a result + * @param num_results the number of results in @a result */ static void payment_by_account_cb (void *cls, @@ -581,13 +582,13 @@ payment_by_account_cb (void *cls, for (unsigned int i = 0; i < num_results; i++) { - struct GNUNET_TIME_Absolute timestamp; + struct GNUNET_TIME_Timestamp timestamp; char *order_id; struct TALER_Amount amount; struct TALER_ClaimTokenP token; struct GNUNET_PQ_ResultSpec rs[] = { - GNUNET_PQ_result_spec_absolute_time ("timestamp", - ×tamp), + GNUNET_PQ_result_spec_timestamp ("timestamp", + ×tamp), GNUNET_PQ_result_spec_string ("order_id", &order_id), GNUNET_PQ_result_spec_auto_from_type ("token", @@ -709,7 +710,6 @@ postgres_store_backup (void *cls, GNUNET_break (0); return SYNC_DB_SOFT_ERROR; case GNUNET_DB_STATUS_SUCCESS_NO_RESULTS: - GNUNET_break (0); return SYNC_DB_NO_RESULTS; case GNUNET_DB_STATUS_SUCCESS_ONE_RESULT: return SYNC_DB_ONE_RESULT; @@ -723,7 +723,7 @@ postgres_store_backup (void *cls, /* First, check if account exists */ { - struct GNUNET_TIME_Absolute ed; + struct GNUNET_TIME_Timestamp ed; struct GNUNET_PQ_QueryParam params[] = { GNUNET_PQ_query_param_auto_from_type (account_pub), GNUNET_PQ_query_param_end @@ -864,7 +864,7 @@ postgres_update_backup (void *cls, /* First, check if account exists */ { - struct GNUNET_TIME_Absolute ed; + struct GNUNET_TIME_Timestamp ed; struct GNUNET_PQ_QueryParam params[] = { GNUNET_PQ_query_param_auto_from_type (account_pub), GNUNET_PQ_query_param_end @@ -953,7 +953,7 @@ postgres_update_backup (void *cls, * * @param cls closure * @param account_pub account to store @a backup under - * @param backup_hash[OUT] set to hash of @a backup + * @param[out] backup_hash set to hash of @a backup * @return transaction status */ static enum SYNC_DB_QueryStatus @@ -1000,7 +1000,7 @@ postgres_lookup_account (void *cls, /* check if account exists */ { - struct GNUNET_TIME_Absolute expiration; + struct GNUNET_TIME_Timestamp expiration; struct GNUNET_PQ_ResultSpec rs[] = { GNUNET_PQ_result_spec_auto_from_type ("expiration_date", &expiration), @@ -1037,11 +1037,11 @@ postgres_lookup_account (void *cls, * * @param cls closure * @param account_pub account to store @a backup under - * @param account_sig[OUT] set to signature affirming storage request - * @param prev_hash[OUT] set to hash of previous @a backup, all zeros if none - * @param backup_hash[OUT] set to hash of @a backup - * @param backup_size[OUT] set to number of bytes in @a backup - * @param backup[OUT] set to raw data to backup, caller MUST FREE + * @param[out] account_sig set to signature affirming storage request + * @param[out] prev_hash set to hash of previous @a backup, all zeros if none + * @param[out] backup_hash set to hash of @a backup + * @param[out] backup_size set to number of bytes in @a backup + * @param[out] backup set to raw data to backup, caller MUST FREE */ static enum SYNC_DB_QueryStatus postgres_lookup_backup (void *cls, @@ -1111,7 +1111,7 @@ postgres_increment_lifetime (void *cls, struct GNUNET_TIME_Relative lifetime) { struct PostgresClosure *pg = cls; - struct GNUNET_TIME_Absolute expiration; + struct GNUNET_TIME_Timestamp expiration; enum GNUNET_DB_QueryStatus qs; check_connection (pg); @@ -1157,8 +1157,8 @@ postgres_increment_lifetime (void *cls, GNUNET_PQ_query_param_end }; struct GNUNET_PQ_ResultSpec rs[] = { - TALER_PQ_result_spec_absolute_time ("expiration_date", - &expiration), + GNUNET_PQ_result_spec_timestamp ("expiration_date", + &expiration), GNUNET_PQ_result_spec_end }; @@ -1180,11 +1180,11 @@ postgres_increment_lifetime (void *cls, { struct GNUNET_PQ_QueryParam params[] = { GNUNET_PQ_query_param_auto_from_type (account_pub), - GNUNET_PQ_query_param_absolute_time (&expiration), + GNUNET_PQ_query_param_timestamp (&expiration), GNUNET_PQ_query_param_end }; - expiration = GNUNET_TIME_relative_to_absolute (lifetime); + expiration = GNUNET_TIME_relative_to_timestamp (lifetime); qs = GNUNET_PQ_eval_prepared_non_select (pg->conn, "account_insert", params); @@ -1193,13 +1193,14 @@ postgres_increment_lifetime (void *cls, case GNUNET_DB_STATUS_SUCCESS_ONE_RESULT: { struct GNUNET_PQ_QueryParam params[] = { - GNUNET_PQ_query_param_absolute_time (&expiration), + GNUNET_PQ_query_param_timestamp (&expiration), GNUNET_PQ_query_param_auto_from_type (account_pub), GNUNET_PQ_query_param_end }; - expiration = GNUNET_TIME_absolute_add (expiration, - lifetime); + expiration = GNUNET_TIME_absolute_to_timestamp ( + GNUNET_TIME_absolute_add (expiration.abs_time, + lifetime)); qs = GNUNET_PQ_eval_prepared_non_select (pg->conn, "account_update", params); @@ -1258,11 +1259,15 @@ postgres_create_tables (void *cls) { struct PostgresClosure *pc = cls; struct GNUNET_PQ_Context *conn; + struct GNUNET_PQ_ExecuteStatement es[] = { + GNUNET_PQ_make_execute ("SET search_path TO sync;"), + GNUNET_PQ_EXECUTE_STATEMENT_END + }; conn = GNUNET_PQ_connect_with_cfg (pc->cfg, "syncdb-postgres", "sync-", - NULL, + es, NULL); if (NULL == conn) return GNUNET_SYSERR; @@ -1351,6 +1356,7 @@ libsync_plugin_db_postgres_done (void *cls) struct PostgresClosure *pg = plugin->cls; GNUNET_PQ_disconnect (pg->conn); + GNUNET_free (pg->currency); GNUNET_free (pg->sql_dir); GNUNET_free (pg); GNUNET_free (plugin); diff --git a/src/syncdb/sync-0001.sql b/src/syncdb/sync-0001.sql index a463d72..c487923 100644 --- a/src/syncdb/sync-0001.sql +++ b/src/syncdb/sync-0001.sql @@ -1,6 +1,6 @@ -- -- This file is part of TALER --- Copyright (C) 2021 Taler Systems SA +-- Copyright (C) 2021, 2023 Taler Systems SA -- -- TALER is free software; you can redistribute it and/or modify it under the -- terms of the GNU General Public License as published by the Free Software @@ -20,6 +20,19 @@ BEGIN; -- Check patch versioning is in place. SELECT _v.register_patch('sync-0001', NULL, NULL); +CREATE SCHEMA sync; +COMMENT ON SCHEMA sync IS 'sync data'; + +SET search_path TO sync; + +CREATE TYPE taler_amount + AS + (val INT8 + ,frac INT4 + ); +COMMENT ON TYPE taler_amount + IS 'Stores an amount, fraction is in units of 1/100000000 of the base value'; + CREATE TABLE IF NOT EXISTS accounts (account_pub BYTEA PRIMARY KEY CHECK (length(account_pub)=32) ,expiration_date INT8 NOT NULL); @@ -30,11 +43,10 @@ CREATE INDEX IF NOT EXISTS accounts_expire ON CREATE TABLE IF NOT EXISTS payments (account_pub BYTEA CHECK (length(account_pub)=32) - ,order_id VARCHAR PRIMARY KEY + ,order_id TEXT PRIMARY KEY ,token BYTEA CHECK (length(token)=16) ,timestamp INT8 NOT NULL - ,amount_val INT8 NOT NULL - ,amount_frac INT4 NOT NULL + ,amount taler_amount NOT NULL ,paid BOOLEAN NOT NULL DEFAULT FALSE); CREATE INDEX IF NOT EXISTS payments_timestamp ON diff --git a/src/syncdb/sync-dbinit.c b/src/syncdb/sync-dbinit.c index be7b2ae..d1c9e39 100644 --- a/src/syncdb/sync-dbinit.c +++ b/src/syncdb/sync-dbinit.c @@ -14,7 +14,7 @@ TALER; see the file COPYING. If not, see <http://www.gnu.org/licenses/> */ /** - * @file util/sync-dbinit.c + * @file syncdb/sync-dbinit.c * @brief Create tables for the sync database. * @author Christian Grothoff */ diff --git a/src/syncdb/sync_db_plugin.c b/src/syncdb/sync_db_plugin.c index 2c3bd48..6739e4d 100644 --- a/src/syncdb/sync_db_plugin.c +++ b/src/syncdb/sync_db_plugin.c @@ -24,12 +24,6 @@ #include <ltdl.h> -/** - * Initialize the plugin. - * - * @param cfg configuration to use - * @return NULL on failure - */ struct SYNC_DatabasePlugin * SYNC_DB_plugin_load (const struct GNUNET_CONFIGURATION_Handle *cfg) { @@ -62,11 +56,6 @@ SYNC_DB_plugin_load (const struct GNUNET_CONFIGURATION_Handle *cfg) } -/** - * Shutdown the plugin. - * - * @param plugin the plugin to unload - */ void SYNC_DB_plugin_unload (struct SYNC_DatabasePlugin *plugin) { @@ -102,7 +91,7 @@ plugin_init () if (err > 0) { fprintf (stderr, - _ ("Initialization of plugin mechanism failed: %s!\n"), + "Initialization of plugin mechanism failed: %s!\n", lt_dlerror ()); return; } diff --git a/src/syncdb/sync_db_postgres.conf b/src/syncdb/sync_db_postgres.conf index db41bd0..ddf7d06 100644 --- a/src/syncdb/sync_db_postgres.conf +++ b/src/syncdb/sync_db_postgres.conf @@ -1,7 +1,3 @@ -[anastasis] -#The DB plugin to use -DB = postgres - [syncdb-postgres] #The connection string the plugin has to use for connecting to the database CONFIG = postgres:///sync diff --git a/src/syncdb/test_sync_db.c b/src/syncdb/test_sync_db.c index 8bea684..d01941b 100644 --- a/src/syncdb/test_sync_db.c +++ b/src/syncdb/test_sync_db.c @@ -60,7 +60,7 @@ static struct SYNC_DatabasePlugin *plugin; */ static void payment_it (void *cls, - struct GNUNET_TIME_Absolute timestamp, + struct GNUNET_TIME_Timestamp timestamp, const char *order_id, const struct TALER_ClaimTokenP *token, const struct TALER_Amount *amount) @@ -235,7 +235,6 @@ run (void *cls) 4, "DATA")); ts = GNUNET_TIME_relative_to_absolute (GNUNET_TIME_UNIT_YEARS); - (void) GNUNET_TIME_round_abs (&ts); FAILIF (0 > plugin->gc (plugin->cls, ts, @@ -275,7 +274,9 @@ main (int argc, GNUNET_break (0); return EXIT_FAILURE; } - GNUNET_log_setup (argv[0], "DEBUG", NULL); + GNUNET_log_setup (argv[0], + "DEBUG", + NULL); (void) TALER_project_data_default (); GNUNET_OS_init (SYNC_project_data_default ()); plugin_name++; diff --git a/src/syncdb/sync-0000.sql b/src/syncdb/versioning.sql index 116f409..444cf95 100644 --- a/src/syncdb/sync-0000.sql +++ b/src/syncdb/versioning.sql @@ -146,12 +146,13 @@ BEGIN; + -- This file adds versioning support to database it will be loaded to. -- It requires that PL/pgSQL is already loaded - will raise exception otherwise. -- All versioning "stuff" (tables, functions) is in "_v" schema. -- All functions are defined as 'RETURNS SETOF INT4' to be able to make them to RETURN literally nothing (0 rows). --- >> RETURNS VOID<< IS similar, but it still outputs "empty line" in psql when calling. +-- >> RETURNS VOID<< IS similar, but it still outputs "empty line" in psql when calling CREATE SCHEMA IF NOT EXISTS _v; COMMENT ON SCHEMA _v IS 'Schema for versioning data and functionality.'; diff --git a/src/testing/.gitignore b/src/testing/.gitignore index 89ebda4..c81b1be 100644 --- a/src/testing/.gitignore +++ b/src/testing/.gitignore @@ -13,3 +13,5 @@ test_sync_api_home/.local/share/taler/exchange/offline-keys/secm_tofus.pub test_sync_api_home/.local/share/taler/taler-exchange-secmod-eddsa/ test_sync_api_home/.local/share/taler/taler-exchange-secmod-rsa/ +test_sync_api.conf.edited +libeufin-bank.pid diff --git a/src/testing/Makefile.am b/src/testing/Makefile.am index 13ff35f..0fd3d03 100644 --- a/src/testing/Makefile.am +++ b/src/testing/Makefile.am @@ -15,10 +15,7 @@ libsynctesting_la_LDFLAGS = \ libsynctesting_la_SOURCES = \ testing_api_cmd_backup_download.c \ testing_api_cmd_backup_upload.c \ - testing_api_helpers.c \ - testing_api_trait_account_pub.c \ - testing_api_trait_account_priv.c \ - testing_api_trait_hash.c + testing_api_traits.c libsynctesting_la_LIBADD = \ $(top_builddir)/src/lib/libsync.la \ -ltalermerchant \ @@ -32,6 +29,8 @@ libsynctesting_la_LIBADD = \ -ltalertesting \ $(XLIB) +AM_TESTS_ENVIRONMENT=export SYNC_PREFIX=$${SYNC_PREFIX:-@libdir@};export PATH=$${SYNC_PREFIX:-@prefix@}/bin:$$PATH; + check_PROGRAMS = \ test_sync_api @@ -40,8 +39,7 @@ TESTS = \ EXTRA_DIST = \ test_sync_api.conf \ - test_sync_api_home/.config/taler/exchange/account-2.json \ - test_sync_api_home/.local/share/taler/exchange/offline-keys/master.priv + test_sync_api_home/.local/share/taler/exchange-offline/master.priv test_sync_api_SOURCES = \ test_sync_api.c diff --git a/src/testing/test_sync_api.c b/src/testing/test_sync_api.c index a05b39a..f4e7d51 100644 --- a/src/testing/test_sync_api.c +++ b/src/testing/test_sync_api.c @@ -1,6 +1,6 @@ /* This file is part of TALER - Copyright (C) 2014-2020 Taler Systems SA + Copyright (C) 2014-2023 Taler Systems SA TALER is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as @@ -80,32 +80,17 @@ static char *merchant_payto; /** * Configuration of the bank. */ -static struct TALER_TESTING_BankConfiguration bc; - -/** - * Configuration of the exchange. - */ -static struct TALER_TESTING_ExchangeConfiguration ec; +static struct TALER_TESTING_Credentials cred; /** * Merchant base URL. */ -static char *merchant_url; +static const char *merchant_url = "http://localhost:8080/"; /** * Sync base URL. */ -static char *sync_url; - -/** - * Merchant process. - */ -static struct GNUNET_OS_Process *merchantd; - -/** - * Sync-httpd process. - */ -static struct GNUNET_OS_Process *syncd; +static const char *sync_url = "http://localhost:8084/"; /** @@ -136,7 +121,7 @@ cmd_transfer_to_exchange (const char *label, { return TALER_TESTING_cmd_admin_add_incoming (label, amount, - &bc.exchange_auth, + &cred.ba, payer_payto); } @@ -152,28 +137,26 @@ run (void *cls, struct TALER_TESTING_Interpreter *is) { struct TALER_TESTING_Command commands[] = { - /* general setup */ - TALER_TESTING_cmd_auditor_add ("add-auditor-OK", - MHD_HTTP_NO_CONTENT, - false), - TALER_TESTING_cmd_wire_add ("add-wire-account", - "payto://x-taler-bank/localhost/2", - MHD_HTTP_NO_CONTENT, - false), - TALER_TESTING_cmd_exec_offline_sign_keys ("offline-sign-future-keys", - CONFIG_FILE), - TALER_TESTING_cmd_exec_offline_sign_fees ("offline-sign-fees", - CONFIG_FILE, - "EUR:0.01", - "EUR:0.01"), - TALER_TESTING_cmd_check_keys_pull_all_keys ("refetch /keys", - 1), + TALER_TESTING_cmd_system_start ("start-taler", + CONFIG_FILE, + "-efms", + "-u", "exchange-account-exchange", + NULL), + TALER_TESTING_cmd_get_exchange ("get-exchange", + cred.cfg, + NULL, + true, + true), TALER_TESTING_cmd_merchant_post_instances ("instance-create-default", merchant_url, "default", - merchant_payto, - "EUR", MHD_HTTP_NO_CONTENT), + TALER_TESTING_cmd_merchant_post_account ( + "instance-create-default-account", + merchant_url, + merchant_payto, + NULL, NULL, + MHD_HTTP_OK), /** * Move money to the exchange's bank account. @@ -188,10 +171,12 @@ run (void *cls, TALER_TESTING_cmd_withdraw_amount ("withdraw-coin-1", "create-reserve-1", "EUR:5", + 0, MHD_HTTP_OK), TALER_TESTING_cmd_withdraw_amount ("withdraw-coin-2", "create-reserve-1", "EUR:5", + 0, MHD_HTTP_OK), /* Failed download: no backup exists */ SYNC_TESTING_cmd_backup_nx ("backup-download-nx", @@ -270,9 +255,8 @@ run (void *cls, TALER_TESTING_cmd_end () }; - TALER_TESTING_run_with_fakebank (is, - commands, - bc.exchange_auth.wire_gateway_url); + TALER_TESTING_run (is, + commands); } @@ -280,76 +264,23 @@ int main (int argc, char *const *argv) { - unsigned int ret; - /* These environment variables get in the way... */ - unsetenv ("XDG_DATA_HOME"); - unsetenv ("XDG_CONFIG_HOME"); - - GNUNET_log_setup ("test-sync-api", - "DEBUG", - NULL); - if (GNUNET_OK != - TALER_TESTING_prepare_fakebank (CONFIG_FILE, - "exchange-account-exchange", - &bc)) - return 77; - payer_payto = ("payto://x-taler-bank/localhost/" USER_ACCOUNT_NAME); - exchange_payto = ("payto://x-taler-bank/localhost/" EXCHANGE_ACCOUNT_NAME); - merchant_payto = ("payto://x-taler-bank/localhost/" MERCHANT_ACCOUNT_NAME); - if (NULL == - (merchant_url = TALER_TESTING_prepare_merchant (CONFIG_FILE))) - return 77; - TALER_TESTING_cleanup_files (CONFIG_FILE); - - if (NULL == - (sync_url = SYNC_TESTING_prepare_sync (CONFIG_FILE))) - return 77; - - TALER_TESTING_cleanup_files (CONFIG_FILE); - - switch (TALER_TESTING_prepare_exchange (CONFIG_FILE, - GNUNET_YES, - &ec)) - { - case GNUNET_SYSERR: - GNUNET_break (0); - return 1; - case GNUNET_NO: - return 77; - - case GNUNET_OK: - - if (NULL == (merchantd = - TALER_TESTING_run_merchant (CONFIG_FILE, - merchant_url))) - return 1; - - if (NULL == (syncd = - SYNC_TESTING_run_sync (CONFIG_FILE, - sync_url))) - return 1; - - ret = TALER_TESTING_setup_with_exchange (&run, - NULL, - CONFIG_FILE); - - GNUNET_OS_process_kill (merchantd, SIGTERM); - GNUNET_OS_process_kill (syncd, SIGTERM); - GNUNET_OS_process_wait (merchantd); - GNUNET_OS_process_wait (syncd); - GNUNET_OS_process_destroy (merchantd); - GNUNET_OS_process_destroy (syncd); - GNUNET_free (merchant_url); - GNUNET_free (sync_url); - - if (GNUNET_OK != ret) - return 1; - break; - default: - GNUNET_break (0); - return 1; - } - return 0; + (void) argc; + payer_payto = + "payto://x-taler-bank/localhost/" USER_ACCOUNT_NAME "?receiver-name=user"; + exchange_payto = + "payto://x-taler-bank/localhost/" EXCHANGE_ACCOUNT_NAME + "?receiver-name=exchange"; + merchant_payto = + "payto://x-taler-bank/localhost/" MERCHANT_ACCOUNT_NAME + "?receiver-name=merchant"; + return TALER_TESTING_main (argv, + "DEBUG", + CONFIG_FILE, + "exchange-account-exchange", + TALER_TESTING_BS_FAKEBANK, + &cred, + &run, + NULL); } diff --git a/src/testing/test_sync_api.conf b/src/testing/test_sync_api.conf index 54d2af7..3a0d655 100644 --- a/src/testing/test_sync_api.conf +++ b/src/testing/test_sync_api.conf @@ -1,84 +1,54 @@ # This file is in the public domain. # [PATHS] -# Persistent data storage for the testcase TALER_TEST_HOME = test_sync_api_home/ TALER_RUNTIME_DIR = ${TMPDIR:-${TMP:-/tmp}}/${USER:-}/taler-system-runtime/ - -# Persistent data storage TALER_DATA_HOME = $TALER_HOME/.local/share/taler/ - -# Configuration files TALER_CONFIG_HOME = $TALER_HOME/.config/taler/ - -# Cached data, no big deal if lost TALER_CACHE_HOME = $TALER_HOME/.cache/taler/ [taler] -# What currency do we use? CURRENCY = EUR CURRENCY_ROUND_UNIT = EUR:0.01 [taler-helper-crypto-rsa] -# Reduce from 1 year to speed up test LOOKAHEAD_SIGN = 24 days [taler-helper-crypto-eddsa] -# Reduce from 1 year to speed up test LOOKAHEAD_SIGN = 24 days -# Reduce from 12 weeks to ensure we have multiple DURATION = 14 days [bank] HTTP_PORT = 8082 -# Sync config +[libeufin-bank] +CURRENCY = EUR +PORT = 8082 + +[libeufin-bankdb-postgres] +CONFIG = postgres:///synccheck +SQL_DIR = $DATADIR/sql/ [sync] -# Which port do we run the *sync* backend on? (HTTP server) PORT = 8084 - -# Where does our payment backend run? Must match PORT under [merchant] -PAYMENT_BACKEND_URL = http://localhost:8080/ - -# Annual fee we charge. +PAYMENT_BACKEND_URL = "http://localhost:8080/" ANNUAL_FEE = EUR:4.99 - -# Upload limit UPLOAD_LIMIT_MB = 1 +SERVE = tcp [syncdb-postgres] CONFIG = postgres:///synccheck - -# Where are the SQL files to setup our tables? -# Important: this MUST end with a "/"! SQL_DIR = $DATADIR/sql/ -########################################## -# Configuration for the merchant backend # -########################################## - [merchant] -# Which port do we run the backend on? (HTTP server) PORT = 8080 - -# How quickly do we want the exchange to send us our money? -# Used only if the frontend does not specify a value. WIRE_TRANSFER_DELAY = 0 s - -# Which plugin (backend) do we use for the DB. DB = postgres - -# Default choice for maximum wire fee. DEFAULT_MAX_WIRE_FEE = EUR:0.10 - -# Default choice for maximum deposit fee. DEFAULT_MAX_DEPOSIT_FEE = EUR:0.10 - -# This specifies which database the postgres backend uses. [merchantdb-postgres] -CONFIG = postgres:///talercheck +CONFIG = postgres:///synccheck # Different instances operated by this merchant: [instance-default] @@ -95,59 +65,29 @@ CURRENCY = EUR [auditor] BASE_URL = http://the.auditor/ -# Auditors must be in sections "auditor-", the rest of the section -# name could be anything. -[auditor-ezb] -# Informal name of the auditor. Just for the user. -NAME = European Central Bank - -# URL of the auditor (especially for in the future, when the -# auditor offers an automated issue reporting system). -# Not really used today. -URL = http://taler.ezb.eu/ - -# This is the important bit: the signing key of the auditor. -PUBLIC_KEY = 9QXF7XY7E9VPV47B5Z806NDFSX2VJ79SVHHD29QEQ3BG31ANHZ60 - -# Which currency is this auditor trusted for? -CURRENCY = EUR - - -################################################### -# Configuration for the exchange for the testcase # -################################################### - [exchange] -# How to access our database +AML_THRESHOLD = EUR:1000000 DB = postgres - -# HTTP port the exchange listens to PORT = 8081 - -# How long are signing keys valid? SIGNKEY_LEGAL_DURATION = 2 years - -# Our public key MASTER_PUBLIC_KEY = EAQTXN8S6QP081WRWG03N8SQ34PX492ATXSSWASJGRXCBP0QM7HG - -# Base URL of the exchange. BASE_URL = "http://localhost:8081/" +STEFAN_ABS = "EUR:5" [exchangedb-postgres] -CONFIG = "postgres:///talercheck" - -[auditordb-postgres] -CONFIG = postgres:///talercheck +CONFIG = "postgres:///synccheck" -# Account of the EXCHANGE [exchange-account-exchange] -# What is the exchange's bank account (with the "Taler Bank" demo system)? -PAYTO_URI = "payto://x-taler-bank/localhost:8082/2" +PAYTO_URI = "payto://x-taler-bank/localhost:8082/2?receiver-name=exchange" ENABLE_DEBIT = YES ENABLE_CREDIT = YES [exchange-accountcredentials-exchange] -WIRE_GATEWAY_URL = "http://localhost:8082/2/" +WIRE_GATEWAY_URL = "http://localhost:8082/accounts/2/taler-wire-gateway/" +WIRE_GATEWAY_AUTH_METHOD = NONE + +[admin-accountcredentials-exchange] +WIRE_GATEWAY_URL = "http://localhost:8082/accounts/2/taler-wire-gateway/" WIRE_GATEWAY_AUTH_METHOD = NONE [coin_eur_ct_1] @@ -160,6 +100,7 @@ fee_deposit = EUR:0.00 fee_refresh = EUR:0.01 fee_refund = EUR:0.01 rsa_keysize = 1024 +cipher = RSA [coin_eur_ct_10] value = EUR:0.10 @@ -171,6 +112,7 @@ fee_deposit = EUR:0.01 fee_refresh = EUR:0.03 fee_refund = EUR:0.01 rsa_keysize = 1024 +cipher = RSA [coin_eur_1] value = EUR:1 @@ -182,6 +124,7 @@ fee_deposit = EUR:0.01 fee_refresh = EUR:0.03 fee_refund = EUR:0.01 rsa_keysize = 1024 +cipher = RSA [coin_eur_5] value = EUR:5 @@ -193,3 +136,4 @@ fee_deposit = EUR:0.01 fee_refresh = EUR:0.03 fee_refund = EUR:0.01 rsa_keysize = 1024 +cipher = RSA diff --git a/src/testing/test_sync_api_home/.config/taler/exchange/account-2.json b/src/testing/test_sync_api_home/.config/taler/exchange/account-2.json deleted file mode 100644 index 567fc91..0000000 --- a/src/testing/test_sync_api_home/.config/taler/exchange/account-2.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "payto_uri": "payto://x-taler-bank/localhost:8082/2", - "master_sig": "AM32QB4RYMWK548PE63PJXJMWSA001TFFWTZZPSSD8HQ8JE4D5V5X8WTSYSX59ANF4YRTRMF5Q4Q12CE2KTA8KQ03CM11YDTK75SJ20" -}
\ No newline at end of file diff --git a/src/testing/test_sync_api_home/.local/share/taler/exchange/offline-keys/master.priv b/src/testing/test_sync_api_home/.local/share/taler/exchange/offline-keys/master.priv deleted file mode 100644 index c20942d..0000000 --- a/src/testing/test_sync_api_home/.local/share/taler/exchange/offline-keys/master.priv +++ /dev/null @@ -1 +0,0 @@ -k;d_U}A.w"!Gv_m"_
\ No newline at end of file diff --git a/src/testing/testing_api_cmd_backup_download.c b/src/testing/testing_api_cmd_backup_download.c index e6bfad4..15ae376 100644 --- a/src/testing/testing_api_cmd_backup_download.c +++ b/src/testing/testing_api_cmd_backup_download.c @@ -17,7 +17,7 @@ <http://www.gnu.org/licenses/> */ /** - * @file lib/testing_api_cmd_backup_download.c + * @file testing/testing_api_cmd_backup_download.c * @brief command to download data to the sync backend service. * @author Christian Grothoff */ @@ -80,40 +80,34 @@ struct BackupDownloadState * Function called with the results of a #SYNC_download(). * * @param cls closure - * @param http_status HTTP status of the request - * @param ud details about the download operation + * @param dd details about the download operation */ static void backup_download_cb (void *cls, - unsigned int http_status, const struct SYNC_DownloadDetails *dd) { struct BackupDownloadState *bds = cls; bds->download = NULL; - if (http_status != bds->http_status) + if (dd->http_status != bds->http_status) { - GNUNET_log (GNUNET_ERROR_TYPE_ERROR, - "Unexpected response code %u to command %s in %s:%u\n", - http_status, - bds->is->commands[bds->is->ip].label, - __FILE__, - __LINE__); - TALER_TESTING_interpreter_fail (bds->is); + TALER_TESTING_unexpected_status (bds->is, + dd->http_status, + bds->http_status); return; } if (NULL != bds->upload_reference) { - if ( (MHD_HTTP_OK == http_status) && - (0 != GNUNET_memcmp (&dd->curr_backup_hash, + if ( (MHD_HTTP_OK == dd->http_status) && + (0 != GNUNET_memcmp (&dd->details.ok.curr_backup_hash, bds->upload_hash)) ) { GNUNET_break (0); TALER_TESTING_interpreter_fail (bds->is); return; } - if ( (MHD_HTTP_OK == http_status) && - (0 != GNUNET_memcmp (&dd->prev_backup_hash, + if ( (MHD_HTTP_OK == dd->http_status) && + (0 != GNUNET_memcmp (&dd->details.ok.prev_backup_hash, bds->prev_upload_hash)) ) { GNUNET_break (0); @@ -155,27 +149,24 @@ backup_download_run (void *cls, return; } if (GNUNET_OK != - SYNC_TESTING_get_trait_hash (upload_cmd, - SYNC_TESTING_TRAIT_HASH_CURRENT, - &bds->upload_hash)) + TALER_TESTING_get_trait_curr_hash (upload_cmd, + &bds->upload_hash)) { GNUNET_break (0); TALER_TESTING_interpreter_fail (bds->is); return; } if (GNUNET_OK != - SYNC_TESTING_get_trait_hash (upload_cmd, - SYNC_TESTING_TRAIT_HASH_PREVIOUS, - &bds->prev_upload_hash)) + TALER_TESTING_get_trait_prev_hash (upload_cmd, + &bds->prev_upload_hash)) { GNUNET_break (0); TALER_TESTING_interpreter_fail (bds->is); return; } if (GNUNET_OK != - SYNC_TESTING_get_trait_account_pub (upload_cmd, - 0, - &sync_pub)) + TALER_TESTING_get_trait_account_pub (upload_cmd, + &sync_pub)) { GNUNET_break (0); TALER_TESTING_interpreter_fail (bds->is); @@ -183,7 +174,7 @@ backup_download_run (void *cls, } bds->sync_pub = *sync_pub; } - bds->download = SYNC_download (is->ctx, + bds->download = SYNC_download (TALER_TESTING_interpreter_get_context (is), bds->sync_url, &bds->sync_pub, &backup_download_cb, diff --git a/src/testing/testing_api_cmd_backup_upload.c b/src/testing/testing_api_cmd_backup_upload.c index 1491db0..3de4db8 100644 --- a/src/testing/testing_api_cmd_backup_upload.c +++ b/src/testing/testing_api_cmd_backup_upload.c @@ -1,6 +1,6 @@ /* This file is part of SYNC - Copyright (C) 2014-2019 Taler Systems SA + Copyright (C) 2014-2023 Taler Systems SA SYNC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as @@ -17,7 +17,7 @@ <http://www.gnu.org/licenses/> */ /** - * @file lib/testing_api_cmd_backup_upload.c + * @file testing/testing_api_cmd_backup_upload.c * @brief command to upload data to the sync backend service. * @author Christian Grothoff */ @@ -82,7 +82,7 @@ struct BackupUploadState /** * Payment order ID we got back, if any. Otherwise NULL. */ - char *payment_order_id; + const char *payment_order_id; /** * Claim token we got back, if any. Otherwise all zeros. @@ -126,100 +126,90 @@ struct BackupUploadState * Function called with the results of a #SYNC_upload(). * * @param cls closure - * @param ec Taler error code - * @param http_status HTTP status of the request * @param ud details about the upload operation */ static void backup_upload_cb (void *cls, - enum TALER_ErrorCode ec, - unsigned int http_status, const struct SYNC_UploadDetails *ud) { struct BackupUploadState *bus = cls; bus->uo = NULL; - if (http_status != bus->http_status) + if (ud->http_status != bus->http_status) { - GNUNET_log (GNUNET_ERROR_TYPE_ERROR, - "Unexpected response code %u to command %s in %s:%u\n", - http_status, - bus->is->commands[bus->is->ip].label, - __FILE__, - __LINE__); - TALER_TESTING_interpreter_fail (bus->is); + TALER_TESTING_unexpected_status (bus->is, + ud->http_status, + bus->http_status); return; } - if (NULL != ud) + switch (ud->us) { - switch (ud->us) + case SYNC_US_SUCCESS: + if (0 != GNUNET_memcmp ( + &bus->curr_hash, + ud->details.success.curr_backup_hash)) + { + GNUNET_break (0); + TALER_TESTING_interpreter_fail (bus->is); + return; + } + break; + case SYNC_US_PAYMENT_REQUIRED: { - case SYNC_US_SUCCESS: - if (0 != GNUNET_memcmp (&bus->curr_hash, - ud->details.curr_backup_hash)) + struct TALER_MERCHANT_PayUriData pd; + + if (GNUNET_OK != + TALER_MERCHANT_parse_pay_uri ( + ud->details.payment_required.payment_request, + &pd)) { GNUNET_break (0); TALER_TESTING_interpreter_fail (bus->is); return; } - break; - case SYNC_US_PAYMENT_REQUIRED: - { - struct TALER_MERCHANT_PayUriData pd; - - if (GNUNET_OK != - TALER_MERCHANT_parse_pay_uri (ud->details.payment_request, - &pd)) - { - GNUNET_break (0); - TALER_TESTING_interpreter_fail (bus->is); - return; - } - bus->payment_order_id = GNUNET_strdup (pd.order_id); - if (NULL != pd.claim_token) - bus->token = *pd.claim_token; - TALER_MERCHANT_parse_pay_uri_free (&pd); - GNUNET_log (GNUNET_ERROR_TYPE_INFO, - "Order ID from Sync service is `%s'\n", - bus->payment_order_id); - memset (&bus->curr_hash, - 0, - sizeof (struct GNUNET_HashCode)); - } - break; - case SYNC_US_CONFLICTING_BACKUP: + bus->payment_order_id = GNUNET_strdup (pd.order_id); + if (NULL != pd.claim_token) + bus->token = *pd.claim_token; + TALER_MERCHANT_parse_pay_uri_free (&pd); + GNUNET_log (GNUNET_ERROR_TYPE_INFO, + "Order ID from Sync service is `%s'\n", + bus->payment_order_id); + memset (&bus->curr_hash, + 0, + sizeof (struct GNUNET_HashCode)); + } + break; + case SYNC_US_CONFLICTING_BACKUP: + { + const struct TALER_TESTING_Command *ref; + const struct GNUNET_HashCode *h; + + ref = TALER_TESTING_interpreter_lookup_command + (bus->is, + bus->last_upload); + GNUNET_assert (NULL != ref); + GNUNET_assert (GNUNET_OK == + TALER_TESTING_get_trait_curr_hash (ref, + &h)); + if (0 != GNUNET_memcmp (h, + &ud->details.recovered_backup. + existing_backup_hash)) { - const struct TALER_TESTING_Command *ref; - const struct GNUNET_HashCode *h; - - ref = TALER_TESTING_interpreter_lookup_command - (bus->is, - bus->last_upload); - GNUNET_assert (NULL != ref); - GNUNET_assert (GNUNET_OK == - SYNC_TESTING_get_trait_hash (ref, - SYNC_TESTING_TRAIT_HASH_CURRENT, - &h)); - if (0 != GNUNET_memcmp (h, - &ud->details.recovered_backup. - existing_backup_hash)) - { - GNUNET_break (0); - TALER_TESTING_interpreter_fail (bus->is); - return; - } + GNUNET_break (0); + TALER_TESTING_interpreter_fail (bus->is); + return; } - case SYNC_US_HTTP_ERROR: - break; - case SYNC_US_CLIENT_ERROR: - GNUNET_break (0); - TALER_TESTING_interpreter_fail (bus->is); - return; - case SYNC_US_SERVER_ERROR: - GNUNET_break (0); - TALER_TESTING_interpreter_fail (bus->is); - return; } + case SYNC_US_HTTP_ERROR: + break; + case SYNC_US_CLIENT_ERROR: + GNUNET_break (0); + TALER_TESTING_interpreter_fail (bus->is); + return; + case SYNC_US_SERVER_ERROR: + GNUNET_break (0); + TALER_TESTING_interpreter_fail (bus->is); + return; } TALER_TESTING_interpreter_next (bus->is); } @@ -257,9 +247,8 @@ backup_upload_run (void *cls, const struct GNUNET_HashCode *h; if (GNUNET_OK == - SYNC_TESTING_get_trait_hash (ref, - SYNC_TESTING_TRAIT_HASH_CURRENT, - &h)) + TALER_TESTING_get_trait_curr_hash (ref, + &h)) { bus->prev_hash = *h; } @@ -268,9 +257,8 @@ backup_upload_run (void *cls, const struct SYNC_AccountPrivateKeyP *priv; if (GNUNET_OK != - SYNC_TESTING_get_trait_account_priv (ref, - 0, - &priv)) + TALER_TESTING_get_trait_account_priv (ref, + &priv)) { GNUNET_break (0); TALER_TESTING_interpreter_fail (bus->is); @@ -282,9 +270,8 @@ backup_upload_run (void *cls, const struct SYNC_AccountPublicKeyP *pub; if (GNUNET_OK != - SYNC_TESTING_get_trait_account_pub (ref, - 0, - &pub)) + TALER_TESTING_get_trait_account_pub (ref, + &pub)) { GNUNET_break (0); TALER_TESTING_interpreter_fail (bus->is); @@ -298,7 +285,6 @@ backup_upload_run (void *cls, if (GNUNET_OK != TALER_TESTING_get_trait_order_id (ref, - 0, &order_id)) { GNUNET_break (0); @@ -327,7 +313,7 @@ backup_upload_run (void *cls, GNUNET_CRYPTO_hash (bus->backup, bus->backup_size, &bus->curr_hash); - bus->uo = SYNC_upload (is->ctx, + bus->uo = SYNC_upload (TALER_TESTING_interpreter_get_context (is), bus->sync_url, &bus->sync_priv, ( ( (NULL != bus->prev_upload) && @@ -375,7 +361,6 @@ backup_upload_cleanup (void *cls, SYNC_upload_cancel (bus->uo); bus->uo = NULL; } - GNUNET_free (bus->payment_order_id); GNUNET_free (bus); } @@ -384,12 +369,12 @@ backup_upload_cleanup (void *cls, * Offer internal data to other commands. * * @param cls closure - * @param ret[out] result (could be anything) + * @param[out] ret result (could be anything) * @param trait name of the trait * @param index index number of the object to extract. * @return #GNUNET_OK on success */ -static int +static enum GNUNET_GenericReturnValue backup_upload_traits (void *cls, const void **ret, const char *trait, @@ -397,29 +382,19 @@ backup_upload_traits (void *cls, { struct BackupUploadState *bus = cls; struct TALER_TESTING_Trait straits[] = { - SYNC_TESTING_make_trait_hash (SYNC_TESTING_TRAIT_HASH_CURRENT, - &bus->curr_hash), - SYNC_TESTING_make_trait_hash (SYNC_TESTING_TRAIT_HASH_PREVIOUS, - &bus->prev_hash), - TALER_TESTING_make_trait_claim_token (0, - &bus->token), - SYNC_TESTING_make_trait_account_pub (0, - &bus->sync_pub), - SYNC_TESTING_make_trait_account_priv (0, - &bus->sync_priv), - TALER_TESTING_make_trait_order_id (0, - bus->payment_order_id), + TALER_TESTING_make_trait_curr_hash (&bus->curr_hash), + TALER_TESTING_make_trait_prev_hash (&bus->prev_hash), + TALER_TESTING_make_trait_claim_token (&bus->token), + TALER_TESTING_make_trait_account_pub (&bus->sync_pub), + TALER_TESTING_make_trait_account_priv (&bus->sync_priv), + TALER_TESTING_make_trait_order_id (bus->payment_order_id), TALER_TESTING_trait_end () }; struct TALER_TESTING_Trait ftraits[] = { - TALER_TESTING_make_trait_claim_token (0, - &bus->token), - SYNC_TESTING_make_trait_account_pub (0, - &bus->sync_pub), - SYNC_TESTING_make_trait_account_priv (0, - &bus->sync_priv), - TALER_TESTING_make_trait_order_id (0, - bus->payment_order_id), + TALER_TESTING_make_trait_claim_token (&bus->token), + TALER_TESTING_make_trait_account_pub (&bus->sync_pub), + TALER_TESTING_make_trait_account_priv (&bus->sync_priv), + TALER_TESTING_make_trait_order_id (bus->payment_order_id), TALER_TESTING_trait_end () }; diff --git a/src/testing/testing_api_helpers.c b/src/testing/testing_api_helpers.c deleted file mode 100644 index a83c391..0000000 --- a/src/testing/testing_api_helpers.c +++ /dev/null @@ -1,196 +0,0 @@ -/* - This file is part of SYNC - Copyright (C) 2014-2019 Taler Systems SA - - SYNC is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as - published by the Free Software Foundation; either version 3, or - (at your option) any later version. - - SYNC is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - SYNCABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public - License along with SYNC; see the file COPYING. If not, see - <http://www.gnu.org/licenses/> -*/ -/** - * @file lib/testing_api_helpers.c - * @brief helper functions for test library. - * @author Christian Grothoff - * @author Marcello Stanisci - */ -#include "platform.h" -#include <taler/taler_testing_lib.h> -#include "sync_testing_lib.h" -#include <gnunet/gnunet_curl_lib.h> - - -/** - * Start the sync backend process. Assume the port - * is available and the database is clean. Use the "prepare - * sync" function to do such tasks. - * - * @param config_filename configuration filename. - * - * @return the process, or NULL if the process could not - * be started. - */ -struct GNUNET_OS_Process * -SYNC_TESTING_run_sync (const char *config_filename, - const char *sync_url) -{ - struct GNUNET_OS_Process *sync_proc; - unsigned int iter; - char *wget_cmd; - - sync_proc - = GNUNET_OS_start_process (GNUNET_OS_INHERIT_STD_ALL, - NULL, NULL, NULL, - "sync-httpd", - "sync-httpd", - "--log=INFO", - "-c", config_filename, - NULL); - if (NULL == sync_proc) - { - GNUNET_break (0); - return NULL; - } - GNUNET_asprintf (&wget_cmd, - "wget -q -t 1 -T 1" - " %s" - " -o /dev/null -O /dev/null", - sync_url); - - /* give child time to start and bind against the socket */ - fprintf (stderr, - "Waiting for `sync-httpd' to be ready\n"); - iter = 0; - do - { - if (10 == iter) - { - fprintf (stderr, - "Failed to launch" - " `sync-httpd' (or `wget')\n"); - GNUNET_OS_process_kill (sync_proc, - SIGTERM); - GNUNET_OS_process_wait (sync_proc); - GNUNET_OS_process_destroy (sync_proc); - GNUNET_break (0); - return NULL; - } - fprintf (stderr, ".\n"); - sleep (1); - iter++; - } - while (0 != system (wget_cmd)); - GNUNET_free (wget_cmd); - fprintf (stderr, "\n"); - return sync_proc; -} - - -/** - * Prepare the sync execution. Create tables and check if - * the port is available. - * - * @param config_filename configuration filename. - * @return the base url, or NULL upon errors. Must be freed - * by the caller. - */ -char * -SYNC_TESTING_prepare_sync (const char *config_filename) -{ - struct GNUNET_CONFIGURATION_Handle *cfg; - unsigned long long port; - struct GNUNET_OS_Process *dbinit_proc; - enum GNUNET_OS_ProcessStatusType type; - unsigned long code; - char *base_url; - - cfg = GNUNET_CONFIGURATION_create (); - if (GNUNET_OK != - GNUNET_CONFIGURATION_load (cfg, - config_filename)) - { - GNUNET_break (0); - return NULL; - } - if (GNUNET_OK != - GNUNET_CONFIGURATION_get_value_number (cfg, - "sync", - "PORT", - &port)) - { - GNUNET_log_config_missing (GNUNET_ERROR_TYPE_ERROR, - "sync", - "PORT"); - GNUNET_CONFIGURATION_destroy (cfg); - GNUNET_break (0); - return NULL; - } - GNUNET_CONFIGURATION_destroy (cfg); - if (GNUNET_OK != - GNUNET_NETWORK_test_port_free (IPPROTO_TCP, - (uint16_t) port)) - { - fprintf (stderr, - "Required port %llu not available, skipping.\n", - port); - GNUNET_break (0); - return NULL; - } - - /* DB preparation */ - if (NULL == (dbinit_proc = GNUNET_OS_start_process ( - GNUNET_OS_INHERIT_STD_ALL, - NULL, NULL, NULL, - "sync-dbinit", - "sync-dbinit", - "-c", config_filename, - "-r", - NULL))) - { - GNUNET_log (GNUNET_ERROR_TYPE_ERROR, - "Failed to run sync-dbinit." - " Check your PATH.\n"); - GNUNET_break (0); - return NULL; - } - - if (GNUNET_SYSERR == - GNUNET_OS_process_wait_status (dbinit_proc, - &type, - &code)) - { - GNUNET_OS_process_destroy (dbinit_proc); - GNUNET_break (0); - return NULL; - } - if ( (type == GNUNET_OS_PROCESS_EXITED) && - (0 != code) ) - { - fprintf (stderr, - "Failed to setup database\n"); - GNUNET_break (0); - return NULL; - } - if ( (type != GNUNET_OS_PROCESS_EXITED) || - (0 != code) ) - { - fprintf (stderr, - "Unexpected error running" - " `sync-dbinit'!\n"); - GNUNET_break (0); - return NULL; - } - GNUNET_OS_process_destroy (dbinit_proc); - GNUNET_asprintf (&base_url, - "http://localhost:%llu/", - port); - return base_url; -} diff --git a/src/testing/testing_api_trait_account_priv.c b/src/testing/testing_api_trait_account_priv.c deleted file mode 100644 index b8bfb5b..0000000 --- a/src/testing/testing_api_trait_account_priv.c +++ /dev/null @@ -1,73 +0,0 @@ -/* - This file is part of TALER - Copyright (C) 2019 Taler Systems SA - - TALER is free software; you can redistribute it and/or modify it - under the terms of the GNU General Public License as published - by the Free Software Foundation; either version 3, or (at your - option) any later version. - - TALER is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - General Privlic License for more details. - - You should have received a copy of the GNU General Privlic - License along with TALER; see the file COPYING. If not, see - <http://www.gnu.org/licenses/> -*/ -/** - * @file lib/testing_api_trait_account_priv.c - * @brief traits to offer a account_priv - * @author Christian Grothoff - */ -#include "platform.h" -#include "sync_service.h" -#include "sync_testing_lib.h" - -#define SYNC_TESTING_TRAIT_ACCOUNT_PRIV "sync-account_priv" - - -/** - * Obtain an account private key from @a cmd. - * - * @param cmd command to extract the private key from. - * @param index the private key's index number. - * @param n[out] set to the private key coming from @a cmd. - * @return #GNUNET_OK on success. - */ -int -SYNC_TESTING_get_trait_account_priv - (const struct TALER_TESTING_Command *cmd, - unsigned int index, - const struct SYNC_AccountPrivateKeyP **priv) -{ - return cmd->traits (cmd->cls, - (const void **) priv, - SYNC_TESTING_TRAIT_ACCOUNT_PRIV, - index); -} - - -/** - * Offer an account private key. - * - * @param index usually zero - * @param priv the account_priv to offer. - * @return #GNUNET_OK on success. - */ -struct TALER_TESTING_Trait -SYNC_TESTING_make_trait_account_priv - (unsigned int index, - const struct SYNC_AccountPrivateKeyP *priv) -{ - struct TALER_TESTING_Trait ret = { - .index = index, - .trait_name = SYNC_TESTING_TRAIT_ACCOUNT_PRIV, - .ptr = (const void *) priv - }; - return ret; -} - - -/* end of testing_api_trait_account_priv.c */ diff --git a/src/testing/testing_api_trait_account_pub.c b/src/testing/testing_api_trait_account_pub.c deleted file mode 100644 index 9fecb18..0000000 --- a/src/testing/testing_api_trait_account_pub.c +++ /dev/null @@ -1,73 +0,0 @@ -/* - This file is part of TALER - Copyright (C) 2019 Taler Systems SA - - TALER is free software; you can redistribute it and/or modify it - under the terms of the GNU General Public License as published - by the Free Software Foundation; either version 3, or (at your - option) any later version. - - TALER is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - General Public License for more details. - - You should have received a copy of the GNU General Public - License along with TALER; see the file COPYING. If not, see - <http://www.gnu.org/licenses/> -*/ -/** - * @file lib/testing_api_trait_account_pub.c - * @brief traits to offer a account_pub - * @author Christian Grothoff - */ -#include "platform.h" -#include "sync_service.h" -#include "sync_testing_lib.h" - -#define SYNC_TESTING_TRAIT_ACCOUNT_PUB "sync-account_pub" - - -/** - * Obtain an account public key from @a cmd. - * - * @param cmd command to extract the public key from. - * @param index the public key's index number. - * @param n[out] set to the public key coming from @a cmd. - * @return #GNUNET_OK on success. - */ -int -SYNC_TESTING_get_trait_account_pub - (const struct TALER_TESTING_Command *cmd, - unsigned int index, - const struct SYNC_AccountPublicKeyP **pub) -{ - return cmd->traits (cmd->cls, - (const void **) pub, - SYNC_TESTING_TRAIT_ACCOUNT_PUB, - index); -} - - -/** - * Offer an account public key. - * - * @param index usually zero - * @param h the account_pub to offer. - * @return #GNUNET_OK on success. - */ -struct TALER_TESTING_Trait -SYNC_TESTING_make_trait_account_pub - (unsigned int index, - const struct SYNC_AccountPublicKeyP *h) -{ - struct TALER_TESTING_Trait ret = { - .index = index, - .trait_name = SYNC_TESTING_TRAIT_ACCOUNT_PUB, - .ptr = (const void *) h - }; - return ret; -} - - -/* end of testing_api_trait_account_pub.c */ diff --git a/src/testing/testing_api_trait_hash.c b/src/testing/testing_api_trait_hash.c deleted file mode 100644 index 10b10d8..0000000 --- a/src/testing/testing_api_trait_hash.c +++ /dev/null @@ -1,73 +0,0 @@ -/* - This file is part of TALER - Copyright (C) 2019 Taler Systems SA - - TALER is free software; you can redistribute it and/or modify it - under the terms of the GNU General Public License as published - by the Free Software Foundation; either version 3, or (at your - option) any later version. - - TALER is distributed in the hope that it will be useful, but - WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - General Public License for more details. - - You should have received a copy of the GNU General Public - License along with TALER; see the file COPYING. If not, see - <http://www.gnu.org/licenses/> -*/ -/** - * @file lib/testing_api_trait_hash.c - * @brief traits to offer a hash - * @author Christian Grothoff - */ -#include "platform.h" -#include "sync_service.h" -#include "sync_testing_lib.h" - -#define SYNC_TESTING_TRAIT_HASH "sync-hash" - - -/** - * Obtain a hash from @a cmd. - * - * @param cmd command to extract the number from. - * @param index the number's index number. - * @param n[out] set to the number coming from @a cmd. - * @return #GNUNET_OK on success. - */ -int -SYNC_TESTING_get_trait_hash - (const struct TALER_TESTING_Command *cmd, - unsigned int index, - const struct GNUNET_HashCode **h) -{ - return cmd->traits (cmd->cls, - (const void **) h, - SYNC_TESTING_TRAIT_HASH, - index); -} - - -/** - * Offer a hash. - * - * @param index the number's index number. - * @param h the hash to offer. - * @return #GNUNET_OK on success. - */ -struct TALER_TESTING_Trait -SYNC_TESTING_make_trait_hash - (unsigned int index, - const struct GNUNET_HashCode *h) -{ - struct TALER_TESTING_Trait ret = { - .index = index, - .trait_name = SYNC_TESTING_TRAIT_HASH, - .ptr = (const void *) h - }; - return ret; -} - - -/* end of testing_api_trait_hash.c */ diff --git a/src/testing/testing_api_traits.c b/src/testing/testing_api_traits.c new file mode 100644 index 0000000..808cfd9 --- /dev/null +++ b/src/testing/testing_api_traits.c @@ -0,0 +1,31 @@ +/* + This file is part of TALER + Copyright (C) 2024 Taler Systems SA + + TALER is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as + published by the Free Software Foundation; either version 3, or + (at your option) any later version. + + TALER is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public + License along with TALER; see the file COPYING. If not, see + <http://www.gnu.org/licenses/> +*/ +/** + * @file testing/testing_api_traits.c + * @brief trait implementation + * @author Christian Grothoff + */ +#include "platform.h" +#include "sync_testing_lib.h" + + +SYNC_TESTING_SIMPLE_TRAITS (TALER_TESTING_MAKE_IMPL_SIMPLE_TRAIT) + + +/* end of testing_api_traits.c */ diff --git a/src/util/Makefile.am b/src/util/Makefile.am index 6bc2447..4dfb823 100644 --- a/src/util/Makefile.am +++ b/src/util/Makefile.am @@ -9,11 +9,16 @@ endif pkgcfgdir = $(prefix)/share/sync/config.d/ +pkgcfg_DATA = \ + paths.conf + + bin_SCRIPTS = \ sync-config EXTRA_DIST = \ $(bin_SCRIPTS) \ + $(pkgcfg_DATA) \ sync-config.in edit_script = $(SED) -e 's,%libdir%,$(libdir),'g $(NULL) diff --git a/src/util/paths.conf b/src/util/paths.conf new file mode 100644 index 0000000..7fc5394 --- /dev/null +++ b/src/util/paths.conf @@ -0,0 +1,34 @@ +[PATHS] +# The PATHS section is special, as filenames including $-expression are +# expanded using the values from PATHS or the system environment (PATHS +# is checked first). libgnunetutil supports expanding $-expressions using +# defaults with the syntax "${VAR:-default}". Here, "default" can again +# be a $-expression. +# +# We usually want $HOME for $SYNC_HOME +# +SYNC_HOME = ${HOME:-${USERPROFILE}} + +# see XDG Base Directory Specification at +# http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html +# for how these should be used. + +# Persistent data storage +SYNC_DATA_HOME = ${XDG_DATA_HOME:-$SYNC_HOME/.local/share}/sync/ + +# Configuration files +SYNC_CONFIG_HOME = ${XDG_CONFIG_HOME:-$SYNC_HOME/.config}/sync/ + +# Cached data, no big deal if lost +SYNC_CACHE_HOME = ${XDG_CACHE_HOME:-$SYNC_HOME/.cache}/sync/ + +# Runtime data (i.e UNIX domain sockets, locks, always lost on system boot) +SYNC_RUNTIME_DIR = ${TMPDIR:-${TMP:-/tmp}}/sync-runtime/ + +# Directory to use for temporary files. +SYNC_TMP = ${TMPDIR:-${TMP:-/tmp}}/sync/ + +# DEFAULTCONFIG = /etc/sync.conf +# If 'DEFAULTCONFIG' is not defined, the current +# configuration file is assumed to be the default, +# which is what we want by default... diff --git a/src/util/sync-config.c b/src/util/sync-config.c index 0e432f8..fd462f2 100644 --- a/src/util/sync-config.c +++ b/src/util/sync-config.c @@ -19,7 +19,7 @@ */ /** - * @file util/taler-config.c + * @file util/sync-config.c * @brief tool to access and manipulate Taler configuration files * @author Christian Grothoff */ diff --git a/src/util/sync-config.in b/src/util/sync-config.in index b1c4971..6a74245 100644 --- a/src/util/sync-config.in +++ b/src/util/sync-config.in @@ -7,7 +7,7 @@ if ! type gnunet-config >/dev/null; then exit 1 fi -GC=`which gnunet-config` -SO=`ls %libdir%/libsyncutil.so.* | sort -n | tail -n1` +GC=$(which gnunet-config) +SO=$(ls %libdir%/libsyncutil.so.* | sort -n | tail -n1) export LD_PRELOAD=${LD_PRELOAD:-}:${SO} exec gnunet-config "$@" |