summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--README.md2
-rwxr-xr-xaptly/aptly-cleanup.sh6
-rwxr-xr-xaptly/aptly-publish.sh18
-rwxr-xr-xaptly/entr.sh6
-rwxr-xr-xbin/taler-deployment838
-rwxr-xr-xbin/taler-deployment-arm11
-rwxr-xr-xbin/taler-deployment-auditor23
-rwxr-xr-xbin/taler-deployment-auth-token36
-rwxr-xr-xbin/taler-deployment-config-generate285
-rwxr-xr-xbin/taler-deployment-config-generate-sepa281
-rwxr-xr-xbin/taler-deployment-config-instances237
-rwxr-xr-xbin/taler-deployment-config-instances-iban163
-rwxr-xr-xbin/taler-deployment-config-tips24
-rwxr-xr-xbin/taler-deployment-dbstart24
-rwxr-xr-xbin/taler-deployment-prepare271
-rwxr-xr-xbin/taler-deployment-prepare-with-eufin421
-rwxr-xr-xbin/taler-deployment-restart19
-rwxr-xr-xbin/taler-deployment-restart-with-eufin19
-rwxr-xr-xbin/taler-deployment-start43
-rwxr-xr-xbin/taler-deployment-start-with-eufin44
-rwxr-xr-xbin/taler-deployment-stop12
-rwxr-xr-xbin/taler-log-adapter66
-rw-r--r--bin/taler_urls.py56
-rw-r--r--bootstrap-docker/README2
-rwxr-xr-xbootstrap-docker/bootstrap-docker.sh7
-rwxr-xr-xbootstrap-docker/install-plugin-buildx.sh8
-rwxr-xr-xbootstrap-docker/install-plugin-compose.sh8
-rwxr-xr-xbootstrap-docker/install-rootless-docker.sh252
-rwxr-xr-xbuildbot/bootstrap-scripts/bootstrap-codespell2
-rwxr-xr-xbuildbot/bootstrap-scripts/bootstrap-walletbuilder38
-rwxr-xr-xbuildbot/build.sh20
-rwxr-xr-xbuildbot/checks.sh69
-rwxr-xr-xbuildbot/create_instances.sh7
-rwxr-xr-xbuildbot/create_tip_reserve.sh7
-rwxr-xr-xbuildbot/doxygen.sh2
-rw-r--r--buildbot/linkchecker.Containerfile10
-rwxr-xr-xbuildbot/linkchecker.sh30
-rw-r--r--buildbot/linkcheckerrc306
-rw-r--r--buildbot/master.cfg1243
-rwxr-xr-xbuildbot/restart.sh14
-rwxr-xr-xbuildbot/run-coverage-helper.sh2
-rwxr-xr-xbuildbot/update-sources.sh2
-rwxr-xr-xbuildbot/with-postgres.sh1
-rw-r--r--codespell/dictionary.txt57
-rw-r--r--depo/.config/systemd/user/bitcoind.env2
-rw-r--r--depo/.config/systemd/user/bitcoind.service12
-rw-r--r--depo/.config/systemd/user/btc-wire.service14
l---------depo/.config/systemd/user/multi-user.target.wants/bitcoind.service1
l---------depo/.config/systemd/user/multi-user.target.wants/btc-wire.service1
l---------depo/.config/systemd/user/multi-user.target.wants/nginx.service1
l---------depo/.config/systemd/user/multi-user.target.wants/taler-local-blog.service1
l---------depo/.config/systemd/user/multi-user.target.wants/taler-local-donations.service1
l---------depo/.config/systemd/user/multi-user.target.wants/taler-local-exchange.target1
l---------depo/.config/systemd/user/multi-user.target.wants/taler-local-landing.service1
l---------depo/.config/systemd/user/multi-user.target.wants/taler-local-merchant-backend.service1
l---------depo/.config/systemd/user/multi-user.target.wants/wire-gateway.service1
-rw-r--r--depo/.config/systemd/user/nginx.service28
-rw-r--r--depo/.config/systemd/user/taler-local-blog.service11
-rw-r--r--depo/.config/systemd/user/taler-local-donations.service11
-rw-r--r--depo/.config/systemd/user/taler-local-exchange-aggregator.service11
-rw-r--r--depo/.config/systemd/user/taler-local-exchange-closer.service11
-rw-r--r--depo/.config/systemd/user/taler-local-exchange-httpd.service19
-rw-r--r--depo/.config/systemd/user/taler-local-exchange-httpd.socket15
-rw-r--r--depo/.config/systemd/user/taler-local-exchange-secmod-cs.service11
-rw-r--r--depo/.config/systemd/user/taler-local-exchange-secmod-eddsa.service11
-rw-r--r--depo/.config/systemd/user/taler-local-exchange-secmod-rsa.service11
-rw-r--r--depo/.config/systemd/user/taler-local-exchange-transfer.service11
-rw-r--r--depo/.config/systemd/user/taler-local-exchange-wirewatch.service11
-rw-r--r--depo/.config/systemd/user/taler-local-exchange.target11
-rw-r--r--depo/.config/systemd/user/taler-local-frontends.env7
-rw-r--r--depo/.config/systemd/user/taler-local-landing.service11
-rw-r--r--depo/.config/systemd/user/taler-local-merchant-backend-token.service8
-rw-r--r--depo/.config/systemd/user/taler-local-merchant-backend.service10
-rw-r--r--depo/.config/systemd/user/taler-local-port-redirect.service6
-rw-r--r--depo/.config/systemd/user/wire-gateway.service11
-rw-r--r--depo/.taler/config/nginx.conf20
-rw-r--r--depo/.taler/config/taler.conf344
-rw-r--r--depo/.taler/data/.exists0
-rw-r--r--depo/.taler/sockets/.exists0
-rw-r--r--docker/compile-and-check/README26
-rw-r--r--docker/compile-and-check/base/Dockerfile40
-rwxr-xr-xdocker/compile-and-check/base/compile_and_check.sh94
-rw-r--r--docker/compile-and-check/base/util.sh34
-rwxr-xr-xdocker/compile-and-check/build.sh8
-rw-r--r--docker/compile-and-check/config/tags.sh7
-rwxr-xr-xdocker/compile-and-check/interactive.sh8
-rwxr-xr-xdocker/compile-and-check/run.sh8
-rw-r--r--docker/docs-build/Dockerfile27
-rwxr-xr-xdocker/docs-build/build.sh5
-rwxr-xr-xdocker/docs-build/run.sh14
-rw-r--r--docker/dpkg-build/Dockerfile32
-rwxr-xr-x[-rw-r--r--]docker/dpkg-build/build-ubuntu.sh (renamed from dpkg-build/build-ubuntu.sh)1
-rw-r--r--docker/sites-build/Dockerfile48
-rwxr-xr-xdocker/sites-build/build.sh3
-rwxr-xr-xdocker/sites-build/run.sh6
-rw-r--r--envcfg.py.template16
-rw-r--r--envcfg/envcfg-demo-2019-11-02-01.py15
-rw-r--r--envcfg/envcfg-demo-2019-12-03-01.py15
-rw-r--r--envcfg/envcfg-demo-2019-12-09-01.py15
-rw-r--r--envcfg/envcfg-demo-2020-11-14.py22
-rw-r--r--envcfg/envcfg-demo-2021-08-18.py15
-rw-r--r--envcfg/envcfg.py.template17
-rw-r--r--envcfg/talerconf/euro.taler.conf333
-rw-r--r--gnunet.conf1
-rwxr-xr-xhead.taler.net/entr.sh6
-rw-r--r--head.taler.net/rsyncd.conf13
-rw-r--r--head.taler.net/rsyncd.service12
-rwxr-xr-xhead.taler.net/update-head-deployment.sh12
-rw-r--r--mypy/mypy.ini3
-rwxr-xr-xnetjail/netjail-init.sh10
-rwxr-xr-xnetjail/netjail.sh3
-rw-r--r--netzbon/.gitignore5
-rwxr-xr-xnetzbon/generate-letter.sh40
-rwxr-xr-xnetzbon/generate-qr.sh40
-rw-r--r--netzbon/qr.tex.j213
-rwxr-xr-xnetzbon/render.py49
-rwxr-xr-xnetzbon/setup-merchants.sh39
-rw-r--r--netzbon/template.tex.j279
-rw-r--r--netzbon/template_de.tex.j2103
-rw-r--r--netzbon/test.json7
-rw-r--r--nlnet/task1/Dockerfile32
-rwxr-xr-xnlnet/task1/start.sh15
-rw-r--r--nlnet/task2/Dockerfile29
-rw-r--r--nlnet/task3/Dockerfile15
-rwxr-xr-xnlnet/task3/keys.sh93
-rw-r--r--nlnet/task3/salted-incoming-payment-template.csv2
-rwxr-xr-xnlnet/task3/start.sh79
-rw-r--r--nlnet/task4/Dockerfile42
-rwxr-xr-xnlnet/task4/launch.sh7
-rwxr-xr-xnlnet/task4/start.sh27
-rw-r--r--nlnet/task5/date-range/Dockerfile15
-rw-r--r--nlnet/task5/date-range/start-libeufin.sh35
-rwxr-xr-xnlnet/task5/date-range/start.sh155
-rw-r--r--nlnet/task5/long-poll/Dockerfile14
-rwxr-xr-xnlnet/task5/long-poll/start.sh134
-rw-r--r--nlnet/task5/performance/Dockerfile70
-rwxr-xr-xnlnet/task5/performance/start.sh107
-rw-r--r--packaging/.gitignore2
-rw-r--r--packaging/debian-docker/Dockerfile56
-rw-r--r--packaging/debian-docker/README16
-rw-r--r--packaging/debian-docker/anastasis-build.sh27
-rw-r--r--packaging/debian-docker/exchange-build.sh24
-rw-r--r--packaging/debian-docker/gnunet-build.sh29
-rw-r--r--packaging/debian-docker/gnunet-gtk-build.sh27
-rw-r--r--packaging/debian-docker/libeufin-build.sh24
-rw-r--r--packaging/debian-docker/mdb-build.sh22
-rw-r--r--packaging/debian-docker/merchant-build.sh24
-rwxr-xr-xpackaging/debian-docker/run.sh25
-rw-r--r--packaging/debian-docker/sync-build.sh27
-rw-r--r--packaging/debian-docker/wallet-build.sh28
-rw-r--r--packaging/ng/.gitignore1
-rw-r--r--packaging/ng/Dockerfile.debian-bookworm37
-rw-r--r--packaging/ng/Dockerfile.ubuntu-kinetic32
-rw-r--r--packaging/ng/README.md22
-rwxr-xr-xpackaging/ng/build-debian-bookworm.sh25
-rwxr-xr-xpackaging/ng/build-ubuntu-kinetic.sh23
-rw-r--r--packaging/ng/buildscripts/anastasis-build.sh27
-rw-r--r--packaging/ng/buildscripts/build-gnunet-gtk.sh37
-rw-r--r--packaging/ng/buildscripts/build-gnunet.sh41
-rw-r--r--packaging/ng/buildscripts/exchange-build.sh24
-rw-r--r--packaging/ng/buildscripts/libeufin-build.sh23
-rw-r--r--packaging/ng/buildscripts/merchant-build.sh24
-rw-r--r--packaging/ng/buildscripts/sync-build.sh27
-rw-r--r--packaging/ng/buildscripts/wallet-build.sh31
-rw-r--r--packaging/ubuntu-docker/.gitignore1
-rw-r--r--packaging/ubuntu-docker/Dockerfile57
-rw-r--r--packaging/ubuntu-docker/README19
-rw-r--r--packaging/ubuntu-docker/anastasis-build.sh27
-rw-r--r--packaging/ubuntu-docker/exchange-build.sh24
-rw-r--r--packaging/ubuntu-docker/gnunet-build.sh29
-rw-r--r--packaging/ubuntu-docker/gnunet-gtk-build.sh27
-rw-r--r--packaging/ubuntu-docker/libeufin-build.sh24
-rw-r--r--packaging/ubuntu-docker/mdb-build.sh22
-rw-r--r--packaging/ubuntu-docker/merchant-build.sh24
-rwxr-xr-xpackaging/ubuntu-docker/run.sh21
-rw-r--r--packaging/ubuntu-docker/sync-build.sh27
-rw-r--r--packaging/ubuntu-docker/taler.list1
-rw-r--r--packaging/ubuntu-docker/wallet-build.sh28
-rw-r--r--packaging/ubuntu-mantic-docker/Dockerfile54
-rw-r--r--packaging/ubuntu-mantic-docker/README19
-rw-r--r--packaging/ubuntu-mantic-docker/anastasis-build.sh27
-rw-r--r--packaging/ubuntu-mantic-docker/exchange-build.sh25
-rw-r--r--packaging/ubuntu-mantic-docker/gnunet-build.sh29
-rw-r--r--packaging/ubuntu-mantic-docker/gnunet-gtk-build.sh27
-rw-r--r--packaging/ubuntu-mantic-docker/libeufin-build.sh22
-rw-r--r--packaging/ubuntu-mantic-docker/mdb-build.sh22
-rw-r--r--packaging/ubuntu-mantic-docker/merchant-build.sh24
-rwxr-xr-xpackaging/ubuntu-mantic-docker/run.sh21
-rw-r--r--packaging/ubuntu-mantic-docker/sync-build.sh27
-rw-r--r--packaging/ubuntu-mantic-docker/taler.list1
-rw-r--r--packaging/ubuntu-mantic-docker/wallet-build.sh28
-rw-r--r--packaging/ubuntu-numbat-docker/Dockerfile53
-rw-r--r--packaging/ubuntu-numbat-docker/README16
-rw-r--r--packaging/ubuntu-numbat-docker/anastasis-build.sh27
-rw-r--r--packaging/ubuntu-numbat-docker/exchange-build.sh25
-rw-r--r--packaging/ubuntu-numbat-docker/gnunet-build.sh29
-rw-r--r--packaging/ubuntu-numbat-docker/gnunet-gtk-build.sh27
-rw-r--r--packaging/ubuntu-numbat-docker/libeufin-build.sh22
-rw-r--r--packaging/ubuntu-numbat-docker/mdb-build.sh22
-rw-r--r--packaging/ubuntu-numbat-docker/merchant-build.sh24
-rwxr-xr-xpackaging/ubuntu-numbat-docker/run.sh21
-rw-r--r--packaging/ubuntu-numbat-docker/sync-build.sh27
-rw-r--r--packaging/ubuntu-numbat-docker/wallet-build.sh28
-rw-r--r--regional-currency/.gitignore2
-rw-r--r--regional-currency/.shellcheckrc1
-rw-r--r--regional-currency/ChangeLog7
-rw-r--r--regional-currency/README2
-rwxr-xr-xregional-currency/config.py478
-rwxr-xr-xregional-currency/config_nginx.sh40
-rwxr-xr-xregional-currency/diagnose.sh125
-rwxr-xr-xregional-currency/functions.sh36
-rwxr-xr-xregional-currency/install_packages.sh83
-rwxr-xr-xregional-currency/list-incoming.sh33
-rwxr-xr-xregional-currency/main.sh101
-rw-r--r--regional-currency/nginx-conf/backend.taler-nginx.conf19
-rw-r--r--regional-currency/nginx-conf/bank.taler-nginx.conf23
-rw-r--r--regional-currency/nginx-conf/exchange.taler-nginx.conf16
-rwxr-xr-xregional-currency/setup-exchange.sh242
-rwxr-xr-xregional-currency/setup-libeufin.sh138
-rwxr-xr-xregional-currency/setup-merchant.sh14
-rwxr-xr-xregional-currency/upgrade.sh30
-rw-r--r--regional-currency/vagrant/.gitignore1
-rw-r--r--regional-currency/vagrant/README2
-rw-r--r--regional-currency/vagrant/Vagrantfile77
-rwxr-xr-xregional-currency/withdraw.sh32
-rwxr-xr-xselenium/launch_selenium_test23
-rw-r--r--splitops/README.md128
-rwxr-xr-xsplitops/splitops143
-rw-r--r--systemd-services/buildbot-worker-codespell.service (renamed from systemd-services/buildbot-worker-wallet.service)6
-rw-r--r--systemd-services/buildbot-worker-compilecheck.service (renamed from systemd-services/buildbot-worker-auditor.service)2
-rw-r--r--systemd-services/buildbot-worker-container.service13
-rw-r--r--systemd-services/buildbot-worker-linkchecker.service13
-rw-r--r--systemd-services/buildbot-worker-packaging.service13
-rw-r--r--systemd-services/tips-checker.service13
-rw-r--r--taler-arm/anastasis.conf4
-rw-r--r--taler-arm/arm.conf19
-rw-r--r--taler-arm/defaults.conf20
-rw-r--r--taler-arm/libeufin-nexus.conf4
-rw-r--r--taler-arm/libeufin-sandbox.conf4
-rw-r--r--taler-arm/taler-aggregator.conf4
-rw-r--r--taler-arm/taler-auditor.conf4
-rw-r--r--taler-arm/taler-blog.conf4
-rw-r--r--taler-arm/taler-closer.conf4
-rw-r--r--taler-arm/taler-demobank.conf4
-rw-r--r--taler-arm/taler-donations.conf4
-rw-r--r--taler-arm/taler-exchange-secmod-eddsa.conf4
-rw-r--r--taler-arm/taler-exchange-secmod-rsa.conf4
-rw-r--r--taler-arm/taler-exchange-wirewatch.conf4
-rw-r--r--taler-arm/taler-exchange.conf4
-rw-r--r--taler-arm/taler-landing.conf4
-rw-r--r--taler-arm/taler-merchant.conf4
-rw-r--r--taler-arm/taler-postgres-standalone.conf8
-rw-r--r--taler-arm/taler-survey.conf4
-rw-r--r--taler-arm/taler-sync.conf4
-rw-r--r--taler-arm/taler-transfer.conf4
-rw-r--r--taler-arm/taler-twister-bank.conf4
-rw-r--r--taler-arm/taler-twister-exchange.conf4
-rw-r--r--taler-arm/taler-twister.conf4
-rwxr-xr-xtaler-sitesbuild/invalidate.sh2
-rwxr-xr-xtaler-sitesbuild/update_buywith_page.sh2
-rwxr-xr-xtaler-sitesbuild/update_stage.sh4
-rwxr-xr-xtaler-sitesbuild/update_twister_page.sh2
-rwxr-xr-xtaler-sitesbuild/update_www.sh4
263 files changed, 6891 insertions, 4526 deletions
diff --git a/README.md b/README.md
index 2e66f4c..6fd724a 100644
--- a/README.md
+++ b/README.md
@@ -20,7 +20,6 @@ surviving processes for unit tests that require ports that might conflict.
Deployment Layout
=================
-bin/ -- main scripts to manage a taler deployment (test/demo/int)
buildbot/ -- scripts and configuration for the buildmaster and jobs launched by it
envcfg/ -- Git tags for various deployments, specify which versions should be used
history/ -- directory for NOT MAINTAINED scripts
@@ -31,7 +30,6 @@ postmortem/ -- postmortem reports about major incidents we have had with lessons
private-keys/ -- deployment private keys so that keys do not change if we migrate elsewhere
sign-gnu-artefact -- shell script for making releases and preparing upload to ftp.gnu.org
systemd-services/ -- service definitions for buildbot build-slaves
-taler-arm/ -- gnunet-arm configurations for a deployment
taler-sitesbuild/ -- scripts for updating our Web sites
diff --git a/aptly/aptly-cleanup.sh b/aptly/aptly-cleanup.sh
new file mode 100755
index 0000000..85bcd5a
--- /dev/null
+++ b/aptly/aptly-cleanup.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+set -ex
+
+for i in $(aptly snapshot list | grep taler | cut -d'[' -f 2 | cut -d']' -f1) ; do
+ aptly snapshot drop $i
+done
diff --git a/aptly/aptly-publish.sh b/aptly/aptly-publish.sh
new file mode 100755
index 0000000..07a986f
--- /dev/null
+++ b/aptly/aptly-publish.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+set -exou pipefail
+
+REPOROOT="${HOME}"/.aptly/public
+DATESTAMP=$(date -Iseconds)
+
+rm -rf "${HOME}"/garbagecollect
+mv "${REPOROOT}"/pool "${HOME}"/garbagecollect || true
+aptly repo drop --force taler-ci-snapshots || true
+aptly db cleanup
+aptly repo create -distribution=bookworm -component=main taler-ci-snapshots
+aptly repo add taler-ci-snapshots /home/aptbuilder/incoming/bookworm-taler-ci || true
+aptly snapshot create "taler-${DATESTAMP}" from repo taler-ci-snapshots
+aptly -gpg-provider="gpg2" publish -architectures="amd64,arm64" switch bookworm "taler-${DATESTAMP}"
+
+# Publish a list of all packages in the repo
+aptly repo show -with-packages taler-ci-snapshots > "${REPOROOT}"/packages.txt
diff --git a/aptly/entr.sh b/aptly/entr.sh
new file mode 100755
index 0000000..6837508
--- /dev/null
+++ b/aptly/entr.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+set -xo
+
+while true ; do
+ echo "${HOME}/incoming/bookworm-taler-ci" | entr -n -d "${HOME}"/deployment/aptly/aptly-publish.sh ; sleep 1 || true
+done
diff --git a/bin/taler-deployment b/bin/taler-deployment
deleted file mode 100755
index c40a53a..0000000
--- a/bin/taler-deployment
+++ /dev/null
@@ -1,838 +0,0 @@
-#!/usr/bin/env python3
-
-# This file is part of GNU Taler.
-#
-# GNU Taler is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# GNU Taler is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Taler. If not, see <https://www.gnu.org/licenses/>.
-
-import click
-import types
-import os
-import sys
-import os.path
-import subprocess
-import time
-import random
-from pathlib import Path
-from dataclasses import dataclass
-from typing import List, Callable
-from shutil import copy
-from taler_urls import get_urls
-from string import ascii_letters, ascii_uppercase
-
-activate_template = """\
-#!/bin/bash
-
-# Generated by taler-deployment-bootstrap
-
-if ! echo $PATH | tr ":" '\\n' | grep "$HOME/deployment/bin" > /dev/null
-then
- export PATH="{curr_path}"
-fi
-export PYTHONUSERBASE=$HOME/local
-export TALER_BOOTSTRAP_TIMESTAMP={timestamp}
-export TALER_CONFIG_CURRENCY={currency}
-export TALER_ENV_NAME={envname}
-export TALER_ENV_URL_INTRO="{landing}"
-export TALER_ENV_URL_BANK="{bank}"
-export TALER_ENV_URL_MERCHANT_BLOG="{blog}"
-export TALER_ENV_URL_MERCHANT_DONATIONS="{donations}"
-export TALER_ENV_URL_MERCHANT_SURVEY="{survey}"
-export TALER_ENV_URL_AUDITOR="{auditor}"
-export TALER_ENV_URL_BACKOFFICE="{backoffice}"
-export TALER_ENV_URL_SYNC="{sync}"
-export TALER_ENV_MERCHANT_BACKEND="{merchant_backend}"
-export TALER_COVERAGE={coverage}
-export TALER_ENV_FRONTENDS_APITOKEN="$(cat ~/merchant_auth_token)"
-export LIBEUFIN_ENV_SANDBOX_ADMIN_PASSWORD="$(cat ~/libeufin_admin_password)"
-export LIBEUFIN_NEXUS_DB_CONNECTION="jdbc:sqlite:$HOME/nexus.sqlite"
-export LIBEUFIN_SANDBOX_DB_CONNECTION="jdbc:sqlite:$HOME/sandbox.sqlite"
-export LIBEUFIN_SANDBOX_HOSTNAME=bank.{envname}.taler.net/eufin/sandbox
-export LIBEUFIN_SANDBOX_CURRENCY={currency}
-"""
-
-@dataclass
-class Repo:
- name: str
- url: str
- deps: List[str]
- builder: Callable[["Repo", Path], None]
-
-
-class EnvInfo:
- def __init__(self, name, repos, cfg):
- self.name = name
- self.repos = []
- for r in repos:
- tag = getattr(cfg, "tag_" + r.name.replace("-", "_"))
- # This check skips all the components that are
- # expected to be already installed; typically via
- # a distribution package manager.
- if not tag:
- continue
- self.repos.append(r)
-
-@click.group()
-def cli():
- pass
-
-
-# map from environment name to currency
-currmap = {
- "test": "TESTKUDOS",
- "docs-builder": "TESTKUDOS",
- "coverage": "TESTKUDOS",
- "integrationtest": "TESTKUDOS",
- "demo": "KUDOS",
- "int": "INTKUDOS",
- "euro": "EUR",
- "chf": "CHF",
- "auditor-reporter-test": "TESTKUDOS",
- "auditor-reporter-demo": "KUDOS",
- "local": "LOCALKUDOS",
- "tanker": "SEK"
-}
-
-def generate_apitoken():
- return "secret-token:" + ''.join(random.choices(ascii_letters + ascii_uppercase, k=10))
-
-def generate_password():
- return ''.join(random.choices(ascii_letters + ascii_uppercase, k=10))
-
-
-def update_checkout(r: Repo, p: Path):
- """Clean the repository's working directory and
- update it to the match the latest version of the upstream branch
- that we are tracking."""
- subprocess.run(["git", "-C", str(p), "clean", "-fdx"], check=True)
- subprocess.run(["git", "-C", str(p), "fetch"], check=True)
- subprocess.run(["git", "-C", str(p), "reset"], check=True)
- res = subprocess.run(
- [
- "git",
- "-C",
- str(p),
- "rev-parse",
- "--abbrev-ref",
- "--symbolic-full-name",
- "@{u}",
- ],
- stderr=subprocess.DEVNULL,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if res.returncode != 0:
- ref = "HEAD"
- else:
- ref = res.stdout.strip("\n ")
- print(f"resetting {r.name} to ref {ref}")
- subprocess.run(["git", "-C", str(p), "reset", "--hard", ref], check=True)
-
-
-def default_configure(*extra):
- pfx = Path.home() / "local"
- extra_list = list(extra)
- if int(os.environ.get("TALER_COVERAGE")):
- extra_list.append("--enable-coverage")
- subprocess.run(["./configure", f"--prefix={pfx}"] + extra_list, check=True)
-
-def default_configure_nc(*extra):
- """Variant of default_configure() that does NEVER add --enable-coverage"""
- pfx = Path.home() / "local"
- extra_list = list(extra)
- subprocess.run(["./configure", f"--prefix={pfx}"] + extra_list, check=True)
-
-
-def pyconfigure(*extra):
- """For python programs, --prefix doesn't work."""
- subprocess.run(["./configure"] + list(extra), check=True)
-
-
-def build_libeufin(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure_nc()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_libmicrohttpd(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- # Debian gnutls packages are too old ...
- default_configure("--with-gnutls=/usr/local")
- subprocess.run(["make"], check=True)
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_gnunet(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- default_configure(
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- "--disable-documentation",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_exchange(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-gnunet={pfx}",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_wallet(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure_nc()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_twister(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_merchant(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_sync(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-merchant={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_anastasis(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-merchant={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_bank(r, p):
- update_checkout(r, p)
- subprocess.run(["pip3", "install", "poetry"], check=True)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- pyconfigure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_demos(r, p):
- update_checkout(r, p)
- pfx = Path.home() / "local"
- pyconfigure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_backoffice(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"])
- subprocess.run(["./configure"])
- subprocess.run(["make", "build-single"])
- (p / "taler-buildstamp").touch()
-
-def build_docs(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def get_repos(envname):
- """Get a list of repos (topologically sorted) that should be build for the
- given environment"""
- print(f"Loading return repositories for {envname}.", file=sys.stderr)
- if envname in ("demochecker",):
- return []
- if envname in ("docs-builder",):
- return [
- Repo(
- "libmicrohttpd",
- "git://git.gnunet.org/libmicrohttpd.git",
- [],
- build_libmicrohttpd,
- ),
- Repo(
- "docs",
- "git://git.taler.net/docs",
- [],
- build_docs,
- ),
- Repo(
- "wallet-core",
- "git://git.taler.net/wallet-core",
- [],
- build_wallet,
- ),
- Repo(
- "exchange",
- "git://git.taler.net/exchange",
- ["gnunet", "libmicrohttpd"],
- build_exchange,
- ),
- Repo(
- "merchant",
- "git://git.taler.net/merchant",
- ["exchange","libmicrohttpd","gnunet"],
- build_merchant,
- ),
- Repo(
- "sync",
- "git://git.taler.net/sync",
- ["exchange", "merchant","gnunet","libmicrohttpd"],
- build_sync,
- ),
- Repo(
- "anastasis",
- "git://git.taler.net/anastasis",
- ["exchange", "merchant","libmicrohttpd","gnunet"],
- build_anastasis,
- ),
- ]
- if envname in ("int", "coverage", "integrationtest",):
- return [
- Repo(
- "libeufin",
- "git://git.taler.net/libeufin.git",
- [],
- build_libeufin,
- ),
- Repo(
- "libmicrohttpd",
- "git://git.gnunet.org/libmicrohttpd.git",
- [],
- build_libmicrohttpd,
- ),
- Repo(
- "gnunet",
- "git://git.gnunet.org/gnunet.git",
- ["libmicrohttpd"],
- build_gnunet),
- Repo(
- "bank",
- "git://git.taler.net/bank",
- [],
- build_bank
- ),
- Repo(
- "wallet-core",
- "git://git.taler.net/wallet-core",
- [],
- build_wallet,
- ),
- Repo(
- "exchange",
- "git://git.taler.net/exchange",
- ["gnunet", "libmicrohttpd", "wallet-core"],
- build_exchange,
- ),
- Repo(
- "merchant",
- "git://git.taler.net/merchant",
- ["exchange", "libmicrohttpd", "gnunet"],
- build_merchant,
- ),
- Repo(
- "sync",
- "git://git.taler.net/sync",
- ["exchange", "merchant", "gnunet", "libmicrohttpd"],
- build_sync,
- ),
- Repo(
- "anastasis",
- "git://git.taler.net/anastasis",
- ["exchange", "merchant", "gnunet", "libmicrohttpd"],
- build_anastasis,
- ),
- ]
-
- # Note: these are currently not in use!
- if envname in ("euro", "chf"):
- return [
- Repo(
- "libmicrohttpd",
- "git://git.gnunet.org/libmicrohttpd.git",
- [],
- build_libmicrohttpd,
- ),
- Repo(
- "bank",
- "git://git.taler.net/bank",
- [],
- build_bank,
- ),
- Repo(
- "gnunet",
- "git://git.gnunet.org/gnunet.git",
- ["libmicrohttpd"],
- build_gnunet,
- ),
- Repo(
- "exchange",
- "git://git.taler.net/exchange",
- ["gnunet", "libmicrohttpd"],
- build_exchange,
- ),
- Repo(
- "merchant",
- "git://git.taler.net/merchant",
- ["exchange", "libmicrohttpd"],
- build_merchant,
- ),
- Repo(
- "taler-merchant-demos",
- "git://git.taler.net/taler-merchant-demos",
- [],
- build_demos,
- ),
- ]
- if envname in ("tanker", "local", "demo", "int", "test", "auditor-reporter-test", "auditor-reporter-demo"):
- return [
- Repo(
- "wallet-core",
- "git://git.taler.net/wallet-core",
- [],
- build_wallet,
- ),
- Repo(
- "bank",
- "git://git.taler.net/bank",
- [],
- build_bank,
- ),
- Repo(
- "libmicrohttpd",
- "git://git.gnunet.org/libmicrohttpd.git",
- [],
- build_libmicrohttpd,
- ),
- Repo(
- "gnunet",
- "git://git.gnunet.org/gnunet.git",
- ["libmicrohttpd"],
- build_gnunet,
- ),
- Repo(
- "twister",
- "git://git.taler.net/twister",
- ["gnunet", "libmicrohttpd"],
- build_twister,
- ),
- Repo(
- "exchange",
- "git://git.taler.net/exchange",
- ["gnunet", "libmicrohttpd", "twister", "wallet-core"],
- build_exchange,
- ),
- Repo(
- "merchant",
- "git://git.taler.net/merchant",
- ["exchange", "libmicrohttpd", "gnunet"],
- build_merchant,
- ),
- Repo(
- "sync",
- "git://git.taler.net/sync",
- ["exchange", "merchant", "gnunet", "libmicrohttpd"],
- build_sync,
- ),
- Repo(
- "anastasis",
- "git://git.taler.net/anastasis",
- ["gnunet", "libmicrohttpd", "exchange", "merchant"],
- build_sync,
- ),
- Repo(
- "taler-merchant-demos",
- "git://git.taler.net/taler-merchant-demos",
- [],
- build_demos,
- ),
- ]
- raise Exception(f"no repos defined for envname {envname}")
-
-
-def ensure_activated():
- """Make sure that the environment variables have been
- loaded correctly via the ~/activate script"""
- ts = os.environ.get("TALER_BOOTSTRAP_TIMESTAMP")
- if ts is None:
- print("Please do 'source ~/activate' first.", file=sys.stderr)
- sys.exit(1)
- out = subprocess.check_output(
- ["bash", "-c", "source ~/activate; echo $TALER_BOOTSTRAP_TIMESTAMP"],
- encoding="utf-8",
- )
- out = out.strip(" \n")
- if out != ts:
- print(
- f"Please do 'source ~/activate'. Current ts={ts}, new ts={out}",
- file=sys.stderr,
- )
- sys.exit(1)
-
-
-def update_repos(repos: List[Repo]) -> None:
- for r in repos:
- r_dir = Path.home() / "sources" / r.name
- subprocess.run(["git", "-C", str(r_dir), "fetch"], check=True)
- res = subprocess.run(
- ["git", "-C", str(r_dir), "status", "-sb"],
- check=True,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if "behind" in res.stdout:
- print(f"new commits in {r}")
- s = r_dir / "taler-buildstamp"
- if s.exists():
- s.unlink()
-
-
-def get_stale_repos(repos: List[Repo]) -> List[Repo]:
- timestamps = {}
- stale = []
- for r in repos:
- r_dir = Path.home() / "sources" / r.name
- s = r_dir / "taler-buildstamp"
- if not s.exists():
- timestamps[r.name] = time.time()
- stale.append(r)
- continue
- ts = timestamps[r.name] = s.stat().st_mtime
- for dep in r.deps:
- if timestamps[dep] > ts:
- stale.append(r)
- break
- return stale
-
-
-allowed_envs = (
- "test",
- "int",
- "demo",
- "auditor-reporter-test",
- "auditor-reporter-demo",
- "docs-builder",
- "euro",
- "chf",
- "coverage",
- "integrationtest",
- "local",
- "tanker"
-)
-
-def load_apitoken():
- apitoken_path = Path.home() / "envcfg.py"
- if not os.path.isfile(apitoken_path):
- return None
- with open(apitoken_path, "r") as f:
- return f.readline()
-
-def load_envcfg():
- cfg = types.ModuleType("taler_deployment_cfg")
- envcfg_path = Path.home() / "envcfg.py"
- if not os.path.isfile(envcfg_path):
- return None
- print(f"Loading configuration from {envcfg_path}.", file=sys.stderr)
- cfgtext = envcfg_path.read_text()
- exec(cfgtext, cfg.__dict__)
- return cfg
-
-
-def get_env_info(cfg):
- envname = getattr(cfg, "env")
- if envname not in allowed_envs:
- print(f"env '{envname}' not supported")
- sys.exit(1)
- repos = get_repos(envname)
- return EnvInfo(envname, repos, cfg)
-
-
-@cli.command()
-def build() -> None:
- """Build the deployment from source."""
- ensure_activated()
- cfg = load_envcfg()
- if not cfg:
- print("Please create ~/envcfg.py (template in deployment.git can help)")
- return 1
- env_info = get_env_info(cfg)
- update_repos(env_info.repos)
- stale = get_stale_repos(env_info.repos)
- print(f"found stale repos: {stale}")
- for r in stale:
- p = Path.home() / "sources" / r.name
- os.chdir(str(p))
- r.builder(r, p)
-
-
-@cli.command()
-@click.argument("color", metavar="COLOR", type=click.Choice(["blue", "green"]))
-def switch_demo(color) -> None:
- """Switch deployment color of demo."""
- if os.environ["USER"] != "demo":
- print("Command should be executed as the demo user only.")
- sys.exit(1)
- active_home = Path.home() / "active-home"
- try:
- active_home.unlink()
- except:
- pass
- active_home.symlink_to(f"/home/demo-{color}")
-
-
-# repos does not contain distro-installed components
-def checkout_repos(cfg, repos):
- """Check out repos to the version specified in envcfg.py"""
- home = Path.home()
- sources = home / "sources"
- for r in repos:
- r_dir = home / "sources" / r.name
- if not r_dir.exists():
- r_dir.mkdir(parents=True, exist_ok=True)
- subprocess.run(["git", "-C", str(sources), "clone", r.url], check=True)
- subprocess.run(["git", "-C", str(r_dir), "fetch"], check=True)
- tag = getattr(cfg, "tag_" + r.name.replace("-", "_"))
- subprocess.run(
- ["git", "-C", str(r_dir), "checkout", "-q", "-f", tag, "--"], check=True,
- )
-
-
-@cli.command()
-def sync_repos() -> None:
- """Sync repos with the envcfg.py file."""
- home = Path.home()
- cfg = load_envcfg()
- if not cfg:
- print("Please create ~/envcfg.py (template in deployment.git can help)")
- return 1
- env_info = get_env_info(cfg)
- repos = env_info.repos
- checkout_repos(cfg, repos)
- for r in repos:
- r_dir = home / "sources" / r.name
- subprocess.run(["git", "-C", str(r_dir), "clean", "-fdx"], check=True)
-
-@cli.command()
-def bootstrap() -> None:
- """Bootstrap a GNU Taler deployment."""
- home = Path.home()
- cfg = load_envcfg()
- if not cfg:
- print("Please create ~/envcfg.py (template in deployment.git can help)")
- return 1
- env_info = get_env_info(cfg)
- repos = env_info.repos
- envname = env_info.name
- checkout_repos(cfg,repos)
-
- # Generate $PATH variable that will be set in the activate script.
- local_path = str(Path.home() / "local" / "bin")
- deployment_path = str(Path.home() / "deployment" / "bin")
- path_list = os.environ["PATH"].split(":")
- if local_path not in path_list:
- path_list.insert(0, local_path)
- if deployment_path not in path_list:
- path_list.insert(0, deployment_path)
-
- token_file = Path.home() / "merchant_auth_token"
- if not token_file.is_file():
- with token_file.open("w") as f:
- f.write(generate_apitoken())
- print(f"Token file '{token_file}' created.")
-
- sandbox_admin_password_file = Path.home() / "libeufin_admin_password"
- if not sandbox_admin_password_file.is_file():
- with sandbox_admin_password_file.open("w") as f:
- f.write(generate_password())
- print(f"Libeufin Sandbox admin password file '{sandbox_admin_password_file}' created.")
-
- with (home / "activate").open("w") as f:
- f.write(
- activate_template.format(
- envname=envname,
- timestamp=str(time.time()),
- currency=currmap[envname],
- curr_path=":".join(path_list),
- coverage=1 if envname == "coverage" else 0,
- **get_urls(envname)
- )
- )
- if envname != "local":
- (home / "sockets").mkdir(parents=True, exist_ok=True)
-
- if envname in ("test", "int", "local"):
- (home / "taler-data").mkdir(parents=True, exist_ok=True)
- if envname == "demo":
- setup_service("config-tips.timer")
- create_bb_worker(
- "tips-checker.service", "tips-checker-dir",
- "tips-checker-worker", "tips-checker-pass"
- )
-
- if not (home / "taler-data").exists():
- (home / "taler-data").symlink_to("/home/demo/shared-data")
-
- if envname == "integrationtest":
- create_bb_worker("buildbot-worker-wallet.service", "worker", "wallet-worker", "wallet-pass")
-
- if envname == "test":
- create_bb_worker("buildbot-worker-taler.service", "bb-worker", "test-worker", "test-pass")
- setup_service("config-tips.timer")
-
- elif envname in ("auditor-reporter-test", "auditor-reporter-demo"):
- create_bb_worker("buildbot-worker-auditor.service", "worker", "auditor-worker", "auditor-pass")
- elif envname == "demo-checker":
- create_bb_worker("buildbot-worker-taler-healthcheck.service", "bb-worker", "demo-worker", "demo-pass")
- elif envname == "coverage":
- create_bb_worker("buildbot-worker-lcov.service", "worker", "lcov-worker", "lcov-pass")
-
- www_path = Path.home() / "www"
- www_path.mkdir(exist_ok=True)
- if not os.path.islink(www_path / "merchant"):
- os.symlink(
- Path.home() / "sources" / "merchant" / "coverage_report",
- www_path / "merchant",
- )
- if not os.path.islink(www_path / "exchange"):
- os.symlink(
- Path.home() / "sources" / "exchange" / "coverage_report",
- www_path / "exchange",
- )
- if not os.path.islink(www_path / "sync"):
- os.symlink(
- Path.home() / "sources" / "sync" / "coverage_report",
- www_path / "sync",
- )
-
- print("Bootstrap finished.")
- print("Please source the ~/activate file before proceeding.")
-
-
-def create_bb_worker(systemd_unit, dirname, workername, workerpw):
- home = Path.home()
- bb_dir = home / dirname
- if bb_dir.exists():
- return
- subprocess.run(
- [
- "buildbot-worker",
- "create-worker",
- "--umask=0o22",
- str(bb_dir),
- "localhost:9989",
- workername,
- workerpw,
- ],
- check=True,
- )
- setup_service (systemd_unit)
-
-def setup_service(systemd_unit):
- sc_path = Path.home() / ".config" / "systemd" / "user"
- sc_path.mkdir(exist_ok=True,parents=True)
- sc_unit = Path.home() / "deployment" / "systemd-services" / systemd_unit
- copy(sc_unit, sc_path)
-
- # If a timer got just installed, the related service
- # file needs to be installed now.
- split_filename = systemd_unit.split(".")
- if "timer" == split_filename[-1]:
- copy(Path.home() / "deployment" / "systemd-services" / f"{split_filename[0]}.service", sc_path)
-
- subprocess.run(
- [
- "systemctl",
- "--user",
- "daemon-reload",
- ],
- check=True,
- )
- subprocess.run(
- [
- "systemctl",
- "--user",
- "enable",
- systemd_unit
- ],
- check=True,
- )
- subprocess.run(
- [
- "systemctl",
- "--user",
- "start",
- systemd_unit
- ],
- check=True,
- )
-
-if __name__ == "__main__":
- cli()
diff --git a/bin/taler-deployment-arm b/bin/taler-deployment-arm
deleted file mode 100755
index e1c2112..0000000
--- a/bin/taler-deployment-arm
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-set -eu
-
-export GNUNET_FORCE_LOG="util;;;;WARNING/taler;;;;DEBUG/twister;;;;DEBUG"
-export GNUNET_BASE_CONFIG=$HOME/deployment/taler-arm
-export PATH="$HOME/local/bin":$PATH
-
-ulimit -c $((100 * 1024))
-mkdir -p $HOME/logs
-exec gnunet-arm -c $HOME/deployment/gnunet.conf "$@"
diff --git a/bin/taler-deployment-auditor b/bin/taler-deployment-auditor
deleted file mode 100755
index f90c7fb..0000000
--- a/bin/taler-deployment-auditor
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/bash
-
-# serve landing page via uwsgi
-
-base=$HOME
-
-export PATH="$base/deployment":$PATH
-
-ulimit -c $((100 * 1024))
-
-mkdir -p $HOME/sockets
-
-
-# redirect / to index.html,
-# serve static files from $HOME/auditor
-exec uwsgi \
- --mimefile /etc/mime.types \
- --socket $HOME/sockets/auditor.uwsgi \
- --chmod-socket=660 \
- --route "^/?$ redirect:index.html" \
- --route "^/(.+) addvar:FILE=$HOME/auditor/\$1" \
- --route-if "exists:\${FILE} static:\${FILE}" \
- --route "^/(.+) break:404 not found"
diff --git a/bin/taler-deployment-auth-token b/bin/taler-deployment-auth-token
deleted file mode 100755
index 03c0620..0000000
--- a/bin/taler-deployment-auth-token
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python3
-
-# This file is part of GNU Taler.
-#
-# GNU Taler is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# GNU Taler is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Taler. If not, see <https://www.gnu.org/licenses/>.
-
-import random
-import os
-import sys
-from pathlib import Path
-from string import ascii_letters, ascii_uppercase
-
-TOKEN_FILE = Path.home() / "merchant_auth_token"
-
-def generate_apitoken():
- return "secret-token:" + ''.join(random.choices(ascii_letters + ascii_uppercase, k=10))
-
-if TOKEN_FILE.is_file():
- print("~/merchant_auth_token exists already. Not overwriting it!")
- sys.exit(0)
-
-with TOKEN_FILE.open("w") as f:
- f.write(generate_apitoken())
-
-print(f"Token file '{TOKEN_FILE}' created")
diff --git a/bin/taler-deployment-config-generate b/bin/taler-deployment-config-generate
deleted file mode 100755
index 0101d78..0000000
--- a/bin/taler-deployment-config-generate
+++ /dev/null
@@ -1,285 +0,0 @@
-#!/usr/bin/env python3
-import click
-import sys
-from collections import OrderedDict
-import json
-import os
-import urllib.parse
-import stat
-from taler_urls import get_urls, get_port
-
-
-class ConfigFile:
- def __init__(self, envname, currency, exchange_pub, filename):
- self.sections = OrderedDict()
- self.envname = envname
- self.filename = filename
- self.currency = currency
- self.exchange_pub = exchange_pub
-
- def destroy(self):
- del self.sections
- self.sections = OrderedDict()
-
- def cfg_put(self, section_name, key, value):
- s = self.sections[section_name] = self.sections.get(section_name, OrderedDict())
- s[key] = value
-
- def cfg_write(self, outdir):
-
- if outdir:
- fstream = open(os.path.join(outdir, self.filename), "w")
- else:
- fstream = open(sys.stdout)
-
- for section_name, section in self.sections.items():
- fstream.write("[" + section_name + "]" + "\n")
- for key, value in section.items():
- fstream.write(key + " = " + value + "\n")
- fstream.write("\n")
- fstream.close()
-
-
-def coin(
- obj,
- name,
- value,
- d_withdraw="3 years",
- d_spend="5 years",
- d_legal="10 years",
- f_withdraw="0.01",
- f_deposit="0.01",
- f_refresh="0.01",
- f_refund="0.01",
- rsa_keysize="2048",
-):
- sec = "coin_" + obj.currency + "_" + name
- obj.cfg_put(sec, "value", obj.currency + ":" + value)
- obj.cfg_put(sec, "duration_withdraw", d_withdraw)
- obj.cfg_put(sec, "duration_spend", d_spend)
- obj.cfg_put(sec, "duration_legal", d_legal)
- obj.cfg_put(sec, "fee_withdraw", obj.currency + ":" + f_withdraw)
- obj.cfg_put(sec, "fee_refresh", obj.currency + ":" + f_refresh)
- obj.cfg_put(sec, "fee_refund", obj.currency + ":" + f_refund)
- obj.cfg_put(sec, "fee_deposit", obj.currency + ":" + f_deposit)
- obj.cfg_put(sec, "rsa_keysize", rsa_keysize)
-
-
-def config(obj):
- urls = get_urls(obj.envname)
- obj.cfg_put("paths", "TALER_DATA_HOME", "${HOME}/taler-data")
- obj.cfg_put("paths", "TALER_RUNTIME_DIR", "${HOME}/taler-runtime")
- obj.cfg_put("taler", "CURRENCY", obj.currency)
- obj.cfg_put("taler", "CURRENCY_ROUND_UNIT", f"{obj.currency}:0.01")
- if obj.envname != "local":
- obj.cfg_put("bank", "serve", "uwsgi")
- obj.cfg_put("bank", "uwsgi_serve", "unix")
- obj.cfg_put("bank", "uwsgi_unixpath", "$HOME/sockets/bank.uwsgi")
- obj.cfg_put("bank", "uwsgi_unixpath_mode", "660")
- else:
- obj.cfg_put("bank", "serve", "http")
- obj.cfg_put("bank", "http_port", get_port(urls["bank"]))
-
- obj.cfg_put("bank", "database", "taler" + obj.envname)
- obj.cfg_put("bank", "max_debt", "%s:500.0" % obj.currency)
- obj.cfg_put("bank", "max_debt_bank", "%s:1000000000.0" % obj.currency)
- obj.cfg_put("bank", "allow_registrations", "YES")
- obj.cfg_put("bank", "base_url", urls["bank"])
- obj.cfg_put("bank", "database", "postgres:///taler{}".format(obj.envname))
- obj.cfg_put("bank", "suggested_exchange", urls["exchange"])
-
- obj.cfg_put("bank-admin", "uwsgi_serve", "unix")
- obj.cfg_put("bank-admin", "uwsgi_unixpath", "$HOME/sockets/bank-admin.uwsgi")
- obj.cfg_put("bank-admin", "uwsgi_unixpath_mode", "660")
-
- obj.cfg_put("donations", "serve", "uwsgi")
- obj.cfg_put("donations", "uwsgi_serve", "unix")
- obj.cfg_put("donations", "uwsgi_unixpath", "$HOME/sockets/donations.uwsgi")
- obj.cfg_put("donations", "uwsgi_unixpath_mode", "660")
-
- obj.cfg_put("landing", "serve", "uwsgi")
- obj.cfg_put("landing", "uwsgi_serve", "unix")
- obj.cfg_put("landing", "uwsgi_unixpath", "$HOME/sockets/landing.uwsgi")
- obj.cfg_put("landing", "uwsgi_unixpath_mode", "660")
-
- obj.cfg_put("blog", "serve", "uwsgi")
- obj.cfg_put("blog", "uwsgi_serve", "unix")
- obj.cfg_put("blog", "uwsgi_unixpath", "$HOME/sockets/shop.uwsgi")
- obj.cfg_put("blog", "uwsgi_unixpath_mode", "660")
-
- obj.cfg_put("survey", "serve", "uwsgi")
- obj.cfg_put("survey", "uwsgi_serve", "unix")
- obj.cfg_put("survey", "uwsgi_unixpath", "$HOME/sockets/survey.uwsgi")
- obj.cfg_put("survey", "uwsgi_unixpath_mode", "660")
- obj.cfg_put("survey", "bank_password", "x")
-
- obj.cfg_put("backoffice-all", "backend", urls["merchant_backend"])
-
- # Keep only one back-office service for all instances, for simplicity.
- obj.cfg_put("backoffice-all", "uwsgi_serve", "unix")
- obj.cfg_put("backoffice-all", "uwsgi_unixpath_mode", "660")
- obj.cfg_put("backoffice-all", "uwsgi_unixpath", "$HOME/sockets/backoffice.uwsgi")
- obj.cfg_put("backoffice-all", "instances", "FSF default Tor")
-
- if obj.envname != "local":
- obj.cfg_put("merchant", "serve", "unix")
- obj.cfg_put("merchant", "unixpath", "$HOME/sockets/merchant.http")
- else:
- obj.cfg_put("merchant", "serve", "tcp")
- obj.cfg_put("merchant", "port", get_port(urls["merchant_backend"]))
-
- obj.cfg_put("merchant", "wire_transfer_delay", "0 s")
- obj.cfg_put("merchant", "default_max_wire_fee", obj.currency + ":" + "0.01")
- obj.cfg_put("merchant", "default_max_deposit_fee", obj.currency + ":" + "0.05")
- obj.cfg_put(
- "merchantdb-postgres", "config", "postgres:///taler{}".format(obj.envname)
- )
-
- obj.cfg_put("frontends", "backend", urls["merchant_backend"])
-
- obj.cfg_put(
- "merchant-exchange-{}".format(obj.currency), "master_key", obj.exchange_pub
- )
- obj.cfg_put("merchant-exchange-{}".format(obj.currency), "currency", obj.currency)
-
- obj.cfg_put(
- "merchant-exchange-{}".format(obj.currency),
- "exchange_base_url",
- urls["exchange"],
- )
-
- obj.cfg_put("auditor", "serve", "unix")
- obj.cfg_put("auditor", "base_url", urls["auditor"])
- obj.cfg_put("auditor", "auditor_url", urls["auditor"])
- obj.cfg_put("auditor", "unixpath", "$HOME/sockets/auditor.http")
- obj.cfg_put("auditor", "tiny_amount", obj.currency + ":0.01")
-
- obj.cfg_put("taler-exchange-secmod-eddsa", "unixpath", "$HOME/sockets/taler-exchange-secmod-eddsa.sock")
- obj.cfg_put("taler-exchange-secmod-rsa", "unixpath", "$HOME/sockets/taler-exchange-secmod-rsa.sock")
- obj.cfg_put("taler-exchange-secmod-rsa", "sm_priv_key", "${TALER_DATA_HOME}/taler-exchange-secmod-rsa/secmod-private-key")
-
- obj.cfg_put("exchange", "base_url", urls["exchange"])
-
- if obj.envname != "local":
- obj.cfg_put("exchange", "serve", "unix")
- obj.cfg_put("exchange", "unixpath", "$HOME/sockets/exchange.http")
- else:
- obj.cfg_put("exchange", "serve", "tcp")
- obj.cfg_put("exchange", "port", get_port(urls["exchange"]))
-
- obj.cfg_put("exchange", "master_public_key", obj.exchange_pub)
- obj.cfg_put("exchange", "terms_etag", "0")
- obj.cfg_put("exchange", "terms_dir", "$HOME/local/share/taler-exchange/tos")
- obj.cfg_put("exchange", "privacy_etag", "0")
- obj.cfg_put("exchange", "privacy_dir", "$HOME/local/share/taler-exchange/pp")
-
-
- obj.cfg_put(
- "exchangedb-postgres", "db_conn_str", "postgres:///taler{}".format(obj.envname)
- )
- obj.cfg_put(
- "exchangedb-postgres", "config", "postgres:///taler{}".format(obj.envname)
- )
- obj.cfg_put(
- "auditordb-postgres", "db_conn_str", "postgres:///taler{}".format(obj.envname)
- )
- obj.cfg_put(
- "auditordb-postgres", "config", "postgres:///taler{}".format(obj.envname)
- )
-
- bank_acct_url = "{}taler-wire-gateway/Exchange/".format(urls["bank"])
-
- obj.cfg_put(
- "exchange-account-1", "payto_uri", "{}Exchange".format(urls["talerbank_payto"])
- )
- obj.cfg_put("exchange-account-1", "enable_debit", "yes")
- obj.cfg_put("exchange-account-1", "enable_credit", "yes")
- obj.cfg_put("exchange-accountcredentials-1", "wire_gateway_auth_method", "basic")
- obj.cfg_put("exchange-accountcredentials-1", "wire_gateway_url", bank_acct_url)
- obj.cfg_put("exchange-accountcredentials-1", "username", "Exchange")
- obj.cfg_put("exchange-accountcredentials-1", "password", "x")
-
- obj.cfg_put(
- "merchant-account-merchant",
- "payto_uri",
- "{}Tutorial".format(urls["talerbank_payto"]),
- )
- obj.cfg_put(
- "merchant-account-merchant",
- "wire_response",
- "${TALER_DATA_HOME}/merchant/wire/merchant.json",
- )
- obj.cfg_put("merchant-account-merchant", "wire_file_mode", "770")
-
- # The following block should be obsoleted by the new API to configure instances.
- merchant_instance_names = ("default", "Tor", "GNUnet", "Taler", "FSF", "Tutorial")
- for mi in merchant_instance_names:
- obj.cfg_put("merchant-account-merchant", f"HONOR_{mi}", "YES")
- obj.cfg_put("merchant-account-merchant", f"ACTIVE_{mi}", "YES")
-
- coin(obj, "ct_10", "0.10")
- coin(obj, "1", "1")
- coin(obj, "2", "2")
- coin(obj, "5", "5")
- coin(obj, "10", "10")
- coin(obj, "1000", "1000")
-
-
-@click.command()
-@click.option("--currency", default="KUDOS")
-@click.option("--envname", default="demo")
-@click.option("--outdir", required=True)
-@click.option("--exchange-pub", required=True)
-# Expected to contain already the 'secret-token:' scheme.
-@click.option("--frontends-apitoken", required=True)
-def main(currency, envname, outdir, exchange_pub, frontends_apitoken):
-
- if envname not in ("tanker", "demo", "test", "int", "euro", "chf", "local"):
- print("envname (%s) not demo/test/int, aborting config generation" % envname)
- return
-
- config_files = []
-
- mc = ConfigFile(envname, currency, exchange_pub, "taler.conf")
- mc.cfg_put("frontends", "backend_apikey", f"{frontends_apitoken}")
- config(mc)
- config_files.append(mc)
-
- urls = get_urls(envname)
-
- sc = ConfigFile(envname, currency, exchange_pub, "sync.conf")
- sc.cfg_put("taler", "currency", currency)
- sc.cfg_put("sync", "serve", "unix")
- sc.cfg_put("sync", "unixpath", "$HOME/sockets/sync.http")
- sc.cfg_put("sync", "apikey", f"Bearer {frontends_apitoken}")
- sc.cfg_put("sync", "annual_fee", f"{currency}:0.1")
- sc.cfg_put("sync", "fulfillment_url", "taler://fulfillment-success/")
- sc.cfg_put("sync", "payment_backend_url", urls["merchant_backend"] + "instances/Taler/")
- sc.cfg_put("syncdb-postgres", "config", f"postgres:///taler{envname}")
- config_files.append(sc)
-
- ac = ConfigFile(envname, currency, exchange_pub, "anastasis.conf")
- ac.cfg_put("taler", "currency", currency)
- ac.cfg_put("anastasis", "serve", "unix")
- ac.cfg_put("anastasis", "business_name", f"GNU Taler Demo Anastasis Provider")
- ac.cfg_put("anastasis", "unixpath", "$HOME/sockets/anastasis.http")
- ac.cfg_put("anastasis", "annual_fee", f"{currency}:0")
- ac.cfg_put("anastasis", "question_cost", f"{currency}:0")
- ac.cfg_put("anastasis", "insurance", f"{currency}:0")
- ac.cfg_put("anastasis", "truth_upload_fee", f"{currency}:0")
- ac.cfg_put("anastasis", "fulfillment_url", "taler://fulfillment-success/")
- ac.cfg_put("anastasis", "server_salt", "kreb3ia9dmj43gfa")
- ac.cfg_put("stasis-postgres", "config", f"postgres:///taler{envname}")
- ac.cfg_put("anastasis-merchant-backend", "payment_backend_url", urls["merchant_backend"] + "instances/anastasis/")
- ac.cfg_put("anastasis-merchant-backend", "api_key", f"Bearer {frontends_apitoken}")
- ac.cfg_put("authorization-question", "cost", f"{currency}:0")
- ac.cfg_put("authorization-question", "enabled", "yes")
- config_files.append(ac)
-
- assert 0 < len(config_files)
- for obj in config_files:
- obj.cfg_write(outdir)
-
-
-if __name__ == "__main__":
- main()
diff --git a/bin/taler-deployment-config-generate-sepa b/bin/taler-deployment-config-generate-sepa
deleted file mode 100755
index 959c224..0000000
--- a/bin/taler-deployment-config-generate-sepa
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/env python3
-import click
-import sys
-from collections import OrderedDict
-import json
-import os
-import urllib.parse
-import stat
-from taler_urls import get_urls, get_port
-
-
-class ConfigFile:
- def __init__(self, envname, currency, exchange_pub, filename):
- self.sections = OrderedDict()
- self.envname = envname
- self.filename = filename
- self.currency = currency
- self.exchange_pub = exchange_pub
-
- def destroy(self):
- del self.sections
- self.sections = OrderedDict()
-
- def cfg_put(self, section_name, key, value):
- s = self.sections[section_name] = self.sections.get(section_name, OrderedDict())
- s[key] = value
-
- def cfg_write(self, outdir):
-
- if outdir:
- fstream = open(os.path.join(outdir, self.filename), "w")
- else:
- fstream = open(sys.stdout)
-
- for section_name, section in self.sections.items():
- fstream.write("[" + section_name + "]" + "\n")
- for key, value in section.items():
- fstream.write(key + " = " + value + "\n")
- fstream.write("\n")
- fstream.close()
-
-
-def coin(
- obj,
- name,
- value,
- d_withdraw="3 years",
- d_spend="5 years",
- d_legal="10 years",
- f_withdraw="0.01",
- f_deposit="0.01",
- f_refresh="0.01",
- f_refund="0.01",
- rsa_keysize="2048",
-):
- sec = "coin_" + obj.currency + "_" + name
- obj.cfg_put(sec, "value", obj.currency + ":" + value)
- obj.cfg_put(sec, "duration_withdraw", d_withdraw)
- obj.cfg_put(sec, "duration_spend", d_spend)
- obj.cfg_put(sec, "duration_legal", d_legal)
- obj.cfg_put(sec, "fee_withdraw", obj.currency + ":" + f_withdraw)
- obj.cfg_put(sec, "fee_refresh", obj.currency + ":" + f_refresh)
- obj.cfg_put(sec, "fee_refund", obj.currency + ":" + f_refund)
- obj.cfg_put(sec, "fee_deposit", obj.currency + ":" + f_deposit)
- obj.cfg_put(sec, "rsa_keysize", rsa_keysize)
-
-
-def config(obj):
- urls = get_urls(obj.envname)
- obj.cfg_put("paths", "TALER_DATA_HOME", "${HOME}/taler-data")
- obj.cfg_put("paths", "TALER_RUNTIME_DIR", "${HOME}/taler-runtime")
- obj.cfg_put("taler", "CURRENCY", obj.currency)
- obj.cfg_put("taler", "CURRENCY_ROUND_UNIT", f"{obj.currency}:0.01")
- if obj.envname != "local":
- obj.cfg_put("bank", "serve", "uwsgi")
- obj.cfg_put("bank", "uwsgi_serve", "unix")
- obj.cfg_put("bank", "uwsgi_unixpath", "$HOME/sockets/bank.uwsgi")
- obj.cfg_put("bank", "uwsgi_unixpath_mode", "660")
- else:
- obj.cfg_put("bank", "serve", "http")
- obj.cfg_put("bank", "http_port", get_port(urls["bank"]))
-
- obj.cfg_put("bank", "database", "taler" + obj.envname)
- obj.cfg_put("bank", "max_debt", "%s:500.0" % obj.currency)
- obj.cfg_put("bank", "max_debt_bank", "%s:1000000000.0" % obj.currency)
- obj.cfg_put("bank", "allow_registrations", "YES")
- obj.cfg_put("bank", "base_url", urls["bank"])
- obj.cfg_put("bank", "database", "postgres:///taler{}".format(obj.envname))
- obj.cfg_put("bank", "suggested_exchange", urls["exchange"])
-
- obj.cfg_put("bank-admin", "uwsgi_serve", "unix")
- obj.cfg_put("bank-admin", "uwsgi_unixpath", "$HOME/sockets/bank-admin.uwsgi")
- obj.cfg_put("bank-admin", "uwsgi_unixpath_mode", "660")
-
- obj.cfg_put("donations", "serve", "uwsgi")
- obj.cfg_put("donations", "uwsgi_serve", "unix")
- obj.cfg_put("donations", "uwsgi_unixpath", "$HOME/sockets/donations.uwsgi")
- obj.cfg_put("donations", "uwsgi_unixpath_mode", "660")
-
- obj.cfg_put("landing", "serve", "uwsgi")
- obj.cfg_put("landing", "uwsgi_serve", "unix")
- obj.cfg_put("landing", "uwsgi_unixpath", "$HOME/sockets/landing.uwsgi")
- obj.cfg_put("landing", "uwsgi_unixpath_mode", "660")
-
- obj.cfg_put("blog", "serve", "uwsgi")
- obj.cfg_put("blog", "uwsgi_serve", "unix")
- obj.cfg_put("blog", "uwsgi_unixpath", "$HOME/sockets/shop.uwsgi")
- obj.cfg_put("blog", "uwsgi_unixpath_mode", "660")
-
- obj.cfg_put("survey", "serve", "uwsgi")
- obj.cfg_put("survey", "uwsgi_serve", "unix")
- obj.cfg_put("survey", "uwsgi_unixpath", "$HOME/sockets/survey.uwsgi")
- obj.cfg_put("survey", "uwsgi_unixpath_mode", "660")
- obj.cfg_put("survey", "bank_password", "x")
-
- obj.cfg_put("backoffice-all", "backend", urls["merchant_backend"])
-
- # Keep only one back-office service for all instances, for simplicity.
- obj.cfg_put("backoffice-all", "uwsgi_serve", "unix")
- obj.cfg_put("backoffice-all", "uwsgi_unixpath_mode", "660")
- obj.cfg_put("backoffice-all", "uwsgi_unixpath", "$HOME/sockets/backoffice.uwsgi")
- obj.cfg_put("backoffice-all", "instances", "FSF default Tor")
-
- if obj.envname != "local":
- obj.cfg_put("merchant", "serve", "unix")
- obj.cfg_put("merchant", "unixpath", "$HOME/sockets/merchant.http")
- else:
- obj.cfg_put("merchant", "serve", "tcp")
- obj.cfg_put("merchant", "port", get_port(urls["merchant_backend"]))
-
- obj.cfg_put("merchant", "wire_transfer_delay", "0 s")
- obj.cfg_put("merchant", "default_max_wire_fee", obj.currency + ":" + "0.01")
- obj.cfg_put("merchant", "default_max_deposit_fee", obj.currency + ":" + "0.05")
- obj.cfg_put(
- "merchantdb-postgres", "config", "postgres:///taler{}".format(obj.envname)
- )
-
- obj.cfg_put("frontends", "backend", urls["merchant_backend"])
-
- obj.cfg_put(
- "merchant-exchange-{}".format(obj.currency), "master_key", obj.exchange_pub
- )
- obj.cfg_put("merchant-exchange-{}".format(obj.currency), "currency", obj.currency)
-
- obj.cfg_put(
- "merchant-exchange-{}".format(obj.currency),
- "exchange_base_url",
- urls["exchange"],
- )
-
- obj.cfg_put("auditor", "serve", "unix")
- obj.cfg_put("auditor", "base_url", urls["auditor"])
- obj.cfg_put("auditor", "auditor_url", urls["auditor"])
- obj.cfg_put("auditor", "unixpath", "$HOME/sockets/auditor.http")
- obj.cfg_put("auditor", "tiny_amount", obj.currency + ":0.01")
-
- obj.cfg_put("taler-exchange-secmod-eddsa", "unixpath", "$HOME/sockets/taler-exchange-secmod-eddsa.sock")
- obj.cfg_put("taler-exchange-secmod-rsa", "unixpath", "$HOME/sockets/taler-exchange-secmod-rsa.sock")
- obj.cfg_put("taler-exchange-secmod-rsa", "sm_priv_key", "${TALER_DATA_HOME}/taler-exchange-secmod-rsa/secmod-private-key")
-
- obj.cfg_put("exchange", "base_url", urls["exchange"])
-
- if obj.envname != "local":
- obj.cfg_put("exchange", "serve", "unix")
- obj.cfg_put("exchange", "unixpath", "$HOME/sockets/exchange.http")
- else:
- obj.cfg_put("exchange", "serve", "tcp")
- obj.cfg_put("exchange", "port", get_port(urls["exchange"]))
-
- obj.cfg_put("exchange", "master_public_key", obj.exchange_pub)
- obj.cfg_put("exchange", "terms_etag", "0")
- obj.cfg_put("exchange", "terms_dir", "$HOME/local/share/taler-exchange/tos")
- obj.cfg_put("exchange", "privacy_etag", "0")
- obj.cfg_put("exchange", "privacy_dir", "$HOME/local/share/taler-exchange/pp")
-
-
- obj.cfg_put(
- "exchangedb-postgres", "db_conn_str", "postgres:///taler{}".format(obj.envname)
- )
- obj.cfg_put(
- "exchangedb-postgres", "config", "postgres:///taler{}".format(obj.envname)
- )
- obj.cfg_put(
- "auditordb-postgres", "db_conn_str", "postgres:///taler{}".format(obj.envname)
- )
- obj.cfg_put(
- "auditordb-postgres", "config", "postgres:///taler{}".format(obj.envname)
- )
-
- bank_acct_url = "{}taler-wire-gateway/Exchange/".format(urls["bank"])
-
- obj.cfg_put("exchange-account-1", "payto_uri",
- "{}/EX00000000000000000000".format(urls["sepa_payto"])
- )
- obj.cfg_put("exchange-account-1", "enable_debit", "yes")
- obj.cfg_put("exchange-account-1", "enable_credit", "yes")
- obj.cfg_put("exchange-accountcredentials-1", "wire_gateway_auth_method", "basic")
- obj.cfg_put("exchange-accountcredentials-1", "wire_gateway_url", bank_acct_url)
- obj.cfg_put("exchange-accountcredentials-1", "username", "Exchange")
- obj.cfg_put("exchange-accountcredentials-1", "password", "x")
-
- obj.cfg_put("merchant-account-merchant", "payto_uri",
- "{}/ME00000000000000000000".format(urls["sepa_payto"]),
- )
- obj.cfg_put("merchant-account-merchant", "wire_response",
- "${TALER_DATA_HOME}/merchant/wire/merchant.json",
- )
- obj.cfg_put("merchant-account-merchant", "wire_file_mode", "770")
-
- # The following block should be obsoleted by the new API to configure instances.
- merchant_instance_names = ("default", "Tor", "GNUnet", "Taler", "FSF", "Tutorial")
- for mi in merchant_instance_names:
- obj.cfg_put("merchant-account-merchant", f"HONOR_{mi}", "YES")
- obj.cfg_put("merchant-account-merchant", f"ACTIVE_{mi}", "YES")
-
- coin(obj, "ct_10", "0.10")
- coin(obj, "1", "1")
- coin(obj, "2", "2")
- coin(obj, "5", "5")
- coin(obj, "10", "10")
- coin(obj, "1000", "1000")
-
-
-@click.command()
-@click.option("--currency", default="KUDOS")
-@click.option("--envname", default="demo")
-@click.option("--outdir", required=True)
-@click.option("--exchange-pub", required=True)
-# Expected to contain already the 'secret-token:' scheme.
-@click.option("--frontends-apitoken", required=True)
-def main(currency, envname, outdir, exchange_pub, frontends_apitoken):
-
- if envname not in ("tanker", "demo", "test", "int", "euro", "chf", "local"):
- print("envname (%s) not demo/test/int, aborting config generation" % envname)
- return
-
- config_files = []
-
- mc = ConfigFile(envname, currency, exchange_pub, "taler.conf")
- mc.cfg_put("frontends", "backend_apikey", f"{frontends_apitoken}")
- config(mc)
- config_files.append(mc)
-
- urls = get_urls(envname)
-
- sc = ConfigFile(envname, currency, exchange_pub, "sync.conf")
- sc.cfg_put("taler", "currency", currency)
- sc.cfg_put("sync", "serve", "unix")
- sc.cfg_put("sync", "unixpath", "$HOME/sockets/sync.http")
- sc.cfg_put("sync", "apikey", f"Bearer {frontends_apitoken}")
- sc.cfg_put("sync", "annual_fee", f"{currency}:0.1")
- sc.cfg_put("sync", "fulfillment_url", "taler://fulfillment-success/")
- sc.cfg_put("sync", "payment_backend_url", urls["merchant_backend"] + "instances/Taler/")
- sc.cfg_put("syncdb-postgres", "config", f"postgres:///taler{envname}")
- config_files.append(sc)
-
- ac = ConfigFile(envname, currency, exchange_pub, "anastasis.conf")
- ac.cfg_put("taler", "currency", currency)
- ac.cfg_put("anastasis", "serve", "unix")
- ac.cfg_put("anastasis", "business_name", f"GNU Taler Demo Anastasis Provider")
- ac.cfg_put("anastasis", "unixpath", "$HOME/sockets/anastasis.http")
- ac.cfg_put("anastasis", "annual_fee", f"{currency}:0")
- ac.cfg_put("anastasis", "question_cost", f"{currency}:0")
- ac.cfg_put("anastasis", "insurance", f"{currency}:0")
- ac.cfg_put("anastasis", "truth_upload_fee", f"{currency}:0")
- ac.cfg_put("anastasis", "fulfillment_url", "taler://fulfillment-success/")
- ac.cfg_put("anastasis", "server_salt", "kreb3ia9dmj43gfa")
- ac.cfg_put("stasis-postgres", "config", f"postgres:///taler{envname}")
- ac.cfg_put("anastasis-merchant-backend", "payment_backend_url", urls["merchant_backend"] + "instances/anastasis/")
- ac.cfg_put("anastasis-merchant-backend", "api_key", f"Bearer {frontends_apitoken}")
- ac.cfg_put("authorization-question", "cost", f"{currency}:0")
- ac.cfg_put("authorization-question", "enabled", "yes")
- config_files.append(ac)
-
- assert 0 < len(config_files)
- for obj in config_files:
- obj.cfg_write(outdir)
-
-
-if __name__ == "__main__":
- main()
diff --git a/bin/taler-deployment-config-instances b/bin/taler-deployment-config-instances
deleted file mode 100755
index 9895737..0000000
--- a/bin/taler-deployment-config-instances
+++ /dev/null
@@ -1,237 +0,0 @@
-#!/usr/bin/env python3
-
-"""
-This script makes sure that the merchant backend instances used by the
-test/demo environment are created.
-
-We assume that the merchant backend is running, and that the "~/activate"
-file has been sourced to provide the right environment variables.
-"""
-
-import requests
-from os import environ, system
-from sys import exit
-from urllib.parse import urljoin
-from subprocess import Popen
-from time import sleep
-import psutil
-from getpass import getuser
-
-def expect_env(name):
- val = environ.get(name)
- if not val:
- print(f"{name} not defined. Please source the ~/activate file.")
- exit(1)
- return val
-
-def wait_merchant_up():
- # Check it started correctly and it is ready to serve requests.
- checks = 10
- url = urljoin(MERCHANT_BACKEND_BASE_URL, "/config")
- print("Check URL: {}".format(url))
- while checks > 0:
-
- try:
- resp = requests.get(url, timeout=5)
- except Exception:
- print("Merchant unreachable")
- sleep(1)
- checks -= 1
- continue
-
- if resp.status_code != 200:
- sleep(1)
- checks -= 1
- continue
-
- # Ready.
- print("Merchant is up and running")
- return True
-
- if checks == 0:
- print("Merchant is not correctly serving requests.")
- return False
-
-
-MERCHANT_BACKEND_BASE_URL = expect_env("TALER_ENV_MERCHANT_BACKEND")
-TALER_ENV_NAME = expect_env("TALER_ENV_NAME")
-TALER_CONFIG_CURRENCY = expect_env("TALER_CONFIG_CURRENCY")
-TALER_ENV_FRONTENDS_APITOKEN = expect_env("TALER_ENV_FRONTENDS_APITOKEN")
-authorization_header = {"Authorization": f"Bearer {TALER_ENV_FRONTENDS_APITOKEN}"}
-
-def ensure_instance(instance_id, name, payto_uris, auth):
-
- resp = requests.get(
- urljoin(MERCHANT_BACKEND_BASE_URL, f"management/instances/{instance_id}"),
- headers = authorization_header
- )
-
- # Instance exists, we PATCH the auth just in case it changed.
- if resp.status_code == 200:
- if instance_id != "Tutorial":
- print(f"Patching (auth of) instance '{instance_id}'")
- patch_resp = requests.post(
- urljoin(MERCHANT_BACKEND_BASE_URL,
- f"management/instances/{instance_id}/auth"),
- json=auth,
- headers = authorization_header
- )
- if patch_resp.status_code < 200 or patch_resp.status_code >= 300:
- print(f"Failed to update auth of '{instance_id}', backend responds: {patch_resp.status_code}/{patch_resp.text}")
- exit(1)
- return
-
- print(f"Instance '{instance_id}' not found, trying to create it.")
- req = dict(
- id=instance_id,
- name=name,
- payto_uris=payto_uris,
- address=dict(),
- jurisdiction=dict(),
- default_max_wire_fee=f"{TALER_CONFIG_CURRENCY}:1",
- default_wire_fee_amortization=3,
- default_max_deposit_fee=f"{TALER_CONFIG_CURRENCY}:1",
- default_wire_transfer_delay=dict(d_ms="forever"),
- default_pay_delay=dict(d_ms="forever"),
- auth=auth,
- )
- create_resp = requests.post(
- urljoin(MERCHANT_BACKEND_BASE_URL, "management/instances"),
- json=req,
- headers = authorization_header
- )
- if create_resp.status_code < 200 or create_resp.status_code >= 300:
- print(f"Could not create instance '{instance_id}', backend responds: {create_resp.status_code}/{create_resp.text}")
- exit(1)
-
-
-def is_merchant_running():
- for proc in psutil.process_iter():
- if proc.name() == "taler-merchant-httpd" and proc.username() == getuser():
- return True
- return False
-
-
-def ensure_default_instance():
- # Assumed is managed by ARM
- merchant_was_running = is_merchant_running()
- if merchant_was_running:
- print("Found running merchant, assuming is managed by ARM. Terminating it")
- system("taler-deployment-arm -k taler-merchant")
-
- checks = 10
- while checks > 0:
- if is_merchant_running():
- sleep(1)
- checks -= 1
- continue
- break
-
- if checks == 0:
- print("Could not stop the running merchant.")
- exit(1)
-
- print("Successfully terminating the merchant.")
- # ARM is _not_ running the merchant at this point.
- env_with_token = environ.copy()
- env_with_token["TALER_MERCHANT_TOKEN"] = TALER_ENV_FRONTENDS_APITOKEN
-
- print("Starting the merchant outside ARM, passing the token into the environment.")
- # Start the merchant natively.
- merchant = Popen(["taler-merchant-httpd"], env=env_with_token)
-
- if not wait_merchant_up():
- merchant.terminate()
- merchant.wait()
- exit(1)
-
- print("Merchant started successfully, creating the default instance now.")
- ensure_instance(
- "default",
- "default",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/Taler"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN)
- )
- # Native process can be terminated now.
- merchant.terminate()
- merchant.wait()
- print("Merchant terminated, restarting it via ARM now.")
-
- # Restarting the official ARM merchant.
- if merchant_was_running:
- system("taler-deployment-arm -i taler-merchant")
-
-ensure_default_instance()
-
-# Needs to wait here since the merchant got lastly restarted via ARM,
-# in the previous operation.
-if not wait_merchant_up():
- system("taler-deployment-arm -k taler-merchant")
- exit(1)
-
-ensure_instance(
- "blog",
- name="Blog",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/blog"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-ensure_instance(
- "donations",
- name="Donations",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/donations"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-ensure_instance(
- "survey",
- name="Survey",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/survey"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-ensure_instance(
- "pos",
- name="PoS",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/pos"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-ensure_instance(
- "GNUnet",
- name="GNUnet",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/GNUnet"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-# This instance relate to both the donation receiver and the sync service.
-ensure_instance(
- "Taler",
- name="Taler",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/Taler"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-ensure_instance(
- "Tor",
- name="Tor",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/Tor"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-ensure_instance(
- "anastasis",
- name="Tor",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/anastasis"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-# Note: this instance has a fixed secret-token, so as to allow anyone to easily
-# run their tutorial.
-ensure_instance(
- "Tutorial",
- name="Tutorial",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/Tutorial"],
- auth=dict(method="token", token="secret-token:sandbox")
-)
-
diff --git a/bin/taler-deployment-config-instances-iban b/bin/taler-deployment-config-instances-iban
deleted file mode 100755
index 2a5daef..0000000
--- a/bin/taler-deployment-config-instances-iban
+++ /dev/null
@@ -1,163 +0,0 @@
-#!/usr/bin/env python3
-
-"""
-This script makes sure that the merchant backend instances used by the
-test/demo environment are created.
-
-We assume that the merchant backend is running, and that the "~/activate"
-file has been sourced to provide the right environment variables.
-"""
-
-import requests
-from os import environ, system
-from sys import exit
-from urllib.parse import urljoin
-from subprocess import Popen
-from time import sleep
-import psutil
-from getpass import getuser
-
-ibans = dict(
- default = "ME00000000000000000000",
- # Must match the IBAN given in the prepare script, called IBAN_MERCHANT.
- blog = "ME00000000000000000001",
-)
-
-def expect_env(name):
- val = environ.get(name)
- if not val:
- print(f"{name} not defined. Please source the ~/activate file.")
- exit(1)
- return val
-
-def wait_merchant_up():
- # Check it started correctly and it is ready to serve requests.
- checks = 10
- url = urljoin(MERCHANT_BACKEND_BASE_URL, "/config")
- while checks > 0:
- try:
- resp = requests.get(url, timeout=5)
- except Exception:
- print("Merchant unreachable")
- sleep(1)
- checks -= 1
- continue
-
- if resp.status_code != 200:
- sleep(1)
- checks -= 1
- continue
-
- # Ready.
- return True
-
- print("Merchant is not correctly serving requests.")
- return False
-
-MERCHANT_BACKEND_BASE_URL = expect_env("TALER_ENV_MERCHANT_BACKEND")
-TALER_ENV_NAME = expect_env("TALER_ENV_NAME")
-TALER_CONFIG_CURRENCY = expect_env("TALER_CONFIG_CURRENCY")
-TALER_ENV_FRONTENDS_APITOKEN = expect_env("TALER_ENV_FRONTENDS_APITOKEN")
-authorization_header = {"Authorization": f"Bearer {TALER_ENV_FRONTENDS_APITOKEN}"}
-
-
-def ensure_instance(instance_id, name, payto_uris, auth):
- resp = requests.get(
- urljoin(MERCHANT_BACKEND_BASE_URL, f"management/instances/{instance_id}"),
- headers = authorization_header
- )
- req = dict(
- id=instance_id,
- name=name,
- payto_uris=payto_uris,
- address=dict(),
- jurisdiction=dict(),
- default_max_wire_fee=f"{TALER_CONFIG_CURRENCY}:1",
- default_wire_fee_amortization=3,
- default_max_deposit_fee=f"{TALER_CONFIG_CURRENCY}:1",
- default_wire_transfer_delay=dict(d_ms="forever"),
- default_pay_delay=dict(d_ms="forever"),
- auth=auth,
- )
- http_method = requests.post
- endpoint = "management/instances"
- # Instance exists, patching it.
- if resp.status_code == 200:
- if instance_id != "Tutorial":
- print(f"Patching instance '{instance_id}'")
- http_method = requests.patch
- endpoint = f"management/instances/{instance_id}"
- resp = http_method(
- urljoin(MERCHANT_BACKEND_BASE_URL, endpoint),
- json=req,
- headers = authorization_header
- )
- if resp.status_code < 200 or resp.status_code >= 300:
- print(f"Could not create (or patch) instance '{instance_id}', backend responds: {resp.status_code}/{resp.text}")
- exit(1)
-
-def is_merchant_running():
- for proc in psutil.process_iter():
- if proc.name() == "taler-merchant-httpd" and proc.username() == getuser():
- return True
- return False
-
-
-def ensure_default_instance():
- # Assumed is managed by ARM
- merchant_was_running = is_merchant_running()
- if merchant_was_running:
- print("Found running merchant, assuming is managed by ARM. Terminating it")
- system("taler-deployment-arm -k taler-merchant")
-
- checks = 10
- while checks > 0:
- if is_merchant_running():
- sleep(1)
- checks -= 1
- continue
- break
-
- if checks == 0:
- print("Could not stop the running merchant.")
- exit(1)
- # ARM is _not_ running the merchant at this point.
- env_with_token = environ.copy()
- env_with_token["TALER_MERCHANT_TOKEN"] = TALER_ENV_FRONTENDS_APITOKEN
-
- print("Starting the merchant outside ARM to pass the token into the environment.")
- # Start the merchant natively.
- merchant = Popen(["taler-merchant-httpd"], env=env_with_token)
-
- if not wait_merchant_up():
- merchant.terminate()
- merchant.wait()
- exit(1)
-
- print("Merchant started successfully, creating the default instance now.")
- ensure_instance(
- "default",
- "default",
- payto_uris=[f"payto://sepa/bank.{TALER_ENV_NAME}.taler.net/{ibans.get('default')}"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN)
- )
- # Native process can be terminated now.
- merchant.terminate()
- merchant.wait()
-
-ensure_default_instance()
-print("Restarting merchant _with_ ARM, to create other non-default instances.")
-system("taler-deployment-arm -s")
-system("taler-deployment-arm -i taler-merchant")
-wait_merchant_up()
-
-ensure_instance(
- "blog",
- name="Blog",
- payto_uris=[f"payto://sepa/bank.{TALER_ENV_NAME}.taler.net/{ibans.get('blog')}"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-print("Stopping the ARM merchant")
-system("taler-deployment-arm -k taler-merchant")
-# NOTE: ARM itself will be stopped by the main prepare script.
-# Stopping here will result in indefinite wait at the caller.
diff --git a/bin/taler-deployment-config-tips b/bin/taler-deployment-config-tips
deleted file mode 100755
index 7baff4a..0000000
--- a/bin/taler-deployment-config-tips
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-
-set -eu
-
-function join_no_double_slash {
- echo "$1$2" | sed -s 's/\([^:]\)\/\+/\1\//g'
-}
-
-BANK_URL=$(taler-config -s bank -o base_url)
-MERCHANT_URL=$(taler-config -s frontends -o backend)
-CURRENCY=$(taler-config -s taler -o currency)
-EXCHANGE_URL=$(taler-config -s exchange -o base_url)
-WIRE_METHOD="x-taler-bank"
-APIKEY=$(taler-config -s frontends -o backend_apikey)
-
-PAYTO_WITH_SUBJECT=$(taler-merchant-setup-reserve \
- --amount="${CURRENCY}:50" \
- --exchange-url=${EXCHANGE_URL} \
- --merchant-url=$(join_no_double_slash ${MERCHANT_URL} "/instances/survey/") \
- --wire-method=${WIRE_METHOD} \
- --apikey="Bearer ${APIKEY}")
-echo "Merchant generated the following tip-reserve: $PAYTO_WITH_SUBJECT"
-taler-bank-manage django wire_transfer_payto Survey x ${PAYTO_WITH_SUBJECT} "${CURRENCY}:50"
-echo Paid for tip reserve.
diff --git a/bin/taler-deployment-dbstart b/bin/taler-deployment-dbstart
deleted file mode 100755
index 2b740ee..0000000
--- a/bin/taler-deployment-dbstart
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env bash
-
-# Start the local database used for Taler if necessary (because we're a
-# standalone environment) and possible.
-
-set -eu
-
-base=$HOME
-
-export PATH="$base/deployment":$PATH
-
-ulimit -c $((100 * 1024))
-
-cd $HOME
-
-if [[ ! -e ~/local/bin/gnunet-arm ]]; then
- echo "not starting database, since gnunet-arm is not installed"
- exit
-fi
-
-if [ "${TALER_CONFIG_STANDALONE:-0}" = 1 ]; then
- taler-deployment-arm -s
- taler-deployment-arm -i taler-postgres-standalone
-fi
diff --git a/bin/taler-deployment-prepare b/bin/taler-deployment-prepare
deleted file mode 100755
index d40291e..0000000
--- a/bin/taler-deployment-prepare
+++ /dev/null
@@ -1,271 +0,0 @@
-#!/bin/bash
-
-# Prepare a deployment for execution:
-# * generate the configuration and setup database
-# * put keys in the right place
-# * set bank password for the exchange
-# * sign the exchange's wire response
-# * run some sanity checks (FIXME: not done yet!)
-
-set -eu
-
-source "$HOME/activate"
-
-# $1 = {yes,no} indicates WITH_DB_RESET. Defaults to no.
-# Helps avoiding color Y destroying the DB while color X is in
-# production.
-WITH_DB_RESET=${1-no}
-
-if [[ -z ${TALER_ENV_NAME+x} ]]; then
- echo "TALER_ENV_NAME not set"
- exit 1
-fi
-
-if [[ -z ${TALER_CONFIG_CURRENCY+x} ]]; then
- echo "TALER_CONFIG_CURRENCY not set"
- exit 1
-fi
-
-# The script stops what started along the flow.
-# This function should help against processes left
-# somehow running.
-function stop_running() {
- taler-deployment-stop
- for n in `jobs -p`
- do
- kill $n 2> /dev/null || true
- done
- wait
-}
-
-trap "stop_running" EXIT
-
-function generate_config() {
- EXCHANGE_PUB=$(gnunet-ecc -p "$HOME/deployment/private-keys/${TALER_ENV_NAME}-exchange-master.priv")
-
- mkdir -p "$HOME/.config"
-
- taler-deployment-config-generate \
- --exchange-pub "$EXCHANGE_PUB" \
- --currency "$TALER_CONFIG_CURRENCY" \
- --outdir "$HOME/.config" \
- --envname "$TALER_ENV_NAME" \
- --frontends-apitoken "$TALER_ENV_FRONTENDS_APITOKEN"
-}
-
-##
-## Step 1: Generate config
-##
-
-case $TALER_ENV_NAME in
- tanker|demo|test|int|local)
- generate_config
- ;;
- *)
- echo "Not generating config for env $TALER_ENV_NAME"
- ;;
-esac
-
-##
-## Step 1b: initialize database
-##
-
-if test $WITH_DB_RESET = yes; then
- taler-exchange-dbinit --reset
-fi
-
-##
-## Step 2: Copy key material and update denom keys
-##
-
-# For demo, make sure the link to shared data between demo-blue and demo-green is
-# set up properly.
-case $TALER_ENV_NAME in
- demo)
- echo "linking taler-data"
- ln -sfT ~demo/shared-data ~/taler-data
- # Check if we won't mess up permissions later
- if [[ ! -g ~/taler-data ]]; then
- echo "the shared-data directory should have the set-group-id bit set"
- exit 1
- fi
- ;;
-esac
-
-case $TALER_ENV_NAME in
- demo|test|int|local)
- EXCHANGE_PUB=$(gnunet-ecc -p "$HOME/deployment/private-keys/${TALER_ENV_NAME}-exchange-master.priv")
- EXCHANGE_PRIV_FILE=$(taler-config -f -s exchange-offline -o master_priv_file)
- if [[ -e "$EXCHANGE_PRIV_FILE" ]]; then
- EXCHANGE_PUB2=$(gnunet-ecc -p "$EXCHANGE_PRIV_FILE")
- if [[ "$EXCHANGE_PUB" != "$EXCHANGE_PUB2" ]]; then
- echo "Warning: Different exchange private key already exists, not copying"
- fi
- else
- mkdir -p "$(dirname "$EXCHANGE_PRIV_FILE")"
- cp "$HOME/deployment/private-keys/${TALER_ENV_NAME}-exchange-master.priv" "$EXCHANGE_PRIV_FILE"
- fi
- ;;
- *)
- echo "Not copying key material for env $TALER_ENV_NAME"
- ;;
-esac
-
-EXCHANGE_MASTER_PUB=$(taler-config -s exchange -o master_public_key)
-taler-auditor-exchange \
- -m "$EXCHANGE_MASTER_PUB" \
- -u "$(taler-config -s exchange -o base_url)" || true
-
-# Make configuration accessible to auditor
-chmod 750 "$HOME/.config"
-
-
-##
-## Step 3: Set up the exchange key material
-##
-
-taler-deployment-arm -s
-
-# Quickly start+shutdown exchange httpd and crypto SM helpers
-taler-deployment-arm -i taler-exchange
-taler-deployment-arm -i taler-exchange-secmod-rsa
-taler-deployment-arm -i taler-exchange-secmod-eddsa
-
-sleep 2 # FIXME: poll keys?
-if ! taler-deployment-arm -I | grep "^taler-exchange" | grep "status=started" > /dev/null; then
- echo "Exchange didn't start, cannot set up keys"
- exit 1
-fi
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-rsa" | grep "status=started" > /dev/null; then
- echo "Exchange (RSA module) didn't start, cannot set up keys."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-eddsa" | grep "status=started" > /dev/null; then
- echo "Exchange (EDDSA module) didn't start, cannot set up keys."
- exit 1
-fi
-
-taler-exchange-offline download sign upload
-
-payto_uri=$(taler-config -s exchange-account-1 -o payto_uri)
-taler-exchange-offline enable-account "$payto_uri" upload
-
-# Set up wire fees for next 5 years
-year=$(date +%Y)
-curr=$TALER_CONFIG_CURRENCY
-for y in $(seq $year $((year + 5))); do
- taler-exchange-offline wire-fee $y x-taler-bank "$curr:0.01" "$curr:0.01" upload
-done
-
-taler-deployment-arm -k taler-exchange
-taler-deployment-arm -k taler-exchange-secmod-rsa
-taler-deployment-arm -k taler-exchange-secmod-eddsa
-
-# Give time to store to disk.
-sleep 5
-
-##
-## Step 4: Set up the bank
-##
-
-# Delete existing data from bank.
-if test $WITH_DB_RESET = yes; then
- echo "yes" | taler-bank-manage django flush
-fi
-
-case $TALER_ENV_NAME in
- demo|test|int|local|tanker)
- taler-bank-manage django provide_accounts
- taler-bank-manage django changepassword_unsafe Exchange x
- taler-bank-manage django changepassword_unsafe Survey x
- ;;
- *)
- echo "Not setting unsafe Exchange bank account password for env $TALER_ENV_NAME"
- ;;
-esac
-
-
-##
-## Step 5: Adjust some permissions
-##
-
-case $TALER_ENV_NAME in
- demo|test|int)
- # Make sure the web server can read ~/local
- chmod og+rx ~/local
-
- # Make sure that shared files created by this user
- # are group writable and readable.
- find ~/taler-data/ -user "$USER" -exec chmod g+rw {} \;
- ;;
- *)
- ;;
-esac
-
-##
-## Step 6: Set up merchant
-##
-
-if test $WITH_DB_RESET = yes; then
- taler-merchant-dbinit --reset
-else
- taler-merchant-dbinit
-fi
-
-# Making sure ARM is not running yet.
-taler-deployment-arm -e
-
-# Need the following services to config instances and tip reserve:
-taler-deployment-arm -s
-taler-deployment-arm -i taler-merchant
-taler-deployment-arm -i taler-demobank
-
-taler-deployment-arm -i taler-exchange
-taler-deployment-arm -i taler-exchange-secmod-rsa
-taler-deployment-arm -i taler-exchange-secmod-eddsa
-sleep 5
-
-if ! taler-deployment-arm -I | grep "^taler-merchant" | grep "status=started" > /dev/null; then
- echo "Merchant didn't start, cannot configure instances / create tip reserve."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-demobank" | grep "status=started" > /dev/null; then
- echo "Bank didn't start, cannot create tip reserve."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-exchange" | grep "status=started" > /dev/null; then
- echo "Exchange didn't start, cannot create tip reserve."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-rsa" | grep "status=started" > /dev/null; then
- echo "Exchange (RSA module) didn't start, cannot create tip reserve."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-eddsa" | grep "status=started" > /dev/null; then
- echo "Exchange (EDDSA module) didn't start, cannot create tip reserve."
- exit 1
-fi
-
-echo "Configuring instances"
-taler-deployment-config-instances
-
-echo "Creating tip reserve"
-taler-deployment-config-tips
-
-taler-deployment-arm -k taler-merchant
-taler-deployment-arm -k taler-demobank
-taler-deployment-arm -k taler-exchange
-taler-deployment-arm -k taler-exchange-secmod-rsa
-taler-deployment-arm -k taler-exchange-secmod-eddsa
-taler-deployment-arm -e
-
-##
-## Step 7: Set up anastasis
-##
-
-anastasis-dbinit
diff --git a/bin/taler-deployment-prepare-with-eufin b/bin/taler-deployment-prepare-with-eufin
deleted file mode 100755
index 358029e..0000000
--- a/bin/taler-deployment-prepare-with-eufin
+++ /dev/null
@@ -1,421 +0,0 @@
-#!/bin/bash
-
-# Prepare a deployment for execution:
-# * generate the configuration and setup database
-# * put keys in the right place
-# * set bank password for the exchange
-# * sign the exchange's wire response
-# * run some sanity checks (FIXME: not done yet!)
-
-set -eu
-
-source "$HOME/activate"
-
-# $1 = {yes,no} indicates WITH_DB_RESET. Defaults to no.
-# Helps avoiding color Y destroying the DB while color X is in
-# production.
-WITH_DB_RESET=${1-no}
-
-if [[ -z ${TALER_ENV_NAME+x} ]]; then
- echo "TALER_ENV_NAME not set"
- exit 1
-fi
-
-if [[ -z ${TALER_CONFIG_CURRENCY+x} ]]; then
- echo "TALER_CONFIG_CURRENCY not set"
- exit 1
-fi
-
-# The script stops what started along the flow.
-# This function should help against processes left
-# somehow running.
-function stop_running() {
- taler-deployment-stop
- for n in `jobs -p`
- do
- kill $n 2> /dev/null || true
- done
- wait
-}
-
-trap "stop_running" EXIT
-
-
-export IBAN_EXCHANGE="EX00000000000000000000"
-function generate_config() {
- EXCHANGE_PUB=$(gnunet-ecc -p "$HOME/deployment/private-keys/${TALER_ENV_NAME}-exchange-master.priv")
-
- mkdir -p "$HOME/.config"
-
- taler-deployment-config-generate \
- --exchange-pub "$EXCHANGE_PUB" \
- --currency "$TALER_CONFIG_CURRENCY" \
- --outdir "$HOME/.config" \
- --envname "$TALER_ENV_NAME" \
- --frontends-apitoken "$TALER_ENV_FRONTENDS_APITOKEN"
-
-taler-config -s exchange-account-1 \
- -o PAYTO_URI \
- -V "payto://sepa/bank.${TALER_ENV_NAME}.taler.net/eufin/sandbox/$IBAN_EXCHANGE"
-}
-
-##
-## Step 1: Generate config
-##
-
-echo -n "Generating configuration.."
-case $TALER_ENV_NAME in
- tanker|demo|test|int|local)
- generate_config
- ;;
- *)
- echo "Not generating config for env $TALER_ENV_NAME"
- ;;
-esac
-echo " OK"
-##
-## Step 1b: initialize database
-##
-if test $WITH_DB_RESET = resetDb; then
- echo -n "Reset and init exchange DB.."
- taler-exchange-dbinit --reset
- echo " OK"
-fi
-##
-## Step 2: Copy key material and update denom keys
-##
-
-# For demo, make sure the link to shared data between demo-blue and demo-green is
-# set up properly.
-case $TALER_ENV_NAME in
- demo)
- echo -n "Syminking demo's taler-data/ to the color's home directory.."
- ln -sfT ~demo/shared-data ~/taler-data
- # Check if we won't mess up permissions later
- if [[ ! -g ~/taler-data ]]; then
- echo "the shared-data directory should have the set-group-id bit set"
- exit 1
- fi
- echo " OK"
- ;;
-esac
-
-echo -n "Trying to copy the exchange private key from deployment.git.."
-case $TALER_ENV_NAME in
- demo|test|int|local)
- EXCHANGE_PUB=$(gnunet-ecc -p "$HOME/deployment/private-keys/${TALER_ENV_NAME}-exchange-master.priv")
- EXCHANGE_PRIV_FILE=$(taler-config -f -s exchange-offline -o master_priv_file)
- if [[ -e "$EXCHANGE_PRIV_FILE" ]]; then
- EXCHANGE_PUB2=$(gnunet-ecc -p "$EXCHANGE_PRIV_FILE")
- if [[ "$EXCHANGE_PUB" != "$EXCHANGE_PUB2" ]]; then
- echo "Warning: Different exchange private key already exists, not copying"
- fi
- else
- mkdir -p "$(dirname "$EXCHANGE_PRIV_FILE")"
- cp "$HOME/deployment/private-keys/${TALER_ENV_NAME}-exchange-master.priv" "$EXCHANGE_PRIV_FILE"
- fi
- ;;
- *)
- echo "Not copying key material for env $TALER_ENV_NAME"
- ;;
-esac
-echo " OK"
-
-echo -n "Add this exchange to the auditor..."
-EXCHANGE_MASTER_PUB=$(taler-config -s exchange -o master_public_key)
-taler-auditor-exchange \
- -m "$EXCHANGE_MASTER_PUB" \
- -u "$(taler-config -s exchange -o base_url)" || true
-# Make configuration accessible to auditor
-chmod 750 "$HOME/.config"
-echo " OK"
-
-##
-## Step 3: Set up the exchange key material
-##
-
-echo -n "Setup exchange's key material.."
-taler-deployment-arm -s
-
-# Quickly start+shutdown exchange httpd and crypto SM helpers
-taler-deployment-arm -i taler-exchange
-taler-deployment-arm -i taler-exchange-secmod-rsa
-taler-deployment-arm -i taler-exchange-secmod-eddsa
-
-sleep 2 # FIXME: poll keys?
-if ! taler-deployment-arm -I | grep "^taler-exchange" | grep "status=started" > /dev/null; then
- echo "Exchange didn't start, cannot set up keys"
- exit 1
-fi
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-rsa" | grep "status=started" > /dev/null; then
- echo "Exchange (RSA module) didn't start, cannot set up keys."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-eddsa" | grep "status=started" > /dev/null; then
- echo "Exchange (EDDSA module) didn't start, cannot set up keys."
- exit 1
-fi
-
-taler-exchange-offline download sign upload
-
-payto_uri=$(taler-config -s exchange-account-1 -o payto_uri)
-taler-exchange-offline enable-account "$payto_uri" upload
-
-# Set up wire fees for next 5 years
-year=$(date +%Y)
-curr=$TALER_CONFIG_CURRENCY
-for y in $(seq $year $((year + 5))); do
- taler-exchange-offline wire-fee $y x-taler-bank "$curr:0.01" "$curr:0.01" upload
-done
-
-taler-deployment-arm -k taler-exchange
-taler-deployment-arm -k taler-exchange-secmod-rsa
-taler-deployment-arm -k taler-exchange-secmod-eddsa
-echo " OK"
-# Give time to store to disk.
-sleep 5
-
-##
-## Step 4: Set up euFin
-##
-
-if test $WITH_DB_RESET = resetDb; then
- echo -n "Resetting euFin databases.."
- # NOTE/FIXME: those values _could_ be extracted from
- # the environment, as this one contains already the DB
- # connection strings.
- rm ~/nexus.sqlite
- rm ~/sandbox.sqlite
- echo " OK"
-fi
-export LIBEUFIN_SANDBOX_USERNAME="admin"
-export LIBEUFIN_SANDBOX_PASSWORD=${LIBEUFIN_ENV_SANDBOX_ADMIN_PASSWORD}
-# $1 = ebics user id, $2 = ebics partner, $3 = bank connection name
-# $4 = bank account name local to Nexus, $5 = bank account name as known
-# by Sandbox
-function prepare_nexus_account() {
- echo -n "Making bank connection $3 ..."
- libeufin-cli connections new-ebics-connection \
- --ebics-url="${SANDBOX_URL}ebicsweb" \
- --host-id=$EBICS_HOST \
- --partner-id=$2 \
- --ebics-user-id=$1 \
- $3 > /dev/null
- echo " OK"
- echo -n "Connecting $3 ..."
- libeufin-cli connections connect $3 > /dev/null
- echo " OK"
- echo -n "Importing Sandbox bank account ($5) to Nexus ($4) ..."
- libeufin-cli connections download-bank-accounts $3 > /dev/null
- libeufin-cli connections import-bank-account \
- --offered-account-id=$5 --nexus-bank-account-id=$4 $3 > /dev/null
- echo " OK"
- # Set how often the automatic routing must fetch the bank account.
- echo -n "Setting background payment initiator.."
- libeufin-cli accounts task-schedule $4 \
- --task-type="submit" \
- --task-name='submit-payments-every-second' \
- --task-cronspec='* * *'
- echo " OK"
- echo -n "Setting background history fetch.."
- libeufin-cli accounts task-schedule $4 \
- --task-type="fetch" \
- --task-name='fetch-reports-every-second' \
- --task-cronspec='* * *' \
- --task-param-level=report \
- --task-param-range-type=latest
- echo " OK"
-}
-
-# $1=ebics username, $2=ebics partner name,
-# $3=person name, $4=sandbox bank account name, $5=iban
-function prepare_sandbox_account() {
- echo -n "Activating ebics subscriber $1 at the sandbox ..."
- libeufin-cli \
- sandbox --sandbox-url=$SANDBOX_URL \
- ebicssubscriber create \
- --host-id=$EBICS_HOST \
- --partner-id=$2 \
- --user-id=$1
- echo " OK"
- echo -n "Giving a bank account ($4) to $1 ..."
- libeufin-cli \
- sandbox --sandbox-url=$SANDBOX_URL \
- ebicsbankaccount create \
- --iban=$5 \
- --bic="BCMAESM1XXX"\
- --person-name="$3" \
- --account-name=$4 \
- --ebics-user-id=$1 \
- --ebics-host-id=$EBICS_HOST \
- --ebics-partner-id=$2 \
- --currency=$TALER_CONFIG_CURRENCY
- echo " OK"
-}
-
-NEXUS_URL="http://localhost:5222/"
-SANDBOX_URL="http://localhost:5111/"
-
-echo -n "Making Sandbox superuser..."
-libeufin-sandbox superuser admin --password=${LIBEUFIN_ENV_SANDBOX_ADMIN_PASSWORD}
-echo " OK"
-
-echo -n "Lunching Sandbox..."
-taler-deployment-arm -i libeufin-sandbox
-
-if ! curl -s --retry 5 --retry-connrefused $SANDBOX_URL > /dev/null; then
- echo "Could not launch Sandbox"
- stop_running
- exit 1
-fi
-echo " OK"
-
-echo -n "Launching Nexus..."
-taler-deployment-arm -i libeufin-nexus
-if ! curl -s --retry 5 --retry-connrefused $NEXUS_URL > /dev/null; then
- echo "Could not launch Nexus"
- stop_running
- exit 1
-fi
-echo " OK"
-
-EBICS_HOST="ebicsDeployedHost"
-
-echo -n "Make Sandbox EBICS host..."
-libeufin-cli \
- sandbox --sandbox-url=$SANDBOX_URL \
- ebicshost create \
- --host-id=$EBICS_HOST
-echo " OK"
-
-export IBAN_MERCHANT="ME00000000000000000001"
-export IBAN_CUSTOMER="WA00000000000000000000"
-
-# note: Ebisc schema doesn't allow dashed names.
-prepare_sandbox_account \
- ebicsuserExchange \
- ebicspartnerExchange \
- "Person Exchange" \
- sandbox-account-exchange \
- $IBAN_EXCHANGE
-prepare_sandbox_account \
- ebicsuserMerchant \
- ebicspartnerMerchant \
- "Person Merchant" \
- sandbox-account-merchant \
- $IBAN_MERCHANT
-prepare_sandbox_account \
- ebicsuserCustomer \
- ebicspartnerCustomer \
- "Person Customer" \
- sandbox-account-customer \
- $IBAN_CUSTOMER
-
-# Only the exchange needs Nexus.
-EXCHANGE_NEXUS_USERNAME=exchange-nexus-user
-EXCHANGE_NEXUS_PASSWORD=exchange-nexus-password
-echo -n "Make Nexus superuser ..."
-libeufin-nexus superuser $EXCHANGE_NEXUS_USERNAME --password=$EXCHANGE_NEXUS_PASSWORD
-echo " OK"
-export LIBEUFIN_NEXUS_URL=$NEXUS_URL
-export LIBEUFIN_NEXUS_USERNAME=$EXCHANGE_NEXUS_USERNAME
-export LIBEUFIN_NEXUS_PASSWORD=$EXCHANGE_NEXUS_PASSWORD
-
-# FIXME: this command below likely not needed. Please
-# remove, run the test, and commit+push if it still works!
-prepare_nexus_account \
- ebicsuserExchange \
- ebicspartnerExchange \
- bankconnection-exchange \
- nexus-bankaccount-exchange \
- sandbox-account-exchange
-
-echo -n "Create Taler facade ..."
-libeufin-cli facades new-taler-wire-gateway-facade \
- --currency=$TALER_CONFIG_CURRENCY \
- --facade-name=facade-exchange \
- bankconnection-exchange nexus-bankaccount-exchange
-echo " OK"
-FACADE_URL=$(libeufin-cli facades list | jq .facades[0].baseUrl | tr -d \")
-
-taler-deployment-arm -k libeufin-nexus
-taler-deployment-arm -k libeufin-sandbox
-
-# Point the exchange to the facade.
-taler-config -s exchange-accountcredentials-1 \
- -o WIRE_GATEWAY_URL \
- -V "${FACADE_URL}"
-
-taler-config -s exchange-accountcredentials-1 \
- -o USERNAME \
- -V "${EXCHANGE_NEXUS_USERNAME}"
-
-taler-config -s exchange-accountcredentials-1 \
- -o PASSWORD \
- -V "${EXCHANGE_NEXUS_PASSWORD}"
-
-
-##
-## Step 5: Adjust some permissions
-##
-
-case $TALER_ENV_NAME in
- demo|test|int)
- # Make sure the web server can read ~/local
- chmod og+rx ~/local
-
- # Make sure that shared files created by this user
- # are group writable and readable.
- find ~/taler-data/ -user "$USER" -exec chmod g+rw {} \;
- ;;
- *)
- ;;
-esac
-
-##
-## Step 6: Set up merchant
-##
-
-if test $WITH_DB_RESET = resetDb; then
- echo -n "Reset and init merchant database.."
- taler-merchant-dbinit --reset
- echo " OK"
-fi
-
-# Making sure ARM is not running yet.
-taler-deployment-arm -e
-
-# Need the following services to config instances and tip reserve:
-taler-deployment-arm -s
-
-taler-deployment-arm -i taler-exchange
-taler-deployment-arm -i taler-exchange-secmod-rsa
-taler-deployment-arm -i taler-exchange-secmod-eddsa
-sleep 5
-
-if ! taler-deployment-arm -I | grep "^taler-exchange" | grep "status=started" > /dev/null; then
- echo "Exchange didn't start, cannot create tip reserve."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-rsa" | grep "status=started" > /dev/null; then
- echo "Exchange (RSA module) didn't start, cannot create tip reserve."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-eddsa" | grep "status=started" > /dev/null; then
- echo "Exchange (EDDSA module) didn't start, cannot create tip reserve."
- exit 1
-fi
-
-echo "Configuring instances"
-taler-deployment-config-instances-iban
-echo "Stopping all the services"
-# The following three commands should be removed,
-# because the last one is already supposed to stop
-# all the running ones.
-taler-deployment-arm -k taler-exchange
-taler-deployment-arm -k taler-exchange-secmod-rsa
-taler-deployment-arm -k taler-exchange-secmod-eddsa
-taler-deployment-arm -e
diff --git a/bin/taler-deployment-restart b/bin/taler-deployment-restart
deleted file mode 100755
index 88eed4e..0000000
--- a/bin/taler-deployment-restart
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/bash
-
-set -eu
-
-base=$HOME
-
-export PATH="$base/deployment/bin":$PATH
-
-# might fail if invoked from another script with ulimit
-ulimit -c $((100 * 1024)) &>/dev/null || true
-
-cd $HOME
-
-if taler-deployment-arm -T 300ms -I &>/dev/null; then
- # looks like deployment is running, stop it
- taler-deployment-arm -e -T 10s &>/dev/null
-fi
-
-exec taler-deployment-start
diff --git a/bin/taler-deployment-restart-with-eufin b/bin/taler-deployment-restart-with-eufin
deleted file mode 100755
index 0f945bd..0000000
--- a/bin/taler-deployment-restart-with-eufin
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/bash
-
-set -eu
-
-base=$HOME
-
-export PATH="$base/deployment/bin":$PATH
-
-# might fail if invoked from another script with ulimit
-ulimit -c $((100 * 1024)) &>/dev/null || true
-
-cd $HOME
-
-if taler-deployment-arm -T 300ms -I &>/dev/null; then
- # looks like deployment is running, stop it
- taler-deployment-arm -e -T 10s &>/dev/null
-fi
-
-exec taler-deployment-start-with-eufin
diff --git a/bin/taler-deployment-start b/bin/taler-deployment-start
deleted file mode 100755
index 271a7e9..0000000
--- a/bin/taler-deployment-start
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/bin/bash
-
-set -eu
-
-export PATH="$HOME/deployment":$PATH
-
-# might fail if invoked from another script with ulimit
-ulimit -c $((100 * 1024)) &>/dev/null || true
-
-cd $HOME
-
-taler_config_file=$HOME/.config/taler.conf
-
-if [[ ! -e "$taler_config_file" ]]; then
- echo "taler config file ($taler_config_file) missing"
- exit 1
-fi
-
-taler-deployment-arm -s
-
-taler-deployment-arm -i taler-exchange
-taler-deployment-arm -i taler-exchange-secmod-eddsa
-taler-deployment-arm -i taler-exchange-secmod-rsa
-taler-deployment-arm -i taler-auditor
-taler-deployment-arm -i taler-merchant
-taler-deployment-arm -i taler-demobank
-taler-deployment-arm -i taler-donations
-taler-deployment-arm -i taler-blog
-taler-deployment-arm -i taler-landing
-taler-deployment-arm -i taler-survey
-taler-deployment-arm -i taler-aggregator
-taler-deployment-arm -i taler-exchange-wirewatch
-taler-deployment-arm -i taler-sync
-taler-deployment-arm -i taler-transfer
-taler-deployment-arm -i anastasis
-
-if $(taler-config -s twister -o taler_deploy >& /dev/null); then
- taler-deployment-arm -i taler-twister
- taler-deployment-arm -i taler-twister-exchange
- taler-deployment-arm -i taler-twister-bank
-fi
-
-exit 0
diff --git a/bin/taler-deployment-start-with-eufin b/bin/taler-deployment-start-with-eufin
deleted file mode 100755
index ea97734..0000000
--- a/bin/taler-deployment-start-with-eufin
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/bin/bash
-
-set -eu
-
-export PATH="$HOME/deployment":$PATH
-
-# might fail if invoked from another script with ulimit
-ulimit -c $((100 * 1024)) &>/dev/null || true
-
-cd $HOME
-
-taler_config_file=$HOME/.config/taler.conf
-
-if [[ ! -e "$taler_config_file" ]]; then
- echo "taler config file ($taler_config_file) missing"
- exit 1
-fi
-
-taler-deployment-arm -s
-
-taler-deployment-arm -i taler-exchange
-taler-deployment-arm -i taler-exchange-secmod-eddsa
-taler-deployment-arm -i taler-exchange-secmod-rsa
-taler-deployment-arm -i taler-auditor
-taler-deployment-arm -i taler-merchant
-taler-deployment-arm -i libeufin-sandbox
-taler-deployment-arm -i libeufin-nexus
-taler-deployment-arm -i taler-donations
-taler-deployment-arm -i taler-blog
-taler-deployment-arm -i taler-landing
-taler-deployment-arm -i taler-survey
-taler-deployment-arm -i taler-aggregator
-taler-deployment-arm -i taler-exchange-wirewatch
-taler-deployment-arm -i taler-sync
-taler-deployment-arm -i taler-transfer
-taler-deployment-arm -i anastasis
-
-if $(taler-config -s twister -o taler_deploy >& /dev/null); then
- taler-deployment-arm -i taler-twister
- taler-deployment-arm -i taler-twister-exchange
- taler-deployment-arm -i taler-twister-bank
-fi
-
-exit 0
diff --git a/bin/taler-deployment-stop b/bin/taler-deployment-stop
deleted file mode 100755
index e08ee71..0000000
--- a/bin/taler-deployment-stop
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-
-base=$HOME
-
-export PATH="$base/deployment":$PATH
-
-# might fail if invoked from another script
-ulimit -c $((100 * 1024)) &>/dev/null
-
-cd $HOME
-
-taler-deployment-arm -e -T 10s &>/dev/null
diff --git a/bin/taler-log-adapter b/bin/taler-log-adapter
deleted file mode 100755
index 07321fa..0000000
--- a/bin/taler-log-adapter
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-# This file is part of GNU TALER.
-# Copyright (C) 2018 INRIA
-#
-# TALER is free software; you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free Software
-# Foundation; either version 2.1, or (at your option) any later version.
-#
-# TALER is distributed in the hope that it will be useful, but WITHOUT ANY
-# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
-# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License along with
-# GNU TALER; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
-#
-# @author Florian Dold
-
-"""
-Wrapper for programs that log to stderr. Redirects logs to a file specified by
-a path with strfmt-style placeholders in it.
-"""
-
-from subprocess import Popen, PIPE
-import sys
-import os
-import os.path
-import signal
-import time
-
-def handler(signum, frame):
- if p:
- os.kill(p.pid, signum)
- else:
- sys.exit(-1)
-
-def touchp(path):
- dir = os.path.dirname(path)
- if dir:
- os.makedirs(dir, exist_ok=True)
-
-if len(sys.argv) < 3:
- print("Usage: {} logfile prog_and_args...".format(sys.argv[0]), file=sys.stderr)
- sys.exit(-1)
-
-p = None
-catchable_sigs = set(signal.Signals) - {signal.SIGKILL, signal.SIGSTOP}
-for sig in catchable_sigs:
- signal.signal(sig, handler)
-p = Popen(sys.argv[2:], stderr=PIPE, shell=False)
-
-log = sys.argv[1]
-last_name = None
-
-while p.poll() is None:
- full_name = time.strftime(log)
- if full_name != last_name:
- touchp(full_name)
- last_name = full_name
- last_read = p.stderr.readline()
- if last_read == '':
- break
- with open(full_name, "ab") as f:
- f.write(last_read)
-
-status = p.wait()
-sys.exit(status)
diff --git a/bin/taler_urls.py b/bin/taler_urls.py
deleted file mode 100644
index 8c81e38..0000000
--- a/bin/taler_urls.py
+++ /dev/null
@@ -1,56 +0,0 @@
-from urllib.parse import urlparse
-
-taler_urls = dict(
- online = dict(
- donations = "https://donations.{}.taler.net/",
- blog = "https://shop.{}.taler.net/",
- bank = "https://bank.{}.taler.net/",
- backoffice = "https://backoffice.{}.taler.net/",
- exchange = "https://exchange.{}.taler.net/",
- merchant_backend = "https://backend.{}.taler.net/",
- landing = "https://{}.taler.net/",
- survey = "https://survey.{}.taler.net/",
- auditor = "https://auditor.{}.taler.net/",
- sync = "https://sync.{}.taler.net/",
- talerbank_payto = "payto://x-taler-bank/bank.{}.taler.net/"
- ),
- offline = dict(
- donations = "http://localhost:5880/",
- blog = "http://localhost:5881/",
- bank = "http://localhost:5882/",
- backoffice = "http://localhost:5883/",
- exchange = "http://localhost:5884/",
- merchant_backend = "http://localhost:5885/",
- landing = "http://localhost:5886/",
- survey = "http://localhost:5887/",
- auditor = "http://localhost:5888/",
- sync = "http://localhost:5889/",
- talerbank_payto = "payto://x-taler-bank/localhost:5882/"
- )
-)
-
-def get_urls(envname):
- if envname == "tanker":
- return dict(
- donations = "https://donations.grumla.se/",
- blog = "https://shop.grumla.se/",
- auditor = "#",
- bank = "https://bank.grumla.se/",
- backoffice = "https://backoffice.grumla.se/",
- exchange = "https://exchange.grumla.se/",
- merchant_backend = "https://merchant-backend.grumla.se/",
- landing = "https://grumla.se/",
- survey = "https://survey.grumla.se/",
- sync = "https://sync.grumla.se/",
- talerbank_payto = "payto://x-taler-bank/bank.grumla.se/"
- )
- if envname == "local":
- return taler_urls["offline"]
- return dict((k, v.format(envname))
- for k, v in taler_urls["online"].items()
- )
-
-def get_port(localhost_url):
- parsed = urlparse(localhost_url)
- assert(parsed.port)
- return str(parsed.port)
diff --git a/bootstrap-docker/README b/bootstrap-docker/README
new file mode 100644
index 0000000..85a3e98
--- /dev/null
+++ b/bootstrap-docker/README
@@ -0,0 +1,2 @@
+Scripts to install rootless docker together with docker-compose and buildx
+plugins.
diff --git a/bootstrap-docker/bootstrap-docker.sh b/bootstrap-docker/bootstrap-docker.sh
new file mode 100755
index 0000000..601e121
--- /dev/null
+++ b/bootstrap-docker/bootstrap-docker.sh
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+
+set -eu
+
+./install-rootless-docker.sh
+./install-plugin-compose.sh
+./install-plugin-buildx.sh
diff --git a/bootstrap-docker/install-plugin-buildx.sh b/bootstrap-docker/install-plugin-buildx.sh
new file mode 100755
index 0000000..aee66ad
--- /dev/null
+++ b/bootstrap-docker/install-plugin-buildx.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+
+set -eu
+
+DOCKER_CONFIG=${DOCKER_CONFIG:-$HOME/.docker}
+mkdir -p $DOCKER_CONFIG/cli-plugins
+curl -SL https://github.com/docker/buildx/releases/download/v0.10.5/buildx-v0.10.5.linux-amd64 -o $DOCKER_CONFIG/cli-plugins/docker-buildx
+chmod +x $DOCKER_CONFIG/cli-plugins/docker-buildx
diff --git a/bootstrap-docker/install-plugin-compose.sh b/bootstrap-docker/install-plugin-compose.sh
new file mode 100755
index 0000000..bb8c8a0
--- /dev/null
+++ b/bootstrap-docker/install-plugin-compose.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+
+set -eu
+
+DOCKER_CONFIG=${DOCKER_CONFIG:-$HOME/.docker}
+mkdir -p $DOCKER_CONFIG/cli-plugins
+curl -SL https://github.com/docker/compose/releases/download/v2.18.1/docker-compose-linux-x86_64 -o $DOCKER_CONFIG/cli-plugins/docker-compose
+chmod +x $DOCKER_CONFIG/cli-plugins/docker-compose
diff --git a/bootstrap-docker/install-rootless-docker.sh b/bootstrap-docker/install-rootless-docker.sh
new file mode 100755
index 0000000..c597186
--- /dev/null
+++ b/bootstrap-docker/install-rootless-docker.sh
@@ -0,0 +1,252 @@
+#!/bin/sh
+set -e
+# Docker CE for Linux installation script (Rootless mode)
+#
+# See https://docs.docker.com/go/rootless/ for the
+# installation steps.
+#
+# This script is meant for quick & easy install via:
+# $ curl -fsSL https://get.docker.com/rootless -o get-docker.sh
+# $ sh get-docker.sh
+#
+# NOTE: Make sure to verify the contents of the script
+# you downloaded matches the contents of install.sh
+# located at https://github.com/docker/docker-install
+# before executing.
+#
+# Git commit from https://github.com/docker/docker-install when
+# the script was uploaded (Should only be modified by upload job):
+SCRIPT_COMMIT_SHA=c2de081
+
+# This script should be run with an unprivileged user and install/setup Docker under $HOME/bin/.
+
+# The channel to install from:
+# * nightly
+# * test
+# * stable
+DEFAULT_CHANNEL_VALUE="stable"
+if [ -z "$CHANNEL" ]; then
+ CHANNEL=$DEFAULT_CHANNEL_VALUE
+fi
+# The latest release is currently hard-coded.
+STABLE_LATEST="24.0.1"
+TEST_LATEST="24.0.1"
+STATIC_RELEASE_URL=
+STATIC_RELEASE_ROOTLESS_URL=
+case "$CHANNEL" in
+ "stable")
+ echo "# Installing stable version ${STABLE_LATEST}"
+ STATIC_RELEASE_URL="https://download.docker.com/linux/static/$CHANNEL/$(uname -m)/docker-${STABLE_LATEST}.tgz"
+ STATIC_RELEASE_ROOTLESS_URL="https://download.docker.com/linux/static/$CHANNEL/$(uname -m)/docker-rootless-extras-${STABLE_LATEST}.tgz"
+ ;;
+ "test")
+ echo "# Installing test version ${TEST_LATEST}"
+ STATIC_RELEASE_URL="https://download.docker.com/linux/static/$CHANNEL/$(uname -m)/docker-${TEST_LATEST}.tgz"
+ STATIC_RELEASE_ROOTLESS_URL="https://download.docker.com/linux/static/$CHANNEL/$(uname -m)/docker-rootless-extras-${TEST_LATEST}.tgz"
+ ;;
+ "nightly")
+ echo "# Installing nightly"
+ STATIC_RELEASE_URL="https://master.dockerproject.org/linux/$(uname -m)/docker.tgz"
+ STATIC_RELEASE_ROOTLESS_URL="https://master.dockerproject.org/linux/$(uname -m)/docker-rootless-extras.tgz"
+ ;;
+ *)
+ >&2 echo "Aborting because of unknown CHANNEL \"$CHANNEL\". Set \$CHANNEL to either \"stable\", \"test\", or \"nightly\"."; exit 1
+ ;;
+esac
+
+init_vars() {
+ BIN="${DOCKER_BIN:-$HOME/bin}"
+
+ DAEMON=dockerd
+ SYSTEMD=
+ if systemctl --user daemon-reload >/dev/null 2>&1; then
+ SYSTEMD=1
+ fi
+}
+
+checks() {
+ # OS verification: Linux only, point osx/win to helpful locations
+ case "$(uname)" in
+ Linux)
+ ;;
+ *)
+ >&2 echo "Rootless Docker cannot be installed on $(uname)"; exit 1
+ ;;
+ esac
+
+ # User verification: deny running as root (unless forced?)
+ if [ "$(id -u)" = "0" ] && [ -z "$FORCE_ROOTLESS_INSTALL" ]; then
+ >&2 echo "Refusing to install rootless Docker as the root user"; exit 1
+ fi
+
+ # HOME verification
+ if [ ! -d "$HOME" ]; then
+ >&2 echo "Aborting because HOME directory $HOME does not exist"; exit 1
+ fi
+
+ if [ -d "$BIN" ]; then
+ if [ ! -w "$BIN" ]; then
+ >&2 echo "Aborting because $BIN is not writable"; exit 1
+ fi
+ else
+ if [ ! -w "$HOME" ]; then
+ >&2 echo "Aborting because HOME (\"$HOME\") is not writable"; exit 1
+ fi
+ fi
+
+ # Existing rootful docker verification
+ if [ -w /var/run/docker.sock ] && [ -z "$FORCE_ROOTLESS_INSTALL" ]; then
+ >&2 echo "Aborting because rootful Docker is running and accessible. Set FORCE_ROOTLESS_INSTALL=1 to ignore."; exit 1
+ fi
+
+ # Validate XDG_RUNTIME_DIR
+ if [ ! -w "$XDG_RUNTIME_DIR" ]; then
+ if [ -n "$SYSTEMD" ]; then
+ >&2 echo "Aborting because systemd was detected but XDG_RUNTIME_DIR (\"$XDG_RUNTIME_DIR\") does not exist or is not writable"
+ >&2 echo "Hint: this could happen if you changed users with 'su' or 'sudo'. To work around this:"
+ >&2 echo "- try again by first running with root privileges 'loginctl enable-linger <user>' where <user> is the unprivileged user and export XDG_RUNTIME_DIR to the value of RuntimePath as shown by 'loginctl show-user <user>'"
+ >&2 echo "- or simply log back in as the desired unprivileged user (ssh works for remote machines)"
+ exit 1
+ fi
+ fi
+
+ # Already installed verification (unless force?). Only having docker cli binary previously shouldn't fail the build.
+ if [ -x "$BIN/$DAEMON" ]; then
+ # If rootless installation is detected print out the modified PATH and DOCKER_HOST that needs to be set.
+ echo "# Existing rootless Docker detected at $BIN/$DAEMON"
+ echo
+ echo "# To reinstall or upgrade rootless Docker, run the following commands and then rerun the installation script:"
+ echo "systemctl --user stop docker"
+ echo "rm -f $BIN/$DAEMON"
+ echo
+ echo "# Alternatively, install the docker-ce-rootless-extras RPM/deb package for ease of package management (requires root)."
+ echo "# See https://docs.docker.com/go/rootless/ for details."
+ exit 0
+ fi
+
+ INSTRUCTIONS=
+
+ # uidmap dependency check
+ if ! command -v newuidmap >/dev/null 2>&1; then
+ if command -v apt-get >/dev/null 2>&1; then
+ INSTRUCTIONS="apt-get install -y uidmap"
+ elif command -v dnf >/dev/null 2>&1; then
+ INSTRUCTIONS="dnf install -y shadow-utils"
+ elif command -v yum >/dev/null 2>&1; then
+ INSTRUCTIONS="curl -o /etc/yum.repos.d/vbatts-shadow-utils-newxidmap-epel-7.repo https://copr.fedorainfracloud.org/coprs/vbatts/shadow-utils-newxidmap/repo/epel-7/vbatts-shadow-utils-newxidmap-epel-7.repo
+yum install -y shadow-utils46-newxidmap"
+ else
+ echo "newuidmap binary not found. Please install with a package manager."
+ exit 1
+ fi
+ fi
+
+ # iptables dependency check
+ if [ -z "$SKIP_IPTABLES" ] && ! command -v iptables >/dev/null 2>&1 && [ ! -f /sbin/iptables ] && [ ! -f /usr/sbin/iptables ]; then
+ if command -v apt-get >/dev/null 2>&1; then
+ INSTRUCTIONS="${INSTRUCTIONS}
+apt-get install -y iptables"
+ elif command -v dnf >/dev/null 2>&1; then
+ INSTRUCTIONS="${INSTRUCTIONS}
+dnf install -y iptables"
+ else
+ echo "iptables binary not found. Please install with a package manager."
+ exit 1
+ fi
+ fi
+
+ # ip_tables module dependency check
+ if [ -z "$SKIP_IPTABLES" ] && ! lsmod | grep ip_tables >/dev/null 2>&1 && ! grep -q ip_tables "/lib/modules/$(uname -r)/modules.builtin"; then
+ INSTRUCTIONS="${INSTRUCTIONS}
+modprobe ip_tables"
+ fi
+
+ # debian requires setting unprivileged_userns_clone
+ if [ -f /proc/sys/kernel/unprivileged_userns_clone ]; then
+ if [ "1" != "$(cat /proc/sys/kernel/unprivileged_userns_clone)" ]; then
+ INSTRUCTIONS="${INSTRUCTIONS}
+cat <<EOT > /etc/sysctl.d/50-rootless.conf
+kernel.unprivileged_userns_clone = 1
+EOT
+sysctl --system"
+ fi
+ fi
+
+ # centos requires setting max_user_namespaces
+ if [ -f /proc/sys/user/max_user_namespaces ]; then
+ if [ "0" = "$(cat /proc/sys/user/max_user_namespaces)" ]; then
+ INSTRUCTIONS="${INSTRUCTIONS}
+cat <<EOT > /etc/sysctl.d/51-rootless.conf
+user.max_user_namespaces = 28633
+EOT
+sysctl --system"
+ fi
+ fi
+
+ if [ -n "$INSTRUCTIONS" ]; then
+ echo "# Missing system requirements. Please run following commands to
+# install the requirements and run this installer again.
+# Alternatively iptables checks can be disabled with SKIP_IPTABLES=1"
+
+ echo
+ echo "cat <<EOF | sudo sh -x"
+ echo "$INSTRUCTIONS"
+ echo "EOF"
+ echo
+ exit 1
+ fi
+
+ # validate subuid/subgid files for current user
+ if ! grep "^$(id -un):\|^$(id -u):" /etc/subuid >/dev/null 2>&1; then
+ >&2 echo "Could not find records for the current user $(id -un) from /etc/subuid . Please make sure valid subuid range is set there.
+For example:
+echo \"$(id -un):100000:65536\" >> /etc/subuid"
+ exit 1
+ fi
+ if ! grep "^$(id -un):\|^$(id -u):" /etc/subgid >/dev/null 2>&1; then
+ >&2 echo "Could not find records for the current user $(id -un) from /etc/subgid . Please make sure valid subuid range is set there.
+For example:
+echo \"$(id -un):100000:65536\" >> /etc/subgid"
+ exit 1
+ fi
+}
+
+exec_setuptool() {
+ if [ -n "$FORCE_ROOTLESS_INSTALL" ]; then
+ set -- "$@" --force
+ fi
+ if [ -n "$SKIP_IPTABLES" ]; then
+ set -- "$@" --skip-iptables
+ fi
+ (
+ set -x
+ PATH="$BIN:$PATH" "$BIN/dockerd-rootless-setuptool.sh" install "$@"
+ )
+}
+
+do_install() {
+ echo "# Executing docker rootless install script, commit: $SCRIPT_COMMIT_SHA"
+
+ init_vars
+ checks
+
+ tmp=$(mktemp -d)
+ trap 'rm -rf "$tmp"' EXIT INT TERM
+ # Download tarballs docker-* and docker-rootless-extras=*
+ (
+ cd "$tmp"
+ curl -L -o docker.tgz "$STATIC_RELEASE_URL"
+ curl -L -o rootless.tgz "$STATIC_RELEASE_ROOTLESS_URL"
+ )
+ # Extract under $HOME/bin/
+ (
+ mkdir -p "$BIN"
+ cd "$BIN"
+ tar zxf "$tmp/docker.tgz" --strip-components=1
+ tar zxf "$tmp/rootless.tgz" --strip-components=1
+ )
+
+ exec_setuptool "$@"
+}
+
+do_install "$@"
diff --git a/buildbot/bootstrap-scripts/bootstrap-codespell b/buildbot/bootstrap-scripts/bootstrap-codespell
index 44d4db3..faeb6b5 100755
--- a/buildbot/bootstrap-scripts/bootstrap-codespell
+++ b/buildbot/bootstrap-scripts/bootstrap-codespell
@@ -25,7 +25,7 @@ for component in $REPOS; do
git -C $HOME/$component checkout $BRANCH
done
-REPOS="exchange merchant wallet-core sync anastasis bank twister"
+REPOS="exchange merchant wallet-core sync bank twister"
for component in $REPOS; do
if ! test -d $HOME/$component; then
diff --git a/buildbot/bootstrap-scripts/bootstrap-walletbuilder b/buildbot/bootstrap-scripts/bootstrap-walletbuilder
deleted file mode 100755
index 8a5304c..0000000
--- a/buildbot/bootstrap-scripts/bootstrap-walletbuilder
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/bash
-
-# Bootstrap the Taler setup for the user account that
-# is currently logged in.
-
-# Generates a setup for a single user,
-# including a postgresql DB.
-
-set -eu
-
-BRANCH=master
-REPOS="wallet-core"
-
-cd $HOME
-
-for component in $REPOS; do
- if ! test -d $HOME/$component; then
- git clone git://localhost/$component.git
- fi
-done
-
-for component in $REPOS; do
- echo "Checking out $component to $BRANCH"
- git -C $HOME/$component checkout $BRANCH
-done
-
-if test ! -d worker ; then
- buildbot-worker create-worker --umask=0o22 ~/worker localhost:9989 wallet-worker wallet-pass
-fi
-
-
-mkdir -p ~/.config/systemd/user/
-cp systemd-services/buildbot-worker-wallet.service ~/.config/systemd/user/
-
-systemctl --user daemon-reload || echo "Please use 'machinectl shell walletbuilder@.host' to log in to use this script"
-
-systemctl --user enable buildbot-worker-wallet.service
-systemctl --user start buildbot-worker-wallet.service
diff --git a/buildbot/build.sh b/buildbot/build.sh
index 6d99cac..79f0e60 100755
--- a/buildbot/build.sh
+++ b/buildbot/build.sh
@@ -2,14 +2,16 @@
set -eu
-echo "Running taler-deployment bootstrap"
+echo "Building the Docker base image (taler_local/taler_base)."
+# INI file with global config entries; typically
+# URLs and secrets. Not automatically generated.
+export TALER_DEPLOYMENT_CONFIG=${HOME}/deployment.conf
+export DOCKER_HOST=unix://${XDG_RUNTIME_DIR}/docker.sock
+echo "Remove data from previous builds. Volumes will be removed before restarting."
+docker system prune -a -f
-# Cannot have this here, as the activate script
-# will be made by the 'bootstrap' command.
-# source "${HOME}/activate". Currently under test.
+${HOME}/deployment/docker/demo/build_base.sh
-${HOME}/deployment/bin/taler-deployment bootstrap
-
-echo "Running taler-deployment build"
-source "${HOME}/activate"
-taler-deployment build
+echo "Building each service's image."
+cd ${HOME}/deployment/docker/demo
+docker-compose build
diff --git a/buildbot/checks.sh b/buildbot/checks.sh
index 02e2b93..7eb331e 100755
--- a/buildbot/checks.sh
+++ b/buildbot/checks.sh
@@ -25,6 +25,7 @@ error_stringify ()
error_fmt="%s (http status code: %s)/(curl condition: %s - %s)\n"
+echo -n "Check exchange..."
URL="https://exchange.${DEPLOYMENT}.${DOMAIN}/"
http_status_code=$(curl \
-s "$URL" -o /dev/null \
@@ -36,11 +37,12 @@ if ! test 200 = $http_status_code; then
$http_status_code $? "$(error_stringify $?)"
exit 1
fi
+echo OK
-URL="http://backend.${DEPLOYMENT}.${DOMAIN}/"
+echo -n "Check merchant backend..."
+URL="https://backend.${DEPLOYMENT}.${DOMAIN}/"
http_status_code=$(curl \
-s $URL \
- --header "Authorization: ApiKey sandbox" \
-o /dev/null \
-w "%{http_code}")
if ! test 200 = $http_status_code; then
@@ -50,8 +52,9 @@ if ! test 200 = $http_status_code; then
$http_status_code $? "$(error_stringify $?)"
exit 1
fi
+echo OK
-
+echo -n "Check blog..."
URL="https://shop.${DEPLOYMENT}.${DOMAIN}/"
http_status_code=$(curl \
-s $URL -o /dev/null \
@@ -63,7 +66,9 @@ if ! test 302 = $http_status_code; then
$http_status_code $? "$(error_stringify $?)"
exit 1
fi
+echo OK
+echo -n "Check survey..."
URL="https://survey.${DEPLOYMENT}.${DOMAIN}/"
http_status_code=$(curl \
-s $URL -o /dev/null \
@@ -75,7 +80,9 @@ if ! test 302 = $http_status_code; then
$http_status_code $? "$(error_stringify $?)"
exit 1
fi
+echo OK
+echo -n "Check donations..."
URL="https://donations.${DEPLOYMENT}.${DOMAIN}/"
http_status_code=$(curl \
-s $URL -o /dev/null \
@@ -87,19 +94,37 @@ if ! test 302 = $http_status_code; then
$http_status_code $? "$(error_stringify $?)"
exit 1
fi
+echo OK
-URL="https://bank.${DEPLOYMENT}.${DOMAIN}/"
+echo -n "Check bank Web UI..."
+URL="https://bank.${DEPLOYMENT}.${DOMAIN}/webui/"
http_status_code=$(curl \
-s $URL -o /dev/null \
-w "%{http_code}")
-if ! test 302 = $http_status_code; then
+if ! test 200 = $http_status_code; then
printf "%s failed\n" $URL
printf "$error_fmt" \
"Bank did not restart correctly" \
$http_status_code $? "$(error_stringify $?)"
exit 1
fi
+echo OK
+echo -n "Check libEufin (Sandbox)..."
+URL="https://bank.${DEPLOYMENT}.${DOMAIN}/demobanks/default/integration-api/config"
+http_status_code=$(curl \
+ -s $URL -o /dev/null \
+ -w "%{http_code}")
+if ! test 200 = $http_status_code; then
+ printf "%s failed\n" $URL
+ printf "$error_fmt" \
+ "Bank did not restart correctly" \
+ $http_status_code $? "$(error_stringify $?)"
+ exit 1
+fi
+echo OK
+
+echo -n "Check landing page..."
URL="https://${DEPLOYMENT}.${DOMAIN}/en/index.html"
http_status_code=$(curl \
-s $URL -o /dev/null \
@@ -111,36 +136,4 @@ if ! test 200 = $http_status_code; then
$http_status_code $? "$(error_stringify $?)"
exit 1
fi
-
-
-
-if $(taler-config -s twister -o twister_deploy >& /dev/null); then
-
- for twister_url in "https://twister-backend.wild.gv.taler.net" \
- "https://twister-bank.wild.gv.taler.net" \
- "https://twister-exchange.wild.gv.taler.net"; do
- http_status_code=$(curl \
- -H "Authorization: ApiKey sandbox" \
- -s $twister_url -o /dev/null \
- -w "%{http_code}")
- if ! test 200 = $http_status_code; then
-
- if test 503 = $http_status_code; then
- printf "%s %s\n" \
- "Hit a '503 Service Unavailable' from Twister." \
- "Assuming all is correct."
- exit 0
- fi
-
- # Real failure here.
- printf "%s failed\n" $twister_url
- printf "$error_fmt" \
- "Twister did not restart correctly" \
- $http_status_code $? "$(error_stringify $?)"
- exit 1
- fi
- done
-fi
-
-
-printf "All services correctly restarted!\n"
+echo OK
diff --git a/buildbot/create_instances.sh b/buildbot/create_instances.sh
deleted file mode 100755
index c67cff6..0000000
--- a/buildbot/create_instances.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env bash
-
-# Step for the BUILD_FACTORY running the 'test.taler.net' site.
-set -eu
-
-source "${HOME}/activate"
-taler-deployment-config-instances
diff --git a/buildbot/create_tip_reserve.sh b/buildbot/create_tip_reserve.sh
deleted file mode 100755
index df756d3..0000000
--- a/buildbot/create_tip_reserve.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env bash
-
-# Step for the BUILD_FACTORY running the 'test.taler.net' site.
-set -eu
-
-source "${HOME}/activate"
-taler-deployment-config-tips
diff --git a/buildbot/doxygen.sh b/buildbot/doxygen.sh
index 23db715..9c04fbf 100755
--- a/buildbot/doxygen.sh
+++ b/buildbot/doxygen.sh
@@ -3,7 +3,7 @@
set -eu
doxygen $1 2> doxygen.err.raw
-grep -v "has multiple @param" doxygen.err.raw | grep -v "too many nodes" > doxygen.err || true
+cat doxygen.err.raw | grep -v "too many nodes" > doxygen.err || true
cat doxygen.err
LC=`cat doxygen.err | wc -l`
if [[ $LC = 0 ]]
diff --git a/buildbot/linkchecker.Containerfile b/buildbot/linkchecker.Containerfile
new file mode 100644
index 0000000..d80693c
--- /dev/null
+++ b/buildbot/linkchecker.Containerfile
@@ -0,0 +1,10 @@
+FROM docker.io/library/debian:bookworm-slim
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN apt-get update && \
+ apt-get install -yqq \
+ linkchecker \
+&& rm -rf /var/lib/apt/lists/*
+
+COPY linkcheckerrc /root/.config/linkchecker/linkcheckerrc
diff --git a/buildbot/linkchecker.sh b/buildbot/linkchecker.sh
index 2b24f7c..24c90a0 100755
--- a/buildbot/linkchecker.sh
+++ b/buildbot/linkchecker.sh
@@ -1,11 +1,14 @@
#!/bin/bash
-
+#set -v
# Removed because wget errors with error 8 (Server issued an error response.)
#set -e
## This script will scan www.taler.net for broken links and e-mail a copy of the log if any are found.
-logfile="linkchecker.log"
+logfile="$HOME/linkchecker.log"
+wait_time="1"
+recurse_level="1"
+ignore_list="(.*)demo.taler.net(.*)\/orders\/(.*)" # appears to do *nothing*
# Remove old log
@@ -17,20 +20,21 @@ if [ -f "$logfile" ]
echo "Info: existing log file '$logfile' not found."
fi
-# Use wget to scan www.taler.net and save output
-
-echo
-echo "Running this command:"
-echo " wget --spider -r -nd -nv -H -l 1 -w 2 -o $logfile https://www.taler.net/"
-wget --spider -r -nd -nv -H -l 1 -w 2 -o $logfile https://www.taler.net/
+podman build -t linkchecker:latest -f "$HOME/deployment/buildbot/linkchecker.Containerfile" "$HOME/deployment/buildbot"
-# display logfile
-echo
-echo "Displaying contents of logfile"
-cat $logfile
+# Use wget to scan hosts and save output
+for url in "https://www.taler.net/" "https://docs.taler.net/" "https://taler-systems.com/" "https://demo.taler.net/" "https://bank.demo.taler.net/" "https://shop.demo.taler.net/" "https://donations.demo.taler.net/" ; do
+ echo -e "\n\n#############################\n## Starting check on ${url}\n#############################\n"
+ podman run --rm localhost/linkchecker:latest \
+ linkchecker \
+ --no-robots \
+ --check-extern \
+ --recursion-level="$recurse_level" \
+ "$url" | tee --append "$logfile"
+done
# Search the log for the phrase "broken link" as this is what wget will report
-if grep -iRl 'broken link!!' $logfile
+if grep -Rl 'Error' $logfile
then
echo "Found broken links. Build should fail (exit 1), triggering e-mail notification."
exit 1
diff --git a/buildbot/linkcheckerrc b/buildbot/linkcheckerrc
new file mode 100644
index 0000000..27337c1
--- /dev/null
+++ b/buildbot/linkcheckerrc
@@ -0,0 +1,306 @@
+# Sample configuration file; see the linkcheckerrc(5) man page or
+# execute linkchecker -h for help on these options.
+# Commandline options override these settings.
+
+##################### output configuration ##########################
+[output]
+# enable debug messages; see 'linkchecker -h' for valid debug names, example:
+#debug=all
+# print status output
+#status=1
+# change the logging type
+#log=text
+# turn on/off --verbose
+#verbose=0
+# turn on/off --warnings
+#warnings=1
+# turn on/off --quiet
+#quiet=0
+# additional file output, example:
+#fileoutput = text, html, gml, sql
+# errors to ignore (URL regular expression, message regular expression)
+ignoreerrors=
+ ^mailto
+ .*orders.*
+ ^https://donations.demo.taler.net/en/checkout
+ ^https://web.archive.org/web/20120118201902/http://www.gnu.org/
+ ^https://www.researchgate.net/publication/4980956_The_Case_Against_Intellectual_Property
+ ^https://shop.fsf.org/.*
+ ^https://twitter.com.*
+# ignore all errors for broken.example.com:
+# ^https?://broken.example.com/
+# ignore SSL errors for dev.example.com:
+# ^https://dev.example.com/ ^SSLError .*
+
+
+##################### logger configuration ##########################
+# logger output part names:
+# all For all parts
+# realurl The full url link
+# result Valid or invalid, with messages
+# extern 1 or 0, only in some logger types reported
+# base <base href=...>
+# name <a href=...>name</a> and <img alt="name">
+# parenturl The referrer URL if there is any
+# info Some additional info, e.g. FTP welcome messages
+# warning Warnings
+# dltime Download time
+# checktime Check time
+# url The original url name, can be relative
+# intro The blurb at the beginning, "starting at ..."
+# outro The blurb at the end, "found x errors ..."
+# stats Statistics including URL lengths and contents.
+
+# each Logger can have separate configuration parameters
+
+# standard text logger
+[text]
+#filename=linkchecker-out.txt
+#parts=all
+# colors for the various parts, syntax is <color> or <type>;<color>
+# type can be bold, light, blink, invert
+# color can be default, black, red, green, yellow, blue, purple, cyan, white,
+# Black, Red, Green, Yellow, Blue, Purple, Cyan, White
+#colorparent=default
+#colorurl=default
+#colorname=default
+#colorreal=cyan
+#colorbase=purple
+#colorvalid=bold;green
+#colorinvalid=bold;red
+#colorinfo=default
+#colorwarning=bold;yellow
+#colordltime=default
+#colorreset=default
+
+# GML logger
+[gml]
+#filename=linkchecker-out.gml
+#parts=all
+# valid encodings are listed in http://docs.python.org/library/codecs.html#standard-encodings
+# example:
+#encoding=utf_16
+
+# DOT logger
+[dot]
+#filename=linkchecker-out.dot
+#parts=all
+# default encoding is ascii since the original DOT format does not
+# support other charsets, example:
+#encoding=iso-8859-15
+
+# CSV logger
+[csv]
+#filename=linkchecker-out.csv
+#separator=;
+#quotechar="
+#dialect=excel
+#parts=all
+
+# SQL logger
+[sql]
+#filename=linkchecker-out.sql
+#dbname=linksdb
+#separator=;
+#parts=all
+
+# HTML logger
+[html]
+#filename=linkchecker-out.html
+# colors for the various parts
+#colorbackground=#fff7e5
+#colorurl=#dcd5cf
+#colorborder=#000000
+#colorlink=#191c83
+#colorwarning=#e0954e
+#colorerror=#db4930
+#colorok=#3ba557
+#parts=all
+
+# failures logger
+[failures]
+#filename=$XDG_DATA_HOME/linkchecker/failures
+
+# custom xml logger
+[xml]
+#filename=linkchecker-out.xml
+# system encoding is used by default. Example:
+#encoding=iso-8859-1
+
+# GraphXML logger
+[gxml]
+#filename=linkchecker-out.gxml
+# system encoding is used by default. Example:
+#encoding=iso-8859-1
+
+# Sitemap logger
+[sitemap]
+#filename=linkchecker-out.sitemap.xml
+#encoding=utf-8
+#priority=0.5
+#frequency=daily
+
+
+##################### checking options ##########################
+[checking]
+# number of threads
+#threads=10
+# connection timeout in seconds
+#timeout=60
+# Time to wait for checks to finish after the user aborts the first time
+# (with Ctrl-C or the abort button).
+#aborttimeout=300
+# The recursion level determines how many times links inside pages are followed.
+#recursionlevel=-1
+# Basic NNTP server. Overrides NNTP_SERVER environment variable.
+#nntpserver=
+# parse a cookiefile for initial cookie data, example:
+#cookiefile=/path/to/cookies.txt
+# User-Agent header string to send to HTTP web servers
+# Note that robots.txt are always checked with the original User-Agent. Example:
+#useragent=Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)
+# When checking finishes, write a memory dump to a temporary file.
+# The memory dump is written both when checking finishes normally
+# and when checking gets canceled.
+# The memory dump only works if the python-meliae package is installed.
+# Otherwise a warning is printed to install it.
+#debugmemory=0
+# When checking absolute URLs inside local files, the given root directory
+# is used as base URL.
+# Note that the given directory must have URL syntax, so it must use a slash
+# to join directories instead of a backslash.
+# And the given directory must end with a slash.
+# Unix example:
+#localwebroot=/var/www/
+# Windows example:
+#localwebroot=/C|/public_html/
+# Check SSL certificates. Set to an absolute pathname for a custom
+# CA cert bundle to use. Set to zero to disable SSL certificate verification.
+#sslverify=1
+# Stop checking new URLs after the given number of seconds. Same as if the
+# user hits Ctrl-C after X seconds. Example:
+#maxrunseconds=600
+# Don't download files larger than the given number of bytes
+#maxfilesizedownload=5242880
+# Don't parse files larger than the given number of bytes
+#maxfilesizeparse=1048576
+# Maximum number of URLs to check. New URLs will not be queued after the
+# given number of URLs is checked. Example:
+#maxnumurls=153
+# Maximum number of requests per second to one host.
+#maxrequestspersecond=10
+# Respect the instructions in any robots.txt files
+#robotstxt=1
+# Allowed URL schemes as a comma-separated list. Example:
+#allowedschemes=http,https
+# Size of the result cache. Checking more urls might increase memory usage during runtime
+#resultcachesize=100000
+
+##################### filtering options ##########################
+[filtering]
+#ignore=
+# ignore everything with 'lconline' in the URL name
+# lconline
+# and ignore everything with 'bookmark' in the URL name
+# bookmark
+# and ignore all mailto: URLs
+# ^mailto:
+# do not recurse into the following URLs
+
+#nofollow=
+# just an example
+# http://www\.example\.com/bla
+
+# Ignore specified warnings (see linkchecker -h for the list of
+# recognized warnings). Add a comma-separated list of warnings here
+# that prevent a valid URL from being logged. Note that the warning
+# will be logged for invalid URLs. Example:
+#ignorewarnings=url-unicode-domain
+# Regular expression to add more URLs recognized as internal links.
+# Default is that URLs given on the command line are internal.
+#internlinks=^http://www\.example\.net/
+# Check external links
+#checkextern=0
+
+
+##################### password authentication ##########################
+[authentication]
+# WARNING: if you store passwords in this configuration entry, make sure the
+# configuration file is not readable by other users.
+# Different user/password pairs for different URLs can be provided.
+# Entries are a triple (URL regular expression, username, password),
+# separated by whitespace.
+# If the regular expression matches, the given user/password pair is used
+# for authentication. The commandline options -u,-p match every link
+# and therefore override the entries given here. The first match wins.
+# At the moment, authentication is used for http[s] and ftp links.
+#entry=
+# Note that passwords are optional. If any passwords are stored here,
+# this file should not readable by other users.
+# ^https?://www\.example\.com/~calvin/ calvin mypass
+# ^ftp://www\.example\.com/secret/ calvin
+
+# if the website requires a login via a page with an HTML form the URL of the
+# page and optionally the username and password input element name attributes
+# can be provided.
+#loginurl=http://www.example.com/
+
+# The name attributes of the username and password HTML input elements
+#loginuserfield=login
+#loginpasswordfield=password
+# Optionally the name attributes of any additional input elements and the values
+# to populate them with. Note that these are submitted without checking
+# whether matching input elements exist in the HTML form. Example:
+#loginextrafields=
+# name1:value1
+# name 2:value 2
+
+############################ Plugins ###################################
+#
+# uncomment sections to enable plugins
+
+# Check HTML anchors
+#[AnchorCheck]
+
+# Print HTTP header info
+#[HttpHeaderInfo]
+# Comma separated list of header prefixes to print.
+# The names are case insensitive.
+# The default list is empty, so it should be non-empty when activating
+# this plugin. Example:
+#prefixes=Server,X-
+
+# Add country info to URLs
+#[LocationInfo]
+
+# Run W3C syntax checks
+#[CssSyntaxCheck]
+#[HtmlSyntaxCheck]
+
+# Search for regular expression in page contents
+#[RegexCheck]
+# Example:
+#warningregex=Oracle Error
+
+# Search for viruses in page contents
+#[VirusCheck]
+#clamavconf=/etc/clamav/clamd.conf
+
+# Check that SSL certificates have at least the given number of days validity.
+#[SslCertificateCheck]
+#sslcertwarndays=30
+
+# Parse and check links in PDF files
+#[PdfParser]
+
+# Parse and check links in Word files
+#[WordParser]
+
+# Parse and check links in Markdown files.
+# Supported links are:
+# <http://autolink.com>
+# [name](http://link.com "Optional title")
+# [id]: http://link.com "Optional title"
+#[MarkdownCheck]
+# Regexp of filename
+#filename_re=.*\.(markdown|md(own)?|mkdn?)$
diff --git a/buildbot/master.cfg b/buildbot/master.cfg
index c93fac9..7d5ff7a 100644
--- a/buildbot/master.cfg
+++ b/buildbot/master.cfg
@@ -1,6 +1,9 @@
+# -*- python -*-
+# ex: set syntax=python:
+
##
# This file is part of TALER
-# (C) 2016-2021 Taler Systems SA
+# (C) 2016-2023 Taler Systems SA
#
# TALER is free software; you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public
@@ -20,12 +23,28 @@
# @author Marcello Stanisci
# @author ng0
# @author Christian Grothoff
+# @author Devan Carpenter
+import ast
+import configparser
+import glob
+import os
+import pathlib
+import pwd
import re
-from getpass import getuser
+import subprocess
+
+from buildbot.changes.pb import PBChangeSource
from buildbot.steps.source.git import Git
from buildbot.steps.shell import ShellCommand
-from buildbot.plugins import *
+from buildbot.plugins import changes
+from buildbot.plugins import reporters
+from buildbot.plugins import schedulers
+from buildbot.plugins import steps
+from buildbot.plugins import util
+from buildbot.process import buildstep, logobserver
from buildbot.reporters.generators.build import BuildStatusGenerator
+from buildbot.worker import Worker
+from twisted.internet import defer
# This is a sample buildmaster config file. It must be
# installed as 'master.cfg' in your buildmaster's base
@@ -68,19 +87,20 @@ class MessageFormatterWithStdout(reporters.MessageFormatter):
stdout.append(line[1:])
ctx.update(dict(stdout="\n".join(stdout)))
-tipReserveEmails = reporters.MailNotifier(
- fromaddr="buildbot@taler.net", # to be sent to a dedicate alias
- sendToInterestedUsers=False,
- mode=("all"),
- builders=["check-tips-builder"],
- extraRecipients=["tips@taler.net"],
- dumpMailsToLog=True, # debug, to remove
- messageFormatter=MessageFormatterWithStdout(
- wantSteps=True,
- wantLogs=True,
- template="{{ stdout }}",
- subject="tips availability on demo")
-)
+# tipReserveEmails = reporters.MailNotifier(
+# fromaddr="buildbot@taler.net", # to be sent to a dedicate alias
+# sendToInterestedUsers=False,
+# mode=("all"),
+# builders=["check-tips-builder"], # This builder has been removed - Javisep.
+# extraRecipients=["tips@taler.net"],
+# dumpMailsToLog=True, # debug, to remove
+# messageFormatter=MessageFormatterWithStdout(
+# wantSteps=True,
+# wantLogs=True,
+# template="{{ stdout }}",
+# subject="tips availability on demo")
+# )
+
SERVICES = []
@@ -98,27 +118,32 @@ SCHEDULERS = []
NIGHTLY_TRIGGERS=[]
# Array of builders to be scheduled whenever any of the code Git repos change
-CODECHANGE_TRIGGERS=[]
+CODECHANGE_TRIGGERS = []
-# Array of builders to be scheduled whenever the wallet-core or deployment change
-WALLETCHANGE_TRIGGERS=[]
+# Array of builders to be scheduled whenever the wallet-core or
+# deployment change
+WALLETCHANGE_TRIGGERS = []
-# Array of builder names for which build status reports should be sent via e-mail
-EMAIL_ALERTS=[]
+# Array of builder names for which build status reports should be sent
+# via e-mail
+EMAIL_ALERTS = []
+# Array of email address for which build status reports shoudl be sent
+BUILDER_EMAIL_ADDRESSES = []
############ Convenience functions #################
# Create a FACTORY with a deployment.git checkout as the first step.
def create_factory_with_deployment():
f = util.BuildFactory()
- update_deployment (f)
+ update_deployment(f)
return f
+
# Convenience function that checks out a Git repository.
# First argument is the URL of the Git to clone, second
# the desired branch. Default is 'master'.
-def git_step(repo,target_branch="master"):
+def git_step(repo, target_branch="master"):
return Git(
repourl=repo,
mode="full",
@@ -129,17 +154,20 @@ def git_step(repo,target_branch="master"):
branch=target_branch
)
+
# Convenience function that runs 'make check' in a
# directory of the code inside of a netjail.
-def jailed_check(package,srcdirs):
+def jailed_check(package, srcdirs):
return steps.ShellSequence(
name="Tests of " + package,
description="Testing " + package,
descriptionDone="Pass",
- commands=map(lambda srcdir: util.ShellArg(command=["sudo", "/usr/local/bin/netjail.sh", "/home/integrationtest/deployment/buildbot/with-postgres.sh", "bash", "-c", "'cd src/"+srcdir+" make check'"]), srcdirs),
+ commands=map(lambda srcdir: util.ShellArg(command=["sudo", "/usr/local/bin/netjail.sh", "/home/integrationtest/deployment/buildbot/with-postgres.sh", "bash", "-c", "'cd src/"+srcdir+" make install check'"]), srcdirs),
+ env={'PATH': "${HOME}/local/bin:${PATH}"},
workdir="../../sources/" + package
)
+
# Convenience function that checks out the deployment.
def update_deployment(factory):
factory.addStep(steps.ShellSequence(
@@ -155,6 +183,69 @@ def update_deployment(factory):
))
+# Convenience function that builds and runs a container.
+def container_add_step(HALT_ON_FAILURE,
+ WARN_ON_FAILURE,
+ CONTAINER_BUILD,
+ CONTAINER_NAME,
+ factory,
+ WORK_DIR,
+ stepName,
+ CONTAINER_ARCH="amd64",
+ jobCmd="/workdir/contrib/ci/ci.sh",
+ containerFile="contrib/ci/Containerfile"):
+ print(f"HALT_ON_FAILURE: {HALT_ON_FAILURE}, WARN_ON_FAILURE: {WARN_ON_FAILURE}, CONTAINER_BUILD: {CONTAINER_BUILD}, CONTAINER_NAME: {CONTAINER_NAME}")
+ if not CONTAINER_BUILD:
+ return steps.ShellSequence(
+ name=stepName,
+ commands=[
+ util.ShellArg(command=["podman", "run", "--rm",
+ "--arch", CONTAINER_ARCH,
+ "--add-host", "taler.host.internal:10.0.2.2",
+ "--network", "slirp4netns:allow_host_loopback=true",
+ "--env", util.Interpolate("CI_COMMIT_REF=%(prop:got_revision:-%(src::revision:-unknown)s)s"),
+ "--env", util.Interpolate("CI_GIT_BRANCH=%(src::branch)s"),
+ "--volume", f"{WORK_DIR}:/workdir",
+ "--volume", "/home/container-worker/container_artifacts:/artifacts",
+ "--volume", "/home/container-worker/mounted_files/ci_container_id_ed25519:/root/.ssh/id_ed25519:ro",
+ "--volume", "/home/container-worker/mounted_files/container_known_hosts:/root/.ssh/known_hosts:ro",
+ "--workdir", "/workdir",
+ CONTAINER_NAME, jobCmd],
+ logname='run inside container',
+ haltOnFailure=HALT_ON_FAILURE),
+ ],
+ haltOnFailure=HALT_ON_FAILURE,
+ workdir=WORK_DIR
+ )
+ else:
+ return steps.ShellSequence(
+ name=stepName,
+ commands=[
+ util.ShellArg(command=["podman", "build", "-t", CONTAINER_NAME,
+ "--arch", CONTAINER_ARCH,
+ "-f", containerFile, "."],
+ logname='build container', haltOnFailure=True),
+ util.ShellArg(command=["podman", "run", "--rm",
+ "--arch", CONTAINER_ARCH,
+ "--add-host", "taler.host.internal:10.0.2.2",
+ "--network", "slirp4netns:allow_host_loopback=true",
+ "--env", util.Interpolate("CI_COMMIT_REF=%(prop:got_revision:-%(src::revision:-unknown)s)s"),
+ "--env", util.Interpolate("CI_GIT_BRANCH=%(src::branch)s"),
+ "--volume", f"{WORK_DIR}:/workdir",
+ "--volume", "/home/container-worker/container_artifacts:/artifacts",
+ "--volume", f"/run/user/{pwd.getpwnam('container-worker').pw_uid}/podman/podman.sock:/run/podman/podman.sock",
+ "--volume", "/home/container-worker/mounted_files/ci_container_id_ed25519:/root/.ssh/id_ed25519:ro",
+ "--volume", "/home/container-worker/mounted_files/container_known_hosts:/root/.ssh/known_hosts:ro",
+ "--security-opt", "label=disable",
+ "--workdir", "/workdir",
+ CONTAINER_NAME, jobCmd],
+ logname='run inside container',
+ haltOnFailure=HALT_ON_FAILURE),
+ ],
+ haltOnFailure=HALT_ON_FAILURE,
+ workdir=WORK_DIR
+ )
+
##################################################################
######################## JOBS ####################################
##################################################################
@@ -170,11 +261,12 @@ def update_deployment(factory):
################ 1: BUILDMASTER JOB ###################################
+
##
# This worker restarts the buildmaster itself on
# changes to this file.
# Location: /home/buildbot-master @ taler.net
-WORKERS.append(worker.Worker("buildmaster-worker", "buildmaster-pass"))
+WORKERS.append(Worker("buildmaster-worker", "buildmaster-pass"))
BUILDMASTER_FACTORY = create_factory_with_deployment()
BUILDMASTER_FACTORY.addStep(
@@ -207,139 +299,6 @@ SCHEDULERS.append(schedulers.SingleBranchScheduler(
))
-################ 2: DOCUMENTATION JOB ###################################
-
-##
-# This worker builds manuals / API docs / tutorials.
-# Location: /home/docbuilder @ taler.net
-WORKERS.append(worker.Worker("doc-worker", "doc-pass"))
-
-DOC_FACTORY = create_factory_with_deployment()
-DOC_FACTORY.addStep(
- ShellCommand(
- name="build docs",
- description="Building documentation",
- descriptionDone="Documentation built.",
- command=["./build-docs.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True
- )
-)
-DOC_FACTORY.addStep(
- steps.ShellSequence(
- name="prepare exchange",
- description="Running bootstrap and configure for exchange",
- descriptionDone="exchange ready for doxygen",
- commands=[
- util.ShellArg(command=["./bootstrap"], logname='bootstrap'),
- util.ShellArg(command=["./configure", "--enable-only-doc"], logname='configure'),
- ],
- workdir="../../sources/exchange/",
- haltOnFailure=True,
- )
-)
-DOC_FACTORY.addStep(
- ShellCommand(
- name="doxygen::exchange",
- description="building exchange doxygen documentation",
- descriptionDone="doxygen on exchange finished",
- command=["make", "full" ],
- want_stderr=False,
- workdir="../../sources/exchange/doc/doxygen",
- haltOnFailure=True,
- )
-)
-DOC_FACTORY.addStep(
- steps.ShellSequence(
- name="prepare merchant",
- description="prepare merchant",
- descriptionDone="merchant prepared",
- commands=[
- util.ShellArg(command=["./bootstrap"], logname='bootstrap'),
- util.ShellArg(command=["./configure", "--enable-only-doc"], logname='configure'),
- util.ShellArg(command=["cp", "../exchange/doc/doxygen/taler-exchange.tag", "doc/doxygen/taler-exchange.tag"]),
- ],
- workdir="../../sources/merchant/"
- )
-)
-DOC_FACTORY.addStep(
- ShellCommand(
- name="doxygen::merchant",
- description="building merchant doxygen documentation",
- descriptionDone="doxygen on merchant finished",
- command=["make", "full" ],
- want_stderr=False,
- workdir="../../sources/merchant/doc/doxygen"
- )
-)
-DOC_FACTORY.addStep(
- steps.ShellSequence(
- name="prepare anastasis",
- description="prepare anastasis",
- descriptionDone="doxygen on anastasis finished",
- commands=[
- util.ShellArg(command=["./bootstrap"], logname='bootstrap'),
- util.ShellArg(command=["./configure", "--enable-only-doc"], logname='configure'),
- util.ShellArg(command=["cp", "../exchange/doc/doxygen/taler-exchange.tag", "doc/doxygen/taler-exchange.tag"], logname="cp-e"),
- util.ShellArg(command=["cp", "../merchant/doc/doxygen/taler-merchant.tag", "doc/doxygen/taler-merchant.tag"], logname="cp-m"),
- ],
- workdir="../../sources/anastasis/"
- )
-)
-DOC_FACTORY.addStep(
- ShellCommand(
- name="doxygen::anastasis",
- description="building anastasis doxygen documentation",
- descriptionDone="doxygen on anastasis finished",
- command=["make", "full" ],
- want_stderr=False,
- workdir="../../sources/anastasis/doc/doxygen"
- )
-)
-DOC_FACTORY.addStep(
- steps.ShellSequence(
- name="doxygen::wallet",
- description="building wallet typescript documentation",
- descriptionDone="typedoc on taler-wallet-core finished",
- commands=[
- util.ShellArg(command=["./bootstrap"], logname="bootstrap"),
- util.ShellArg(command=["./configure"], logname="configure"),
- util.ShellArg(command=["make"], logname="make"),
- util.ShellArg(command=["pnpm", "install", "-W", "typedoc"], logname="pnpm"),
- util.ShellArg(command=["./node_modules/typedoc/bin/typedoc", "--out", "dist/typedoc", "--tsconfig", "tsconfig.build.json", "packages/taler-util/src/index.ts", "packages/taler-wallet-cli/src/index.ts", "packages/taler-wallet-android/src/index.ts", "packages/taler-wallet-core/src/index.ts" ], logname="typedoc"),
- ],
- workdir="../../sources/wallet-core/"
- )
-)
-
-
-BUILDERS.append(util.BuilderConfig(
- name="doc-builder", workernames=["doc-worker"], factory=DOC_FACTORY
-))
-
-EMAIL_ALERTS.append("doc-builder")
-
-#sphinxErrorNotifier = reporters.MailNotifier(
-# fromaddr="bb@taler.net",
-# sendToInterestedUsers=False,
-# addLogs=['build docs.stdio',],
-# useTls=True,
-# # notify if sphinx exits with error (command line option in Makefile turns warnings into exit 1)
-# mode=('failing'),
-# builders=('doc-builder',),
-# extraRecipients=['sphinxerrors@taler.net']
-#)
-
-
-# Docs run if master or stable branch of 'docs' (or deployment) changed.
-SCHEDULERS.append(schedulers.SingleBranchScheduler(
- name="periodic-doc-scheduler",
- builderNames=["doc-builder"],
- change_filter=util.ChangeFilter(
- branch_re="(master|stable)", project_re="(docs|deployment)"
- ),
- treeStableTimer=None,
-))
################ 3: WEBSITE JOB ###################################
@@ -347,7 +306,7 @@ SCHEDULERS.append(schedulers.SingleBranchScheduler(
##
# This worker builds Websites: www and stage.
#
-WORKERS.append(worker.Worker("sites-worker", "sites-pass"))
+WORKERS.append(Worker("sites-worker", "sites-pass"))
SITES_FACTORY = create_factory_with_deployment()
SITES_FACTORY.addStep(
@@ -365,7 +324,8 @@ BUILDERS.append(util.BuilderConfig(
name="sites-builder", workernames=["sites-worker"], factory=SITES_FACTORY
))
-EMAIL_ALERTS.append("sites-builder")
+#EMAIL_ALERTS.append("sites-builder")
+
# The web page changed if 'www' changed OR if 'web' in the 'twister' repo changed:
def twister_web_page(change):
@@ -381,6 +341,7 @@ def twister_web_page(change):
return True
return False
+
# Sites are re-build whenever deployment, www buywith, or twister changes.
SCHEDULERS.append(schedulers.SingleBranchScheduler(
name="sites-scheduler",
@@ -393,351 +354,12 @@ SCHEDULERS.append(schedulers.SingleBranchScheduler(
))
-################ 4: LCOV JOB ###################################
-
-##
-# This worker makes the code coverage and publishes it
-# under the "lcov" Website.
-WORKERS.append(worker.Worker("lcov-worker", "lcov-pass"))
-
-LCOV_FACTORY = create_factory_with_deployment()
-LCOV_FACTORY.addStep(git_step("git://git.taler.net/wallet-core.git"))
-LCOV_FACTORY.addStep(
- ShellCommand(
- name="fetch",
- description="Running yarn install of wallet",
- descriptionDone="Correctly installed",
- command=["npm", "install", "-g", "--prefix", "$HOME", "@gnu-taler/taler-wallet-cli"],
- workdir="build/",
- haltOnFailure=True,
- )
-)
-LCOV_FACTORY.addStep(
- ShellCommand(
- name="build",
- description="Building other Taler components",
- descriptionDone="Taler built",
- command=["./build.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- env={'TALER_COVERAGE': "1"},
- )
-)
-LCOV_FACTORY.addStep(
- ShellCommand(
- name="coverage generation",
- description="running tests",
- descriptionDone="generating HTML report",
- command=["/usr/bin/sudo", "/usr/local/bin/netjail.sh", "/home/lcovworker/deployment/buildbot/coverage.sh"],
- workdir="../../deployment/buildbot",
- env={'PATH': "${HOME}/local/bin:${PATH}"}
- )
-)
-
-BUILDERS.append(util.BuilderConfig(
- name="lcov-builder",
- workernames=["lcov-worker"],
- factory=LCOV_FACTORY
-))
-
-EMAIL_ALERTS.append("lcov-builder")
-NIGHTLY_TRIGGERS.append("lcov-builder")
-NIGHTLY_TRIGGERS.append("check-tips-builder")
-NIGHTLY_TRIGGERS.append("taler-demo-healthcheck-builder")
-
-################ 5: UNIT TEST JOB ###################################
-
-##
-# This worker builds everything and runs our 'make check'
-# test suite against 'everything'.
-WORKERS.append(worker.Worker("checker-worker", "checker-pass"))
-
-INTEGRATIONTEST_FACTORY = create_factory_with_deployment()
-INTEGRATIONTEST_FACTORY.addStep(
- ShellCommand(
- name="fetch gnunet.org sources",
- description="fetching latest deployment repositories from git.gnunet.org",
- descriptionDone="GNUnet code base updated",
- command=["./update-sources.sh", "git://git.gnunet.org/", "libmicrohttpd", "gnunet"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- )
-)
-INTEGRATIONTEST_FACTORY.addStep(
- ShellCommand(
- name="fetch taler.net sources",
- description="fetching latest deployment repositories from git.taler.net",
- descriptionDone="Taler code base updated",
- command=["./update-sources.sh", "git://git.taler.net/", "exchange", "merchant", "wallet-core", "sync", "anastasis", "bank", "twister"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- )
-)
-INTEGRATIONTEST_FACTORY.addStep(git_step("git://git.taler.net/wallet-core.git"))
-INTEGRATIONTEST_FACTORY.addStep(
- ShellCommand(
- name="fetch",
- description="Running yarn install of wallet",
- descriptionDone="Correctly installed",
- command=["npm", "install", "-g", "--prefix", "$HOME", "@gnu-taler/taler-wallet-cli"],
- workdir="../../sources/wallet-core",
- haltOnFailure=True,
- )
-)
-INTEGRATIONTEST_FACTORY.addStep(
- steps.ShellSequence(
- name="fetch",
- description="Running local install of wallet",
- descriptionDone="local wallet install done",
- commands=[
- util.ShellArg(command=["./bootstrap"]),
- util.ShellArg(command=["./configure", "--prefix=$HOME/local/"]),
- util.ShellArg(command=["make"]),
- ],
- workdir="../../sources/wallet-core",
- haltOnFailure=True,
- )
-)
-INTEGRATIONTEST_FACTORY.addStep(
- ShellCommand(
- name="build",
- description="Building other Taler components",
- descriptionDone="Taler built",
- command=["./build.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=False
- )
-)
-INTEGRATIONTEST_FACTORY.addStep(jailed_check ("libmicrohttpd", [
- "microhttpd",
- "testcurl",
- "testzzuf",
-]))
-INTEGRATIONTEST_FACTORY.addStep(jailed_check ("gnunet", [
- "util",
- "pq",
- "curl",
- "json",
-]))
-INTEGRATIONTEST_FACTORY.addStep(jailed_check ("twister", [
- "test",
-]))
-INTEGRATIONTEST_FACTORY.addStep(jailed_check ("exchange", [
- "util",
- "curl",
- "mhd",
- "pq",
- "json",
- "bank-lib",
- "exchangedb",
- "auditordb",
- "exchange",
- "auditor",
- "lib",
- "exchange-tools",
- "testing",
- "benchmark",
-]))
-INTEGRATIONTEST_FACTORY.addStep(jailed_check ("merchant", [
- "mustach",
- "backenddb",
- "backend",
- "lib",
- "testing",
- "merchant-tools",
-]))
-INTEGRATIONTEST_FACTORY.addStep(jailed_check ("sync", [
- "util",
- "syncdb",
- "sync",
- "lib"
-]))
-INTEGRATIONTEST_FACTORY.addStep(jailed_check ("anastasis", [
- "util",
- "stasis",
- "testing",
- "reducer",
-]))
-INTEGRATIONTEST_FACTORY.addStep(
- ShellCommand(
- name="bank check",
- description="Testing Taler Python bank",
- descriptionDone="Done",
- command=["sudo", "/usr/local/bin/netjail.sh", "/home/integrationtest/deployment/buildbot/with-postgres.sh", "make", "check"],
- workdir="../../sources/bank",
- haltOnFailure=False,
- env={'PYTHONUSERBASE': "$HOME/local" }
- )
-)
-INTEGRATIONTEST_FACTORY.addStep(
- ShellCommand(
- name="wallet check",
- description="Testing wallet-core",
- descriptionDone="Done",
- command=["make", "check"],
- workdir="../../sources/wallet-core",
- haltOnFailure=False
- )
-)
-
-BUILDERS.append(util.BuilderConfig(
- name="checker-builder",
- workernames=["checker-worker"],
- factory=INTEGRATIONTEST_FACTORY
-))
-
-EMAIL_ALERTS.append("checker-builder")
-CODECHANGE_TRIGGERS.append("checker-builder")
-
-
-################ 6: 'test.taler.net' deployment JOB ###################################
-
-##
-# This worker builds Taler for the 'test' deployment.
-WORKERS.append(worker.Worker("test-worker", "test-pass"))
-
-# buildslavetest FACTORY
-BUILDSLAVETEST_FACTORY = create_factory_with_deployment()
-BUILDSLAVETEST_FACTORY.addStep(
- ShellCommand(
- name="buildslavetest script (for testing purposes)",
- description="Build Slave Test",
- descriptionDone="buildslavetest: Done",
- command=["./buildslavetest.sh"],
- workdir="/home/buildslavetest/"
- )
-)
-
-# buildslavetest BUILDER
-BUILDERS.append(util.BuilderConfig(
- name="buildslavetest-builder",
- workernames=["buildslavetest-worker"],
- factory=BUILDSLAVETEST_FACTORY
-))
-
-EMAIL_ALERTS.append("buildslavetest-builder")
-
-# buildslavetest SCHEDULER
-SCHEDULERS.append(schedulers.SingleBranchScheduler(
- name="buildslavetest-scheduler",
- builderNames=["buildslavetest-builder"],
- change_filter=util.ChangeFilter(
- branch_re="(master|stable)", project_re="(help|deployment)"
- ),
- treeStableTimer=None,
-))
-
-################ 7: 'test-auditor' deployment JOB ###################################
-
-
-##
-# This worker compiles the auditor reports for the "test"
-# demo deployment.
-WORKERS.append(worker.Worker("test-auditor-worker", "test-auditor-pass"))
-
-AUDITOR_FACTORY_TEST = create_factory_with_deployment()
-AUDITOR_FACTORY_TEST.addStep(
- ShellCommand(
- name="Auditor reports generator",
- description="Generating auditor reports.",
- descriptionDone="Auditor reports correctly generated.",
- command=["./make_auditor_reports.sh"],
- workdir="../../deployment/buildbot",
- env={'TALER_HOME': "/home/taler-test/"}
- )
-)
-
-BUILDERS.append(util.BuilderConfig(
- name="auditor-builder-test",
- workernames=["test-auditor-worker"],
- factory=AUDITOR_FACTORY_TEST
-))
-NIGHTLY_TRIGGERS.append("auditor-builder-test")
-
-
-################ 8: 'demo-auditor' deployment JOB ###################################
-##
-# This worker compiles the auditor reports for the "green"
-# demo deployment.
-WORKERS.append(worker.Worker("demo-auditor-worker", "demo-auditor-pass"))
-
-AUDITOR_FACTORY_DEMO = create_factory_with_deployment()
-AUDITOR_FACTORY_DEMO.addStep(
- ShellCommand(
- name="Auditor reports generator",
- description="Generating auditor reports.",
- descriptionDone="Auditor reports correctly generated.",
- command=["./make_auditor_reports.sh"],
- workdir="../../deployment/buildbot",
- env={'TALER_HOME': "/home/demo/active-home/"}
- )
-)
-
-BUILDERS.append(util.BuilderConfig(
- name="auditor-builder-demo",
- workernames=["demo-auditor-worker"],
- factory=AUDITOR_FACTORY_DEMO
-))
-
-NIGHTLY_TRIGGERS.append("auditor-builder-demo")
-
-
-################ 8: 'build wallet-core' JOB ###################################
-
-##
-# This worker builds wallet-core.
-WORKERS.append(worker.Worker("wallet-worker", "wallet-pass"))
-
-WALLET_FACTORY = create_factory_with_deployment()
-WALLET_FACTORY.addStep(
- ShellCommand(
- name="build",
- description="Building all Taler codebase.",
- descriptionDone="Taler built.",
- command=["./build.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True
- )
-)
-WALLET_FACTORY.addStep(
- ShellCommand(
- name="test",
- description="Running wallet tests",
- descriptionDone="Test correctly run",
- command=["timeout", "--preserve-status", "5m",
- "taler-wallet-cli", "testing", "run-integrationtests",
- "--suites", "wallet,merchant,libeufin,wallet-backup,wallet-tipping"],
- workdir="../../",
- env={'PATH': "${HOME}/local/bin:${PATH}"}
- )
-)
-
-BUILDERS.append(util.BuilderConfig(
- name="wallet-builder",
- workernames=["wallet-worker"],
- factory=WALLET_FACTORY
-))
-
-EMAIL_ALERTS.append("wallet-builder")
-
-# Wallet is re-build whenever wallet-core or deployment changes
-SCHEDULERS.append(schedulers.SingleBranchScheduler(
- name="wallet-scheduler",
- change_filter=util.ChangeFilter(
- branch="master",
- project_re="(wallet-core|deployment|libeufin)"
- ),
- treeStableTimer=None,
- builderNames=["wallet-builder"]
-))
-
-
################ 9: 'check links' JOB ###################################
##
# linkchecker worker checks for dead links in the Website
# Location: /home/linkchecker @ taler.net
-WORKERS.append(worker.Worker("linkchecker-worker", "linkchecker-pass"))
+WORKERS.append(Worker("linkchecker-worker", "linkchecker-pass"))
# linkchecker FACTORY
LINKCHECKER_FACTORY = create_factory_with_deployment()
@@ -749,6 +371,7 @@ LINKCHECKER_FACTORY.addStep(
command=["/home/linkchecker/deployment/buildbot/linkchecker.sh"],
workdir="/home/linkchecker",
haltOnFailure=True,
+ timeout=7200 # 2 hours
)
)
@@ -761,7 +384,7 @@ BUILDERS.append(util.BuilderConfig(
))
docs_generator = BuildStatusGenerator(
- mode=('change','problem','failing','exception',),
+ mode=('change', 'problem', 'failing', 'exception',),
builders=[
'linkchecker-builder',
],
@@ -770,387 +393,261 @@ docs_generator = BuildStatusGenerator(
wantSteps=True,
wantLogs=True
),
- add_logs=True
+ add_logs=True
)
+
SERVICES.append(reporters.MailNotifier(
fromaddr="bb@taler.net",
generators=[docs_generator],
sendToInterestedUsers=False,
- useTls=True,
+ useTls=False,
+ relayhost="localhost",
+ smtpPort=25,
+ dumpMailsToLog=True,
extraRecipients=['linkcheck@taler.net']
))
-SERVICES.append(tipReserveEmails)
-
-NIGHTLY_TRIGGERS.append("linkchecker-builder")
-
-
-################ 10: 'check spelling' JOB ###################################
-##
-# codespell worker checks for spelling mistakes in code
-# Location: /home/codespell @ taler.net
-WORKERS.append(worker.Worker("codespell-worker", "codespell-pass"))
-
-CODESPELL_FACTORY = create_factory_with_deployment()
-CODESPELL_FACTORY.addStep(
- ShellCommand(
- name="clean old deployment",
- description="cleaning previous doxygen runs",
- descriptionDone="Doxygen cleaned",
- command=["rm", "-rf", "exchange/doc/doxygen/html/", "merchant/doc/doxygen/html/" ],
- workdir="/home/codespell/sources/"
- )
-)
-CODESPELL_FACTORY.addStep(
- ShellCommand(
- name="fetch gnunet.org sources",
- description="fetching latest deployment repositories from git.gnunet.org",
- descriptionDone="GNUnet code base updated",
- command=["./update-sources.sh", "git://git.gnunet.org/", "libmicrohttpd", "gnunet"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- )
-)
-CODESPELL_FACTORY.addStep(
- ShellCommand(
- name="fetch taler.net sources",
- description="fetching latest deployment repositories from git.taler.net",
- descriptionDone="Taler code base updated",
- command=["./update-sources.sh", "git://git.taler.net/", "exchange", "merchant", "wallet-core", "sync", "anastasis", "bank", "twister"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- )
-)
-CODESPELL_FACTORY.addStep(git_step("git://git.taler.net/wallet-core.git"))
-CODESPELL_FACTORY.addStep(
- ShellCommand(
- name="codespell",
- description="spell checking repositories",
- descriptionDone="Spell check complete",
- command=["/home/codespell/.local/bin/codespell", "-I", "/home/codespell/deployment/codespell/dictionary.txt", "-S", "*.bib,*.bst,*.cls,*.json,*.png,*.svg,*.wav,*.gz,*/mustach/**,*.fees,*key,*.tag,*.info,*.latexmkrc,*.ecc,*.jpg,*.zkey,*.sqlite,*/contrib/hellos/**,*/vpn/tests/**,*.priv,*.file,*.tgz,*.woff,*.gif,*.odt,*.fee,*.deflate,*.dat,*.jpeg,*.eps,*.odg,*/m4/ax_lib_postgresql.m4,*/m4/libgcrypt.m4,*.rpath,config.status,ABOUT-NLS,*/doc/texinfo.tex,*.PNG,*.??.json,*.docx,*.ods,*.doc,*.docx,*.xcf,*.xlsx,*.ecc,*.ttf,*.woff2,*.eot,*.ttf,*.eot,*.mp4,*.pptx,*.epgz,*.min.js,**/*.map,**/fonts/**,*.pack.js,*.po,*.bbl,*/afl-tests/*,*/.git/**,*.pdf,*.epub,**/signing-key.asc,**/pnpm-lock.yaml,**/*.svg,**/*.cls,**/rfc.bib,**/*.bst", "anastasis", "bank", "exchange", "gnunet", "libmicrohttpd", "merchant", "sync", "twister", "wallet-core"],
- workdir="/home/codespell/sources/"
- )
-)
-CODESPELL_FACTORY.addStep(
- ShellCommand(
- name="gana",
- description="fetch GANA",
- descriptionDone="GANA obtained",
- command=["contrib/gana.sh" ],
- workdir="/home/codespell/sources/exchange/"
- )
-)
-CODESPELL_FACTORY.addStep(
- ShellCommand(
- name="doxygen::exchange",
- description="checking for doxygen mistakes in exchange",
- descriptionDone="doxygen on exchange finished",
- command=["/home/codespell/deployment/buildbot/doxygen.sh", "taler.doxy" ],
- workdir="/home/codespell/sources/exchange/doc/doxygen"
- )
-)
-CODESPELL_FACTORY.addStep(
- steps.ShellSequence(
- name="tag",
- description="prepare merchant",
- descriptionDone="directory created",
- commands=[
- util.ShellArg(command=["mkdir", "-p", "merchant/doc/doxygen/"]),
- util.ShellArg(command=["cp", "exchange/doc/doxygen/taler-exchange.tag", "merchant/doc/doxygen/taler-exchange.tag"]),
- ],
- workdir="/home/codespell/sources/"
- )
-)
-CODESPELL_FACTORY.addStep(
- ShellCommand(
- name="doxygen::merchant",
- description="checking for doxygen mistakes in merchant",
- descriptionDone="doxygen on merchant finished",
- command=["/home/codespell/deployment/buildbot/doxygen.sh", "taler.doxy" ],
- workdir="/home/codespell/sources/merchant/doc/doxygen"
- )
-)
-
-BUILDERS.append(util.BuilderConfig(
- name="codespell-builder",
- workernames=["codespell-worker"],
- factory=CODESPELL_FACTORY
-))
-
-EMAIL_ALERTS.append("codespell-builder")
-
-CODECHANGE_TRIGGERS.append("codespell-builder")
+# SERVICES.append(tipReserveEmails)
+NIGHTLY_TRIGGERS.append("linkchecker-builder")
-################ 11: 'demo checks' JOB ###################################
+#############################################
+# 19: CONTAINER FACTORY #####################
+#############################################
##
-# This worker checks that all the services run under the
-# 'demo' deployment are up&running.
-
-WORKERS.append(worker.Worker("tips-checker-worker", "tips-checker-pass"))
-DEMO_CHECK_TIPS_FACTORY = create_factory_with_deployment()
-DEMO_CHECK_TIPS_FACTORY.addStep(
- ShellCommand(
- name="demo tip reserves checker",
- description="Checking that demo allocated tip money",
- descriptionDone="Demo can tip visitors!.",
- command=["./check_tip_reserve.sh"],
- workdir="../../deployment/buildbot",
+# These factories uses the standard container worker.
+WORKERS.append(Worker("container-worker", "container-pass"))
+
+
+# Container Job Generator Functions
+# Parse config file and save values in a dict
+def ingest_job_config(configPath, jobName):
+ configDict = {jobName: {}}
+ print(configDict)
+ ini.read_string(configPath)
+ for key in ini["build"]:
+ value = ini['build'][key]
+ configDict[jobName][key] = value
+ print(configDict)
+ configDict.update(configDict)
+ print(configDict)
+ return configDict
+
+
+# Search for configs, and ingest
+def handle_job_config(jobDirPath, jobName, repoName, configPath, configExist):
+ print(configPath)
+ if configExist == 0:
+ print(f"Ingesting Job Config: {configPath}")
+ configDict = ingest_job_config(configPath, jobName)
+ print(configDict)
+ return configDict
+ else:
+ print("No job config; Using default params")
+ # Set default job config parameters
+ configDict = {jobName: {"HALT_ON_FAILURE": True,
+ "WARN_ON_FAILURE": False,
+ "CONTAINER_BUILD": True,
+ "CONTAINER_NAME": repoName,
+ "CONTAINER_ARCH": "amd64"}}
+ return configDict
+
+
+class GenerateStagesCommand(buildstep.ShellMixin, steps.BuildStep):
+
+ def __init__(self, REPO_NAME, **kwargs):
+ self.REPO_NAME = REPO_NAME
+ kwargs = self.setupShellMixin(kwargs)
+ super().__init__(**kwargs)
+ self.observer = logobserver.BufferLogObserver()
+ self.addLogObserver('stdio', self.observer)
+
+ def extract_stages(self, stdout):
+ stages = []
+ for line in stdout.split('\n'):
+ stage = str(line.strip())
+ if stage:
+ stages.append(stage)
+ return stages
+
+ @defer.inlineCallbacks
+ def run(self):
+ CONTAINER_WORKDIR = f"/home/container-worker/workspace/{self.REPO_NAME}"
+ CI_JOBS_PATH = f"{CONTAINER_WORKDIR}/contrib/ci/jobs"
+ # run 'ls <project_root>/contrib/ci/jobs/' to get the list of stages
+ cmd = yield self.makeRemoteShellCommand()
+ yield self.runCommand(cmd)
+ jobDirs = []
+
+ # if the command passes extract the list of stages
+ result = cmd.results()
+ if result == util.SUCCESS:
+ jobDirs = self.extract_stages(self.observer.getStdout())
+ print(f"this is jobDirs list: {jobDirs}")
+ self.configDict = {}
+ print(f"Remote cmd stdout: {self.observer.getStdout()}")
+ print(f"cmd.results: {cmd.results()}")
+ for stage in jobDirs:
+ jobDirPath = f"{CI_JOBS_PATH}/{stage}"
+ observer = logobserver.BufferLogObserver()
+ self.addLogObserver('stdio', observer)
+ cmd1 = yield self.makeRemoteShellCommand(
+ command=["cat", f"{jobDirPath}/config.ini"])
+ yield self.runCommand(cmd1)
+ print(f"cmd1.results: {cmd1.results()}")
+ print(f"Second command stdout: {observer.getStdout()}")
+ print(f"Current stage: {stage}")
+ print(jobDirPath)
+ self.configDict.update(
+ handle_job_config(
+ jobDirPath, stage, self.REPO_NAME,
+ observer.getStdout(), cmd1.results()))
+ print(self.configDict)
+ # create a container step for each stage and
+ # add them to the build
+ convstr2bool = ast.literal_eval
+ self.build.addStepsAfterCurrentStep([
+ container_add_step(
+ convstr2bool(
+ str(self.configDict[stage]["HALT_ON_FAILURE"])),
+ convstr2bool(
+ str(self.configDict[stage]["WARN_ON_FAILURE"])),
+ convstr2bool(
+ str(self.configDict[stage]["CONTAINER_BUILD"])),
+ self.configDict[stage]["CONTAINER_NAME"],
+ container_factory,
+ CONTAINER_WORKDIR,
+ stage,
+ self.configDict[stage]["CONTAINER_ARCH"],
+ f"contrib/ci/jobs/{stage}/job.sh")
+ for stage in jobDirs
+ ])
+
+ return result
+
+
+# List of repos to add to container factory.
+container_repos = ["git.taler.net/wallet-core",
+ "git.taler.net/libeufin",
+ "git.taler.net/merchant",
+ "git.taler.net/exchange",
+ "git.taler.net/docs",
+ "git.taler.net/taler-ops-www",
+ "git.taler.net/taler-systems-www",
+ "git.taler.net/anastasis-www",
+ "git.taler.net/tutorials",
+ "git.taler.net/sync",
+ "git.taler.net/challenger",
+ "git.taler.net/sandcastle-ng",
+ "git.gnunet.org/gnunet"]
+
+for repo in container_repos:
+
+ # Prepare to read job configs
+ ini = configparser.ConfigParser()
+ ini.optionxform = str
+
+ # Factory-wide variables
+ REPO_NAME = repo.rsplit('/', 1)[1]
+ REPO_URL = "https://" + repo + ".git"
+ CONTAINER_WORKDIR = f"/home/container-worker/workspace/{REPO_NAME}"
+ CI_JOBS_PATH = f"{CONTAINER_WORKDIR}/contrib/ci/jobs"
+
+ # Create a factory
+ container_factory = util.BuildFactory()
+ container_factory.workdir = CONTAINER_WORKDIR
+
+ # Setup workspace
+ container_factory.addStep(ShellCommand(
+ name="workspace",
+ descriptionDone="Workspace directory check",
+ command=f"test -d {CONTAINER_WORKDIR} && podman run --rm --volume {CONTAINER_WORKDIR}:/workdir docker.io/library/debian:bookworm-slim chmod -R 777 /workdir || mkdir -p {CONTAINER_WORKDIR}",
haltOnFailure=True,
- # Needed to test the 'demo' deployment.
- env={"DEPLOYMENT": "demo"}
- )
-)
+ ))
-WORKERS.append(worker.Worker("demo-worker", "demo-pass"))
-DEMO_SERVICES_INTEGRATIONTEST_FACTORY = create_factory_with_deployment()
-DEMO_SERVICES_INTEGRATIONTEST_FACTORY.addStep(
- ShellCommand(
- name="demo services checker",
- description="Checking demo services are online",
- descriptionDone="Demo services are online!.",
- command=["./checks.sh"],
- workdir="../../deployment/buildbot",
+ # Ensure repo is cloned or clean.
+ # Git() will clone repo if it doesn't exist.
+ # Method clobber removes directory and makes a fresh clone.
+ # Shallow set to "True" defaults to a depth of 1.
+ # Will checkout value of "branch" property from job properties.
+ # https://docs.buildbot.net/latest/manual/configuration/steps/source_git.html
+ container_factory.addStep(Git(
+ name="git",
+ repourl=REPO_URL,
+ branch=util.Interpolate('%(src::branch)s'),
+ mode='full',
+ method='clobber',
+ shallow=True,
+ submodules=True,
haltOnFailure=True,
- # Needed to test the 'demo' deployment.
- env={"DEPLOYMENT": "demo"}
- )
-)
-BUILDERS.append(util.BuilderConfig(
- name="demo-services-checker-builder",
- workernames="demo-worker",
- factory=DEMO_SERVICES_INTEGRATIONTEST_FACTORY
-))
-BUILDERS.append(util.BuilderConfig(
- name="check-tips-builder",
- workernames="tips-checker-worker",
- factory=DEMO_CHECK_TIPS_FACTORY
-))
-EMAIL_ALERTS.append("demo-services-checker-builder")
-
-# We check demo once per hour.
-SCHEDULERS.append(schedulers.Periodic(
- name="demo-services-checker-scheduler",
- periodicBuildTimer=60 * 60, # 1 hour
- builderNames=["demo-services-checker-builder"]
-))
-
-
-################ 12: 'demo health wallet-cli check' JOB ###################################
-
-
-##
-# health checks performed by wallet-cli for demo
-WORKERS.append(worker.Worker("taler-demo-healthcheck", "taler-demo-healthcheck-pass"))
-
-TALER_DEMO_HEALTHCHECK_FACTORY = create_factory_with_deployment()
-# For the moment, the health-check uses whatever wallet
-# was (possibly manually) installed beforehand. This is
-# not necessarily a problem, since demo-deployments do never
-# update code automatically.
-TALER_DEMO_HEALTHCHECK_FACTORY.addStep(
- ShellCommand(
- name="test-withdraw-and-spend",
- description="Running wallet spend tests",
- descriptionDone="Test correctly run",
- workdir="../../deployment/buildbot",
- command=["./demo-healthchecks.sh"],
- )
-)
-BUILDERS.append(util.BuilderConfig(
- name="taler-demo-healthcheck-builder",
- workernames=["taler-demo-healthcheck"],
- factory=TALER_DEMO_HEALTHCHECK_FACTORY
-))
-
-################ 13: 'test health wallet-cli check' JOB ###################################
+ ))
-##
-# health checks performed by wallet-cli for test
-WORKERS.append(worker.Worker("taler-test-healthcheck", "taler-test-healthcheck-pass"))
-
-TALER_TEST_HEALTHCHECK_FACTORY = create_factory_with_deployment()
-TALER_TEST_HEALTHCHECK_FACTORY.addStep(git_step("git://git.taler.net/wallet-core.git"))
-TALER_TEST_HEALTHCHECK_FACTORY.addStep(
- ShellCommand(
- name="fetch",
- description="Running yarn install",
- descriptionDone="Correctly installed",
- command=["npm", "install", "-g", "--prefix", "$HOME", "@gnu-taler/taler-wallet-cli"],
- workdir="build/"
- )
-)
-TALER_TEST_HEALTHCHECK_FACTORY.addStep(
- ShellCommand(
- name="test-withdraw",
- description="Running wallet withdraw tests",
- descriptionDone="Test correctly run",
- command=["timeout", "--preserve-status", "5m", "./$HOME/bin/taler-wallet-cli", "integrationtest", "--verbose", "-b", "https://bank.test.taler.net", "-w", "TESTKUDOS:10"],
- workdir="build/",
- )
-)
-TALER_TEST_HEALTHCHECK_FACTORY.addStep(
- ShellCommand(
- name="test-spend",
- description="Running wallet spend tests",
- descriptionDone="Test correctly run",
- command=["timeout", "--preserve-status", "5m", "./$HOME/bin/taler-wallet-cli", "integrationtest", "--verbose", "-b", "https://bank.test.taler.net", "-s", "TESTKUDOS:4"],
- workdir="build/",
- )
-)
-BUILDERS.append(util.BuilderConfig(
- name="taler-test-healthcheck-builder",
- workernames=["taler-test-healthcheck"],
- factory=TALER_TEST_HEALTHCHECK_FACTORY
-))
+ container_factory.addStep(GenerateStagesCommand(
+ REPO_NAME,
+ name="Generate build stages",
+ command=["ls", CI_JOBS_PATH],
+ haltOnFailure=True))
+ BUILDERS.append(util.BuilderConfig(
+ name=f"{REPO_NAME}-builder",
+ workernames=["container-worker"],
+ factory=container_factory
+ ))
-WALLETCHANGE_TRIGGERS.append("taler-test-healthcheck-builder")
+ # Only enable this scheduler for debugging!
+ # Will run builders with 1 minute of waiting inbetween builds
+ # SCHEDULERS.append(schedulers.Periodic(
+ # name=f"{REPO_NAME}-minutely",
+ # builderNames=[f"{REPO_NAME}-builder"],
+ # periodicBuildTimer=60
+ # ))
+
+ SCHEDULERS.append(schedulers.SingleBranchScheduler(
+ name=f"{REPO_NAME}-container-scheduler",
+ change_filter=util.ChangeFilter(
+ branch="master",
+ project_re=f"({REPO_NAME})"
+ ),
+ treeStableTimer=30,
+ builderNames=[f"{REPO_NAME}-builder"]
+ ))
+ SERVICES.append(reporters.MailNotifier(
+ fromaddr="buildbot@taler.net",
+ # notify from pass to fail, and viceversa.
+ generators=[BuildStatusGenerator(
+ mode=('change','problem','failing','exception',),
+ builders=[f"{REPO_NAME}-builder",],
+ message_formatter=reporters.MessageFormatter(
+ template_type='plain',
+ wantSteps=True,
+ wantLogs=True
+ ),
+ add_logs=True,
+ )],
+ sendToInterestedUsers=False,
+ useTls=False,
+ relayhost="localhost",
+ smtpPort=25,
+ dumpMailsToLog=True,
+ extraRecipients=[f"ci-{REPO_NAME}@taler.net"]
+ ))
-################ 14: upgrade test deployment JOB ###################################
-##
-# testing buildbot using the "buildslavetest" user (for no specific reason except it exists)
-# Location: /home/buidlslavetest @ taler.net
-WORKERS.append(worker.Worker("buildslavetest-worker", "Gei8naiyox4uuhoo"))
+############## sandcastle-ng Scheduler #################################
-BUILD_FACTORY = create_factory_with_deployment()
-BUILD_FACTORY.addStep(
- ShellCommand(
- name="build",
- description="Building all Taler codebase.",
- descriptionDone="Taler built.",
- command=["./build.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True
- )
-)
-BUILD_FACTORY.addStep(
- ShellCommand(
- name="keys generation and sign",
- description="Generating exchange keys, and auditor-sign them.",
- descriptionDone="Exchange keys generated, and auditor-signed.",
- command=["./keys.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- env={'BRANCH': util.Property("branch")}
- )
-)
-BUILD_FACTORY.addStep(
- ShellCommand(
- name="restart services",
- description="Restarting inactive blue-green party.",
- descriptionDone="Restarting Taler.",
- command=["./restart.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- env={'BRANCH': util.Property("branch")}
- )
-)
-BUILD_FACTORY.addStep(
- ShellCommand(
- name="check services correctly restarted",
- description="Checking services are correctly restarted.",
- descriptionDone="All services are correctly restarted.",
- command=["./checks.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- env={'DEPLOYMENT': "test"}
- )
-)
-BUILD_FACTORY.addStep(
- ShellCommand(
- name="create instances",
- description="Create merchant instances.",
- descriptionDone="All the instances got created.",
- command=["./create_instances.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- env={'DEPLOYMENT': "test"}
- )
-)
-
-BUILD_FACTORY.addStep(
- ShellCommand(
- name="activate tip reserve",
- description="Instruct the merchant to pay and authorize the tip reserve.",
- descriptionDone="The tip reserve got payed and authorized.",
- command=["./create_tip_reserve.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- env={'DEPLOYMENT': "test"}
- )
-)
-
-BUILDERS.append(util.BuilderConfig(
- name="test-builder", workernames=["test-worker"], factory=BUILD_FACTORY
+# Periodic scheduler for sandcastle-ng.
+# Runs every 2 hours (60 seconds * 60 * 2)
+SCHEDULERS.append(schedulers.Periodic(
+ name="sandcastle-ng-periodic-scheduler",
+ builderNames=["sandcastle-ng-builder"],
+ change_filter=util.ChangeFilter(branch="master"),
+ periodicBuildTimer=60*60*2
))
-EMAIL_ALERTS.append("test-builder")
-
-# Scheduler that triggers if anything changes
-CODECHANGE_TRIGGERS.append("test-builder")
-CODECHANGE_TRIGGERS.append("wallet-builder")
-
-################ 15: Python linting JOB ###################################
-
-# This job is noat active / complete yet!
-def lint_dispatcher(project):
- return "./lint_%s.sh" % project
-
-LINT_FACTORY = util.BuildFactory()
-LINT_FACTORY.addStep(
- ShellCommand(
- name="Python linter",
- description="linting Python",
- descriptionDone="linting done",
- command=util.Transform(lint_dispatcher, util.Property("project")),
- workdir="../../deployment/taler-build"
- )
-)
-# This builder is NOT ACTIVE!
-#BUILDERS.append(util.BuilderConfig(
-# name="lint-builder",
-# workernames=["lint-worker"],
-# factory=LINT_FACTORY
-#))
-
-# Consider adding other Python parts, like the various frontends.
-# NOTE: scheduler is NOT active! (commented out below)
-#SCHEDULERS.append(schedulers.SingleBranchScheduler(
-# name="lint-scheduler",
-# change_filter=util.ChangeFilter(
-# branch="master", project_re="(bank|donations|survey|blog)"
-# ),
-# treeStableTimer=None,
-# builderNames=["lint-builder"]
-#))
-
-
-################ 16: Selenium JOB ###################################
-
-# This job is noat active!
-SELENIUM_FACTORY = create_factory_with_deployment()
-SELENIUM_FACTORY.addStep(
- ShellCommand(
- name="selenium",
- description="Headless browser test",
- descriptionDone="Test finished",
- command=["launch_selenium_test"],
- env={'PATH': "${HOME}/local/bin:/usr/lib/chromium:${PATH}"}
- )
-)
-
-#BUILDERS.append(util.BuilderConfig(
-# name="selenium-builder",
-# workernames=["selenium-worker"],
-# factory=SELENIUM_FACTORY
-#))
-
################ 99: debug stuff JOB ###################################
@@ -1175,21 +672,6 @@ BUILDER_LIST = map(lambda builder: builder.name, BUILDERS)
####### GENERAL PURPOSE BUILDBOT SERVICES #######################
-SERVICES.append(reporters.IRC(
- "irc.eu.libera.net",
- "taler-bb",
- useColors=False,
- channels=[{
- "channel": "#taler"
- }],
- password="taler-bb-pass19",
- notify_events={
- 'exception': 1,
- 'successToFailure': 1,
- 'failureToSuccess': 1
- }
-))
-
SERVICES.append(reporters.MailNotifier(
fromaddr="testbuild@taler.net",
# notify from pass to fail, and viceversa.
@@ -1204,7 +686,11 @@ SERVICES.append(reporters.MailNotifier(
add_logs=True,
)],
sendToInterestedUsers=False,
- extraRecipients=["buildfailures@taler.net"]
+ useTls=False,
+ relayhost="localhost",
+ smtpPort=25,
+ dumpMailsToLog=True,
+ extraRecipients=BUILDER_EMAIL_ADDRESSES
))
@@ -1276,7 +762,16 @@ c["db"] = {
# the 'change_source' setting tells the buildmaster how it should
# find out about source code changes.
-c["change_source"] = [changes.PBChangeSource(user="allcs", passwd="allcs")]
+pbSource = PBChangeSource(user="allcs", passwd="allcs")
+
+
+pollGnunetSource = changes.GitPoller(repourl='https://git.gnunet.org/gnunet.git',
+ branches=True,
+ pollInterval=300,
+ pollAtLaunch=True,
+ project="gnunet")
+
+c["change_source"] = [pollGnunetSource, pbSource]
# 'protocols' contains information about protocols which master
# will use for communicating with workers. You must define at
diff --git a/buildbot/restart.sh b/buildbot/restart.sh
index 03d55a3..c6c4b25 100755
--- a/buildbot/restart.sh
+++ b/buildbot/restart.sh
@@ -2,10 +2,12 @@
set -eu
-DATE=`date`
-echo "Restarting Taler deployment at $DATE"
-source "${HOME}/activate"
-taler-deployment-restart
+cd ${HOME}/deployment/docker/demo
-echo "Deployment ready"
-exit 0
+export DOCKER_HOST=unix://${XDG_RUNTIME_DIR}/docker.sock
+export TALER_DEPLOYMENT_CONFIG=${HOME}/deployment.conf
+docker-compose stop
+docker-compose down -v
+docker-compose up --remove-orphans -d
+# Make early errors shown on the Web console:
+timeout 5m docker-compose logs --follow || true
diff --git a/buildbot/run-coverage-helper.sh b/buildbot/run-coverage-helper.sh
index 7448514..e85544a 100755
--- a/buildbot/run-coverage-helper.sh
+++ b/buildbot/run-coverage-helper.sh
@@ -7,7 +7,7 @@ set -eu
ORIG=`pwd`
-for codebase in exchange merchant sync anastasis; do
+for codebase in exchange merchant sync ; do
TOP="$HOME/sources/${codebase}/"
REPORT_DIR="$HOME/sources/${codebase}/coverage_report"
lcov --no-external --initial -d $TOP -z
diff --git a/buildbot/update-sources.sh b/buildbot/update-sources.sh
index 8c95340..9fc7b80 100755
--- a/buildbot/update-sources.sh
+++ b/buildbot/update-sources.sh
@@ -22,10 +22,12 @@ do
git -C $P clean -fdx
git -C $P fetch
git -C $P reset --hard origin/master
+ git submodule sync --recursive
git submodule update --init
else
cd $HOME/sources
git clone ${BASE_URL}$n
+ git submodule sync --recursive
git submodule update --init
cd -
fi
diff --git a/buildbot/with-postgres.sh b/buildbot/with-postgres.sh
index a4552d9..ba0d83a 100755
--- a/buildbot/with-postgres.sh
+++ b/buildbot/with-postgres.sh
@@ -14,7 +14,6 @@ export PGHOST=localhost
$PG_DIR/pg_ctl -D $TMP_DB_DIR/ -o "-c unix_socket_directories=$HOME" -l logfile start
$PG_DIR/createdb talercheck
$PG_DIR/createdb synccheck
-$PG_DIR/createdb anastasischeck
# Run the commands
"$@"
diff --git a/codespell/dictionary.txt b/codespell/dictionary.txt
index e42fe3c..702ac5c 100644
--- a/codespell/dictionary.txt
+++ b/codespell/dictionary.txt
@@ -1,32 +1,47 @@
# List of "words" that codespell should ignore in our sources.
-ect
-ba
-ifset
-openin
-fo
-complet
-ist
+#
+# Note: The word sensitivity depends on how the to-be-ignored word is
+# spelled in codespell_lib/data/dictionary.txt. F.e. if there is a word
+# 'foo' and you add 'Foo' _here_, codespell will continue to complain
+# about 'Foo'.
+#
+Nam
+BRE
+ND
Nd
+TE
TEH
-onl
UPDATEing
-BRE
-TE
-te
-ND
+WAN
aci
-doas
acn
-tha
-ths
-nd
-WAN
-wan
+ba
+bre
+cant
+clen
+complet
+doas
+ect
+ehr
+fo
hel
-te
+ifset
+ist
keypair
-sie
+nd
+onl
+openin
ot
-bre
+ser
+sie
+som
+sover
+te
+te
teh
+tha
+ths
updateing
+vie
+wan
+wih
diff --git a/depo/.config/systemd/user/bitcoind.env b/depo/.config/systemd/user/bitcoind.env
new file mode 100644
index 0000000..8236eb2
--- /dev/null
+++ b/depo/.config/systemd/user/bitcoind.env
@@ -0,0 +1,2 @@
+PASSWORD=password
+PATH=/home/depolymerization/.local/bin:/usr/bin:/bin
diff --git a/depo/.config/systemd/user/bitcoind.service b/depo/.config/systemd/user/bitcoind.service
new file mode 100644
index 0000000..fe825ad
--- /dev/null
+++ b/depo/.config/systemd/user/bitcoind.service
@@ -0,0 +1,12 @@
+[Unit]
+Description=Bitcoind
+
+[Service]
+ExecStart=/home/depolymerization/bitcoin/bin/bitcoind -rpcservertimeout=0
+StandardOutput=append:/home/depolymerization/.taler/logs/bitcoind.log
+StandardError=append:/home/depolymerization/.taler/logs/bitcoind.log
+EnvironmentFile=/home/depolymerization/.config/systemd/user/bitcoind.env
+
+
+[Install]
+WantedBy=multi-user.target
diff --git a/depo/.config/systemd/user/btc-wire.service b/depo/.config/systemd/user/btc-wire.service
new file mode 100644
index 0000000..046cbd3
--- /dev/null
+++ b/depo/.config/systemd/user/btc-wire.service
@@ -0,0 +1,14 @@
+[Unit]
+Description=Bitcoin wire adapter
+After=bitcoind.service
+
+
+
+[Service]
+ExecStart=/home/depolymerization/.cargo/bin/btc-wire -c /home/depolymerization/.taler/config/taler.conf
+StandardOutput=append:/home/depolymerization/.taler/logs/btc-wire.log
+StandardError=append:/home/depolymerization/.taler/logs/btc-wire.log
+EnvironmentFile=/home/depolymerization/.config/systemd/user/bitcoind.env
+
+[Install]
+WantedBy=multi-user.target
diff --git a/depo/.config/systemd/user/multi-user.target.wants/bitcoind.service b/depo/.config/systemd/user/multi-user.target.wants/bitcoind.service
new file mode 120000
index 0000000..a315587
--- /dev/null
+++ b/depo/.config/systemd/user/multi-user.target.wants/bitcoind.service
@@ -0,0 +1 @@
+/home/depolymerization/.config/systemd/user/bitcoind.service \ No newline at end of file
diff --git a/depo/.config/systemd/user/multi-user.target.wants/btc-wire.service b/depo/.config/systemd/user/multi-user.target.wants/btc-wire.service
new file mode 120000
index 0000000..2449eec
--- /dev/null
+++ b/depo/.config/systemd/user/multi-user.target.wants/btc-wire.service
@@ -0,0 +1 @@
+/home/depolymerization/.config/systemd/user/btc-wire.service \ No newline at end of file
diff --git a/depo/.config/systemd/user/multi-user.target.wants/nginx.service b/depo/.config/systemd/user/multi-user.target.wants/nginx.service
new file mode 120000
index 0000000..8a64ca4
--- /dev/null
+++ b/depo/.config/systemd/user/multi-user.target.wants/nginx.service
@@ -0,0 +1 @@
+/home/depolymerization/.config/systemd/user/nginx.service \ No newline at end of file
diff --git a/depo/.config/systemd/user/multi-user.target.wants/taler-local-blog.service b/depo/.config/systemd/user/multi-user.target.wants/taler-local-blog.service
new file mode 120000
index 0000000..f4d8ffe
--- /dev/null
+++ b/depo/.config/systemd/user/multi-user.target.wants/taler-local-blog.service
@@ -0,0 +1 @@
+/home/depolymerization/.config/systemd/user/taler-local-blog.service \ No newline at end of file
diff --git a/depo/.config/systemd/user/multi-user.target.wants/taler-local-donations.service b/depo/.config/systemd/user/multi-user.target.wants/taler-local-donations.service
new file mode 120000
index 0000000..e461950
--- /dev/null
+++ b/depo/.config/systemd/user/multi-user.target.wants/taler-local-donations.service
@@ -0,0 +1 @@
+/home/depolymerization/.config/systemd/user/taler-local-donations.service \ No newline at end of file
diff --git a/depo/.config/systemd/user/multi-user.target.wants/taler-local-exchange.target b/depo/.config/systemd/user/multi-user.target.wants/taler-local-exchange.target
new file mode 120000
index 0000000..3a950c3
--- /dev/null
+++ b/depo/.config/systemd/user/multi-user.target.wants/taler-local-exchange.target
@@ -0,0 +1 @@
+/home/depolymerization/.config/systemd/user/taler-local-exchange.target \ No newline at end of file
diff --git a/depo/.config/systemd/user/multi-user.target.wants/taler-local-landing.service b/depo/.config/systemd/user/multi-user.target.wants/taler-local-landing.service
new file mode 120000
index 0000000..83b3cab
--- /dev/null
+++ b/depo/.config/systemd/user/multi-user.target.wants/taler-local-landing.service
@@ -0,0 +1 @@
+/home/depolymerization/.config/systemd/user/taler-local-landing.service \ No newline at end of file
diff --git a/depo/.config/systemd/user/multi-user.target.wants/taler-local-merchant-backend.service b/depo/.config/systemd/user/multi-user.target.wants/taler-local-merchant-backend.service
new file mode 120000
index 0000000..ed207fb
--- /dev/null
+++ b/depo/.config/systemd/user/multi-user.target.wants/taler-local-merchant-backend.service
@@ -0,0 +1 @@
+/home/depolymerization/.config/systemd/user/taler-local-merchant-backend.service \ No newline at end of file
diff --git a/depo/.config/systemd/user/multi-user.target.wants/wire-gateway.service b/depo/.config/systemd/user/multi-user.target.wants/wire-gateway.service
new file mode 120000
index 0000000..9bf47cf
--- /dev/null
+++ b/depo/.config/systemd/user/multi-user.target.wants/wire-gateway.service
@@ -0,0 +1 @@
+/home/depolymerization/.config/systemd/user/wire-gateway.service \ No newline at end of file
diff --git a/depo/.config/systemd/user/nginx.service b/depo/.config/systemd/user/nginx.service
new file mode 100644
index 0000000..f7165de
--- /dev/null
+++ b/depo/.config/systemd/user/nginx.service
@@ -0,0 +1,28 @@
+# Stop dance for nginx
+# =======================
+#
+# ExecStop sends SIGSTOP (graceful stop) to the nginx process.
+# If, after 5s (--retry QUIT/5) nginx is still running, systemd takes control
+# and sends SIGTERM (fast shutdown) to the main process.
+# After another 5s (TimeoutStopSec=5), and if nginx is alive, systemd sends
+# SIGKILL to all the remaining processes in the process group (KillMode=mixed).
+#
+# nginx signals reference doc:
+# http://nginx.org/en/docs/control.html
+#
+[Unit]
+Description=nginx proxy for depolymerization
+Documentation=man:nginx(8)
+
+[Service]
+Type=forking
+PIDFile=/run/nginx.pid
+ExecStartPre=/usr/sbin/nginx -c /home/depolymerization/.taler/config/nginx.conf -t -q -g 'daemon on; master_process on;'
+ExecStart=/usr/sbin/nginx -c /home/depolymerization/.taler/config/nginx.conf -g 'daemon on; master_process on;'
+ExecReload=/usr/sbin/nginx -c /home/depolymerization/.taler/config/nginx.conf -g 'daemon on; master_process on;' -s reload
+ExecStop=-/sbin/start-stop-daemon -c /home/depolymerization/.taler/config/nginx.conf --quiet --stop --retry QUIT/5 --pidfile /run/nginx.pid
+TimeoutStopSec=5
+KillMode=mixed
+
+[Install]
+WantedBy=multi-user.target
diff --git a/depo/.config/systemd/user/taler-local-blog.service b/depo/.config/systemd/user/taler-local-blog.service
new file mode 100644
index 0000000..5d6401e
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-blog.service
@@ -0,0 +1,11 @@
+[Unit]
+Description=Blog that accepts Taler payments.
+
+[Service]
+ExecStart=/home/depolymerization/.local/bin/taler-merchant-demos blog -c /home/depolymerization/.taler/config/taler.conf
+StandardOutput=append:/home/depolymerization/.taler/logs/taler-merchant-demos.log
+StandardError=append:/home/depolymerization/.taler/logs/taler-merchant-demos.log
+EnvironmentFile=/home/depolymerization/.config/systemd/user/taler-local-frontends.env
+
+[Install]
+WantedBy=multi-user.target \ No newline at end of file
diff --git a/depo/.config/systemd/user/taler-local-donations.service b/depo/.config/systemd/user/taler-local-donations.service
new file mode 100644
index 0000000..e2c2ad2
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-donations.service
@@ -0,0 +1,11 @@
+[Unit]
+Description=Donation Website that accepts Taler payments.
+
+[Service]
+ExecStart=/home/depolymerization/.local/bin/taler-merchant-demos donations -c /home/depolymerization/.taler/config/taler.conf
+StandardOutput=append:/home/depolymerization/.taler/logs/taler-merchant-demos.log
+StandardError=append:/home/depolymerization/.taler/logs/taler-merchant-demos.log
+EnvironmentFile=/home/depolymerization/.config/systemd/user/taler-local-frontends.env
+
+[Install]
+WantedBy=multi-user.target \ No newline at end of file
diff --git a/depo/.config/systemd/user/taler-local-exchange-aggregator.service b/depo/.config/systemd/user/taler-local-exchange-aggregator.service
new file mode 100644
index 0000000..9e961f1
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-exchange-aggregator.service
@@ -0,0 +1,11 @@
+[Unit]
+Description=Taler Exchange Aggregator
+PartOf=taler-local-exchange.target
+
+
+[Service]
+ExecStart=/home/depolymerization/.local/bin/taler-exchange-aggregator --kyc-off -L DEBUG -c /home/depolymerization/.taler/config/taler.conf
+StandardOutput=append:/home/depolymerization/.taler/logs/taler-exchange-aggregator.log
+StandardError=append:/home/depolymerization/.taler/logs/taler-exchange-aggregator.log
+Restart=always
+RestartSec=100ms
diff --git a/depo/.config/systemd/user/taler-local-exchange-closer.service b/depo/.config/systemd/user/taler-local-exchange-closer.service
new file mode 100644
index 0000000..47c3ab9
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-exchange-closer.service
@@ -0,0 +1,11 @@
+[Unit]
+Description=Taler Exchange Closer
+PartOf=taler-local-exchange.target
+
+[Service]
+ExecStart=/home/depolymerization/.local/bin/taler-exchange-closer -L DEBUG -c /home/depolymerization/.taler/config/taler.conf
+StandardOutput=append:/home/depolymerization/.taler/logs/taler-exchange-closer.log
+StandardError=append:/home/depolymerization/.taler/logs/taler-exchange-closer.log
+
+Restart=always
+RestartSec=100ms
diff --git a/depo/.config/systemd/user/taler-local-exchange-httpd.service b/depo/.config/systemd/user/taler-local-exchange-httpd.service
new file mode 100644
index 0000000..fa19bb1
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-exchange-httpd.service
@@ -0,0 +1,19 @@
+[Unit]
+Description=Taler Exchange HTTP daemon
+Requires=taler-local-exchange-httpd.socket taler-local-exchange-secmod-cs.service taler-local-exchange-secmod-rsa.service taler-local-exchange-secmod-eddsa.service
+After=taler-local-exchange-secmod-cs.service taler-local-exchange-secmod-rsa.service taler-local-exchange-secmod-eddsa.service
+PartOf=taler-local-exchange.target
+
+
+[Service]
+ExecStart=/home/depolymerization/.local/bin/taler-exchange-httpd -L DEBUG -c /home/depolymerization/.taler/config/taler.conf
+StandardOutput=append:/home/depolymerization/.taler/logs/taler-exchange-httpd.log
+StandardError=append:/home/depolymerization/.taler/logs/taler-exchange-httpd.log
+
+Restart=always
+# Do not dally on restarts.
+RestartSec=1ms
+
+[Install]
+WantedBy=multi-user.target
+
diff --git a/depo/.config/systemd/user/taler-local-exchange-httpd.socket b/depo/.config/systemd/user/taler-local-exchange-httpd.socket
new file mode 100644
index 0000000..96ac2c0
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-exchange-httpd.socket
@@ -0,0 +1,15 @@
+[Unit]
+Description=Taler Exchange Socket
+PartOf=taler-local-exchange-httpd.service
+
+[Socket]
+ListenStream=/home/depolymerization/.taler/sockets/exchange.sock
+Accept=no
+Service=taler-local-exchange-httpd.service
+SocketMode=0660
+SocketUser=depolymerization
+SocketGroup=depolymerization
+
+
+[Install]
+WantedBy=sockets.target
diff --git a/depo/.config/systemd/user/taler-local-exchange-secmod-cs.service b/depo/.config/systemd/user/taler-local-exchange-secmod-cs.service
new file mode 100644
index 0000000..1e2e64c
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-exchange-secmod-cs.service
@@ -0,0 +1,11 @@
+[Unit]
+Description=Taler Exchange CS security module
+PartOf=taler-local-exchange.target
+
+
+[Service]
+ExecStart=/home/depolymerization/.local/bin/taler-exchange-secmod-cs -L DEBUG -c /home/depolymerization/.taler/config/taler.conf
+StandardOutput=append:/home/depolymerization/.taler/logs/taler-exchange-secmod-cs.log
+StandardError=append:/home/depolymerization/.taler/logs/taler-exchange-secmod-cs.log
+Restart=always
+RestartSec=100ms
diff --git a/depo/.config/systemd/user/taler-local-exchange-secmod-eddsa.service b/depo/.config/systemd/user/taler-local-exchange-secmod-eddsa.service
new file mode 100644
index 0000000..dc8f9f6
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-exchange-secmod-eddsa.service
@@ -0,0 +1,11 @@
+[Unit]
+Description=Taler Exchange EDDSA security module
+PartOf=taler-local-exchange.target
+
+
+[Service]
+ExecStart=/home/depolymerization/.local/bin/taler-exchange-secmod-eddsa -L DEBUG -c /home/depolymerization/.taler/config/taler.conf
+StandardOutput=append:/home/depolymerization/.taler/logs/taler-exchange-secmod-eddsa.log
+StandardError=append:/home/depolymerization/.taler/logs/taler-exchange-secmod-eddsa.log
+Restart=always
+RestartSec=100ms
diff --git a/depo/.config/systemd/user/taler-local-exchange-secmod-rsa.service b/depo/.config/systemd/user/taler-local-exchange-secmod-rsa.service
new file mode 100644
index 0000000..e7df100
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-exchange-secmod-rsa.service
@@ -0,0 +1,11 @@
+[Unit]
+Description=Taler Exchange RSA security module
+PartOf=taler-local-exchange.target
+
+
+[Service]
+ExecStart=/home/depolymerization/.local/bin/taler-exchange-secmod-rsa -L DEBUG -c /home/depolymerization/.taler/config/taler.conf
+StandardOutput=append:/home/depolymerization/.taler/logs/taler-exchange-secmod-rsa.log
+StandardError=append:/home/depolymerization/.taler/logs/taler-exchange-secmod-rsa.log
+Restart=always
+RestartSec=100ms
diff --git a/depo/.config/systemd/user/taler-local-exchange-transfer.service b/depo/.config/systemd/user/taler-local-exchange-transfer.service
new file mode 100644
index 0000000..9aeef61
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-exchange-transfer.service
@@ -0,0 +1,11 @@
+[Unit]
+Description=Taler Exchange Transfer
+PartOf=taler-local-exchange.target
+
+
+[Service]
+ExecStart=/home/depolymerization/.local/bin/taler-exchange-transfer -L DEBUG -c /home/depolymerization/.taler/config/taler.conf
+StandardOutput=append:/home/depolymerization/.taler/logs/taler-exchange-transfer.log
+StandardError=append:/home/depolymerization/.taler/logs/taler-exchange-transfer.log
+Restart=always
+RestartSec=100ms
diff --git a/depo/.config/systemd/user/taler-local-exchange-wirewatch.service b/depo/.config/systemd/user/taler-local-exchange-wirewatch.service
new file mode 100644
index 0000000..1b1098b
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-exchange-wirewatch.service
@@ -0,0 +1,11 @@
+[Unit]
+Description=Taler Exchange Wirewatch
+PartOf=taler-local-exchange.target
+
+[Service]
+ExecStart=/home/depolymerization/.local/bin/taler-exchange-wirewatch -L DEBUG -c /home/depolymerization/.taler/config/taler.conf
+StandardOutput=append:/home/depolymerization/.taler/logs/taler-exchange-wirewatch.log
+StandardError=append:/home/depolymerization/.taler/logs/taler-exchange-wirewatch.log
+
+Restart=always
+RestartSec=100ms
diff --git a/depo/.config/systemd/user/taler-local-exchange.target b/depo/.config/systemd/user/taler-local-exchange.target
new file mode 100644
index 0000000..2235564
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-exchange.target
@@ -0,0 +1,11 @@
+[Unit]
+Description=GNU Taler exchange
+
+Wants=taler-local-exchange-httpd.service
+Wants=taler-local-exchange-wirewatch.service
+Wants=taler-local-exchange-aggregator.service
+Wants=taler-local-exchange-closer.service
+Wants=taler-local-exchange-transfer.service
+
+[Install]
+WantedBy=multi-user.target
diff --git a/depo/.config/systemd/user/taler-local-frontends.env b/depo/.config/systemd/user/taler-local-frontends.env
new file mode 100644
index 0000000..c884b6c
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-frontends.env
@@ -0,0 +1,7 @@
+PATH=/home/depolymerization/geth:/home/depolymerization/bitcoin/bin:/usr/lib/postgresql/13/bin:/home/depolymerization/.vscode-server/bin/c722ca6c7eed3d7987c0d5c3df5c45f6b15e77d1/bin/remote-cli:/home/depolymerization/.local/bin:/home/depolymerization/geth:/home/depolymerization/bitcoin/bin:/usr/lib/postgresql/13/bin:/home/depolymerization/.cargo/bin:/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games::
+TALER_CONFIG_FILE=/home/depolymerization/.taler/config/taler.conf
+TALER_ENV_URL_INTRO=http://localhost:8080/landing/
+TALER_ENV_URL_BANK=http://localhost:8080/btc-wire/
+TALER_ENV_URL_MERCHANT_BLOG=http://localhost:8080/blog/
+TALER_ENV_URL_MERCHANT_DONATIONS=http://localhost:8080/donations/
+TALER_ENV_URL_MERCHANT_SURVEY=http://localhost:8080/survey/
diff --git a/depo/.config/systemd/user/taler-local-landing.service b/depo/.config/systemd/user/taler-local-landing.service
new file mode 100644
index 0000000..4239e4c
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-landing.service
@@ -0,0 +1,11 @@
+[Unit]
+Description=Landing Website of Taler demo.
+
+[Service]
+ExecStart=/home/depolymerization/.local/bin/taler-merchant-demos landing -c /home/depolymerization/.taler/config/taler.conf
+StandardOutput=append:/home/depolymerization/.taler/logs/taler-merchant-demos.log
+StandardError=append:/home/depolymerization/.taler/logs/taler-merchant-demos.log
+EnvironmentFile=/home/depolymerization/.config/systemd/user/taler-local-frontends.env
+
+[Install]
+WantedBy=multi-user.target \ No newline at end of file
diff --git a/depo/.config/systemd/user/taler-local-merchant-backend-token.service b/depo/.config/systemd/user/taler-local-merchant-backend-token.service
new file mode 100644
index 0000000..e96551d
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-merchant-backend-token.service
@@ -0,0 +1,8 @@
+[Unit]
+Description=Taler Merchant backend with auth token to allow default instance creation.
+
+[Service]
+ExecStart=/home/depolymerization/.local/bin/taler-merchant-httpd -a secret-token:secret -L DEBUG -c /home/depolymerization/.taler/config/taler.conf
+StandardOutput=append:/home/depolymerization/.taler/logs/taler-merchant-httpd.log
+StandardError=append:/home/depolymerization/.taler/logs/taler-merchant-httpd.log
+
diff --git a/depo/.config/systemd/user/taler-local-merchant-backend.service b/depo/.config/systemd/user/taler-local-merchant-backend.service
new file mode 100644
index 0000000..df12179
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-merchant-backend.service
@@ -0,0 +1,10 @@
+[Unit]
+Description=Taler Merchant backend
+
+[Service]
+ExecStart=/home/depolymerization/.local/bin/taler-merchant-httpd -L DEBUG -c /home/depolymerization/.taler/config/taler.conf
+StandardOutput=append:/home/depolymerization/.taler/logs/taler-merchant-httpd.log
+StandardError=append:/home/depolymerization/.taler/logs/taler-merchant-httpd.log
+
+[Install]
+WantedBy=multi-user.target \ No newline at end of file
diff --git a/depo/.config/systemd/user/taler-local-port-redirect.service b/depo/.config/systemd/user/taler-local-port-redirect.service
new file mode 100644
index 0000000..21fa037
--- /dev/null
+++ b/depo/.config/systemd/user/taler-local-port-redirect.service
@@ -0,0 +1,6 @@
+[Unit]
+Description=Port redirect allowing configuration at X-Forwarded-Host
+[Service]
+ExecStart=true
+StandardOutput=append:/home/depolymerization/.taler/logs/true.log
+StandardError=append:/home/depolymerization/.taler/logs/true.log \ No newline at end of file
diff --git a/depo/.config/systemd/user/wire-gateway.service b/depo/.config/systemd/user/wire-gateway.service
new file mode 100644
index 0000000..1a8b4b8
--- /dev/null
+++ b/depo/.config/systemd/user/wire-gateway.service
@@ -0,0 +1,11 @@
+[Unit]
+Description=Generic wire gateway
+
+[Service]
+ExecStart=/home/depolymerization/.cargo/bin/wire-gateway -c /home/depolymerization/.taler/config/taler.conf
+StandardOutput=append:/home/depolymerization/.taler/logs/wire-gateway.log
+StandardError=append:/home/depolymerization/.taler/logs/wire-gateway.log
+EnvironmentFile=/home/depolymerization/.config/systemd/user/bitcoind.env
+
+[Install]
+WantedBy=multi-user.target
diff --git a/depo/.taler/config/nginx.conf b/depo/.taler/config/nginx.conf
new file mode 100644
index 0000000..12932db
--- /dev/null
+++ b/depo/.taler/config/nginx.conf
@@ -0,0 +1,20 @@
+error_log /home/depolymerization/.taler/logs/nginx.log;
+pid /home/depolymerization/.taler/nginx.pid;
+events {}
+http {
+ access_log /home/depolymerization/.taler/logs/nginx.log;
+ server {
+ listen 8980;
+ location / {
+ return 200 'Hello, I am Nginx - proxying taler-local';
+ }
+ location ~* ^/(?<component>[a-z\-]+)(/(?<taler_uri>.*))? {
+ proxy_pass http://unix:/home/depolymerization/.taler/sockets/$component.sock:/$taler_uri?$args;
+ proxy_redirect off;
+ proxy_set_header X-Forwarded-Prefix /$component;
+ proxy_set_header X-Forwarded-Host localhost:8080;
+ proxy_set_header X-Forwarded-Proto http;
+ client_body_temp_path /tmp/taler-local-nginx;
+ }
+ }
+}
diff --git a/depo/.taler/config/taler.conf b/depo/.taler/config/taler.conf
new file mode 100644
index 0000000..7c2e146
--- /dev/null
+++ b/depo/.taler/config/taler.conf
@@ -0,0 +1,344 @@
+[coin_TESTBTC_ct_00000001]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00000001
+cipher = RSA
+
+[coin_TESTBTC_ct_00000002]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00000002
+cipher = RSA
+
+[coin_TESTBTC_ct_00000004]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00000004
+cipher = RSA
+
+[coin_TESTBTC_ct_00000008]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00000008
+cipher = RSA
+
+[coin_TESTBTC_ct_00000016]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00000016
+cipher = RSA
+
+[coin_TESTBTC_ct_00000032]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00000032
+cipher = RSA
+
+[coin_TESTBTC_ct_00000064]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00000064
+cipher = RSA
+
+[coin_TESTBTC_ct_00000128]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00000128
+cipher = RSA
+
+[coin_TESTBTC_ct_00000512]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00000512
+cipher = RSA
+
+[coin_TESTBTC_ct_00001024]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00001024
+cipher = RSA
+
+[coin_TESTBTC_ct_00002048]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00002048
+cipher = RSA
+
+[coin_TESTBTC_ct_00004096]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00004096
+cipher = RSA
+
+[coin_TESTBTC_ct_00008192]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00008192
+cipher = RSA
+
+[coin_TESTBTC_ct_00016384]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00016386
+cipher = RSA
+
+[coin_TESTBTC_ct_00032768]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00032768
+cipher = RSA
+
+[coin_TESTBTC_ct_00065536]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00065536
+cipher = RSA
+
+[coin_TESTBTC_ct_00131072]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00131072
+cipher = RSA
+
+[coin_TESTBTC_ct_00262144]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00262144
+cipher = RSA
+
+[coin_TESTBTC_ct_00524288]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.00524288
+cipher = RSA
+
+[coin_TESTBTC_ct_01048576]
+rsa_keysize = 2048
+fee_deposit = TESTBTC:0.00000001
+fee_refund = TESTBTC:0.00000001
+fee_refresh = TESTBTC:0.00000001
+fee_withdraw = TESTBTC:0.00000001
+duration_legal = 2 years
+duration_spend = 6 months
+duration_withdraw = 3 months
+value = TESTBTC:0.01048576
+cipher = RSA
+
+
+
+
+[merchant-account-merchant]
+wire_file_mode = 770
+wire_response = ${TALER_DATA_HOME}/merchant/wire/merchant.json
+
+[merchant-exchange-TESTBTC]
+master_key = 86HV6CHWAVQSFR53939ZXXKDRND6MXYY51EEJJCBEY1SE5ANTDY0
+currency = TESTBTC
+exchange_base_url = https://exchange.btc.taler.net/
+
+[frontends]
+backend_apikey = secret-token:secret
+backend = https://backend.btc.taler.net/
+
+[blog]
+http_unixpath_mode = 660
+http_unixpath = /home/depolymerization/.taler/sockets/blog.sock
+http_serve = unix
+serve = http
+
+[landing]
+http_unixpath_mode = 660
+http_unixpath = /home/depolymerization/.taler/sockets/landing.sock
+http_serve = unix
+serve = http
+
+[donations]
+http_unixpath_mode = 660
+http_unixpath = /home/depolymerization/.taler/sockets/donations.sock
+http_serve = unix
+serve = http
+
+
+[taler]
+CURRENCY_ROUND_UNIT = TESTBTC:0.0000000001
+CURRENCY = TESTBTC
+
+[taler-exchange-secmod-rsa]
+SM_PRIV_KEY = ${TALER_DATA_HOME}/taler-exchange-secmod-rsa/secmod-private-key
+UNIXPATH = /home/depolymerization/.taler/sockets/exchange-secmod-rsa.sock
+
+[taler-exchange-secmod-eddsa]
+SM_PRIV_KEY = ${TALER_DATA_HOME}/taler-exchange-secmod-eddsa/secmod-private-key
+UNIXPATH = /home/depolymerization/.taler/sockets/exchange-secmod-eddsa.sock
+
+[taler-exchange-secmod-cs]
+SM_PRIV_KEY = ${TALER_DATA_HOME}/taler-exchange-secmod-cs/secmod-private-key
+UNIXPATH = /home/depolymerization/.taler/sockets/exchange-secmod-cs.sock
+
+[merchantdb-postgres]
+CONFIG = postgres:///depo-taler
+
+[merchant]
+default_max_deposit_fee = TESTBTC:0.05
+default_max_wire_fee = TESTBTC:0.0078125
+WIRE_TRANSFER_DELAY = 1000 s
+UNIXPATH = /home/depolymerization/.taler/sockets/merchant.sock
+SERVE = unix
+
+[exchangedb-postgres]
+db_conn_str = postgres:///depo-taler
+CONFIG = postgres:///depo-taler
+
+[exchange]
+master_public_key = 86HV6CHWAVQSFR53939ZXXKDRND6MXYY51EEJJCBEY1SE5ANTDY0
+PRIVACY_DIR = $HOME/.local/share/taler-exchange/pp
+TERMS_DIR = $HOME/.local/share/taler-exchange/tos
+BASE_URL = http://localhost:8980/exchange/
+UNIXPATH = /home/depolymerization/.taler/sockets/exchange.sock
+SERVE = unix
+
+[auditordb-postgres]
+db_conn_str = postgres:///depo-taler
+CONFIG = postgres:///depo-taler
+
+[auditor]
+tiny_amount = TESTBTC:0.0000000001
+auditor_url = http://localhost:8980/auditor
+base_url = http://localhost:8980/auditor
+UNIXPATH = /home/depolymerization/.taler/sockets/auditor.sock
+SERVE = unix
+
+[PATHS]
+TALER_RUNTIME_DIR = /home/depolymerization/.taler/runtime
+TALER_DATA_HOME = /home/depolymerization/.taler/data
+
+
+[depolymerizer-bitcoin]
+UNIXPATH = /home/depolymerization/.taler/sockets/btc-wire.sock
+DB_URL = postgres://%2Fvar%2Frun%2Fpostgresql/btc-wire?user=depolymerization
+AUTH_METHOD = none
+PAYTO = payto://bitcoin/tb1qhxrhccqexg0dv4nltgkuw4fg2ce7muplmjsn0v
+
+
+[exchange-accountcredentials-1]
+wire_gateway_url = http://localhost:8980/btc-wire/
+wire_gateway_auth_method = none
+
+[exchange-account-1]
+PAYTO_URI = payto://bitcoin/tb1qhxrhccqexg0dv4nltgkuw4fg2ce7muplmjsn0v
+ENABLE_CREDIT = YES
+ENABLE_DEBIT = YES
diff --git a/depo/.taler/data/.exists b/depo/.taler/data/.exists
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depo/.taler/data/.exists
diff --git a/depo/.taler/sockets/.exists b/depo/.taler/sockets/.exists
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depo/.taler/sockets/.exists
diff --git a/docker/compile-and-check/README b/docker/compile-and-check/README
new file mode 100644
index 0000000..e70c571
--- /dev/null
+++ b/docker/compile-and-check/README
@@ -0,0 +1,26 @@
+This Docker image compiles and checks all the Taler code
+along its entrypoint. In case of build failures, it optionally
+offers a debug shell to inspect the running container.
+
+Build Toolchain Image
+---------------------
+
+To build image with app dependencies to build Taler, run ./build.sh
+
+Run
+---
+
+To start the build process, run
+
+$ ./build.sh
+
+Interactive Debugging
+---------------------
+
+To interactively debug the build process, run
+
+$ ./interactive.sh
+
+Then inside the container, run the entry point manually
+
+[cointainer]$ /home/talercheck/compile_and_check.sh
diff --git a/docker/compile-and-check/base/Dockerfile b/docker/compile-and-check/base/Dockerfile
new file mode 100644
index 0000000..8ce899b
--- /dev/null
+++ b/docker/compile-and-check/base/Dockerfile
@@ -0,0 +1,40 @@
+FROM debian:bookworm
+
+
+# This image provides base dependencies needed to compile and run
+# GNU Taler components
+
+RUN apt-get update
+
+# Explanations for weirder dependencies:
+# - texlive-* is required by the exchange test cases
+RUN apt-get install -y autoconf autopoint libtool texinfo \
+ libgcrypt-dev libidn11-dev zlib1g-dev libunistring-dev \
+ libjansson-dev python3-pip git recutils libsqlite3-dev \
+ libpq-dev postgresql libcurl4-openssl-dev libsodium-dev git \
+ libqrencode-dev zip jq npm openjdk-17-jre nginx procps \
+ curl python3-jinja2 wget curl python3-sphinx socat apache2-utils \
+ python3-sphinx-rtd-theme sqlite3 vim emacs faketime \
+ texlive-latex-base texlive-latex-extra
+
+RUN useradd -m talercheck
+
+USER talercheck
+WORKDIR /home/talercheck
+
+# pnpm likes to have the tmp directory
+RUN mkdir -p tmp
+
+# Make pip3 happy by running as a non-root user
+# and setting PATH correctly
+ENV PATH="/home/talercheck/.local/bin:$PATH"
+
+RUN pip3 install --break-system-packages requests click poetry uwsgi htmlark
+
+RUN npm config set prefix $HOME/.npm-global
+RUN npm install -g pnpm
+
+COPY ./base/util.sh ./base/compile_and_check.sh /home/talercheck/
+COPY ./config/tags.sh /home/talercheck/tags.sh
+
+ENTRYPOINT /home/talercheck/compile_and_check.sh
diff --git a/docker/compile-and-check/base/compile_and_check.sh b/docker/compile-and-check/base/compile_and_check.sh
new file mode 100755
index 0000000..59e16cb
--- /dev/null
+++ b/docker/compile-and-check/base/compile_and_check.sh
@@ -0,0 +1,94 @@
+#!/bin/bash
+
+set -eu
+set -x
+
+start_debug_shell () {
+ if test "${SANDCASTLE_DEBUG_SHELL:-no}" = yes; then
+ bash --init-file <(echo \
+ "echo 'Taler build failed, press enter to get the debug shell..'; read || exit $?"
+ ) -i
+ fi
+}
+trap start_debug_shell ERR
+
+source ~/util.sh
+
+echo -n Exporting the tags environment..
+set -a
+. tags.sh
+set +a
+echo DONE
+echo Exported tags:
+{ env | grep TAG_; } || echo NONE
+
+export LD_LIBRARY_PATH=$HOME/local
+
+num_processors=$(getconf _NPROCESSORS_ONLN)
+JFLAG="-j$num_processors"
+PREFIX=$HOME/local
+
+git clone --depth=1 git://git.gnunet.org/libmicrohttpd --branch ${TAG_LIBMHD:-master}
+git clone --depth=1 git://git.gnunet.org/gnunet --branch ${TAG_GNUNET:-master}
+git clone --depth=1 git://git.taler.net/exchange --branch ${TAG_EXCHANGE:-master}
+git clone --depth=1 git://git.taler.net/merchant --branch ${TAG_MERCHANT:-master}
+git clone --depth=1 git://git.taler.net/libeufin --branch ${TAG_LIBEUFIN:-master}
+git clone --depth=1 git://git.taler.net/taler-merchant-demos --branch ${TAG_MERCHANT_DEMOS:-master}
+git clone --depth=1 git://git.taler.net/wallet-core --branch ${TAG_WALLET:-master}
+git clone --depth=1 git://git.taler.net/sync --branch ${TAG_SYNC:-master}
+
+cd ~/libmicrohttpd
+./bootstrap
+./configure --disable-doc --prefix=$PREFIX
+make $JFLAG install
+
+cd ~/gnunet
+./bootstrap
+./configure --enable-logging=verbose --disable-documentation --prefix=$PREFIX --with-microhttpd=$PREFIX
+make $JFLAG install
+
+cd ~/exchange
+./bootstrap
+./configure CFLAGS="-ggdb -O0" --enable-logging=verbose --disable-doc --prefix=$PREFIX --with-gnunet=$PREFIX
+make $JFLAG install
+
+cd ~/merchant
+./bootstrap
+./configure CFLAGS="-ggdb -O0" --enable-logging=verbose --disable-doc --prefix=$PREFIX --with-gnunet=$PREFIX --with-exchange=$PREFIX
+make $JFLAG install
+
+cd ~/libeufin
+./bootstrap
+./configure --prefix=$PREFIX
+make install
+
+cd ~/taler-merchant-demos
+./bootstrap
+./configure --destination=local
+make install
+
+cd ~/sync
+./bootstrap
+./configure CFLAGS="-ggdb -O0" --enable-logging=verbose --disable-doc --prefix=$PREFIX --with-gnunet=$PREFIX --with-exchange=$PREFIX
+make $JFLAG install
+
+cd ~/wallet-core
+./bootstrap
+cd ~/wallet-core/packages/taler-wallet-cli
+./configure --prefix=$HOME/local
+make install
+cd ~/wallet-core/packages/taler-harness
+./configure --prefix=$HOME/local
+make install
+
+db_start
+createdb talercheck
+
+cd ~/exchange
+make check
+
+cd ~/merchant
+make check
+
+cd ~
+taler-harness run-integrationtests
diff --git a/docker/compile-and-check/base/util.sh b/docker/compile-and-check/base/util.sh
new file mode 100644
index 0000000..9ff8984
--- /dev/null
+++ b/docker/compile-and-check/base/util.sh
@@ -0,0 +1,34 @@
+export PATH=$HOME/.npm-global/bin:$PATH
+export PATH=$HOME/local/bin:$PATH
+export PATH=/lib/postgresql/15/bin/:$PATH
+
+export DBDIR=$HOME/talerdb
+export LOGDIR=$HOME/logs
+export TMPDIR=$HOME/tmp
+export SOCKDIR=$HOME/sockets
+export PGHOST=$SOCKDIR
+
+function db_start() {
+ mkdir -p $SOCKDIR
+ mkdir -p $LOGDIR
+ mkdir -p $TMPDIR
+ initdb --no-sync --auth=trust -D $DBDIR # > /logs/postgres-dbinit.log 2> /logs/postgres-dbinit.er
+ echo "Launching Postgres"
+ cat - > $DBDIR/postgresql.conf <<EOF
+unix_socket_directories='$SOCKDIR'
+fsync=off
+max_wal_senders=0
+synchronous_commit=off
+wal_level=minimal
+listen_addresses=''
+EOF
+ cat $DBDIR/pg_hba.conf | grep -v host > $DBDIR/pg_hba.conf.new
+ mv $DBDIR/pg_hba.conf.new $DBDIR/pg_hba.conf
+ pg_ctl -D $DBDIR -l /dev/null start > $LOGDIR/postgres-start.log 2> $LOGDIR/postgres-start.err
+ echo " DONE"
+}
+
+db_destroy() {
+ pg_ctl -D $DBDIR -l /dev/null stop
+ rm -rf $DBDIR
+}
diff --git a/docker/compile-and-check/build.sh b/docker/compile-and-check/build.sh
new file mode 100755
index 0000000..c5164ed
--- /dev/null
+++ b/docker/compile-and-check/build.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+# Build the image and tag it
+
+mydir="$(dirname -- "$0")"
+cd $mydir
+
+exec docker build -f base/Dockerfile -t sandcastle-checker .
diff --git a/docker/compile-and-check/config/tags.sh b/docker/compile-and-check/config/tags.sh
new file mode 100644
index 0000000..ce2ed9c
--- /dev/null
+++ b/docker/compile-and-check/config/tags.sh
@@ -0,0 +1,7 @@
+TAG_LIBMHD=v0.9.75
+TAG_GNUNET=v0.19.3
+TAG_EXCHANGE=master
+TAG_MERCHANT=master
+TAG_EXCHANGE=master
+TAG_SYNC=master
+TAG_WALLET=master
diff --git a/docker/compile-and-check/interactive.sh b/docker/compile-and-check/interactive.sh
new file mode 100755
index 0000000..94fb404
--- /dev/null
+++ b/docker/compile-and-check/interactive.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+# Run the image in interactive mode
+
+mydir="$(dirname -- "$0")"
+cd $mydir
+
+exec docker run -it --entrypoint /bin/bash sandcastle-checker
diff --git a/docker/compile-and-check/run.sh b/docker/compile-and-check/run.sh
new file mode 100755
index 0000000..e5dc0fe
--- /dev/null
+++ b/docker/compile-and-check/run.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+# Run compilation and tests in the image
+
+mydir="$(dirname -- "$0")"
+cd $mydir
+
+exec docker run sandcastle-checker
diff --git a/docker/docs-build/Dockerfile b/docker/docs-build/Dockerfile
new file mode 100644
index 0000000..5bcddce
--- /dev/null
+++ b/docker/docs-build/Dockerfile
@@ -0,0 +1,27 @@
+# Stage 1
+
+FROM debian:bookworm-slim AS repo
+
+RUN apt update \
+ && apt install --no-install-recommends -y ca-certificates git
+
+RUN git clone --branch=master --depth=1 https://git.taler.net/docs.git
+
+# Final image
+
+FROM sphinxdoc/sphinx-latexpdf
+
+# Copy content from one container to the other
+
+WORKDIR /docs
+
+COPY --from=repo /docs .
+
+# Install dependencies
+
+RUN python3 -m pip install --no-cache-dir recommonmark
+
+# Compile /docs/*
+
+ENTRYPOINT ["make", "BUILDDIR=/output", "-C", "/docs", "html", "latexpdf"]
+
diff --git a/docker/docs-build/build.sh b/docker/docs-build/build.sh
new file mode 100755
index 0000000..0289932
--- /dev/null
+++ b/docker/docs-build/build.sh
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+set -eu
+
+docker build . -t taler-docs-image
diff --git a/docker/docs-build/run.sh b/docker/docs-build/run.sh
new file mode 100755
index 0000000..9f2eba6
--- /dev/null
+++ b/docker/docs-build/run.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+set -eu
+
+# Execute the dockerfile, and leave output in local system
+
+docker run -v /home/docbuilder/docs:/output taler-docs-image
+
+# COPY data from volume to local
+
+cp -r /home/docbuilder/docs/html/* /home/docbuilder/build/docs/html
+cp -r /home/docbuilder/docs/latex/*.pdf /home/docbuilder/build/docs/pdf
+
+
diff --git a/docker/dpkg-build/Dockerfile b/docker/dpkg-build/Dockerfile
new file mode 100644
index 0000000..f745c0c
--- /dev/null
+++ b/docker/dpkg-build/Dockerfile
@@ -0,0 +1,32 @@
+FROM docker.io/debian:bullseye
+
+RUN apt-get update -y
+RUN apt-get upgrade -y
+
+# Install essential build dependencies
+RUN apt-get -y install build-essential devscripts debhelper equivs sudo
+
+# Allow everyone to sudo
+RUN echo 'ALL ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers
+
+RUN useradd -m builduser
+
+USER builduser
+
+WORKDIR /home/builduser
+
+# Clone our repos
+RUN git clone --depth=1 git://git.gnunet.org/gnunet.git
+RUN git clone --depth=1 git://git.taler.net/exchange.git
+RUN git clone --depth=1 git://git.taler.net/merchant.git
+
+#
+## Build GNUnet
+#
+
+# Install build dependencies
+RUN cd ~/gnunet && sudo mk-build-deps -t "apt-get -o Debug::pkgProblemResolver=yes --no-install-recommends -y" --install debian/control
+
+RUN cd ~/gnunet && ./bootstrap
+RUN cd ~/gnunet && debuild --no-sign -i -B
+
diff --git a/dpkg-build/build-ubuntu.sh b/docker/dpkg-build/build-ubuntu.sh
index c225b4a..2ce4855 100644..100755
--- a/dpkg-build/build-ubuntu.sh
+++ b/docker/dpkg-build/build-ubuntu.sh
@@ -1,3 +1,4 @@
+#!/bin/sh
# Install essential build dependencies
sudo apt install build-essential devscripts debhelper equivs
diff --git a/docker/sites-build/Dockerfile b/docker/sites-build/Dockerfile
new file mode 100644
index 0000000..8c541b5
--- /dev/null
+++ b/docker/sites-build/Dockerfile
@@ -0,0 +1,48 @@
+FROM debian:bookworm-slim
+
+# Install dependencies
+
+RUN apt update
+
+RUN apt install --no-install-recommends -y ca-certificates git make python3-pip gettext
+
+RUN pip install Jinja2 ruamel.yaml Babel beautifulsoup4 lxml
+
+# User and folder
+
+RUN useradd -m taler-websites
+
+USER taler-websites
+
+WORKDIR /home/taler-websites
+
+# Get the bash files which do the sites' compilation - step 1
+
+RUN git clone https://git.taler.net/deployment.git
+
+# Get the compilation Make files - step 2
+
+RUN git clone https://git.taler.net/www.git
+
+# Get twister code
+
+RUN git clone https://git.taler.net/twister.git
+
+# Get buywith code
+
+RUN git clone https://git.taler.net/buywith.git
+
+# Create needed directories requested by .sh files of step 1
+
+RUN mkdir buywith.taler.net stage.taler.net twister.taler.net stamps
+
+# Copy needed files to stamps directory
+
+RUN cp ~/deployment/taler-sitesbuild/invalidate.sh ~/stamps \
+ && cp ~/deployment/taler-sitesbuild/Makefile ~/stamps
+
+WORKDIR deployment/buildbot
+
+# Compile Taler websites
+
+ENTRYPOINT ["./build-sites.sh"]
diff --git a/docker/sites-build/build.sh b/docker/sites-build/build.sh
new file mode 100755
index 0000000..41e8dfd
--- /dev/null
+++ b/docker/sites-build/build.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+docker build . -t docker_image_taler_websites \ No newline at end of file
diff --git a/docker/sites-build/run.sh b/docker/sites-build/run.sh
new file mode 100755
index 0000000..37d1a79
--- /dev/null
+++ b/docker/sites-build/run.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+# Execute "Docker: sites-builer-image"
+
+
+docker run -v $HOME/taler-websites:/home/taler-websites docker_image_taler_websites
diff --git a/envcfg.py.template b/envcfg.py.template
deleted file mode 100644
index 3e12ea4..0000000
--- a/envcfg.py.template
+++ /dev/null
@@ -1,16 +0,0 @@
-# Name of the environment (test, demo, int, coverage, demo-checker, auditor-reporter, ...)
-env = "..."
-
-tag = "master"
-
-tag_gnunet = tag
-tag_libmicrohttpd = tag
-tag_exchange = tag
-tag_merchant = tag
-tag_bank = tag
-tag_twister = tag
-tag_landing = tag
-tag_backoffice = tag
-tag_taler_merchant_demos = tag
-tag_sync = tag
-tag_wallet_core = tag
diff --git a/envcfg/envcfg-demo-2019-11-02-01.py b/envcfg/envcfg-demo-2019-11-02-01.py
deleted file mode 100644
index e02becf..0000000
--- a/envcfg/envcfg-demo-2019-11-02-01.py
+++ /dev/null
@@ -1,15 +0,0 @@
-env = "demo"
-
-tag = "demo-2019-11-02-00"
-
-tag_gnunet = "v0.11.8"
-tag_libmicrohttpd = "v0.9.68"
-tag_exchange = tag
-tag_merchant = tag
-tag_bank = tag
-tag_twister = tag
-tag_landing = tag
-tag_donations = tag
-tag_blog = tag
-tag_survey = tag
-tag_backoffice = tag
diff --git a/envcfg/envcfg-demo-2019-12-03-01.py b/envcfg/envcfg-demo-2019-12-03-01.py
deleted file mode 100644
index 4c77213..0000000
--- a/envcfg/envcfg-demo-2019-12-03-01.py
+++ /dev/null
@@ -1,15 +0,0 @@
-env = "demo"
-
-tag = "demo-2019-12-03-01"
-
-tag_gnunet = "taler-new-crypto"
-tag_libmicrohttpd = "v0.9.68"
-tag_exchange = tag
-tag_merchant = tag
-tag_bank = tag
-tag_twister = tag
-tag_landing = "demo-2019-08-31-00"
-tag_donations = tag
-tag_blog = tag
-tag_survey = tag
-tag_backoffice = tag
diff --git a/envcfg/envcfg-demo-2019-12-09-01.py b/envcfg/envcfg-demo-2019-12-09-01.py
deleted file mode 100644
index faa85a3..0000000
--- a/envcfg/envcfg-demo-2019-12-09-01.py
+++ /dev/null
@@ -1,15 +0,0 @@
-env = "demo"
-
-tag = "demo-2019-12-03-01"
-
-tag_gnunet = "taler-new-crypto"
-tag_libmicrohttpd = "v0.9.68"
-tag_exchange = "demo-2019-12-09-01"
-tag_merchant = tag
-tag_bank = tag
-tag_twister = tag
-tag_landing = "demo-2019-08-31-00"
-tag_donations = tag
-tag_blog = tag
-tag_survey = tag
-tag_backoffice = tag
diff --git a/envcfg/envcfg-demo-2020-11-14.py b/envcfg/envcfg-demo-2020-11-14.py
deleted file mode 100644
index 189d508..0000000
--- a/envcfg/envcfg-demo-2020-11-14.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Name of the environment
-# (test, demo, int, coverage, demo-checker, auditor-reporter, ...)
-env = "demo"
-
-# Such tag is only used to make the builder happy.
-unused_codebase_tag = "master"
-
-tag_gnunet = "v0.14.0"
-tag_libmicrohttpd = "ad8a3e3fde50de45d075dbb6971ed52003200ee2"
-tag_exchange = "v0.8.1"
-tag_merchant = "v0.8.0"
-tag_bank = "v0.8.1"
-tag_sync = "v0.8.1"
-tag_taler_merchant_demos = "71193537361e0f230214137f7f5211117d35277e"
-tag_wallet_core = "cdf5cc583cd7fc938f38137da25aaee2aeaf28a9"
-
-# The following repositories do not really take part
-# in any demo, for now. Some of them are old, some are
-# not really needed (like Twister, for example.)
-tag_backoffice = unused_codebase_tag
-tag_twister = unused_codebase_tag
-
diff --git a/envcfg/envcfg-demo-2021-08-18.py b/envcfg/envcfg-demo-2021-08-18.py
deleted file mode 100644
index a64113d..0000000
--- a/envcfg/envcfg-demo-2021-08-18.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# Name of the environment
-# (test, demo, int, coverage, demo-checker, auditor-reporter, ...)
-env = "demo"
-
-tag_gnunet = "v0.15.0"
-tag_libmicrohttpd = "3db35a4ca6192cd26770ee69f1c48e353535b70d"
-# previous at exchange: "35b232642bc831e8c9759f7ae6180bb2deabed7e"
-tag_exchange = "v0.8.3"
-tag_merchant = "v0.8.2"
-tag_bank = "v0.8.2"
-tag_sync = "v0.8.2"
-tag_taler_merchant_demos = "1d66634cd8f4b5c089be58d62615fc48d3f7163b"
-tag_wallet_core = "daf9dc507ec16c34cecb7f423be8935b6816eede"
-tag_twister = "v0.8.1"
-tag_anastasis = "v0.0.0"
diff --git a/envcfg/envcfg.py.template b/envcfg/envcfg.py.template
deleted file mode 100644
index 7153874..0000000
--- a/envcfg/envcfg.py.template
+++ /dev/null
@@ -1,17 +0,0 @@
-# Name of the environment (test, demo, int, ...)
-env = "..."
-
-tag = "master"
-
-tag_gnunet = tag
-tag_libmicrohttpd = tag
-tag_exchange = tag
-tag_merchant = tag
-tag_bank = tag
-tag_twister = tag
-tag_landing = tag
-tag_donations = tag
-tag_blog = tag
-tag_survey = tag
-tag_backoffice = tag
-tag_sync = tag
diff --git a/envcfg/talerconf/euro.taler.conf b/envcfg/talerconf/euro.taler.conf
deleted file mode 100644
index ad3ee78..0000000
--- a/envcfg/talerconf/euro.taler.conf
+++ /dev/null
@@ -1,333 +0,0 @@
-[paths]
-TALER_DEPLOYMENT_DATA = ${HOME}/taler-data
-
-[taler]
-CURRENCY = EUR
-
-[bank]
-serve = uwsgi
-uwsgi_serve = unix
-uwsgi_unixpath = $HOME/sockets/bank.uwsgi
-uwsgi_unixpath_mode = 660
-database = postgres:///talereuro
-max_debt = EUR:20.0
-max_debt_bank = EUR:0.0
-suggested_exchange = https://exchange.euro.taler.net/
-suggested_exchange_payto = payto://x-taler-bank/bank.euro.taler.net/2
-
-[bank-admin]
-uwsgi_serve = unix
-uwsgi_unixpath = $HOME/sockets/bank-admin.uwsgi
-uwsgi_unixpath_mode = 660
-
-[donations]
-uwsgi_serve = unix
-uwsgi_unixpath = $HOME/sockets/donations.uwsgi
-uwsgi_unixpath_mode = 660
-
-[survey]
-uwsgi_serve = unix
-uwsgi_unixpath = $HOME/sockets/survey.uwsgi
-uwsgi_unixpath_mode = 660
-
-[blog]
-uwsgi_serve = unix
-uwsgi_unixpath = $HOME/sockets/shop.uwsgi
-uwsgi_unixpath_mode = 660
-instance = FSF
-
-[backoffice-all]
-backend = https://backend.euro.taler.net/
-uwsgi_serve = unix
-uwsgi_unixpath_mode = 660
-uwsgi_unixpath = $HOME/sockets/backoffice.uwsgi
-instances = FSF default Tor
-
-[merchant]
-wireformat = test
-serve = unix
-unixpath = $HOME/sockets/merchant.http
-wire_transfer_delay = 0 s
-default_max_wire_fee = EUR:0.01
-default_max_deposit_fee = EUR:0.05
-
-[merchantdb-postgres]
-config = postgres:///talereuro
-
-[merchant-exchange-test]
-url = https://exchange.euro.taler.net/
-master_key = J3QPEAEDKWZ22VQQTXE5EW1MAC6RFRWC7DHFEC4M74V8NR2109TG
-
-[frontends]
-backend_apikey = sandbox
-backend = https://backend.euro.taler.net/
-
-[exchange-EUR]
-master_key = J3QPEAEDKWZ22VQQTXE5EW1MAC6RFRWC7DHFEC4M74V8NR2109TG
-currency = EUR
-base_url = https://exchange.euro.taler.net/
-
-[auditor]
-auditor_priv_file = ${TALER_DEPLOYMENT_DATA}/auditor/offline-keys/auditor.priv
-serve = unix
-auditor_url = https://auditor.euro.taler.net/service/
-unixpath = $HOME/sockets/auditor.http
-reports = ${TALER_DEPLOYMENT_DATA}/auditor/reports
-
-[exchange]
-base_url = https://exchange.euro.taler.net/
-serve = unix
-unixpath = $HOME/sockets/exchange.http
-master_public_key = J3QPEAEDKWZ22VQQTXE5EW1MAC6RFRWC7DHFEC4M74V8NR2109TG
-master_priv_file = ${TALER_DEPLOYMENT_DATA}/exchange/offline-keys/master.priv
-keydir = ${TALER_DEPLOYMENT_DATA}/exchange/live-keys/
-
-[exchangedb]
-auditor_base_dir = ${TALER_DEPLOYMENT_DATA}/exchange/auditors/
-wirefee_base_dir = ${TALER_DEPLOYMENT_DATA}/exchange/wirefees/
-auditor_inputs = ${TALER_DEPLOYMENT_DATA}/exchange/auditor-inputs/
-
-[exchangedb-postgres]
-db_conn_str = postgres:///talereuro
-config = postgres:///talereuro
-
-[auditordb-postgres]
-db_conn_str = postgres:///talereuro
-config = postgres:///talereuro
-
-[account-1]
-url = payto://x-taler-bank/bank.euro.taler.net/2
-wire_response = ${TALER_DEPLOYMENT_DATA}/exchange/wire/test.json
-plugin = taler_bank
-taler_bank_auth_method = basic
-username = Exchange
-password = x
-enable_debit = yes
-enable_credit = yes
-
-[fees-x-taler-bank]
-wire-fee-2018 = EUR:0.02
-wire-fee-2019 = EUR:0.03
-wire-fee-2020 = EUR:0.04
-wire-fee-2021 = EUR:0.04
-wire-fee-2022 = EUR:0.05
-wire-fee-2023 = EUR:0.06
-wire-fee-2024 = EUR:0.07
-wire-fee-2025 = EUR:0.08
-closing-fee-2018 = EUR:0.01
-closing-fee-2019 = EUR:0.01
-closing-fee-2020 = EUR:0.01
-closing-fee-2021 = EUR:0.01
-closing-fee-2022 = EUR:0.01
-closing-fee-2023 = EUR:0.01
-closing-fee-2024 = EUR:0.01
-closing-fee-2025 = EUR:0.01
-
-[exchange_keys]
-signkey_duration = 18 weeks
-legal_duration = 2 years
-lookahead_sign = 32 weeks 1 day
-lookahead_provide = 4 weeks 1 day
-
-[instance-FSF]
-keyfile = ${TALER_DEPLOYMENT_DATA}/merchant/fsf.priv
-name = Free Software Foundation
-
-[merchant-location-FSF-address]
-street = 51 Franklin Street, Fifth Floor.
-city = Boston
-country = USA
-
-[instance-Tor]
-keyfile = ${TALER_DEPLOYMENT_DATA}/merchant/tor.priv
-name = The Tor Project
-
-[instance-GNUnet]
-keyfile = ${TALER_DEPLOYMENT_DATA}/merchant/gnunet.priv
-name = GNUnet Project
-
-[instance-Taler]
-keyfile = ${TALER_DEPLOYMENT_DATA}/merchant/taler.priv
-name = Taler
-
-[instance-default]
-keyfile = ${TALER_DEPLOYMENT_DATA}/merchant/default.priv
-name = Kudos Inc.
-tip_reserve_priv_filename = ${TALER_DEPLOYMENT_DATA}/merchant/default-tip.priv
-tip_exchange = https://exchange.euro.taler.net/
-
-[merchant-location-default-address]
-country = Kudosland
-
-[instance-Tutorial]
-keyfile = ${TALER_DEPLOYMENT_DATA}/merchant/tutorial.priv
-name = Tutorial
-
-[account-merchant]
-url = payto://x-taler-bank/bank.euro.taler.net/3
-plugin = taler_bank
-taler_bank_auth_method = basic
-username = user
-password = pass
-wire_response = ${TALER_DEPLOYMENT_DATA}/merchant/wire/merchant.json
-wire_file_mode = 770
-HONOR_default = YES
-HONOR_Tor = YES
-HONOR_GNUnet = YES
-HONOR_Taler = YES
-HONOR_FSF = YES
-HONOR_Tutorial = YES
-
-[coin_EUR_decimilli_25]
-value = EUR:0.0025
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.0001
-fee_refresh = EUR:0.0001
-fee_refund = EUR:0.0001
-fee_deposit = EUR:0.0001
-rsa_keysize = 2048
-
-[coin_EUR_milli_5]
-value = EUR:0.005
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.0001
-fee_refresh = EUR:0.0001
-fee_refund = EUR:0.0001
-fee_deposit = EUR:0.0001
-rsa_keysize = 2048
-
-[coin_EUR_cent_1]
-value = EUR:0.01
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.001
-fee_refresh = EUR:0.001
-fee_refund = EUR:0.001
-fee_deposit = EUR:0.001
-rsa_keysize = 2048
-
-[coin_EUR_cent_2]
-value = EUR:0.02
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.001
-fee_refresh = EUR:0.001
-fee_refund = EUR:0.001
-fee_deposit = EUR:0.001
-rsa_keysize = 2048
-
-[coin_EUR_cent_4]
-value = EUR:0.04
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.001
-fee_refresh = EUR:0.001
-fee_refund = EUR:0.001
-fee_deposit = EUR:0.001
-rsa_keysize = 2048
-
-[coin_EUR_cent_8]
-value = EUR:0.08
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.001
-fee_refresh = EUR:0.001
-fee_refund = EUR:0.001
-fee_deposit = EUR:0.001
-rsa_keysize = 2048
-
-[coin_EUR_cent_16]
-value = EUR:0.16
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.001
-fee_refresh = EUR:0.001
-fee_refund = EUR:0.001
-fee_deposit = EUR:0.001
-rsa_keysize = 2048
-
-[coin_EUR_cent_32]
-value = EUR:0.32
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.01
-fee_refresh = EUR:0.01
-fee_refund = EUR:0.01
-fee_deposit = EUR:0.01
-rsa_keysize = 2048
-
-[coin_EUR_cent_64]
-value = EUR:0.64
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.03
-fee_refresh = EUR:0.03
-fee_refund = EUR:0.03
-fee_deposit = EUR:0.03
-rsa_keysize = 2048
-
-[coin_EUR_1_cent_28]
-value = EUR:1.28
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.03
-fee_refresh = EUR:0.03
-fee_refund = EUR:0.03
-fee_deposit = EUR:0.03
-rsa_keysize = 2048
-
-[coin_EUR_2_cent_56]
-value = EUR:2.56
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.03
-fee_refresh = EUR:0.03
-fee_refund = EUR:0.03
-fee_deposit = EUR:0.03
-rsa_keysize = 2048
-
-[coin_EUR_5_cent_12]
-value = EUR:5.12
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.05
-fee_refresh = EUR:0.05
-fee_refund = EUR:0.05
-fee_deposit = EUR:0.05
-rsa_keysize = 2048
-
-[coin_EUR_10_cent_24]
-value = EUR:10.24
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.05
-fee_refresh = EUR:0.05
-fee_refund = EUR:0.05
-fee_deposit = EUR:0.05
-rsa_keysize = 2048
-
-[coin_EUR_20_cent_48]
-value = EUR:20.48
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.10
-fee_refresh = EUR:0.10
-fee_refund = EUR:0.10
-fee_deposit = EUR:0.10
-rsa_keysize = 2048
diff --git a/gnunet.conf b/gnunet.conf
deleted file mode 100644
index 00cd536..0000000
--- a/gnunet.conf
+++ /dev/null
@@ -1 +0,0 @@
-# Empty configuration file used for gnunet-arm / taler-deployment-arm.
diff --git a/head.taler.net/entr.sh b/head.taler.net/entr.sh
new file mode 100755
index 0000000..b44d826
--- /dev/null
+++ b/head.taler.net/entr.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+set -xo
+
+while true ; do
+ echo "${HOME}/incoming" | entr -n -d "${HOME}"/deployment/head.taler.net/update-head-deployment.sh ; sleep 1 || true
+done
diff --git a/head.taler.net/rsyncd.conf b/head.taler.net/rsyncd.conf
new file mode 100644
index 0000000..613dea6
--- /dev/null
+++ b/head.taler.net/rsyncd.conf
@@ -0,0 +1,13 @@
+max connections = 4
+log file = /home/head/.local/var/log/rsync.log
+lock file = /home/head/.local/var/run/rsyncd.lock
+timeout = 300
+use chroot = no
+
+[incoming]
+ comment = Inbox for head.taler.net images
+ path = /home/head/incoming
+ read only = no
+ write only = no
+ #uid = head
+ #gid = head
diff --git a/head.taler.net/rsyncd.service b/head.taler.net/rsyncd.service
new file mode 100644
index 0000000..cf3791b
--- /dev/null
+++ b/head.taler.net/rsyncd.service
@@ -0,0 +1,12 @@
+[Unit]
+Description=fast remote file copy program daemon
+ConditionPathExists=/home/head/.config/rsyncd.conf
+Documentation=man:rsync(1) man:rsyncd.conf(5)
+
+[Service]
+ExecStart=/usr/bin/rsync --daemon --no-detach --address 127.0.0.1 --port 424240 --config=/home/head/.config/rsyncd.conf
+RestartSec=1
+Restart=on-failure
+
+[Install]
+WantedBy=default.target
diff --git a/head.taler.net/update-head-deployment.sh b/head.taler.net/update-head-deployment.sh
new file mode 100755
index 0000000..09f7fd2
--- /dev/null
+++ b/head.taler.net/update-head-deployment.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -ex
+
+pushd "${HOME}/sandcastle-ng"
+git pull
+popd
+
+podman load -i "${HOME}/incoming/taler-base-all-head.tar"
+podman tag taler-base-all-head:latest taler-base-all:latest
+rm -f "${HOME}/incoming/taler-base-all-head.tar"
+
+exec systemctl --user restart container-taler-sandcastle-head.service
diff --git a/mypy/mypy.ini b/mypy/mypy.ini
deleted file mode 100644
index 924c128..0000000
--- a/mypy/mypy.ini
+++ /dev/null
@@ -1,3 +0,0 @@
-[mypy]
-ignore_missing_imports = True
-python_version = 3.5
diff --git a/netjail/netjail-init.sh b/netjail/netjail-init.sh
index 7fd0dd5..9b28a37 100755
--- a/netjail/netjail-init.sh
+++ b/netjail/netjail-init.sh
@@ -17,13 +17,17 @@ export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
BRIDGE=builderbridge
-brctl addbr $BRIDGE
-brctl stp $BRIDGE off
+# This is deprecated
+# brctl addbr $BRIDGE
+# brctl stp $BRIDGE off
+ip link add dev $BRIDGE type bridge stp_state 0
+
ip link set dev $BRIDGE up
# Connect bridge to host network
ip link add tap0 type veth peer name br-tap0
-brctl addif $BRIDGE br-tap0
+#brctl addif $BRIDGE br-tap0
+ip link set dev br-tap0 master $BRIDGE
ip link set dev tap0 up
ip link set dev br-tap0 up
diff --git a/netjail/netjail.sh b/netjail/netjail.sh
index 136390b..e445245 100755
--- a/netjail/netjail.sh
+++ b/netjail/netjail.sh
@@ -31,7 +31,8 @@ BRTAP=br-tap-$NSUID
# Setup link to our bridge
ip link add "$TAP" type veth peer name "$BRTAP"
-brctl addif "$BRIDGE" "$BRTAP"
+#brctl addif "$BRIDGE" "$BRTAP"
+ip link set dev $BRTAP master $BRIDGE
ip link set "$TAP" netns "$NSNAME"
ip link set dev "$BRTAP" up
diff --git a/netzbon/.gitignore b/netzbon/.gitignore
new file mode 100644
index 0000000..8a9cd7b
--- /dev/null
+++ b/netzbon/.gitignore
@@ -0,0 +1,5 @@
+export/
+tmp/
+result.pdf
+qre/
+qr.pdf
diff --git a/netzbon/generate-letter.sh b/netzbon/generate-letter.sh
new file mode 100755
index 0000000..272463c
--- /dev/null
+++ b/netzbon/generate-letter.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+# This script is in the public domain.
+#
+# Call with the JSON file (like test.json) with
+# an array of merchants to generate letters for!
+#
+# You must export
+#
+# export BASE_URL=https://e.netzbon-basel.ch/
+#
+# before running this script!
+#
+
+set -eu
+LENGTH=$(jq length < $1)
+echo "Generating $LENGTH letters for ${BASE_URL}"
+DOMAIN=$( echo "${BASE_URL}" | sed -e "s/https:\/\///" | sed -e "s/\/$//")
+mkdir -p export
+mkdir -p tmp
+
+for n in $(seq 1 $LENGTH)
+do
+ echo "Processing merchant $n"
+ INDEX=$(expr $n - 1 || true)
+ ID=$(jq -r .[$INDEX].id < $1)
+
+ jq ".[$INDEX]" < $1 | jq '.domain="'"${DOMAIN}"'"' > "tmp/${ID}.json"
+ cd tmp
+ ../render.py "${ID}.json" < ../template_de.tex.j2 > "${ID}.tex"
+ pdflatex "${ID}.tex" < /dev/null &> /dev/null || true
+ pdflatex "${ID}.tex" < /dev/null &> /dev/null || true
+ pdflatex "${ID}.tex" < /dev/null
+ mv "${ID}.pdf" ../export/
+ cd ..
+
+ echo "Done with ${ID}"
+done
+
+pdftk export/*.pdf cat output result.pdf
+echo "Combined letters are in 'result.pdf'"
diff --git a/netzbon/generate-qr.sh b/netzbon/generate-qr.sh
new file mode 100755
index 0000000..e5c01c6
--- /dev/null
+++ b/netzbon/generate-qr.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+# This script is in the public domain.
+#
+# Call with the JSON file (like test.json) with
+# an array of merchants to generate letters for!
+#
+# You must export
+#
+# export BASE_URL=https://e.netzbon-basel.ch/
+#
+# before running this script!
+#
+
+set -eu
+LENGTH=$(jq length < $1)
+echo "Generating $LENGTH QR codes for ${BASE_URL}"
+DOMAIN=$( echo "${BASE_URL}" | sed -e "s/https:\/\///" | sed -e "s/\/$//")
+mkdir -p qre
+mkdir -p tmp
+
+for n in $(seq 1 $LENGTH)
+do
+ echo "Processing merchant $n"
+ INDEX=$(expr $n - 1 || true)
+ ID=$(jq -r .[$INDEX].id < $1)
+
+ jq ".[$INDEX]" < $1 | jq '.domain="'"${DOMAIN}"'"' > "tmp/${ID}.json"
+ cd tmp
+ ../render.py "${ID}.json" < ../qr.tex.j2 > "${ID}.tex"
+ pdflatex "${ID}.tex" < /dev/null &> /dev/null || true
+ pdflatex "${ID}.tex" < /dev/null &> /dev/null || true
+ pdflatex "${ID}.tex" < /dev/null
+ mv "${ID}.pdf" ../qre/
+ cd ..
+
+ echo "Done with ${ID}"
+done
+
+pdftk qre/*.pdf cat output qr.pdf
+echo "Combined letters are in 'qr.pdf'"
diff --git a/netzbon/qr.tex.j2 b/netzbon/qr.tex.j2
new file mode 100644
index 0000000..ff7b52d
--- /dev/null
+++ b/netzbon/qr.tex.j2
@@ -0,0 +1,13 @@
+\documentclass[a4paper]{minimal}
+\usepackage[
+ paperwidth=53mm,
+ paperheight=53mm,
+ total={53mm,53mm}]{geometry}
+\usepackage[nolinks,final,forget]{qrcode}
+
+\begin{document}
+\vspace*{0.6mm}
+\begin{center}
+\qrcode[hyperlink,level=M,height=45mm]{taler://pay-template/backend.{{data.domain}}/instances/{{data.id}}/default}
+\end{center}
+\end{document}
diff --git a/netzbon/render.py b/netzbon/render.py
new file mode 100755
index 0000000..8bce600
--- /dev/null
+++ b/netzbon/render.py
@@ -0,0 +1,49 @@
+#!/usr/bin/python3
+# This file is in the public domain.
+
+"""Expand Jinja2 templates based on JSON input.
+
+The tool then reads the template from stdin and writes the expanded
+output to stdout.
+
+TODO: proper installation, man page, error handling, --help option.
+
+@author Christian Grothoff
+
+"""
+
+import sys
+import json
+import jinja2
+from jinja2 import BaseLoader
+
+
+class StdinLoader(BaseLoader):
+ def __init__ (self):
+ self.path = '-'
+ def get_source(self, environment, template):
+ source = sys.stdin.read()
+ return source, self.path, lambda: false
+
+
+jsonFile1 = open (sys.argv[1], 'r')
+jsonData1 = json.load(jsonFile1)
+
+jinjaEnv = jinja2.Environment(loader=StdinLoader(),
+ lstrip_blocks=True,
+ trim_blocks=True,
+ undefined=jinja2.StrictUndefined,
+ autoescape=False)
+tmpl = jinjaEnv.get_template('stdin');
+
+try:
+ print(tmpl.render(data = jsonData1))
+except jinja2.TemplateSyntaxError as error:
+ print("Template syntax error: {error.message} on line {error.lineno}.".format(error=error))
+ exit(1)
+except jinja2.UndefinedError as error:
+ print("Template undefined error: {error.message}.".format(error=error))
+ exit(1)
+except TypeError as error:
+ print("Template type error: {0}.".format(error.args[0]))
+ exit(1)
diff --git a/netzbon/setup-merchants.sh b/netzbon/setup-merchants.sh
new file mode 100755
index 0000000..7fa1d3b
--- /dev/null
+++ b/netzbon/setup-merchants.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+# This script is in the public domain.
+#
+# You must export
+#
+# export BASE_URL=e.netzbon-basel.ch
+# export MERCHANT_TOKEN=password
+# export BANK_TOKEN=password
+#
+# before running this script!
+#
+# Call with the JSON file (like test.json) with
+# an array of merchants to set up as the first argument!
+#
+# FIXME: nice error handling is non-existent...
+#
+set -eu
+LENGTH=$(jq length < $1)
+echo "Setting up $LENGTH merchants at ${BASE_URL}"
+
+for n in $(seq 1 $LENGTH)
+do
+ echo "Processing merchant $n"
+ INDEX=$(expr $n - 1 || true)
+ NAME=$(jq ".[$INDEX].name" < $1)
+ ID=$(jq .[$INDEX].id < $1)
+ PW=$(jq .[$INDEX].pass < $1)
+
+ taler-harness deployment provision-bank-and-merchant \
+ "merchant.${BASE_URL}" \
+ "bank.${BASE_URL}" \
+ "--merchant-management-token=${MERCHANT_TOKEN}" \
+ "--bank-admin-token=${BANK_ADMIN_TOKEN}" \
+ "--id=${ID}" \
+ "--legal-name=${NAME}" \
+ "--password=${PW}"
+
+ echo "Done with ${ID}"
+done
diff --git a/netzbon/template.tex.j2 b/netzbon/template.tex.j2
new file mode 100644
index 0000000..81ec978
--- /dev/null
+++ b/netzbon/template.tex.j2
@@ -0,0 +1,79 @@
+\documentclass[12pt,a4paper]{letter}
+\usepackage[utf8]{inputenc}
+\usepackage[english]{babel}
+\usepackage[nolinks,final,forget]{qrcode}
+\usepackage[top=2cm,
+bottom=2cm,
+includefoot,
+left=2.5cm,
+right=2cm,
+footskip=1cm]{geometry}
+\usepackage{url}
+\usepackage[colorlinks=true, allcolors=black]{hyperref}
+\IfFileExists{lmodern.sty}{\usepackage{lmodern}}{}
+\date{\today}
+%
+\selectlanguage{english}
+
+\signature{Isidor}
+\begin{document}
+%
+\begin{letter}{To \\ {{data.name}}}
+
+\opening{Dear {{data.name}},}
+
+We are excited to introduce you to the new digital Netzbon {\bf eNetzBon} using GNU Taler.
+In the enclosed brochure, you will find some introduction on how you can
+set up your business to accept payments in eNetzBon.
+
+This letter provides you with your personalized credentials to access your
+{\bf eNetzBon bank account} and {\bf merchant backend}. Please keep the password
+confidential as otherwise others might gain control over your eNetzBon! You
+are encouraged to set up second-factor authentication (via SMS or email)
+before using the system.
+
+Your initial password is {\bf {{data.pass}}}.
+
+Using this password and the username {\tt {{data.id}}} you can log into
+your eNetzBon bank account at {\url{https://bank.{{data.domain}}/}}.
+
+Furthermore, we are happy to provide you with a GNU Taler merchant
+backend at {\url{https://backend.{{data.domain}}/instances/{{data.id}}/}}.
+The backend is already configured to use your eNetzBon bank account
+and uses the same password.
+
+You are encouraged to change the password (separately) in both systems.
+
+If you want to use a GNU Taler wallet (from {\url{https://wallet.taler.net/}})
+you need to add eNetzBon as a payment service provider before you can use it to
+invoice your customers. To do so, please scan the following QR code with your Taler wallet:
+\begin{center}
+{\qrcode[hyperlink,level=M,height=3cm]{\tt taler://exchange/exchange.{{data.domain}}/}}
+
+{\tt taler://exchange/exchange.{{data.domain}}/}
+\end{center}
+
+This welcome package includes five identical stickers with a QR code which is
+pre-configured to link all your customers' payments into your eNetzBon bank account. Your
+specific QR code looks like this:
+\begin{center}
+{\qrcode[hyperlink,level=M,height=3cm]{\tt taler://pay-template/backend.{{data.domain}}/instances/{{data.id}}/default}}
+
+{\tt taler://pay-template/backend.{{data.domain}}/instances/{{data.id}}/default}
+\end{center}
+You are free to create additional QR codes or change the contents for this QR code
+in the merchant backend.
+
+Please sign the included Terms of service on the attached paper slip and return it to us.
+If you want us to set up the Taler Point-of-sale app, please complete the form in the
+introduction brochure and return that form to us as well.
+
+We hope your customers enjoy paying you with eNetzBon!
+
+
+\closing{Best regards}
+\encl{Five similar QR code stickers, \\ Introduction to GNU Taler for merchants, \\
+eNetzBon Terms of service (to sign), \\ Return envelope}
+
+\end{letter}
+\end{document}
diff --git a/netzbon/template_de.tex.j2 b/netzbon/template_de.tex.j2
new file mode 100644
index 0000000..400942c
--- /dev/null
+++ b/netzbon/template_de.tex.j2
@@ -0,0 +1,103 @@
+\documentclass[12pt,a4paper]{letter}
+\usepackage[utf8]{inputenc}
+\usepackage[ngerman]{babel}
+\usepackage[nolinks,final,forget]{qrcode}
+\usepackage[top=2cm,
+bottom=2cm,
+includefoot,
+left=2.5cm,
+right=2cm,
+footskip=1cm]{geometry}
+\usepackage{url}
+\usepackage[colorlinks=true, allcolors=black]{hyperref}
+\IfFileExists{lmodern.sty}{\usepackage{lmodern}}{}
+\date{\today}
+%
+\selectlanguage{german}
+\address{Verein Soziale \"Okonomie \\Klybeckstrasse 95 \\4057 Basel}
+\signature{Isidor}
+\begin{document}
+%
+\begin{letter}{An \\ {{data.name}}}
+\opening{Liebe(r) {{data.name}},}
+
+Wir freuen uns, dir heute die digitale NetzBon-Variante {\bf eNetzBon} vorstellen zu
+d\"urfen. Der Verein Soziale \"Okonomie betreibt dieses Bezahlsystem basierend auf der
+Technik von {\bf GNU Taler} und l\"adt dich ein, es gleich einmal praktisch kennenzulernen.
+
+Die beiliegende Brosch\"ure erkl\"art, wie du die Software ausprobierst und so einstellst,
+dass dein Betrieb Zahlungen von Kunden und anderen Betrieben in eNetzBon auf deinem
+internen Konto beim Verein empfangen kann. Die {\bf pers\"onlichen Zugangsdaten} gelten
+sowohl f\"ur das {\bf eNetzBon-Konto} als auch f\"ur das {\bf Verwaltungsprogramm GNU Taler
+Merchant}, mit dem du deine Artikelstammdaten anlegen und Buchungen in eNetzBon verwalten
+kannst.
+
+Um Zugang zu deinem {\bf eNetzBon-Konto} zu erhalten, rufst du in deinem Browser die Seite
+\begin{center}
+{\url{https://bank.{{data.domain}}/}}
+\end{center}
+auf und gibst den Benutzernamen {\tt {{data.id}}} und das Passwort {\bf {{data.pass}}} ein.
+
+Dein Passwort musst du nach dem ersten Besuch \"andern und es dauerhaft vor dem Zugriff
+Unbefugter sch\"utzen, weil diese sonst Kontrolle \"uber die eNetzBon erlangen k\"onnten!
+Wir empfehlen daf\"ur eine Zwei-Faktor-Authentifizierung (mittels SMS oder E-Mail), bevor
+das System in Betrieb genommen wird.
+
+Das {\bf Verwaltungsprogramm GNU Taler Merchant} ist zug\"anglich unter
+\begin{center}
+{\url{https://backend.{{data.domain}}/instances/{{data.id}}/}}.
+\end{center}
+Es ist bereits mit deinem eNetzBon-Konto verbunden und verwendet {\bf dasselbe
+Passwort}.
+
+Wir empfehlen zugunsten h\"oherer Sicherheit die beiden Passw\"orter unabh\"angig
+voneinander in beiden Systemen zu \"andern.
+
+Wenn du die {\bf elektronische Geldb\"orse GNU Taler Wallet} verwenden willst, um von
+deinen Kunden eNetzBon an dieses gezahlt zu bekommen bzw. um selbst mit eNetzBon zu
+bezahlen, besuchst du
+\begin{center}
+{\url{https://wallet.taler.net/}}
+\end{center}
+und installierst das passende Wallet f\"ur dein Smartphone (Android oder iOS).
+\newpage
+
+Bevor du {\bf Rechnungen an deine Kunden stellen} kannst, musst du im Wallet zuerst
+eNetzBon als Zahlungsdienst hinzuf\"ugen. Um dies zu tun, aktiviere bitte dein GNU Taler
+Wallet und scanne folgenden QR-Code:
+\begin{center}
+{\qrcode[hyperlink,level=M,height=3cm]{taler://exchange/exchange.{{data.domain}}/}}
+
+{\tt taler://exchange/exchange.{{data.domain}}/}
+\end{center}
+
+Anbei erh\"altst du {\bf f\"unf gleichartige Aufkleber mit einem QR-Code}, der den Verweis
+auf dein eNetzBon-Konto enth\"alt bzw. deinen Betrieb eindeutig bezeichnet. Die Kunden
+m\"ussen diesen QR-Code beim Bezahlen mit ihren GNU Taler Wallets scannen, damit ihre
+Zahlungen auf dein eNetzBon-Konto gelenkt werden. So sieht dein QR-Code aus:
+\begin{center}
+{\qrcode[hyperlink,level=M,height=3cm]{taler://pay-template/backend.{{data.domain}}/instances/{{data.id}}/default}}
+
+{\tt taler://pay-template/backend.{{data.domain}}/instances/{{data.id}}/default}
+\end{center}
+Im Verwaltungsprogramm GNU Taler Merchant besteht die M\"oglichkeit, weitere QR-Codes zu
+erzeugen und zus\"atzliche Daten des QR-Codes festzulegen, z.B. QR-Codes mit festen
+oder variablen Preisen f\"ur deine angebotenen Waren oder Dienstleistungen.
+
+Eine Bitte haben wir noch, bevor es losgehen kann:
+
+Wir ben\"otigen die {\bf Allgemeinen Geschäftsbedingungen (AGB)} zur eNetzBon-Nutzung
+unterschrieben an den Verein Soziale \"Okonomie zur\"uckgesendet.
+
+F\"ur den Fall deines Interesses, dass wir dir die Anwendung {\bf Taler
+Point-of-sale App} aufsetzen und in Betrieb nehmen sollen, f\"ulle bitte den Antrag in der
+Anleitungsbrosch\"ure aus und sende auch diesen an uns zur\"uck.
+
+Und nun w\"unschen wir dir gutes Gelingen und viel Freude beim Entdecken des eNetzBon!
+\closing{Herzliche Gr\"usse}
+\encl{F\"unf identische Aufkleber mit dem QR-Code deines eNetzBon-Kontos, \\
+Anleitungsbrosch\"ure GNU Taler f\"ur NetzBon-Betriebe, \\
+eNetzBon-AGB (bitte mit Unterschrift zur\"ucksenden), \\ Antwortcouvert}
+
+\end{letter}
+\end{document}
diff --git a/netzbon/test.json b/netzbon/test.json
new file mode 100644
index 0000000..9a47fe6
--- /dev/null
+++ b/netzbon/test.json
@@ -0,0 +1,7 @@
+[
+ {
+ "name": "Test shop",
+ "id": "test",
+ "pass": "password"
+ }
+]
diff --git a/nlnet/task1/Dockerfile b/nlnet/task1/Dockerfile
new file mode 100644
index 0000000..498d54a
--- /dev/null
+++ b/nlnet/task1/Dockerfile
@@ -0,0 +1,32 @@
+FROM debian:stable
+
+RUN apt-get update
+RUN apt-get install -y openjdk-17-jre git python3-pip curl jq sqlite3
+RUN pip3 install click requests
+
+# Installation
+RUN git clone git://git.taler.net/libeufin
+WORKDIR /libeufin
+RUN git fetch && git checkout 9c7079e5323eed4d16e24c1c4245d6586cecac53 # amounts zero-check fixed.
+RUN ./bootstrap
+RUN ./configure --prefix=/usr/local
+RUN make install
+
+# Reverse proxy
+RUN apt-get install -y nginx
+
+# Importing the UI.
+RUN git clone -b prebuilt git://git.taler.net/wallet-core
+RUN git -C wallet-core checkout 75af013b348b08b8fb9e65cc9270f2fde964979b # checkout rates fixed.
+RUN cp /libeufin/debian/etc/nginx/sites-available/libeufin-sandbox /etc/nginx/sites-enabled/
+RUN mkdir -p /usr/share/libeufin/demobank-ui/
+RUN mkdir -p /etc/libeufin/
+RUN cp /libeufin/debian/usr/share/libeufin/demobank-ui/demobank-ui-settings.js /etc/libeufin/
+RUN cp wallet-core/demobank/* /usr/share/libeufin/demobank-ui/
+
+# Default place for the database.
+RUN mkdir /libeufin-data
+
+COPY start.sh /
+# ENTRYPOINT /start.sh
+CMD /start.sh
diff --git a/nlnet/task1/start.sh b/nlnet/task1/start.sh
new file mode 100755
index 0000000..18bf9b8
--- /dev/null
+++ b/nlnet/task1/start.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+
+set -eu
+
+export LIBEUFIN_SANDBOX_ADMIN_PASSWORD=${LIBEUFIN_SANDBOX_ADMIN_PASSWORD:-admin}
+export LIBEUFIN_SANDBOX_DB_CONNECTION="jdbc:sqlite:/libeufin-data/libeufin.sqlite"
+libeufin-sandbox config --without-registrations --currency ${CURRENCY:-EUR} default
+if test -z $LIBEUFIN_EXPOSED_PORT; then
+ echo ERROR: LIBEUFIN_EXPOSED_PORT is an empty string.
+ exit 1
+fi
+
+sed -i "s/localhost/localhost:$LIBEUFIN_EXPOSED_PORT/" /etc/libeufin/demobank-ui-settings.js
+service nginx start
+libeufin-sandbox serve --port 5016 --no-localhost-only
diff --git a/nlnet/task2/Dockerfile b/nlnet/task2/Dockerfile
new file mode 100644
index 0000000..e7cc048
--- /dev/null
+++ b/nlnet/task2/Dockerfile
@@ -0,0 +1,29 @@
+FROM debian:stable
+
+RUN apt-get update
+RUN apt-get install -y git
+
+ # python3-pip
+# Libeufin Dependencies
+RUN apt-get install -y openjdk-17-jre
+# Installation
+RUN git clone git://git.taler.net/libeufin
+WORKDIR /libeufin
+RUN git fetch && git checkout a52cf289234683c4ff492cd8b508cfb6c85ca1e8
+RUN ./bootstrap
+RUN apt-get install -y python3-venv
+RUN apt-get install -y make
+RUN ./configure --prefix=/usr/local
+RUN make install
+# FIXME: move to the deps block.
+RUN apt-get install -y postgresql sudo
+RUN grep -v ^host.*all /etc/postgresql/13/main/pg_hba.conf > /tmp/pg_hba_buf.txt
+RUN echo "host libeufincheck all 127.0.0.1/32 trust" >> /tmp/pg_hba_buf.txt
+RUN echo "host libeufincheck all ::1/128 trust" >> /tmp/pg_hba_buf.txt
+RUN cp /tmp/pg_hba_buf.txt /etc/postgresql/13/main/pg_hba.conf
+# CMD bash
+RUN apt-get install -y jq curl
+CMD service postgresql start && \
+ sudo -u postgres createuser -s root && \
+ createdb -h /var/run/postgresql libeufincheck && \
+ make check
diff --git a/nlnet/task3/Dockerfile b/nlnet/task3/Dockerfile
new file mode 100644
index 0000000..52e7978
--- /dev/null
+++ b/nlnet/task3/Dockerfile
@@ -0,0 +1,15 @@
+FROM debian:stable
+
+RUN apt-get update
+RUN apt-get install -y openjdk-17-jre git python3-pip curl jq sqlite3 postgresql python3-requests python3-click sudo
+
+# Installation
+RUN git clone git://git.taler.net/libeufin
+WORKDIR /libeufin
+RUN git fetch && git checkout 4bc5f38f571a45d427f73813ec3846bf59413afa
+RUN ./bootstrap
+RUN ./configure --prefix=/usr/local
+RUN make install
+COPY keys.sh /
+COPY start.sh /
+ENTRYPOINT ["/start.sh"]
diff --git a/nlnet/task3/keys.sh b/nlnet/task3/keys.sh
new file mode 100755
index 0000000..d1fff07
--- /dev/null
+++ b/nlnet/task3/keys.sh
@@ -0,0 +1,93 @@
+#!/bin/bash
+
+set -eu
+
+# This script prepares the EBICS keys for one subscriber
+# at the PostFinance test platform.
+
+export LIBEUFIN_NEXUS_DB_CONNECTION="jdbc:sqlite:/tmp/postfinance-nexusdb.sqlite3"
+
+NEXUS_USERNAME="netzbon-bridge"
+NEXUS_PASSWORD="secret"
+NEXUS_PORT="5001"
+
+function exit_cleanup()
+{
+ for n in `jobs -p`
+ do
+ kill $n 2> /dev/null || true
+ done
+ wait || true
+}
+
+trap "exit_cleanup" EXIT
+
+echo "Creating the $NEXUS_USERNAME Nexus user..."
+libeufin-nexus superuser $NEXUS_USERNAME --password $NEXUS_PASSWORD
+echo "Launching Nexus on port $NEXUS_PORT..."
+libeufin-nexus \
+ serve --ipv4-only \
+ --log-level debug \
+ --no-localhost-only \
+ --port $NEXUS_PORT > nexus-postfinance.log 2>&1 &
+
+echo -n "Checking Nexus is serving..."
+for i in `seq 1 10`; do
+ echo -n .
+ if test i = 10; then
+ echo Nexus is unreachable
+ exit 1
+ fi
+ if `curl "http://localhost:$NEXUS_PORT/" &> /dev/null`; then
+ break
+ fi
+ sleep 1
+done
+echo OK
+
+export LIBEUFIN_NEXUS_URL="http://localhost:5001/"
+export LIBEUFIN_NEXUS_USERNAME=$NEXUS_USERNAME
+export LIBEUFIN_NEXUS_PASSWORD=$NEXUS_PASSWORD
+
+# FIXME: make connection creation idempotent.
+echo "Creating a EBICS connection at Nexus..."
+libeufin-cli connections new-ebics-connection \
+ --ebics-url https://isotest.postfinance.ch/ebicsweb/ebicsweb \
+ --host-id PFEBICS \
+ --partner-id $EBICS_PARTNER_ID \
+ --ebics-user-id $EBICS_USER_ID \
+ --dialect pf \
+ postfinanceconn || true
+
+# 1, send the keys (INI, HIA)
+# NOTE: these keys will ONLY be considered if the user
+# is in a NEW state, any previous uploaded keys should be reset.
+echo "If that is the case, reset any previous keys via the bank Web UI. Press Enter to continue.. "
+read -s
+echo -n "Sending the new keys to the bank..."
+libeufin-cli connections connect postfinanceconn
+echo DONE
+
+# 2, invite the user to unblock them in the Web UI
+echo "Please enable the new client keys via the bank Web UI, then press Enter.. "
+read -s
+
+# 3, download the bank keys (HPB).
+# That's achieved with another 'connect' action (#7880).
+echo -n "Downloading the bank keys..."
+libeufin-cli connections connect postfinanceconn
+echo DONE
+echo "Found the following bank keys:"
+libeufin-cli connections show-connection postfinanceconn | jq -r '.details | "Auth: \(.bankAuthKeyHash)\nEnc: \(.bankEncKeyHash)"'
+
+echo
+echo "If any bank keys showed up, please check in the bank Web UI if they match."
+echo "If they match, press Enter to continue, otherwise CTRL-C to end."
+read -s
+
+echo -n "Preparing the local keys bundle.."
+libeufin-cli connections export-backup \
+ --passphrase secret \
+ --output-file /tmp/pofi.json \
+ postfinanceconn > /dev/null
+echo DONE
diff --git a/nlnet/task3/salted-incoming-payment-template.csv b/nlnet/task3/salted-incoming-payment-template.csv
new file mode 100644
index 0000000..c539939
--- /dev/null
+++ b/nlnet/task3/salted-incoming-payment-template.csv
@@ -0,0 +1,2 @@
+Product;Channel;Account;Currency;Amount;Reference;Name;Street;Number;Postcode;City;Country;DebtorAddressLine;DebtorAddressLine;DebtorAccount;ReferenceType;UltimateDebtorName;UltimateDebtorStreet;UltimateDebtorNumber;UltimateDebtorPostcode;UltimateDebtorTownName;UltimateDebtorCountry;UltimateDebtorAddressLine;UltimateDebtorAddressLine;RemittanceInformationText
+ QRR;PO;__PAYEE_IBAN__;CHF;33;;D009;Musterstrasse;1;1111;Musterstadt;CH;;;;NON;D009;Musterstrasse;1;1111;Musterstadt;CH;;;__PAYMENT_SALT__
diff --git a/nlnet/task3/start.sh b/nlnet/task3/start.sh
new file mode 100755
index 0000000..2f8b2a2
--- /dev/null
+++ b/nlnet/task3/start.sh
@@ -0,0 +1,79 @@
+#!/bin/bash
+
+set -eu
+
+function finish() {
+ exit 1
+}
+
+trap finish SIGINT
+
+# Expected arguments are:
+#
+# $1 := EBICS user ID
+# $2 := EBICS partner ID (a.k.a. customer ID)
+# $3 := IBAN as assigned by the PostFinance test platform.
+
+# Suggested invocation via 'docker':
+#
+# docker run -it $IMAGE_TAG $EBICS_USER_ID $EBICS_PARTNER_ID
+service postgresql start
+sudo -u postgres createuser -s root
+createdb libeufincheck
+# This script conducts the key exchange with the bank
+# and guides the user to download and upload docuemts
+# to the bank. It pauses the execution to let the user
+# check and set the Web UI as a double-check mean.
+
+# Setting the EBICS keys. It'll place them in the container's
+# /tmp/pofi.json, where Kotlin expects them.
+export EBICS_USER_ID=$1
+export EBICS_PARTNER_ID=$2
+/keys.sh
+
+# If the keys are ready, it proceeds to invoke the uploading
+# and downloading logic.
+
+# Upload test.
+
+# The test runner will upload one pain.001 document to
+# the bank. Thereafter, the user can check the existtence
+# of such document via the bank Web UI. Moreover, the user
+# is offered the possibility to specify a custom payment
+# subject.
+
+MY_IBAN=$3
+PAIN_SALT=$RANDOM
+echo
+echo "Now preparing the pain.001 to upload to the bank via LibEuFin."
+echo "This document instructs the bank to send money to an arbitrary"
+echo "IBAN by debiting the test platform bank account."
+echo "The outgoing payment defaults to have this subject: $PAIN_SALT".
+echo "Please enter any value in this prompt, in case you want to"
+echo -n "change the default subject: "
+read MAYBE_PAIN_SALT
+
+if ! test "x" = "x$MAYBE_PAIN_SALT"; then
+ PAIN_SALT=$MAYBE_PAIN_SALT
+fi
+
+echo "The pain.001 will have this subject: '$PAIN_SALT', now calling"
+echo "LibEuFin to upload it via EBICS.."
+cd /libeufin; ./gradlew -q :nexus:pofi --args="--my-iban \"$MY_IBAN\" upload --subject \"$PAIN_SALT\""; cd -
+echo DONE
+
+echo
+echo "Please check the bank Web UI to find the pain.001 document"
+echo "whose subject is '$PAIN_SALT'. If that is found, then LibEuFin"
+echo "has successfully uploaded it. In the next step, LibEuFin"
+echo "will download the new banking records. If '$PAIN_SALT' is found"
+echo "in the logs, then it succeeded. Press Enter to continue.. "
+read -s
+
+# Download test.
+
+# The test runnner proceeds with downloading the banking
+# records that witness the payment that was uploaded shortly
+# ago. If the logs show the payment subject that belongs
+# to such payment, then the download went through.
+cd /libeufin; ./gradlew -q :nexus:pofi --args="--my-iban \"$MY_IBAN\" download"; cd -
diff --git a/nlnet/task4/Dockerfile b/nlnet/task4/Dockerfile
new file mode 100644
index 0000000..0a3be9a
--- /dev/null
+++ b/nlnet/task4/Dockerfile
@@ -0,0 +1,42 @@
+FROM debian:stable
+
+RUN apt-get update
+RUN apt-get install -y \
+ git \
+ openjdk-17-jre \
+ python3-pip \
+ curl \
+ jq \
+ postgresql \
+ python3-requests \
+ python3-click \
+ sudo \
+ time \
+ autoconf \
+ autopoint \
+ libtool \
+ texinfo \
+ libgcrypt-dev \
+ libidn11-dev \
+ zlib1g-dev \
+ libunistring-dev \
+ libjansson-dev \
+ recutils \
+ libsqlite3-dev \
+ libpq-dev \
+ libcurl4-openssl-dev \
+ libsodium-dev \
+ libqrencode-dev \
+ zip
+
+# Installation
+RUN git clone git://git.taler.net/libeufin
+WORKDIR /libeufin
+RUN git fetch && git checkout 736c3998648ad249577f8930b616e1f27647f938
+RUN ./bootstrap
+RUN ./configure --prefix=/usr/local
+RUN make install
+RUN make install-nexus
+WORKDIR /
+COPY start.sh /
+ENTRYPOINT ["/start.sh"]
diff --git a/nlnet/task4/launch.sh b/nlnet/task4/launch.sh
new file mode 100755
index 0000000..bc1508e
--- /dev/null
+++ b/nlnet/task4/launch.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+# Binds the container internal port 8080 to the host's.
+
+set -eu
+
+docker run -p 8080:8080 -it monitor
diff --git a/nlnet/task4/start.sh b/nlnet/task4/start.sh
new file mode 100755
index 0000000..3b45d57
--- /dev/null
+++ b/nlnet/task4/start.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+set -eu
+
+service postgresql start
+sudo -u postgres createuser -s root
+createdb libeufinbank
+cat << EOF > /usr/bin/taler-config
+#!/bin/bash
+
+echo postgresql:///libeufinbank
+EOF
+chmod +x /usr/bin/taler-config
+sed -i 's/ALLOW_CONVERSION = no/ALLOW_CONVERSION = yes/' \
+ /libeufin/contrib/libeufin-bank.conf
+cat << EOF >> /libeufin/contrib/libeufin-bank.conf
+
+[nexus-ebics]
+currency = EUR
+[nexus-postgres]
+config = postgresql:///libeufinbank
+EOF
+libeufin-bank dbinit -c /libeufin/contrib/libeufin-bank.conf
+libeufin-nexus dbinit -c /libeufin/contrib/libeufin-bank.conf
+/libeufin/contrib/populate-stats.sh /libeufin/contrib/libeufin-bank.conf --one
+libeufin-bank passwd admin nlnet
+libeufin-bank serve -c /libeufin/contrib/libeufin-bank.conf
diff --git a/nlnet/task5/date-range/Dockerfile b/nlnet/task5/date-range/Dockerfile
new file mode 100644
index 0000000..8d1224f
--- /dev/null
+++ b/nlnet/task5/date-range/Dockerfile
@@ -0,0 +1,15 @@
+FROM debian:stable
+
+RUN apt-get update
+RUN apt-get install -y openjdk-17-jre git python3-pip curl jq sqlite3 postgresql python3-requests python3-click sudo
+
+# Installation
+RUN git clone git://git.taler.net/libeufin
+WORKDIR /libeufin
+RUN git fetch && git checkout a614d433a8307468f1074114086ae0a47b848472
+RUN ./bootstrap
+RUN ./configure --prefix=/usr/local
+RUN make install
+COPY start-libeufin.sh /
+COPY start.sh /
+ENTRYPOINT ["/start.sh"]
diff --git a/nlnet/task5/date-range/start-libeufin.sh b/nlnet/task5/date-range/start-libeufin.sh
new file mode 100644
index 0000000..8f000a4
--- /dev/null
+++ b/nlnet/task5/date-range/start-libeufin.sh
@@ -0,0 +1,35 @@
+DB_CONN="postgresql:///libeufincheck"
+export LIBEUFIN_SANDBOX_DB_CONNECTION=$DB_CONN
+export LIBEUFIN_NEXUS_DB_CONNECTION=$DB_CONN
+
+echo -n Delete previous data...
+libeufin-sandbox reset-tables
+libeufin-nexus reset-tables
+echo DONE
+echo -n Configure the default demobank with MANA...
+libeufin-sandbox config --with-signup-bonus --currency MANA default
+echo DONE
+echo -n Setting the default exchange at Sandbox...
+libeufin-sandbox \
+ default-exchange \
+ "https://exchange.example.com/" \
+ "payto://iban/NOTUSED"
+echo DONE
+echo -n Start the bank...
+export LIBEUFIN_SANDBOX_ADMIN_PASSWORD=foo
+libeufin-sandbox serve > sandbox.log 2>&1 &
+SANDBOX_PID=$!
+echo DONE
+echo -n Wait for the bank...
+curl --max-time 4 --retry-all-errors --retry-connrefused --retry-delay 1 --retry 10 http://localhost:5000/ &> /dev/null
+echo DONE
+echo -n Make one superuser at Nexus...
+libeufin-nexus superuser test-user --password x
+echo DONE
+echo -n Launching Nexus...
+libeufin-nexus serve &> nexus.log &
+NEXUS_PID=$!
+echo DONE
+echo -n Waiting for Nexus...
+curl --max-time 4 --retry-all-errors --retry-connrefused --retry-delay 1 --retry 10 http://localhost:5001/ &> /dev/null
+echo DONE
diff --git a/nlnet/task5/date-range/start.sh b/nlnet/task5/date-range/start.sh
new file mode 100755
index 0000000..c61cfee
--- /dev/null
+++ b/nlnet/task5/date-range/start.sh
@@ -0,0 +1,155 @@
+#!/bin/bash
+
+# This script shows how Nexus can request histories from
+# a particular time frame. Such request must succeed via
+# two connection types: EBICS and x-libeufin-bank. EBICS
+# ensures the fetching of fiat payments made to the regional
+# currency authority, whereas x-libeufin-bank does it for
+# the regional currency circuit. Note: the time-framed
+# request is exceptional: it's used only after a complaint
+# from a user where they didn't get their funds as expected.
+
+set -eu
+
+service postgresql start
+sudo -u postgres createuser -s root
+createdb libeufincheck
+
+echo -n Launching and confirguring LibEuFin..
+source /start-libeufin.sh &> /dev/null
+# Register the Sandbox account.
+export LIBEUFIN_SANDBOX_USERNAME=sandbox-user
+export LIBEUFIN_SANDBOX_PASSWORD=foo
+libeufin-cli \
+ sandbox --sandbox-url http://localhost:5000/ \
+ demobank \
+ register
+# x-libeufin-bank connection.
+# Creating the x-libeufin-bank connection at Nexus.
+export LIBEUFIN_NEXUS_USERNAME=test-user
+export LIBEUFIN_NEXUS_PASSWORD=x
+export LIBEUFIN_NEXUS_URL=http://localhost:5001
+libeufin-cli connections new-xlibeufinbank-connection \
+ --bank-url "http://localhost:5000/demobanks/default/access-api" \
+ --username sandbox-user \
+ --password foo \
+ xlibeufinbankconn
+# Connecting the x-libeufin-bank connection...
+libeufin-cli connections connect xlibeufinbankconn
+# Importing the bank account under a local name at Nexus.
+# Importing the x-libeufin-bank account locally..
+libeufin-cli connections import-bank-account \
+ --offered-account-id sandbox-user \
+ --nexus-bank-account-id foo-at-nexus xlibeufinbankconn
+
+# EBICS connection.
+## Sandbox side.
+export LIBEUFIN_SANDBOX_USERNAME=admin
+# "Create EBICS host at Sandbox..."
+libeufin-cli sandbox \
+ --sandbox-url http://localhost:5000 \
+ ebicshost create --host-id wwwebics
+# Create nlnet EBICS subscriber at Sandbox
+libeufin-cli sandbox \
+ --sandbox-url http://localhost:5000 \
+ demobank new-ebicssubscriber --host-id wwwebics \
+ --user-id nlnet --partner-id nlnet \
+ --bank-account sandbox-user # that's a username _and_ a bank account name
+## Nexus side.
+export LIBEUFIN_NEXUS_USERNAME=test-user
+export LIBEUFIN_NEXUS_PASSWORD=x
+export LIBEUFIN_NEXUS_URL=http://localhost:5001
+# Creating the EBICS connection at Nexus...
+libeufin-cli connections new-ebics-connection \
+ --ebics-url "http://localhost:5000/ebicsweb" \
+ --host-id wwwebics \
+ --partner-id nlnet \
+ --ebics-user-id nlnet \
+ ebicsconn
+# Setup EBICS keying...
+libeufin-cli connections connect ebicsconn > /dev/null
+# Download bank account name from Sandbox...
+libeufin-cli connections download-bank-accounts ebicsconn
+# Importing bank account info into Nexus...
+libeufin-cli connections import-bank-account \
+ --offered-account-id sandbox-user \
+ --nexus-bank-account-id bar-at-nexus ebicsconn
+echo DONE
+
+FIRST_JAN_2020="1577833200000" # in milliseconds
+END_DEC_2019="2019-12-30"
+MID_JAN_2020="2020-01-15"
+
+echo Make sample transaction..
+# 0, setup and start services.
+libeufin-sandbox make-transaction \
+ --credit-account=admin \
+ --debit-account=sandbox-user MANA:2 \
+ "task5" # subject.
+echo DONE
+
+echo -n Articifially set the transaction date to $FIRST_JAN_2020..
+# 1, set artificial time for the transaction at January, 1st 2020.
+echo "UPDATE bankaccounttransactions SET date='$FIRST_JAN_2020' WHERE subject='task5'" | psql -q -d libeufincheck
+echo DONE
+
+# 2, retrieve the transaction via Nexus, for both
+# connections and by asking for a (narrow) time frame
+# that includes the 2020-01-01 payment.
+
+echo -n Nexus: syncing banking records for the time frame $END_DEC_2019-$MID_JAN_2020 via EBICS..
+# Fetch time-framed payments via EBICS.
+libeufin-cli \
+ accounts \
+ fetch-transactions \
+ --level=report \
+ --range-type=time-range \
+ --start=$END_DEC_2019 \
+ --end=$MID_JAN_2020 \
+ bar-at-nexus > /dev/null # EBICS
+echo DONE
+
+echo Showing the synced data..
+# Now checks if Nexus ingested and shows the
+# expected payment.
+libeufin-cli \
+ accounts \
+ transactions \
+ bar-at-nexus
+echo DONE
+
+echo Resetting the Nexus database..
+# Bring the database state so that Nexus does not hold any payment.
+echo "DELETE FROM nexusbanktransactions" | psql -d libeufincheck
+echo "DELETE FROM nexusbankmessages" | psql -d libeufincheck
+echo DONE
+
+echo Checking that no payment data appears after the reset..
+# Double-checking that the future steps start
+# without the previous transactions.
+libeufin-cli \
+ accounts \
+ transactions \
+ foo-at-nexus # FIXME: put a 204 No Content check?
+echo DONE
+
+# Fetch time-framed payments via x-libeufin-bank.
+echo Nexus: syncing banking records for the time frame ${END_DEC_2019}_${MID_JAN_2020} via x-libeufin-bank..
+libeufin-cli \
+ accounts \
+ fetch-transactions \
+ --level=statement \
+ --range-type=time-range \
+ --start=$END_DEC_2019 \
+ --end=$MID_JAN_2020 \
+ foo-at-nexus
+echo DONE
+
+# As in the previous case, now Nexus should show
+# the 2020-01-01 the payment.
+echo Showing the synced data..
+libeufin-cli \
+ accounts \
+ transactions \
+ foo-at-nexus # FIXME: put a 200 OK check?
+echo DONE
diff --git a/nlnet/task5/long-poll/Dockerfile b/nlnet/task5/long-poll/Dockerfile
new file mode 100644
index 0000000..713e47e
--- /dev/null
+++ b/nlnet/task5/long-poll/Dockerfile
@@ -0,0 +1,14 @@
+FROM debian:stable
+
+RUN apt-get update
+RUN apt-get install -y openjdk-17-jre git python3-pip curl jq sqlite3 postgresql python3-requests python3-click sudo libgnunet0.19
+
+# Installation
+RUN git clone git://git.taler.net/libeufin
+WORKDIR /libeufin
+RUN git fetch && git checkout 934a73b09b9e9abba348e15ddc058df5bb9cd6a3
+RUN ./bootstrap
+RUN ./configure --prefix=/usr/local
+RUN make install
+COPY start.sh /
+ENTRYPOINT ["/start.sh"]
diff --git a/nlnet/task5/long-poll/start.sh b/nlnet/task5/long-poll/start.sh
new file mode 100755
index 0000000..46a0af2
--- /dev/null
+++ b/nlnet/task5/long-poll/start.sh
@@ -0,0 +1,134 @@
+#!/bin/bash
+
+set -eu
+
+service postgresql start
+sudo -u postgres createuser -s root
+createdb libeufincheck
+
+wire_transfer () {
+ RESERVE_PUB=$(gnunet-ecc -g1 /tmp/www &> /dev/null && gnunet-ecc -p /tmp/www)
+ DB_CONN="postgresql:///libeufincheck"
+ libeufin-sandbox \
+ make-transaction \
+ --credit-account=sandbox-user \
+ --debit-account=admin MANA:2 \
+ $RESERVE_PUB
+}
+
+WITH_TASKS=1
+echo RUNNING SANDBOX-NEXUS EBICS PAIR
+jq --version &> /dev/null || (echo "'jq' command not found"; exit 77)
+curl --version &> /dev/null || (echo "'curl' command not found"; exit 77)
+
+DB_CONN="postgresql:///libeufincheck"
+export LIBEUFIN_SANDBOX_DB_CONNECTION=$DB_CONN
+export LIBEUFIN_NEXUS_DB_CONNECTION=$DB_CONN
+
+echo -n Delete previous data...
+libeufin-sandbox reset-tables
+libeufin-nexus reset-tables
+echo DONE
+echo -n Configure the default demobank with MANA...
+libeufin-sandbox config --with-signup-bonus --currency MANA default
+echo DONE
+echo -n Setting the default exchange at Sandbox...
+libeufin-sandbox \
+ default-exchange \
+ "https://exchange.example.com/" \
+ "payto://iban/NOTUSED"
+echo DONE
+echo -n Start the bank...
+export LIBEUFIN_SANDBOX_ADMIN_PASSWORD=foo
+libeufin-sandbox serve > sandbox.log 2>&1 &
+SANDBOX_PID=$!
+echo DONE
+echo -n Wait for the bank...
+curl --max-time 4 --retry-all-errors --retry-connrefused --retry-delay 1 --retry 10 http://localhost:5000/ &> /dev/null
+echo DONE
+echo -n Make one superuser at Nexus...
+libeufin-nexus superuser test-user --password x
+echo DONE
+echo -n Launching Nexus...
+libeufin-nexus serve &> nexus.log &
+NEXUS_PID=$!
+echo DONE
+echo -n Waiting for Nexus...
+curl --max-time 4 --retry-all-errors --retry-connrefused --retry-delay 1 --retry 10 http://localhost:5001/ &> /dev/null
+echo DONE
+
+echo -n "Register the Sandbox account..."
+export LIBEUFIN_SANDBOX_USERNAME=sandbox-user
+export LIBEUFIN_SANDBOX_PASSWORD=foo
+libeufin-cli \
+ sandbox --sandbox-url http://localhost:5000/ \
+ demobank \
+ register
+echo DONE
+echo -n Creating the x-libeufin-bank connection at Nexus...
+export LIBEUFIN_NEXUS_USERNAME=test-user
+export LIBEUFIN_NEXUS_PASSWORD=x
+export LIBEUFIN_NEXUS_URL=http://localhost:5001
+# echoing the password to STDIN, as that is a "prompt" option.
+libeufin-cli connections new-xlibeufinbank-connection \
+ --bank-url "http://localhost:5000/demobanks/default/access-api" \
+ --username sandbox-user \
+ --password foo \
+ wwwconn
+echo DONE
+echo -n Connecting the x-libeufin-bank connection...
+libeufin-cli connections connect wwwconn
+echo DONE
+# Importing the bank account under a local name at Nexus.
+echo -n Importing the x-libeufin-bank account locally..
+libeufin-cli connections import-bank-account \
+ --offered-account-id sandbox-user \
+ --nexus-bank-account-id foo-at-nexus wwwconn
+echo DONE
+echo -n Create the Taler facade at Nexus...
+libeufin-cli facades \
+ new-taler-wire-gateway-facade \
+ --currency TESTKUDOS --facade-name test-facade \
+ wwwconn foo-at-nexus
+echo DONE
+if test 1 = $WITH_TASKS; then
+ echo -n Creating submit transactions task..
+ libeufin-cli accounts task-schedule \
+ --task-type submit \
+ --task-name www-payments \
+ --task-cronspec "* * *" \
+ foo-at-nexus || true
+ # Tries every second. Ask C52
+ echo DONE
+ echo -n Creating fetch transactions task..
+ # Not idempotent, FIXME #7739
+ libeufin-cli accounts task-schedule \
+ --task-type fetch \
+ --task-name www-history \
+ --task-cronspec "* * *" \
+ --task-param-level statement \
+ --task-param-range-type since-last \
+ foo-at-nexus || true
+ echo DONE
+else
+ echo NOT creating background tasks!
+fi
+
+echo
+echo Services are online! The following shell offers a 'wire_transfer'
+echo command that wires money to Nexus 'test-user'. Give it after having
+echo connected an HTTP client that long-polls to Nexus. As an example, a
+echo 100 seconds long-poller to Nexus is the following command:
+echo curl -v -u test-user:x "'http://localhost:5001/facades/test-facade/taler-wire-gateway/history/incoming?delta=5&long_poll_ms=100000'"
+echo
+echo Hint: after having issued the previous command and having observed
+echo that it actually long-polls, press CTRL-Z to send it in the background,
+echo "then wire the funds to the long-poller with 'wire_transfer',"
+echo "and finally give 'fg 1' to bring the long-poller in the foreground."
+echo If the client now shows a response, then the long-polling mechanism
+echo worked.
+echo
+
+cd /
+export -f wire_transfer
+bash
diff --git a/nlnet/task5/performance/Dockerfile b/nlnet/task5/performance/Dockerfile
new file mode 100644
index 0000000..4daeaf0
--- /dev/null
+++ b/nlnet/task5/performance/Dockerfile
@@ -0,0 +1,70 @@
+FROM debian:stable
+
+RUN apt-get update
+RUN apt-get install -y \
+ git \
+ openjdk-17-jre \
+ python3-pip \
+ curl \
+ jq \
+ postgresql \
+ python3-requests \
+ python3-click \
+ sudo \
+ time \
+ autoconf \
+ autopoint \
+ libtool \
+ texinfo \
+ libgcrypt-dev \
+ libidn11-dev \
+ zlib1g-dev \
+ libunistring-dev \
+ libjansson-dev \
+ recutils \
+ libsqlite3-dev \
+ libpq-dev \
+ libcurl4-openssl-dev \
+ libsodium-dev \
+ libqrencode-dev \
+ zip
+
+# Installation
+RUN git clone git://git.taler.net/libeufin
+WORKDIR /libeufin
+RUN git fetch && git checkout 4bc5f38f571a45d427f73813ec3846bf59413afa
+RUN ./bootstrap
+RUN ./configure --prefix=/usr/local
+RUN make install
+WORKDIR /
+RUN git clone git://git.gnunet.org/libmicrohttpd
+WORKDIR /libmicrohttpd
+RUN ./bootstrap
+RUN ./configure --disable-doc
+RUN make install
+WORKDIR /
+RUN git clone git://git.gnunet.org/gnunet
+WORKDIR /gnunet
+RUN apt-get install -y python3-sphinx python3-sphinx-rtd-theme # Move up?
+RUN ./bootstrap
+RUN ./configure
+RUN pip3 install --break-system-packages htmlark
+RUN make install
+WORKDIR /
+RUN git clone git://git.taler.net/exchange
+WORKDIR /exchange
+RUN ./bootstrap
+RUN ./configure
+RUN make install
+WORKDIR /
+RUN git clone git://git.taler.net/merchant
+WORKDIR /merchant
+RUN ./bootstrap
+RUN ./configure
+RUN make install
+WORKDIR /
+
+COPY start.sh /
+RUN apt-get install -y wget
+RUN apt-get install -y bc
+ENTRYPOINT ["/start.sh"]
diff --git a/nlnet/task5/performance/start.sh b/nlnet/task5/performance/start.sh
new file mode 100755
index 0000000..2cc9175
--- /dev/null
+++ b/nlnet/task5/performance/start.sh
@@ -0,0 +1,107 @@
+#!/bin/bash
+
+# This script shows, via runnuing the benchmark, how
+# the LibEuFin database connections are significantly
+# shorter than the benchmark total time.
+
+# For this reason, it can only be that LibEuFin opens
+# and closes many PostgreSQL connections, as it is required
+# by milestone #3.
+
+set -eu
+
+export HOW_MANY_WITHDRAWALS=100
+
+service postgresql start
+sudo -u postgres createuser -s root
+
+# Activating the disconnection logs.
+sudo -u postgres psql -q -c "ALTER SYSTEM SET log_disconnections = 'on'" -c "SELECT pg_reload_conf()" > /dev/null
+
+# Converts AA:BB:CC.DDD to milliseconds.
+convert_pg_time_to_ms () {
+ awk -F[.:] '{SECS=(60*60*$1)+(60*$2)+$3; MILLI=$4; TOTAL_MS=(SECS*1000)+MILLI; print TOTAL_MS}'
+}
+
+createdb talercheck
+export LD_LIBRARY_PATH=/usr/local/lib
+
+prepare_and_run () {
+ taler-unified-setup.sh \
+ -Wwemtns \
+ -c /exchange/src/benchmark/benchmark-cs.conf \
+ -u exchange-account-2 &> /check_ready.txt &
+ # Wait that the prep. went through.
+ echo -n Waiting the unified setup to complete..
+ READY="NO"
+ for i in `seq 100` true; do
+ if grep -q "<<READY>>" /check_ready.txt; then
+ READY="YES"
+ break
+ fi
+ echo -n "."; sleep 1
+ done
+
+ if test $READY = "YES"; then
+ echo "DONE"
+ else
+ cat /check_ready.txt
+ echo FAIL
+ exit 1
+ fi
+
+ echo Running the benchmark..
+ taler-exchange-benchmark \
+ -c /exchange/src/benchmark/benchmark-cs.conf.edited \
+ -u exchange-account-2 \
+ -L WARNING \
+ -n 1 \
+ -r $HOW_MANY_WITHDRAWALS
+}
+
+export -f prepare_and_run
+/usr/bin/time -o /benchmark-wall-clock-time.txt --format=%e bash -c "prepare_and_run"
+
+NEXUS_PID=$(cat /libeufin-nexus.pid)
+SANDBOX_PID=$(cat /libeufin-sandbox.pid)
+
+if test -z $NEXUS_PID; then
+ echo Could not find Nexus PID, failing.
+ exit 1
+fi
+
+if test -z $SANDBOX_PID; then
+ echo Could not find Sandbox PID, failing.
+ exit 1
+fi
+
+# Convert the wall clock time to milliseconds, to make
+# it compatible with the format as GREPped through Postgres logs.
+BENCHMARK_TOT_MS=$(awk -F. '{t=($1 * 1000 + $2 * 10)} END {print t}' /benchmark-wall-clock-time.txt)
+
+NEXUS_LONGEST_DB_SESSION_MS=$(grep disconnection < /var/log/postgresql/postgresql-15-main.log | grep $NEXUS_PID | grep -o "session time:.*$" | grep -o [0-9]:[0-9][0-9]:[0-9][0-9]\.[0-9][0-9][0-9] | convert_pg_time_to_ms | sort -n | tail -n 1)
+
+SANDBOX_LONGEST_DB_SESSION_MS=$(grep disconnection < /var/log/postgresql/postgresql-15-main.log | grep $SANDBOX_PID | grep -o "session time:.*$" | grep -o [0-9]:[0-9][0-9]:[0-9][0-9]\.[0-9][0-9][0-9] | convert_pg_time_to_ms | sort -n | tail -n 1)
+
+if test $NEXUS_LONGEST_DB_SESSION_MS -gt $BENCHMARK_TOT_MS; then
+ echo Nexus had a DB session longer than the benchmark itself, failing.
+ exit 1
+fi
+
+if test $SANDBOX_LONGEST_DB_SESSION_MS -gt $BENCHMARK_TOT_MS; then
+ echo Sandbox had a DB session longer than the benchmark itself, failing.
+ exit 1
+fi
+
+NEXUS_TIME_PORTION=$(echo "($NEXUS_LONGEST_DB_SESSION_MS / $BENCHMARK_TOT_MS) * 100" | bc -lq | sed 's/^\./0./')
+SANDBOX_TIME_PORTION=$(echo "($SANDBOX_LONGEST_DB_SESSION_MS / $BENCHMARK_TOT_MS) * 100" | bc -lq | sed 's/^\./0./')
+
+# Here: the further from 1 the better.
+echo Nexus longest DB session is $NEXUS_TIME_PORTION percent of the total benchmark time.
+echo Sandbox longest DB session is $SANDBOX_TIME_PORTION percent of the total benchmark time.
+
+# Now show the total space occupied by the database.
+# Although that's a _total_ estimate, it'll anyhow show
+# that _also_ libeufin has reasonable data usage.
+TOTAL_DB_SPACE=$(echo "SELECT pg_size_pretty(pg_database_size('talercheck'))" | psql -d talercheck | grep "^ [0-9]" | tr -d "[:blank:]")
+echo "The total space occupied by the database for $HOW_MANY_WITHDRAWALS withdrawals is $TOTAL_DB_SPACE"
diff --git a/packaging/.gitignore b/packaging/.gitignore
new file mode 100644
index 0000000..7b3eef0
--- /dev/null
+++ b/packaging/.gitignore
@@ -0,0 +1,2 @@
+debian-docker/dist
+ubuntu-mantic-docker/dist
diff --git a/packaging/debian-docker/Dockerfile b/packaging/debian-docker/Dockerfile
new file mode 100644
index 0000000..0d6c739
--- /dev/null
+++ b/packaging/debian-docker/Dockerfile
@@ -0,0 +1,56 @@
+FROM debian:bookworm
+# This file is in the public domain.
+#
+# Docker image to build Debian packages of
+# GNUnet, GNU Taler and GNU Anastasis.
+#
+# We need 'bookworm' for a reasonably recent NodeJS version.
+
+ARG DEBIAN_FRONTEND=noninteractive
+
+# Install dependencies
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get update
+RUN apt-get -y dist-upgrade
+RUN apt-get update
+RUN apt-get -y install build-essential zip jq python3 python3-pip autoconf automake gcc make libtool libltdl-dev libmicrohttpd-dev libpq-dev libsqlite3-dev libunistring-dev libqrencode-dev libgcrypt-dev libsodium-dev libargon2-dev libjansson-dev recutils libgmp-dev texinfo pkgconf zlib1g-dev libopus-dev libextractor-dev libnss3-dev libcurl4-gnutls-dev autopoint linux-libc-dev python3-sphinx python3-sphinxcontrib.httpdomain policykit-1 libzbar-dev default-libmysqlclient-dev mandoc libpulse-dev libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev libbluetooth-dev iptables miniupnpc libpng-dev python3-jinja2 doxygen libjose-dev iproute2 sudo python3-sphinx-rtd-theme wget zile libogg-dev gettext net-tools po-debconf debhelper-compat dbconfig-pgsql nginx libgtk-3-dev libgladeui-dev libmagic-dev policykit-1 libnfc-dev python3-click python3-requests apt-utils nodejs npm openjdk-17-jdk-headless default-jre-headless pandoc groff
+RUN npm install -g node pnpm
+
+ADD "https://www.random.org/cgi-bin/randbyte?nbytes=10&format=h" skipcache
+
+COPY libeufin-build.sh /root/
+RUN chmod +x /root/libeufin-build.sh
+RUN /root/libeufin-build.sh master
+
+COPY gnunet-build.sh /root/
+RUN chmod +x /root/gnunet-build.sh
+RUN /root/gnunet-build.sh master
+
+COPY gnunet-gtk-build.sh /root/
+RUN chmod +x /root/gnunet-gtk-build.sh
+RUN /root/gnunet-gtk-build.sh master
+
+COPY libeufin-build.sh /root/
+RUN chmod +x /root/libeufin-build.sh
+RUN /root/libeufin-build.sh master
+
+COPY exchange-build.sh /root/
+RUN chmod +x /root/exchange-build.sh
+RUN /root/exchange-build.sh master
+
+COPY merchant-build.sh /root/
+RUN chmod +x /root/merchant-build.sh
+RUN /root/merchant-build.sh master
+
+COPY sync-build.sh /root/
+RUN chmod +x /root/sync-build.sh
+RUN /root/sync-build.sh master
+
+COPY anastasis-build.sh /root/
+RUN chmod +x /root/anastasis-build.sh
+RUN /root/anastasis-build.sh master
+
+COPY wallet-build.sh /root/
+RUN chmod +x /root/wallet-build.sh
+RUN /root/wallet-build.sh master
diff --git a/packaging/debian-docker/README b/packaging/debian-docker/README
new file mode 100644
index 0000000..0a092e5
--- /dev/null
+++ b/packaging/debian-docker/README
@@ -0,0 +1,16 @@
+Scripts to build Debian packages from source.
+
+TODO:
+- break up into separate build for GNUnet/Taler/Anastasis
+ => might be good to not run the entire pipeline only
+ because something changes in anastasis/wallet, as
+ that's not a good reason to re-build GNUnet ;-).
+- integrate with buildbot (integrationtests?)
+ to build-on-tag / build nightly and upload resulting
+ Deb packages to reprepro (fully automated for nightly,
+ but with explicit password-protected signature for tagged builds)
+- support other CPU architectures (by running in VM that emulates
+ other CPU architectures)
+- eventually: try to improve scripts to support older
+ Debian versions
+
diff --git a/packaging/debian-docker/anastasis-build.sh b/packaging/debian-docker/anastasis-build.sh
new file mode 100644
index 0000000..24643e1
--- /dev/null
+++ b/packaging/debian-docker/anastasis-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/anastasis
+cd /build/anastasis
+
+# Fetch source
+rm -rf *
+
+for n in anastasis anastasis-gtk
+do
+ git clone git://git.taler.net/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/debian-docker/exchange-build.sh b/packaging/debian-docker/exchange-build.sh
new file mode 100644
index 0000000..b4a0115
--- /dev/null
+++ b/packaging/debian-docker/exchange-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/exchange
+cd exchange
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/debian-docker/gnunet-build.sh b/packaging/debian-docker/gnunet-build.sh
new file mode 100644
index 0000000..614c5e6
--- /dev/null
+++ b/packaging/debian-docker/gnunet-build.sh
@@ -0,0 +1,29 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/gnunet
+cd /build/gnunet
+
+# Fetch source
+rm -rf *
+
+pip3 install --break-system-packages sphinx-book-theme sphinx-multiversion
+
+for n in gnunet
+do
+ git clone git://git.gnunet.org/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar cvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/debian-docker/gnunet-gtk-build.sh b/packaging/debian-docker/gnunet-gtk-build.sh
new file mode 100644
index 0000000..4414c3f
--- /dev/null
+++ b/packaging/debian-docker/gnunet-gtk-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/gnunet
+cd /build/gnunet
+
+# Fetch source
+rm -rf *
+
+for n in gnunet-gtk
+do
+ git clone git://git.gnunet.org/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/debian-docker/libeufin-build.sh b/packaging/debian-docker/libeufin-build.sh
new file mode 100644
index 0000000..eb440f2
--- /dev/null
+++ b/packaging/debian-docker/libeufin-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/libeufin
+cd /build/libeufin
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/libeufin
+cd libeufin
+git checkout $1
+./bootstrap
+export JAVA_HOME=/usr
+./configure --prefix=/usr
+make install
+make deb
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/debian-docker/mdb-build.sh b/packaging/debian-docker/mdb-build.sh
new file mode 100644
index 0000000..d097240
--- /dev/null
+++ b/packaging/debian-docker/mdb-build.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+git clone git://git.taler.net/taler-mdb
+cd taler-mdb
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/debian-docker/merchant-build.sh b/packaging/debian-docker/merchant-build.sh
new file mode 100644
index 0000000..24f5f9d
--- /dev/null
+++ b/packaging/debian-docker/merchant-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+# pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/merchant
+cd merchant
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/debian-docker/run.sh b/packaging/debian-docker/run.sh
new file mode 100755
index 0000000..2689a33
--- /dev/null
+++ b/packaging/debian-docker/run.sh
@@ -0,0 +1,25 @@
+#!/bin/sh
+set -eu
+CONTAINER=$USER/debbuilder:latest
+
+# If we don't specify the ulimit here, fakeroot is extremely slow.
+# See https://github.com/moby/moby/issues/45436
+docker build --ulimit "nofile=1024:1048576" -t $CONTAINER .
+
+rm -rf dist
+mkdir dist
+docker run --read-only $CONTAINER sleep 100 &
+sleep 1
+docker container ls
+ID=$(docker container ls | grep $CONTAINER | head -n1 | awk '{print $1}')
+echo "Extracting files from $ID"
+docker cp "$ID:/build/packages.tgz" .
+echo "Stopping $CONTAINER ($ID)"
+docker container stop $ID
+echo "Removing $CONTAINER"
+docker container rm $ID
+docker image rm $USER/debbuilder
+cd dist
+tar xvf ../packages.tgz
+cd ..
+rm packages.tgz
diff --git a/packaging/debian-docker/sync-build.sh b/packaging/debian-docker/sync-build.sh
new file mode 100644
index 0000000..2f5d9df
--- /dev/null
+++ b/packaging/debian-docker/sync-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+rm -rf /build/sync
+
+mkdir -p /build/sync
+cd /build/sync
+
+# Fetch source
+for n in sync
+do
+ git clone git://git.taler.net/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/debian-docker/wallet-build.sh b/packaging/debian-docker/wallet-build.sh
new file mode 100644
index 0000000..6d807be
--- /dev/null
+++ b/packaging/debian-docker/wallet-build.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/wallet
+cd /build/wallet
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/wallet-core
+
+cd wallet-core
+git checkout $1
+./bootstrap
+
+cd packages/taler-wallet-cli
+
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cd ../taler-harness
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cd ../
+
+tar uvf ../../../packages.tgz *.deb
diff --git a/packaging/ng/.gitignore b/packaging/ng/.gitignore
new file mode 100644
index 0000000..23053de
--- /dev/null
+++ b/packaging/ng/.gitignore
@@ -0,0 +1 @@
+packages/
diff --git a/packaging/ng/Dockerfile.debian-bookworm b/packaging/ng/Dockerfile.debian-bookworm
new file mode 100644
index 0000000..b6aade0
--- /dev/null
+++ b/packaging/ng/Dockerfile.debian-bookworm
@@ -0,0 +1,37 @@
+FROM debian:bookworm
+# This file is in the public domain.
+#
+# Docker image to build Ubuntu packages of
+# GNUnet, GNU Taler and GNU Anastasis.
+#
+ARG DEBIAN_FRONTEND=noninteractive
+
+# Install dependencies
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get -y install build-essential zip jq python3 python3-pip nodejs npm
+RUN apt-get -y install autoconf automake gcc make libtool libltdl-dev libmicrohttpd-dev libpq-dev libsqlite3-dev libunistring-dev libqrencode-dev libgcrypt-dev libsodium-dev libargon2-dev libjansson-dev recutils libgmp-dev texinfo pkgconf zlib1g-dev libopus-dev libextractor-dev libnss3-dev libcurl4-gnutls-dev autopoint
+RUN apt-get -y install libzbar-dev libmariadb-dev-compat libmariadb-dev mandoc libpulse-dev libgstreamer1.0-dev libbluetooth-dev iptables miniupnpc libpng-dev
+RUN apt-get -y install python3-jinja2 doxygen libjose-dev iproute2 sudo
+RUN apt-get -y install wget zile
+RUN apt-get -y install libogg-dev gettext net-tools po-debconf debhelper-compat dbconfig-pgsql nginx
+RUN apt-get -y install libgtk-3-dev libgladeui-dev libmagic-dev policykit-1
+RUN apt-get -y install dbconfig-no-thanks
+RUN apt-get -y install devscripts equivs
+# For libeufin:
+RUN apt-get -y install python3-click python3-requests python3
+
+RUN apt-get -y install \
+ openjdk-17-jre-headless \
+ openjdk-17-jdk-headless \
+ default-jre-headless \
+ ;
+
+
+RUN pip install sphinx_rtd_theme --break-system-packages
+#RUN npm install -g npm
+RUN npm install -g pnpm
+
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get -y dist-upgrade
diff --git a/packaging/ng/Dockerfile.ubuntu-kinetic b/packaging/ng/Dockerfile.ubuntu-kinetic
new file mode 100644
index 0000000..0a90fd7
--- /dev/null
+++ b/packaging/ng/Dockerfile.ubuntu-kinetic
@@ -0,0 +1,32 @@
+FROM ubuntu:kinetic
+# This file is in the public domain.
+#
+# Docker image to build Ubuntu packages of
+# GNUnet, GNU Taler and GNU Anastasis.
+#
+# We need 'kinetic' for a reasonably recent NodeJS version.
+
+ARG DEBIAN_FRONTEND=noninteractive
+
+# Install dependencies
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get -y install build-essential zip jq python3 python3-pip nodejs npm
+RUN apt-get -y install autoconf automake gcc make libtool libltdl-dev libmicrohttpd-dev libpq-dev libsqlite3-dev libunistring-dev libqrencode-dev libgcrypt-dev libsodium-dev libargon2-dev libjansson-dev recutils libgmp-dev texinfo pkgconf zlib1g-dev libopus-dev libextractor-dev libnss3-dev libcurl4-gnutls-dev autopoint
+RUN apt-get -y install libzbar-dev libmysqlclient-dev mandoc libpulse-dev libgstreamer1.0-dev libgstreamer-plugins-good1.0-dev libbluetooth-dev iptables miniupnpc libpng-dev
+RUN apt-get -y install python3-jinja2 doxygen libjose-dev iproute2 sudo
+RUN apt-get -y install wget zile
+RUN apt-get -y install libogg-dev gettext net-tools po-debconf debhelper-compat dbconfig-pgsql nginx
+RUN apt-get -y install libgtk-3-dev libgladeui-dev libmagic-dev policykit-1
+RUN apt-get -y install dbconfig-no-thanks
+RUN apt-get -y install devscripts equivs
+# For libeufin:
+RUN apt-get -y install openjdk-17-jdk python3-click python3-requests python3
+
+RUN pip install sphinx_rtd_theme
+RUN npm install -g npm
+RUN /usr/local/bin/npm install -g npm pnpm node
+
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get -y dist-upgrade
diff --git a/packaging/ng/README.md b/packaging/ng/README.md
new file mode 100644
index 0000000..f51745f
--- /dev/null
+++ b/packaging/ng/README.md
@@ -0,0 +1,22 @@
+# taler-packaging ng
+
+This directory contains the improved implementation of container-based
+packaging for GNU Taler and associated packages.
+
+The major improvement is that a component can be built *without* having to
+rebuild every single package.
+
+Instead, dependencies are only built on-demand. Each package is built in a
+fresh environment, with build dependencies pulled in via apt. Previously built
+packages are available via a file-based apt source.
+
+The packaging logic is also the same for Debian and Ubuntu.
+
+
+## Structure
+
+* `packages/$DISTRO-$DISTRO_VERNAME`: Output folder for debian packages.
+Also contains a `Packages.gz` metadata file generated by `dpkg-scanpackages`
+so that this folder can be directly consumed as a trusted package source.
+
+* `buildscripts/*`: Build scripts used during the package build steps.
diff --git a/packaging/ng/build-debian-bookworm.sh b/packaging/ng/build-debian-bookworm.sh
new file mode 100755
index 0000000..c259fe7
--- /dev/null
+++ b/packaging/ng/build-debian-bookworm.sh
@@ -0,0 +1,25 @@
+#!/usr/bin/env bash
+
+# Build debian packages.
+
+set -eu
+
+DISTRO=debian
+DISTRO_VERNAME=bookworm
+LABEL=$DISTRO-$DISTRO_VERNAME
+
+IMAGE_TAG=taler-packaging-$LABEL:latest
+DOCKERFILE=Dockerfile.$LABEL
+PKGDIR=packages/$LABEL
+
+echo "Building $IMAGE_TAG from $DOCKERFILE"
+
+# Build the base image. Usually fast because it's cached.
+docker build -t $IMAGE_TAG -f $DOCKERFILE .
+
+mkdir -p $PKGDIR
+
+# echo docker run -it --mount type=bind,source="$(pwd)"/buildscripts,target=/buildscripts,readonly --mount type=bind,source="$(pwd)"/$PKGDIR,target=/pkgdir $IMAGE_TAG /bin/bash
+#docker run -it --entrypoint=/bin/bash --mount type=bind,source="$(pwd)"/buildscripts,target=/buildscripts,readonly --mount type=bind,source="$(pwd)"/$PKGDIR,target=/pkgdir $IMAGE_TAG /buildscripts/build-gnunet.sh
+docker run -it --entrypoint=/bin/bash --mount type=bind,source="$(pwd)"/buildscripts,target=/buildscripts,readonly --mount type=bind,source="$(pwd)"/$PKGDIR,target=/pkgdir $IMAGE_TAG /buildscripts/libeufin-build.sh master
+docker run -it --entrypoint=/bin/bash --mount type=bind,source="$(pwd)"/buildscripts,target=/buildscripts,readonly --mount type=bind,source="$(pwd)"/$PKGDIR,target=/pkgdir $IMAGE_TAG /buildscripts/wallet-build.sh master
diff --git a/packaging/ng/build-ubuntu-kinetic.sh b/packaging/ng/build-ubuntu-kinetic.sh
new file mode 100755
index 0000000..a27bcac
--- /dev/null
+++ b/packaging/ng/build-ubuntu-kinetic.sh
@@ -0,0 +1,23 @@
+#!/usr/bin/env bash
+
+# Build debian packages.
+
+set -eu
+
+DISTRO=ubuntu
+DISTRO_VERNAME=kinetic
+LABEL=$DISTRO-$DISTRO_VERNAME
+
+IMAGE_TAG=taler-packaging-$LABEL:latest
+DOCKERFILE=Dockerfile.$LABEL
+PKGDIR=packages/$LABEL
+
+echo "Building $IMAGE_TAG from $DOCKERFILE"
+
+# Build the base image. Usually fast because it's cached.
+docker build -t $IMAGE_TAG -f $DOCKERFILE .
+
+mkdir -p $PKGDIR
+
+# echo docker run -it --mount type=bind,source="$(pwd)"/buildscripts,target=/buildscripts,readonly --mount type=bind,source="$(pwd)"/$PKGDIR,target=/pkgdir $IMAGE_TAG /bin/bash
+#docker run -it --entrypoint=/bin/bash --mount type=bind,source="$(pwd)"/buildscripts,target=/buildscripts,readonly --mount type=bind,source="$(pwd)"/$PKGDIR,target=/pkgdir $IMAGE_TAG /buildscripts/build-gnunet.sh
diff --git a/packaging/ng/buildscripts/anastasis-build.sh b/packaging/ng/buildscripts/anastasis-build.sh
new file mode 100644
index 0000000..24643e1
--- /dev/null
+++ b/packaging/ng/buildscripts/anastasis-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/anastasis
+cd /build/anastasis
+
+# Fetch source
+rm -rf *
+
+for n in anastasis anastasis-gtk
+do
+ git clone git://git.taler.net/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ng/buildscripts/build-gnunet-gtk.sh b/packaging/ng/buildscripts/build-gnunet-gtk.sh
new file mode 100644
index 0000000..924c638
--- /dev/null
+++ b/packaging/ng/buildscripts/build-gnunet-gtk.sh
@@ -0,0 +1,37 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+dpkg-scanpackages /pkgdir /dev/null | gzip -9c > /pkgdir/Packages.gz
+
+echo "deb [trusted=yes] file:/pkgdir ./" >/etc/apt/sources.list.d/taler-packaging-local.list
+
+rm -rf /build/gnunet
+mkdir -p /build/gnunet
+cd /build/gnunet
+
+TAG=${1:-master}
+
+for n in gnunet gnunet-gtk
+do
+ git clone --depth=1 git://git.gnunet.org/$n
+ cd $n
+ git checkout $TAG
+
+ # Install build-time dependencies.
+ mk-build-deps --install --tool='apt-get -o Debug::pkgProblemResolver=yes --no-install-recommends --yes' debian/control
+
+ # We do a sparse checkout, so we need to hint
+ # the version to the build system.
+ dpkg-parsechangelog -S Version > .version
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+done
+
+ls /build/gnunet
+
+cp /build/gnunet/*.deb /pkgdir/
diff --git a/packaging/ng/buildscripts/build-gnunet.sh b/packaging/ng/buildscripts/build-gnunet.sh
new file mode 100644
index 0000000..70fb1a8
--- /dev/null
+++ b/packaging/ng/buildscripts/build-gnunet.sh
@@ -0,0 +1,41 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+cd /pkgdir
+dpkg-scanpackages . /dev/null | gzip -9c > /pkgdir/Packages.gz
+echo "deb [trusted=yes] file:/pkgdir ./" >/etc/apt/sources.list.d/taler-packaging-local.list
+apt-get update
+
+
+mkdir -p /build
+cd /build
+
+TAG=${1:-master}
+
+git clone --depth=1 git://git.gnunet.org/gnunet
+cd gnunet
+git checkout $TAG
+
+# Get current version from debian/control file.
+DEB_VERSION=$(dpkg-parsechangelog -S Version)
+
+apt-cache show gnunet | grep "Version: $DEB_VERSION" >/dev/null && found=true || found=false
+if [ $found = true ]; then
+ echo "gnunet version $DEB_VERSION already built, skipping"
+ exit 0
+fi
+
+# Install build-time dependencies.
+mk-build-deps --install --tool='apt-get -o Debug::pkgProblemResolver=yes --no-install-recommends --yes' debian/control
+
+# We do a sparse checkout, so we need to hint
+# the version to the build system.
+echo $DEB_VERSION > .version
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cp /build/*.deb /pkgdir/
diff --git a/packaging/ng/buildscripts/exchange-build.sh b/packaging/ng/buildscripts/exchange-build.sh
new file mode 100644
index 0000000..36b9ede
--- /dev/null
+++ b/packaging/ng/buildscripts/exchange-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+pip3 install htmlark
+
+git clone git://git.taler.net/exchange
+cd exchange
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ng/buildscripts/libeufin-build.sh b/packaging/ng/buildscripts/libeufin-build.sh
new file mode 100644
index 0000000..f355add
--- /dev/null
+++ b/packaging/ng/buildscripts/libeufin-build.sh
@@ -0,0 +1,23 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build
+cd /build
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/libeufin
+cd libeufin
+git checkout $1
+./bootstrap
+./configure --prefix=/usr/local
+make deb
+
+cp /build/*.deb /pkgdir/
+
+dpkg -i /build/*.deb
+
+
diff --git a/packaging/ng/buildscripts/merchant-build.sh b/packaging/ng/buildscripts/merchant-build.sh
new file mode 100644
index 0000000..5625198
--- /dev/null
+++ b/packaging/ng/buildscripts/merchant-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+pip3 install htmlark
+
+git clone git://git.taler.net/merchant
+cd merchant
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ng/buildscripts/sync-build.sh b/packaging/ng/buildscripts/sync-build.sh
new file mode 100644
index 0000000..e38a0ee
--- /dev/null
+++ b/packaging/ng/buildscripts/sync-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/sync
+cd /build/sync
+
+# Fetch source
+rm -rf *
+
+for n in sync
+do
+ git clone git://git.taler.net/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ng/buildscripts/wallet-build.sh b/packaging/ng/buildscripts/wallet-build.sh
new file mode 100644
index 0000000..029d90a
--- /dev/null
+++ b/packaging/ng/buildscripts/wallet-build.sh
@@ -0,0 +1,31 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build
+cd /build
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/wallet-core
+
+cd wallet-core
+git checkout $1
+./bootstrap
+./configure --prefix=/usr
+make
+cd packages/taler-wallet-cli
+./configure --prefix=/usr
+make install
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ../taler-harness
+./configure --prefix=/usr
+make install
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+
+
+cp /build/wallet-core/packages/*.deb /pkgdir/
diff --git a/packaging/ubuntu-docker/.gitignore b/packaging/ubuntu-docker/.gitignore
new file mode 100644
index 0000000..849ddff
--- /dev/null
+++ b/packaging/ubuntu-docker/.gitignore
@@ -0,0 +1 @@
+dist/
diff --git a/packaging/ubuntu-docker/Dockerfile b/packaging/ubuntu-docker/Dockerfile
new file mode 100644
index 0000000..f6e39c0
--- /dev/null
+++ b/packaging/ubuntu-docker/Dockerfile
@@ -0,0 +1,57 @@
+FROM ubuntu:lunar
+# This file is in the public domain.
+#
+# Docker image to build Ubuntu packages of
+# GNUnet, GNU Taler and GNU Anastasis.
+#
+# We need 'kinetic' for a reasonably recent NodeJS version.
+
+ARG DEBIAN_FRONTEND=noninteractive
+
+# Install dependencies
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get -y install build-essential zip jq python3 python3-pip nodejs npm autoconf automake gcc make libtool libltdl-dev libmicrohttpd-dev libpq-dev libsqlite3-dev libunistring-dev libqrencode-dev libgcrypt-dev libsodium-dev libargon2-dev libjansson-dev recutils libgmp-dev texinfo pkgconf zlib1g-dev libopus-dev libextractor-dev libnss3-dev libcurl4-gnutls-dev autopoint libzbar-dev libmysqlclient-dev mandoc libpulse-dev libgstreamer1.0-dev libgstreamer-plugins-good1.0-dev libbluetooth-dev iptables miniupnpc libpng-dev python3-jinja2 doxygen libjose-dev iproute2 sudo wget zile libogg-dev gettext net-tools po-debconf debhelper-compat dbconfig-pgsql nginx libgtk-3-dev libgladeui-dev libmagic-dev policykit-1 libnfc-dev python3-click python3-requests python3-sphinx-rtd-theme pandoc groff
+
+# For libeufin:
+RUN apt-get -y install openjdk-17-jdk default-jre-headless
+# For wallet-core:
+RUN npm install -g node pnpm
+
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get -y dist-upgrade
+
+ADD "https://www.random.org/cgi-bin/randbyte?nbytes=10&format=h" skipcache
+
+COPY gnunet-build.sh /root/
+RUN chmod +x /root/gnunet-build.sh
+RUN /root/gnunet-build.sh master
+
+COPY gnunet-gtk-build.sh /root/
+RUN chmod +x /root/gnunet-gtk-build.sh
+RUN /root/gnunet-gtk-build.sh master
+
+COPY exchange-build.sh /root/
+RUN chmod +x /root/exchange-build.sh
+RUN /root/exchange-build.sh master
+
+COPY merchant-build.sh /root/
+RUN chmod +x /root/merchant-build.sh
+RUN /root/merchant-build.sh master
+
+COPY sync-build.sh /root/
+RUN chmod +x /root/sync-build.sh
+RUN /root/sync-build.sh master
+
+COPY anastasis-build.sh /root/
+RUN chmod +x /root/anastasis-build.sh
+RUN /root/anastasis-build.sh master
+
+COPY libeufin-build.sh /root/
+RUN chmod +x /root/libeufin-build.sh
+RUN /root/libeufin-build.sh master
+
+COPY wallet-build.sh /root/
+RUN chmod +x /root/wallet-build.sh
+RUN /root/wallet-build.sh master
diff --git a/packaging/ubuntu-docker/README b/packaging/ubuntu-docker/README
new file mode 100644
index 0000000..f4a4824
--- /dev/null
+++ b/packaging/ubuntu-docker/README
@@ -0,0 +1,19 @@
+Scripts to build Ubuntu packages from source.
+
+
+TODO:
+- check build for warnings/missing dependencies
+ (espcially GNUnet!)
+- break up into separate build for GNUnet/Taler/Anastasis
+ => might be good to not run the entire pipeline only
+ because something changes in anastasis/wallet, as
+ that's not a good reason to re-build GNUnet ;-).
+- integrate with buildbot (integrationtests?)
+ to build-on-tag / build nightly and upload resulting
+ Deb packages to reprepro (fully automated for nightly,
+ but with explicit password-protected signature for tagged builds)
+- support other CPU architectures (by running in VM that emulates
+ other CPU architectures)
+- eventually: try to improve scripts to support older
+ Debian versions
+
diff --git a/packaging/ubuntu-docker/anastasis-build.sh b/packaging/ubuntu-docker/anastasis-build.sh
new file mode 100644
index 0000000..24643e1
--- /dev/null
+++ b/packaging/ubuntu-docker/anastasis-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/anastasis
+cd /build/anastasis
+
+# Fetch source
+rm -rf *
+
+for n in anastasis anastasis-gtk
+do
+ git clone git://git.taler.net/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-docker/exchange-build.sh b/packaging/ubuntu-docker/exchange-build.sh
new file mode 100644
index 0000000..b4a0115
--- /dev/null
+++ b/packaging/ubuntu-docker/exchange-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/exchange
+cd exchange
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-docker/gnunet-build.sh b/packaging/ubuntu-docker/gnunet-build.sh
new file mode 100644
index 0000000..614c5e6
--- /dev/null
+++ b/packaging/ubuntu-docker/gnunet-build.sh
@@ -0,0 +1,29 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/gnunet
+cd /build/gnunet
+
+# Fetch source
+rm -rf *
+
+pip3 install --break-system-packages sphinx-book-theme sphinx-multiversion
+
+for n in gnunet
+do
+ git clone git://git.gnunet.org/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar cvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-docker/gnunet-gtk-build.sh b/packaging/ubuntu-docker/gnunet-gtk-build.sh
new file mode 100644
index 0000000..4414c3f
--- /dev/null
+++ b/packaging/ubuntu-docker/gnunet-gtk-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/gnunet
+cd /build/gnunet
+
+# Fetch source
+rm -rf *
+
+for n in gnunet-gtk
+do
+ git clone git://git.gnunet.org/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-docker/libeufin-build.sh b/packaging/ubuntu-docker/libeufin-build.sh
new file mode 100644
index 0000000..75713ab
--- /dev/null
+++ b/packaging/ubuntu-docker/libeufin-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/libeufin
+cd /build/libeufin
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/libeufin
+cd libeufin
+git checkout $1
+./bootstrap
+export JAVA_HOME=/usr
+./configure --prefix=/usr
+make install
+make deb
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-docker/mdb-build.sh b/packaging/ubuntu-docker/mdb-build.sh
new file mode 100644
index 0000000..d097240
--- /dev/null
+++ b/packaging/ubuntu-docker/mdb-build.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+git clone git://git.taler.net/taler-mdb
+cd taler-mdb
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-docker/merchant-build.sh b/packaging/ubuntu-docker/merchant-build.sh
new file mode 100644
index 0000000..24f5f9d
--- /dev/null
+++ b/packaging/ubuntu-docker/merchant-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+# pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/merchant
+cd merchant
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-docker/run.sh b/packaging/ubuntu-docker/run.sh
new file mode 100755
index 0000000..a6df3f0
--- /dev/null
+++ b/packaging/ubuntu-docker/run.sh
@@ -0,0 +1,21 @@
+#!/bin/sh
+set -eu
+CONTAINER=$USER/debbuilder:latest
+docker build -t $CONTAINER .
+rm -rf dist
+mkdir dist
+docker run --read-only $CONTAINER sleep 100 &
+sleep 1
+docker container ls
+ID=`docker container ls | grep $CONTAINER | head -n1 | awk '{print $1}'`
+echo "Extracting files from $ID"
+docker cp "$ID:/build/packages.tgz" .
+echo "Stopping $CONTAINER ($ID)"
+docker container stop $ID
+echo "Removing $CONTAINER"
+docker container rm $ID
+docker image rm $USER/debbuilder
+cd dist
+tar xvf ../packages.tgz
+cd ..
+rm packages.tgz
diff --git a/packaging/ubuntu-docker/sync-build.sh b/packaging/ubuntu-docker/sync-build.sh
new file mode 100644
index 0000000..e38a0ee
--- /dev/null
+++ b/packaging/ubuntu-docker/sync-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/sync
+cd /build/sync
+
+# Fetch source
+rm -rf *
+
+for n in sync
+do
+ git clone git://git.taler.net/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-docker/taler.list b/packaging/ubuntu-docker/taler.list
new file mode 100644
index 0000000..29cea42
--- /dev/null
+++ b/packaging/ubuntu-docker/taler.list
@@ -0,0 +1 @@
+deb https://deb.taler.net/apt/ubuntu jammy main
diff --git a/packaging/ubuntu-docker/wallet-build.sh b/packaging/ubuntu-docker/wallet-build.sh
new file mode 100644
index 0000000..6d807be
--- /dev/null
+++ b/packaging/ubuntu-docker/wallet-build.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/wallet
+cd /build/wallet
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/wallet-core
+
+cd wallet-core
+git checkout $1
+./bootstrap
+
+cd packages/taler-wallet-cli
+
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cd ../taler-harness
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cd ../
+
+tar uvf ../../../packages.tgz *.deb
diff --git a/packaging/ubuntu-mantic-docker/Dockerfile b/packaging/ubuntu-mantic-docker/Dockerfile
new file mode 100644
index 0000000..6967b42
--- /dev/null
+++ b/packaging/ubuntu-mantic-docker/Dockerfile
@@ -0,0 +1,54 @@
+FROM ubuntu:mantic
+# This file is in the public domain.
+#
+# Docker image to build Ubuntu packages of
+# GNUnet, GNU Taler and GNU Anastasis.
+#
+# We need 'kinetic' for a reasonably recent NodeJS version.
+
+ARG DEBIAN_FRONTEND=noninteractive
+
+# Install dependencies
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get update
+RUN apt-get -y dist-upgrade
+RUN apt-get update
+RUN apt-get -y install build-essential zip jq python3 python3-pip nodejs npm autoconf automake gcc-12 make libtool libltdl-dev libmicrohttpd-dev libpq-dev libsqlite3-dev libunistring-dev libqrencode-dev libgcrypt-dev libsodium-dev libargon2-dev libjansson-dev recutils libgmp-dev texinfo pkgconf zlib1g-dev libopus-dev libextractor-dev libnss3-dev libcurl4-gnutls-dev autopoint libzbar-dev libmysqlclient-dev mandoc libpulse-dev libgstreamer1.0-dev libgstreamer-plugins-good1.0-dev libbluetooth-dev iptables miniupnpc libpng-dev python3-jinja2 doxygen libjose-dev iproute2 sudo wget zile libogg-dev gettext net-tools po-debconf debhelper-compat dbconfig-pgsql nginx libgtk-3-dev libgladeui-dev libmagic-dev policykit-1 libnfc-dev python3-click python3-requests python3-sphinx-rtd-theme openjdk-17-jdk pandoc groff
+RUN npm install -g node pnpm
+
+RUN pip install --break-system-packages sphinx_multiversion
+
+# ADD "https://www.random.org/cgi-bin/randbyte?nbytes=10&format=h" skipcache
+COPY gnunet-build.sh /root/
+RUN chmod +x /root/gnunet-build.sh
+RUN /root/gnunet-build.sh master
+
+COPY gnunet-gtk-build.sh /root/
+RUN chmod +x /root/gnunet-gtk-build.sh
+RUN /root/gnunet-gtk-build.sh master
+
+COPY exchange-build.sh /root/
+RUN chmod +x /root/exchange-build.sh
+RUN /root/exchange-build.sh master
+
+COPY merchant-build.sh /root/
+RUN chmod +x /root/merchant-build.sh
+RUN /root/merchant-build.sh master
+
+COPY sync-build.sh /root/
+RUN chmod +x /root/sync-build.sh
+RUN /root/sync-build.sh master
+
+COPY anastasis-build.sh /root/
+RUN chmod +x /root/anastasis-build.sh
+RUN /root/anastasis-build.sh master
+
+# No wallet on Jammy
+COPY wallet-build.sh /root/
+RUN chmod +x /root/wallet-build.sh
+RUN /root/wallet-build.sh master
+
+COPY libeufin-build.sh /root/
+RUN chmod +x /root/libeufin-build.sh
+RUN /root/libeufin-build.sh master
diff --git a/packaging/ubuntu-mantic-docker/README b/packaging/ubuntu-mantic-docker/README
new file mode 100644
index 0000000..f4a4824
--- /dev/null
+++ b/packaging/ubuntu-mantic-docker/README
@@ -0,0 +1,19 @@
+Scripts to build Ubuntu packages from source.
+
+
+TODO:
+- check build for warnings/missing dependencies
+ (espcially GNUnet!)
+- break up into separate build for GNUnet/Taler/Anastasis
+ => might be good to not run the entire pipeline only
+ because something changes in anastasis/wallet, as
+ that's not a good reason to re-build GNUnet ;-).
+- integrate with buildbot (integrationtests?)
+ to build-on-tag / build nightly and upload resulting
+ Deb packages to reprepro (fully automated for nightly,
+ but with explicit password-protected signature for tagged builds)
+- support other CPU architectures (by running in VM that emulates
+ other CPU architectures)
+- eventually: try to improve scripts to support older
+ Debian versions
+
diff --git a/packaging/ubuntu-mantic-docker/anastasis-build.sh b/packaging/ubuntu-mantic-docker/anastasis-build.sh
new file mode 100644
index 0000000..24643e1
--- /dev/null
+++ b/packaging/ubuntu-mantic-docker/anastasis-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/anastasis
+cd /build/anastasis
+
+# Fetch source
+rm -rf *
+
+for n in anastasis anastasis-gtk
+do
+ git clone git://git.taler.net/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-mantic-docker/exchange-build.sh b/packaging/ubuntu-mantic-docker/exchange-build.sh
new file mode 100644
index 0000000..a94a003
--- /dev/null
+++ b/packaging/ubuntu-mantic-docker/exchange-build.sh
@@ -0,0 +1,25 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+export CC=gcc-12
+# Fetch source
+rm -rf *
+
+pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/exchange
+cd exchange
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-mantic-docker/gnunet-build.sh b/packaging/ubuntu-mantic-docker/gnunet-build.sh
new file mode 100644
index 0000000..614c5e6
--- /dev/null
+++ b/packaging/ubuntu-mantic-docker/gnunet-build.sh
@@ -0,0 +1,29 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/gnunet
+cd /build/gnunet
+
+# Fetch source
+rm -rf *
+
+pip3 install --break-system-packages sphinx-book-theme sphinx-multiversion
+
+for n in gnunet
+do
+ git clone git://git.gnunet.org/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar cvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-mantic-docker/gnunet-gtk-build.sh b/packaging/ubuntu-mantic-docker/gnunet-gtk-build.sh
new file mode 100644
index 0000000..4414c3f
--- /dev/null
+++ b/packaging/ubuntu-mantic-docker/gnunet-gtk-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/gnunet
+cd /build/gnunet
+
+# Fetch source
+rm -rf *
+
+for n in gnunet-gtk
+do
+ git clone git://git.gnunet.org/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-mantic-docker/libeufin-build.sh b/packaging/ubuntu-mantic-docker/libeufin-build.sh
new file mode 100644
index 0000000..7229221
--- /dev/null
+++ b/packaging/ubuntu-mantic-docker/libeufin-build.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/libeufin
+cd /build/libeufin
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/libeufin
+cd libeufin
+git checkout $1
+./bootstrap
+./configure --prefix=/usr/local
+make deb
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-mantic-docker/mdb-build.sh b/packaging/ubuntu-mantic-docker/mdb-build.sh
new file mode 100644
index 0000000..d097240
--- /dev/null
+++ b/packaging/ubuntu-mantic-docker/mdb-build.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+git clone git://git.taler.net/taler-mdb
+cd taler-mdb
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-mantic-docker/merchant-build.sh b/packaging/ubuntu-mantic-docker/merchant-build.sh
new file mode 100644
index 0000000..24f5f9d
--- /dev/null
+++ b/packaging/ubuntu-mantic-docker/merchant-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+# pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/merchant
+cd merchant
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-mantic-docker/run.sh b/packaging/ubuntu-mantic-docker/run.sh
new file mode 100755
index 0000000..a6df3f0
--- /dev/null
+++ b/packaging/ubuntu-mantic-docker/run.sh
@@ -0,0 +1,21 @@
+#!/bin/sh
+set -eu
+CONTAINER=$USER/debbuilder:latest
+docker build -t $CONTAINER .
+rm -rf dist
+mkdir dist
+docker run --read-only $CONTAINER sleep 100 &
+sleep 1
+docker container ls
+ID=`docker container ls | grep $CONTAINER | head -n1 | awk '{print $1}'`
+echo "Extracting files from $ID"
+docker cp "$ID:/build/packages.tgz" .
+echo "Stopping $CONTAINER ($ID)"
+docker container stop $ID
+echo "Removing $CONTAINER"
+docker container rm $ID
+docker image rm $USER/debbuilder
+cd dist
+tar xvf ../packages.tgz
+cd ..
+rm packages.tgz
diff --git a/packaging/ubuntu-mantic-docker/sync-build.sh b/packaging/ubuntu-mantic-docker/sync-build.sh
new file mode 100644
index 0000000..e38a0ee
--- /dev/null
+++ b/packaging/ubuntu-mantic-docker/sync-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/sync
+cd /build/sync
+
+# Fetch source
+rm -rf *
+
+for n in sync
+do
+ git clone git://git.taler.net/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-mantic-docker/taler.list b/packaging/ubuntu-mantic-docker/taler.list
new file mode 100644
index 0000000..29cea42
--- /dev/null
+++ b/packaging/ubuntu-mantic-docker/taler.list
@@ -0,0 +1 @@
+deb https://deb.taler.net/apt/ubuntu jammy main
diff --git a/packaging/ubuntu-mantic-docker/wallet-build.sh b/packaging/ubuntu-mantic-docker/wallet-build.sh
new file mode 100644
index 0000000..6d807be
--- /dev/null
+++ b/packaging/ubuntu-mantic-docker/wallet-build.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/wallet
+cd /build/wallet
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/wallet-core
+
+cd wallet-core
+git checkout $1
+./bootstrap
+
+cd packages/taler-wallet-cli
+
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cd ../taler-harness
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cd ../
+
+tar uvf ../../../packages.tgz *.deb
diff --git a/packaging/ubuntu-numbat-docker/Dockerfile b/packaging/ubuntu-numbat-docker/Dockerfile
new file mode 100644
index 0000000..d59d238
--- /dev/null
+++ b/packaging/ubuntu-numbat-docker/Dockerfile
@@ -0,0 +1,53 @@
+FROM ubuntu:numbat
+# This file is in the public domain.
+#
+# Docker image to build Ubuntu packages of
+# GNUnet, GNU Taler and GNU Anastasis.
+#
+
+ARG DEBIAN_FRONTEND=noninteractive
+
+# Install dependencies
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get update
+RUN apt-get -y dist-upgrade
+RUN apt-get update
+RUN apt-get -y install build-essential zip jq python3 python3-pip nodejs npm autoconf automake gcc-12 make libtool libltdl-dev libmicrohttpd-dev libpq-dev libsqlite3-dev libunistring-dev libqrencode-dev libgcrypt-dev libsodium-dev libargon2-dev libjansson-dev recutils libgmp-dev texinfo pkgconf zlib1g-dev libopus-dev libextractor-dev libnss3-dev libcurl4-gnutls-dev autopoint libzbar-dev libmysqlclient-dev mandoc libpulse-dev libgstreamer1.0-dev libgstreamer-plugins-good1.0-dev libbluetooth-dev iptables miniupnpc libpng-dev python3-jinja2 doxygen libjose-dev iproute2 sudo wget zile libogg-dev gettext net-tools po-debconf debhelper-compat dbconfig-pgsql nginx libgtk-3-dev libgladeui-dev libmagic-dev policykit-1 libnfc-dev python3-click python3-requests python3-sphinx-rtd-theme openjdk-17-jdk pandoc groff
+RUN npm install -g node pnpm
+
+RUN pip install --break-system-packages sphinx_multiversion
+
+# ADD "https://www.random.org/cgi-bin/randbyte?nbytes=10&format=h" skipcache
+COPY gnunet-build.sh /root/
+RUN chmod +x /root/gnunet-build.sh
+RUN /root/gnunet-build.sh master
+
+COPY gnunet-gtk-build.sh /root/
+RUN chmod +x /root/gnunet-gtk-build.sh
+RUN /root/gnunet-gtk-build.sh master
+
+COPY exchange-build.sh /root/
+RUN chmod +x /root/exchange-build.sh
+RUN /root/exchange-build.sh master
+
+COPY merchant-build.sh /root/
+RUN chmod +x /root/merchant-build.sh
+RUN /root/merchant-build.sh master
+
+COPY sync-build.sh /root/
+RUN chmod +x /root/sync-build.sh
+RUN /root/sync-build.sh master
+
+COPY anastasis-build.sh /root/
+RUN chmod +x /root/anastasis-build.sh
+RUN /root/anastasis-build.sh master
+
+# No wallet on Jammy
+COPY wallet-build.sh /root/
+RUN chmod +x /root/wallet-build.sh
+RUN /root/wallet-build.sh master
+
+COPY libeufin-build.sh /root/
+RUN chmod +x /root/libeufin-build.sh
+RUN /root/libeufin-build.sh master
diff --git a/packaging/ubuntu-numbat-docker/README b/packaging/ubuntu-numbat-docker/README
new file mode 100644
index 0000000..0f8c821
--- /dev/null
+++ b/packaging/ubuntu-numbat-docker/README
@@ -0,0 +1,16 @@
+Scripts to build Ubuntu packages from source.
+
+
+TODO:
+- check build for warnings/missing dependencies
+ (espcially GNUnet!)
+- break up into separate build for GNUnet/Taler/Anastasis
+ => might be good to not run the entire pipeline only
+ because something changes in anastasis/wallet, as
+ that's not a good reason to re-build GNUnet ;-).
+- integrate with buildbot (integrationtests?)
+ to build-on-tag / build nightly and upload resulting
+ Deb packages to reprepro (fully automated for nightly,
+ but with explicit password-protected signature for tagged builds)
+- support other CPU architectures (by running in VM that emulates
+ other CPU architectures)
diff --git a/packaging/ubuntu-numbat-docker/anastasis-build.sh b/packaging/ubuntu-numbat-docker/anastasis-build.sh
new file mode 100644
index 0000000..24643e1
--- /dev/null
+++ b/packaging/ubuntu-numbat-docker/anastasis-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/anastasis
+cd /build/anastasis
+
+# Fetch source
+rm -rf *
+
+for n in anastasis anastasis-gtk
+do
+ git clone git://git.taler.net/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-numbat-docker/exchange-build.sh b/packaging/ubuntu-numbat-docker/exchange-build.sh
new file mode 100644
index 0000000..a94a003
--- /dev/null
+++ b/packaging/ubuntu-numbat-docker/exchange-build.sh
@@ -0,0 +1,25 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+export CC=gcc-12
+# Fetch source
+rm -rf *
+
+pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/exchange
+cd exchange
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-numbat-docker/gnunet-build.sh b/packaging/ubuntu-numbat-docker/gnunet-build.sh
new file mode 100644
index 0000000..614c5e6
--- /dev/null
+++ b/packaging/ubuntu-numbat-docker/gnunet-build.sh
@@ -0,0 +1,29 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/gnunet
+cd /build/gnunet
+
+# Fetch source
+rm -rf *
+
+pip3 install --break-system-packages sphinx-book-theme sphinx-multiversion
+
+for n in gnunet
+do
+ git clone git://git.gnunet.org/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar cvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-numbat-docker/gnunet-gtk-build.sh b/packaging/ubuntu-numbat-docker/gnunet-gtk-build.sh
new file mode 100644
index 0000000..4414c3f
--- /dev/null
+++ b/packaging/ubuntu-numbat-docker/gnunet-gtk-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/gnunet
+cd /build/gnunet
+
+# Fetch source
+rm -rf *
+
+for n in gnunet-gtk
+do
+ git clone git://git.gnunet.org/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-numbat-docker/libeufin-build.sh b/packaging/ubuntu-numbat-docker/libeufin-build.sh
new file mode 100644
index 0000000..7229221
--- /dev/null
+++ b/packaging/ubuntu-numbat-docker/libeufin-build.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/libeufin
+cd /build/libeufin
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/libeufin
+cd libeufin
+git checkout $1
+./bootstrap
+./configure --prefix=/usr/local
+make deb
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-numbat-docker/mdb-build.sh b/packaging/ubuntu-numbat-docker/mdb-build.sh
new file mode 100644
index 0000000..d097240
--- /dev/null
+++ b/packaging/ubuntu-numbat-docker/mdb-build.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+git clone git://git.taler.net/taler-mdb
+cd taler-mdb
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-numbat-docker/merchant-build.sh b/packaging/ubuntu-numbat-docker/merchant-build.sh
new file mode 100644
index 0000000..24f5f9d
--- /dev/null
+++ b/packaging/ubuntu-numbat-docker/merchant-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+# pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/merchant
+cd merchant
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-numbat-docker/run.sh b/packaging/ubuntu-numbat-docker/run.sh
new file mode 100755
index 0000000..a6df3f0
--- /dev/null
+++ b/packaging/ubuntu-numbat-docker/run.sh
@@ -0,0 +1,21 @@
+#!/bin/sh
+set -eu
+CONTAINER=$USER/debbuilder:latest
+docker build -t $CONTAINER .
+rm -rf dist
+mkdir dist
+docker run --read-only $CONTAINER sleep 100 &
+sleep 1
+docker container ls
+ID=`docker container ls | grep $CONTAINER | head -n1 | awk '{print $1}'`
+echo "Extracting files from $ID"
+docker cp "$ID:/build/packages.tgz" .
+echo "Stopping $CONTAINER ($ID)"
+docker container stop $ID
+echo "Removing $CONTAINER"
+docker container rm $ID
+docker image rm $USER/debbuilder
+cd dist
+tar xvf ../packages.tgz
+cd ..
+rm packages.tgz
diff --git a/packaging/ubuntu-numbat-docker/sync-build.sh b/packaging/ubuntu-numbat-docker/sync-build.sh
new file mode 100644
index 0000000..e38a0ee
--- /dev/null
+++ b/packaging/ubuntu-numbat-docker/sync-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/sync
+cd /build/sync
+
+# Fetch source
+rm -rf *
+
+for n in sync
+do
+ git clone git://git.taler.net/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-numbat-docker/wallet-build.sh b/packaging/ubuntu-numbat-docker/wallet-build.sh
new file mode 100644
index 0000000..6d807be
--- /dev/null
+++ b/packaging/ubuntu-numbat-docker/wallet-build.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/wallet
+cd /build/wallet
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/wallet-core
+
+cd wallet-core
+git checkout $1
+./bootstrap
+
+cd packages/taler-wallet-cli
+
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cd ../taler-harness
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cd ../
+
+tar uvf ../../../packages.tgz *.deb
diff --git a/regional-currency/.gitignore b/regional-currency/.gitignore
new file mode 100644
index 0000000..26790f8
--- /dev/null
+++ b/regional-currency/.gitignore
@@ -0,0 +1,2 @@
+config/
+setup.log \ No newline at end of file
diff --git a/regional-currency/.shellcheckrc b/regional-currency/.shellcheckrc
new file mode 100644
index 0000000..e170f39
--- /dev/null
+++ b/regional-currency/.shellcheckrc
@@ -0,0 +1 @@
+disable=SC2018,SC2019
diff --git a/regional-currency/ChangeLog b/regional-currency/ChangeLog
new file mode 100644
index 0000000..b2310dd
--- /dev/null
+++ b/regional-currency/ChangeLog
@@ -0,0 +1,7 @@
+Sun Mar 10 12:15:15 PM CET 2024
+ Changed the scripts to enable (!) taler-merchant.target
+ instead of just the taler-merchant-httpd service.
+
+ Added automatically setting the wire-fee for IBAN.
+
+ Added code to automatically run taler-exchange-offline daily (#8623).
diff --git a/regional-currency/README b/regional-currency/README
new file mode 100644
index 0000000..599336a
--- /dev/null
+++ b/regional-currency/README
@@ -0,0 +1,2 @@
+Refer to the following document:
+https://docs.taler.net/libeufin/regional-manual.html#guided-basic-setup
diff --git a/regional-currency/config.py b/regional-currency/config.py
new file mode 100755
index 0000000..107e535
--- /dev/null
+++ b/regional-currency/config.py
@@ -0,0 +1,478 @@
+#!/usr/bin/env python3
+"""Python script to ask questions using an interactive prompt"""
+
+import base64
+import os
+import re
+import subprocess
+import urllib.parse
+import uuid
+from base64 import b64decode, b64encode
+from typing import Callable, Dict, TypeVar
+
+import argon2
+from Crypto.Cipher import ChaCha20_Poly1305
+from Crypto.Hash import SHA512
+from Crypto.Protocol.KDF import PBKDF2
+from Crypto.Random import get_random_bytes
+
+# Early exit if already loaded
+if os.environ.get("CONFIG_LOADED") == "y":
+ exit(0)
+
+log = open("setup.log", "ab", buffering=0)
+CONFIG_FILE = "config/user.conf"
+BIC_PATTERN = re.compile("[A-Z0-9]{4}[A-Z]{2}[A-Z0-9]{2}(?:[A-Z0-9]{3})?")
+IBAN_PATTERN = re.compile("[A-Z]{2}[0-9]{2}[A-Z0-9]{,28}")
+
+
+def load_conf() -> Dict[str, str]:
+ """Load user configuration file"""
+ conf = {}
+ with open(CONFIG_FILE, "r") as f:
+ for kv in f.read().splitlines():
+ if len(kv) != 0:
+ [k, v] = [part.strip() for part in kv.split("=", 1)]
+ if v.startswith('"') and v.endswith('"'):
+ conf[k] = v.strip('"').replace('\\"', '"')
+ elif v.startswith("'") and v.endswith("'"):
+ conf[k] = v.strip("'").replace("'\\''", "'").replace("\\'", "'")
+ else:
+ conf[k] = v
+ return conf
+
+
+conf = load_conf()
+result_conf = {**conf, "CONFIG_LOADED": "y"}
+
+
+def add_conf(name: str, value: str):
+ """Update a user configuration value and update the configuration file"""
+ conf[name] = value
+ result_conf[name] = value
+ content = ""
+ for key, value in conf.items():
+ escaped = value.replace("'", "'\\''")
+ content += f"export {key}='{escaped}'\n"
+ with open(CONFIG_FILE, "w") as f:
+ f.write(content)
+
+
+def run_cmd(
+ cmd: list[str], input: str | None = None, env: Dict[str, str] | None = None
+) -> int:
+ """Run a command in a child process and return its exit code"""
+ result = subprocess.run(
+ cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ input=input.encode() if input is not None else None,
+ stdin=subprocess.DEVNULL if input is None else None,
+ env=env,
+ )
+ log.write(result.stdout)
+ if result.returncode != 0:
+ print(result.stdout.decode("utf-8"), end="")
+ return result.returncode
+
+
+def try_cmd(
+ cmd: list[str], input: str | None = None, env: Dict[str, str] | None = None
+) -> bool:
+ """Run a command in a child process and return if successful"""
+ return run_cmd(cmd, input, env) == 0
+
+
+A = TypeVar("A")
+T = TypeVar("T")
+
+
+def conf_value(
+ name: str | None,
+ action: Callable[[], str | None],
+ default: T | None = None,
+ check: Callable[[str], T | None] = lambda it: it,
+ fmt: Callable[[T], str] = lambda it: str(it),
+) -> T:
+ """
+ Logic to configure a value
+
+ :param name: if present will try to fetch the current value and will store the new value
+ :param action: how a value will be obtained
+ :param default: default value to use if no value is given
+ :param check: check and normalize the value
+ :param fmt: format value for storage
+ :return: the configuration value
+ """
+ value = None
+
+ # Fetch current value
+ if name is not None:
+ curr = conf.get(name)
+ if curr is not None:
+ # Check the current value and ask again if invalid
+ value = check(curr)
+
+ # Ask for a new value until we get a valid one
+ while value is None:
+ new = action()
+ # Use default if no value was provided else check the new value
+ value = check(new) if new is not None else default
+
+ # Store the new value
+ if name is not None:
+ add_conf(name, fmt(value))
+
+ return value
+
+
+def ask(
+ name: str | None,
+ msg: str,
+ default: T | None = None,
+ check: Callable[[str], T | None] = lambda it: it,
+ fmt: Callable[[T], str] = lambda it: str(it),
+) -> T:
+ """
+ Prompt the user to configurea value
+ :param name: if present will try to fetch the current value and will store the new value
+ :param msg: the message to prompt the user with
+ :param default: default value to use if no value is obtained
+ :param check: check and normalize the value
+ :param fmt: format value for storage
+ :return: the configuration value
+ """
+
+ def do_ask() -> str | None:
+ # Log the prompt
+ log.write(msg.encode() + "\n".encode())
+ # Actual prompt
+ raw = input(msg).strip()
+ if raw == "":
+ if default is None:
+ print("You must enter a value")
+ return None
+ return raw
+
+ return conf_value(name, do_ask, default, check, fmt)
+
+
+def ask_str(name: str | None, msg: str, default: str | None = None) -> str:
+ "Prompt the user to configure a string"
+ return ask(name, msg, default)
+
+
+def ask_bic(name: str | None, msg: str, default: str | None = None) -> str:
+ "Prompt the user to configure a BIC"
+
+ def check_bic(raw: str) -> str | None:
+ raw = raw.translate({ord(i): None for i in " -"})
+ if not BIC_PATTERN.fullmatch(raw):
+ print("Invalid BIC")
+ return None
+ else:
+ return raw
+
+ return ask(name, msg, default, check_bic)
+
+
+def ask_iban(name: str | None, msg: str, default: str | None = None) -> str:
+ "Prompt the user to configure a IBAN"
+
+ def check_iban(raw: str) -> str | None:
+ raw = raw.translate({ord(i): None for i in " -"})
+ if not IBAN_PATTERN.fullmatch(raw):
+ print("Invalid IBAN") # Checksum check ?
+ return None
+ else:
+ return raw
+
+ return ask(name, msg, default, check_iban)
+
+
+def ask_currency(name: str, msg: str, default: str | None = None) -> str:
+ "Prompt the user to configure a currency name"
+
+ def check_currency(currency: str) -> str | None:
+ currency = currency.upper()
+ if not all([c.isascii() and c.isalpha() for c in currency]):
+ print("The currency name must be an ASCII alphabetic string")
+ elif len(currency) < 3 or 11 < len(currency):
+ print("The currency name had to be between 3 and 11 characters long")
+ else:
+ return currency
+ return None
+
+ return ask(name, msg, default, check_currency)
+
+
+def ask_host(name: str, msg: str, default: str | None = None) -> str:
+ "Prompt the user to configure the installation hostname"
+
+ def check_host(host: str) -> str | None:
+ success = True
+ for subdomain in ["backend", "bank", "exchange"]:
+ success = try_cmd(["ping", "-c", "1", f"{subdomain}.{host}"]) and success
+ if success:
+ return host
+ else:
+ return None
+
+ return ask(name, msg, default, check_host)
+
+
+def ask_terms(name: str, msg: str, kind: str) -> str:
+ "Prompt the user to select a ToS/privacy policy"
+
+ # msg = "9.1. Enter the filename of the ToS. Some available options are:\n"
+ tos_msg = msg
+
+ # Recollect example ToS files
+ tos_path = "/usr/share/taler/terms"
+ for f in os.listdir(tos_path):
+ tos_file = os.path.join(tos_path, f)
+ if os.path.isfile(tos_file) and f.endswith(".rst") and kind in f:
+ tos_msg += f"- {tos_file}\n"
+
+ tos_msg += "=> "
+
+ def check_file(path: str) -> str | None:
+ if not os.path.isfile(path):
+ print("Not a file") # Checksum check ?
+ return None
+ else:
+ return path
+
+ return ask(name, tos_msg, None, check_file)
+
+
+def ask_yes_no(name: str | None, msg: str, default: bool | None = None) -> bool:
+ "Prompt the user to configure a boolean"
+
+ def check_yes_no(raw: str) -> bool | None:
+ raw = raw.lower()
+ if raw == "y" or raw == "yes":
+ return True
+ elif raw == "n" or raw == "no":
+ return False
+ else:
+ print("Expected 'y' or 'n'")
+ return None
+
+ return ask(name, msg, default, check_yes_no, lambda it: "y" if it else "n")
+
+
+# ----- Crypto ----- #
+
+
+def ask_config_password() -> str:
+ "Prompt the user to configure a password stored hashed with argon2id"
+ ph = argon2.PasswordHasher()
+ hash = conf.get("CONFIG_PASSWORD")
+ passwd = None
+ if hash is not None:
+ while True:
+ passwd = ask_str(None, "Enter the config password : ")
+ try:
+ ph.verify(hash, passwd)
+ break
+ except argon2.exceptions.VerifyMismatchError:
+ print("invalid password")
+ else:
+ passwd = ask_str(None, "1.1 Choose a config password : ")
+
+ if hash is None or ph.check_needs_rehash(hash):
+ add_conf("CONFIG_PASSWORD", ph.hash(passwd))
+
+ return passwd
+
+
+def ask_secret(
+ name: str, msg: str, passwd: str | None, default: str | None = None
+) -> str:
+ "Prompt the user to configure a string stored encryped using pbkdf2_sha512 and chacha20_poly1305"
+ if passwd is None:
+ return ask_str(name, msg, default)
+ else:
+ raw = conf.get(name)
+ plaintext = None
+ if raw is not None:
+ method = "$pbkdf2_sha512_chacha20_poly1305$1000000$"
+ if raw.startswith(method):
+ salt, nonce, tag, ciphertext = [
+ b64decode(it) for it in raw.removeprefix(method).split("$", 3)
+ ]
+ key = PBKDF2(passwd, salt, 32, count=1000000, hmac_hash_module=SHA512)
+ cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce)
+ cipher.update(name.encode())
+ plaintext = cipher.decrypt_and_verify(ciphertext, tag).decode()
+ else:
+ salt = get_random_bytes(16)
+ key = PBKDF2(passwd, salt, 32, count=1000000, hmac_hash_module=SHA512)
+ cipher = ChaCha20_Poly1305.new(key=key)
+ cipher.update(name.encode())
+ ciphertext, tag = cipher.encrypt_and_digest(raw.encode())
+ add_conf(
+ name,
+ f"$pbkdf2_sha512_chacha20_poly1305$1000000${base64.b64encode(salt).decode()}${base64.b64encode(cipher.nonce).decode()}${base64.b64encode(tag).decode()}${base64.b64encode(ciphertext).decode()}",
+ )
+ else:
+ plaintext = ask_str(None, msg, default)
+ salt = get_random_bytes(16)
+ key = PBKDF2(passwd, salt, 32, count=1000000, hmac_hash_module=SHA512)
+ cipher = ChaCha20_Poly1305.new(key=key)
+ cipher.update(name.encode())
+ ciphertext, tag = cipher.encrypt_and_digest(plaintext.encode())
+ add_conf(
+ name,
+ f"$pbkdf2_sha512_chacha20_poly1305$1000000${base64.b64encode(salt).decode()}${base64.b64encode(cipher.nonce).decode()}${base64.b64encode(tag).decode()}${base64.b64encode(ciphertext).decode()}",
+ )
+ result_conf[name] = plaintext
+ return plaintext
+
+
+# ----- Prompt ----- #
+
+config_passwd = (
+ ask_config_password()
+ if ask_yes_no(
+ "DO_CONFIG_ENCRYPTION",
+ "1. Do you want to encrypt sensitive config values (Y/n): ",
+ True,
+ )
+ else None
+)
+ask_currency(
+ "CURRENCY",
+ "2. Enter the name of the regional currency (e.g. 'NETZBON'): ",
+ "NETZBON",
+)
+do_conversion = ask_yes_no(
+ "DO_CONVERSION",
+ "3. Do you want setup regional currency conversion to fiat currency (Y/n): ",
+ True,
+)
+if do_conversion:
+ ask_currency(
+ "FIAT_CURRENCY",
+ "3.1. Enter the name of the fiat currency (e.g. 'CHF'): ",
+ "CHF",
+ )
+ ask_str(
+ "FIAT_BANK_NAME",
+ "3.2. Enter the name of your fiat bank (e.g. POSTFINANCE AG): ",
+ )
+ iban = ask_iban(
+ "FIAT_ACCOUNT_IBAN",
+ "3.3. Enter the IBAN of your fiat bank account (e.g. 'CH7789144474425692816'): ",
+ )
+ bic = ask_bic(
+ "FIAT_ACCOUNT_BIC",
+ "3.4. Enter the BIC of your fiat bank account (e.g. 'POFICHBEXXX'): ",
+ )
+ name = ask_str(
+ "FIAT_ACCOUNT_NAME", "3.5. Enter the legal name of your fiat bank account: "
+ )
+ params = urllib.parse.urlencode({"receiver-name": name})
+ add_conf("CONVERSION_PAYTO", f"payto://iban/{bic}/{iban}?{params}")
+bank_name = ask_str(
+ "BANK_NAME",
+ "4. Enter the human-readable name of the bank (e.g. 'Taler Bank'): ",
+ "Taler Bank",
+)
+ask_host("DOMAIN_NAME", "5. Enter the domain name (e.g. 'example.com'): ")
+if ask_yes_no("ENABLE_TLS", "6. Setup TLS using Let's Encrypt? (Y/n): ", True):
+ ask_str("TLS_EMAIL", "6.1. Enter an email address for Let's Encrypt: ")
+
+ def ask_tos():
+ print(
+ "6.2. Please read the Terms of Service at https://letsencrypt.org/documents/LE-SA-v1.3-September-21-2022.pdf."
+ )
+ if not ask_yes_no(
+ None,
+ "6.2. You must agree in order to register with the ACME server. Do you agree? (y/n): ",
+ False,
+ ):
+ print("You must agree in order to register with the ACME server")
+ return None
+ else:
+ return "y"
+
+ conf_value("TLS_TOS", ask_tos)
+ add_conf("PROTO", "https")
+else:
+ add_conf("PROTO", "http")
+
+add_conf(
+ "DO_OFFLINE", "y"
+) # TODO support offline setup again when the documentation is ready
+
+if ask_yes_no(
+ "DO_TELESIGN",
+ "7. Setup SMS two-factor authentication using Telesign https://www.telesign.com? (Y/n): ",
+ True,
+):
+
+ def ask_telesign():
+ customer_id = ask_str(None, "7.1. Enter your Telesign Customer ID: ")
+ api_key = ask_str(None, "7.2. Enter your Telesign API Key: ")
+ phone_number = ask_str(
+ None,
+ "6.3. Enter a phone number to test your API key (e.g. '+447911123456'): ",
+ )
+ auth_token = base64.b64encode(f"{customer_id}:{api_key}".encode()).decode()
+ if not try_cmd(
+ ["libeufin-tan-sms.sh", phone_number],
+ f"T-12345 is your verification code for {bank_name} setup",
+ {**os.environ, "AUTH_TOKEN": auth_token},
+ ):
+ print(
+ "Failed to send an SMS using Telesign API, check your credentials and phone number"
+ )
+ return None
+ code = ask_str(None, f"7.4. Enter the code received by {phone_number} : ")
+ if code != "12345" and code != "T-12345":
+ print(
+ f"Wrong code got '{code}' expected '12345', check your credentials and phone number"
+ )
+ return None
+ return auth_token
+
+ conf_value("TELESIGN_AUTH_TOKEN", ask_telesign)
+ask_secret(
+ "BANK_ADMIN_PASSWORD",
+ "8. Enter the admin password for the bank (or press enter to autogenerate password): ",
+ config_passwd,
+ str(uuid.uuid4()),
+)
+
+if ask_yes_no(
+ "DO_EXCHANGE_TERMS",
+ "9. Do you wish to configure terms of service (ToS) for the exchange? (Y/n): ",
+ True,
+):
+ ask_terms(
+ "EXCHANGE_TERMS_FILE",
+ "9.1. Enter the filename of the ToS. Some available options are:\n",
+ "-tos-",
+ )
+
+if ask_yes_no(
+ "DO_EXCHANGE_PRIVACY",
+ "10. Do you wish to configure a privacy policy for the exchange? (Y/n): ",
+ True,
+):
+ ask_terms(
+ "EXCHANGE_PRIVACY_FILE",
+ "10.1. Enter the filename of the privacy policy. Some available options are:\n",
+ "-pp-",
+ )
+
+# ----- Return conf ----- #
+
+content = ""
+for key, value in result_conf.items():
+ escaped = value.replace("'", "'\\''")
+ content += f"export {key}='{escaped}'\n"
+with os.fdopen(3, "w") as f:
+ f.write(content)
diff --git a/regional-currency/config_nginx.sh b/regional-currency/config_nginx.sh
new file mode 100755
index 0000000..84df1e8
--- /dev/null
+++ b/regional-currency/config_nginx.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+
+source functions.sh
+source config/user.conf
+source config/internal.conf
+
+export PROTO
+export DOMAIN_NAME
+export BANK_PORT
+
+envsubst <nginx-conf/backend.taler-nginx.conf >"/etc/nginx/sites-available/backend.${DOMAIN_NAME}"
+envsubst <nginx-conf/bank.taler-nginx.conf >"/etc/nginx/sites-available/bank.${DOMAIN_NAME}"
+envsubst <nginx-conf/exchange.taler-nginx.conf >"/etc/nginx/sites-available/exchange.${DOMAIN_NAME}"
+
+# Create nginx symlinks
+
+ln -sf /etc/nginx/sites-available/backend."${DOMAIN_NAME}" /etc/nginx/sites-enabled/backend."${DOMAIN_NAME}"
+ln -sf /etc/nginx/sites-available/bank."${DOMAIN_NAME}" /etc/nginx/sites-enabled/bank."${DOMAIN_NAME}"
+ln -sf /etc/nginx/sites-available/exchange."${DOMAIN_NAME}" /etc/nginx/sites-enabled/exchange."${DOMAIN_NAME}"
+
+if test "${ENABLE_TLS}" == "y"; then
+
+ # Replace http with https in the demobank-ui configuration
+
+ sed -i "s/http:\/\/bank./https:\/\/bank./g" /etc/libeufin/settings.json
+
+ # Certbot
+
+ say "Obtaining TLS certificates using Let's Encrypt"
+
+ certbot --nginx -n --agree-tos -m ${TLS_EMAIL} \
+ -d backend."${DOMAIN_NAME}" \
+ -d bank."${DOMAIN_NAME}" \
+ -d exchange."${DOMAIN_NAME}" &>> setup.log
+else
+ sed -i "s/https:\/\/bank./http:\/\/bank./g" /etc/libeufin/settings.json
+fi
+
+say "Restarting Nginx with new configuration"
+systemctl reload nginx &>> setup.log
diff --git a/regional-currency/diagnose.sh b/regional-currency/diagnose.sh
new file mode 100755
index 0000000..a0c513b
--- /dev/null
+++ b/regional-currency/diagnose.sh
@@ -0,0 +1,125 @@
+#!/usr/bin/env bash
+
+# This file is in the public domain.
+
+# Script for basic diagnostics of a Taler regio deployment.
+# @author Florian Dold <dold@taler.net>
+
+if [ "$(id -u)" -ne 0 ]; then
+ echo "FATAL: Please run as root." >&2
+ exit 1
+fi
+
+libeufin_bank_db=$(libeufin-bank config get libeufin-bankdb-postgres config)
+libeufin_nexus_db=$(libeufin-nexus config get libeufin-nexusdb-postgres config)
+exchange_db=$(taler-config -s exchangedb-postgres -o config)
+
+if [[ $libeufin_nexus_db != $libeufin_bank_db ]]; then
+ echo "FATAL: libeufin-bank and libeufin-nexus don't share the same database" >&2
+ exit 1
+fi
+
+libeufin_db=$libeufin_bank_db
+
+# runsql db RESNAME < query
+function runsql() {
+ local sql
+ read -r -d '' sql
+ res=$(cd / && sudo -u postgres psql "$1" -t --csv -c "$sql")
+ printf -v "$2" '%s' "$res"
+}
+
+#
+# Check for conversion trigger
+#
+
+runsql "$libeufin_db" have_conversion_triggers <<EOF
+select count(*) from information_schema.triggers
+ where trigger_schema='libeufin_nexus'
+ and trigger_name='cashin_link';
+EOF
+
+echo "have_conversion_triggers" $have_conversion_triggers
+
+#
+# Check for transactions
+#
+runsql "$libeufin_db" num_nexus_incoming_transactions <<EOF
+select count(*) from libeufin_nexus.incoming_transactions;
+EOF
+echo num_nexus_incoming_transactions: $num_nexus_incoming_transactions
+
+runsql "$libeufin_db" num_nexus_talerable_transactions <<EOF
+select count(*) from libeufin_nexus.talerable_incoming_transactions;
+EOF
+echo "num_nexus_talerable_transactions:" $num_nexus_talerable_transactions
+
+runsql "$libeufin_db" num_nexus_bounced_transactions <<EOF
+select count(*) from libeufin_nexus.bounced_transactions;
+EOF
+echo "num_nexus_bounced_transactions:" $num_nexus_bounced_transactions
+
+runsql "$libeufin_db" num_bank_exchange_incoming <<EOF
+select count(*) from libeufin_bank.taler_exchange_incoming;
+EOF
+echo "num_bank_exchange_incoming:" $num_bank_exchange_incoming
+
+runsql "$exchange_db" num_exchange_reserves_in <<EOF
+select count(*) from exchange.reserves_in;
+EOF
+echo num_exchange_reserves_in: $num_exchange_reserves_in
+
+runsql "$exchange_db" num_exchange_reserves <<EOF
+select count(*) from exchange.reserves;
+EOF
+echo num_exchange_reserves: $num_exchange_reserves
+
+
+function expect_unit_active() {
+ systemctl --quiet is-active "$1"
+ if [[ $? -ne 0 ]]; then
+ echo "WARNING: expected unit $1 to be active, but it is not active"
+ fi
+}
+
+libeufin_units=(
+libeufin-bank.service
+libeufin-nexus-ebics-fetch.service
+libeufin-nexus-ebics-submit.service
+)
+
+exchange_units=(
+taler-exchange-aggregator.service
+taler-exchange-closer.service
+taler-exchange-expire.service
+taler-exchange-httpd.service
+taler-exchange-secmod-cs.service
+taler-exchange-secmod-eddsa.service
+taler-exchange-secmod-rsa.service
+taler-exchange-transfer.service
+taler-exchange-wirewatch.service
+)
+
+
+merchant_units=(
+taler-merchant-httpd.service
+)
+
+all_units=()
+all_units+=( "${libeufin_units[@]}" "${exchange_units[@]}" "${merchant_units[@]}" )
+
+for unit in ${all_units[@]}; do
+ expect_unit_active "$unit"
+done
+
+SINCE="7 days ago"
+echo "analysing logs since $SINCE"
+
+for unit in ${all_units[@]}; do
+ num_warnings=$(journalctl -u "$unit" --since "$SINCE" | grep WARNING | wc -l)
+ num_errors=$(journalctl -u "$unit" --since "$SINCE" | grep ERROR | wc -l)
+ if [[ ( $num_errors -eq 0 ) && ( $num_warnings -eq 0 ) ]]; then
+ continue
+ fi
+ echo "Please check logs for $unit ($num_warnings warnings, $num_errors errors)"
+done
diff --git a/regional-currency/functions.sh b/regional-currency/functions.sh
new file mode 100755
index 0000000..0663fec
--- /dev/null
+++ b/regional-currency/functions.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+
+notify_err() {
+ say "errexit on line $(caller)"
+ say "Error messages can be found at the end of setup.log"
+ exit 1
+}
+
+trap notify_err ERR
+
+# Message
+function say() {
+ echo "TALER: " "$@" >> setup.log
+ echo "TALER: " "$@"
+}
+
+# Check user if the user is root
+function check_user() {
+ if [ "$(whoami)" != "root" ]; then
+ say "Please run this script as root"
+ exit 1
+ fi
+}
+
+# Set DISTRO to the detected distro or return non-zero
+# status if distro not supported.
+function detect_distro() {
+ unset DISTRO
+ [[ -f /etc/os-release ]] && source /etc/os-release
+ # shellcheck disable=SC2034
+ echo $NAME | grep Ubuntu >/dev/null && DISTRO=ubuntu && return 0
+ # shellcheck disable=SC2034
+ echo $NAME | grep Debian >/dev/null && DISTRO=debian && return 0
+ echo "Unsupported distro, should be either ubuntu or debian" >&2
+ return 1
+}
diff --git a/regional-currency/install_packages.sh b/regional-currency/install_packages.sh
new file mode 100755
index 0000000..44e4377
--- /dev/null
+++ b/regional-currency/install_packages.sh
@@ -0,0 +1,83 @@
+#!/bin/bash
+# This file is in the public domain.
+
+set -eu
+
+source functions.sh
+
+detect_distro
+
+# Program versions
+PG_VERSION=15
+
+say "Installing necessary packages (this may take a while)..."
+
+## Update
+
+apt update &>> setup.log
+
+## General requirements
+
+apt install \
+ uuid-runtime \
+ make \
+ sudo \
+ curl \
+ jq \
+ wget \
+ nginx \
+ postgresql-${PG_VERSION} \
+ postgresql-client-${PG_VERSION} \
+ dbconfig-pgsql \
+ certbot \
+ python3-sphinx \
+ python3-pip \
+ python3-certbot-nginx -y &>> setup.log
+
+pip3 install --break-system-packages \
+ sphinx-markdown-builder \
+ htmlark \
+ argon2-cffi \
+ pycryptodome &>> setup.log
+
+## Add GNU Taler deb.taler.net to /etc/apt/sources.list
+
+say "Adding GNU Taler apt repository"
+say "Detected distro $DISTRO"
+
+case $DISTRO in
+debian)
+ if test ${APT_NIGHTLY:-n} == y; then
+ say "Setup nightly packages"
+ echo "deb [trusted=yes] https://deb.taler.net/apt-nightly bookworm main" >/etc/apt/sources.list.d/taler.list
+ else
+ echo "deb [signed-by=/etc/apt/keyrings/taler-systems.gpg] https://deb.taler.net/apt/debian bookworm main" >/etc/apt/sources.list.d/taler.list
+ fi
+ ;;
+ubuntu)
+ echo "deb [signed-by=/etc/apt/keyrings/taler-systems.gpg] https://deb.taler.net/apt/ubuntu mantic taler-mantic" >/etc/apt/sources.list.d/taler.list
+ ;;
+*)
+ say "Unsupported distro: $DISTRO"
+ exit 1
+ ;;
+esac
+
+wget -P /etc/apt/keyrings https://taler.net/taler-systems.gpg &>> setup.log
+
+## Specific GNU Taler packages
+
+say "Installing GNU Taler packages (this may take a while)..."
+
+apt update &>> setup.log
+apt install \
+ taler-exchange \
+ taler-terms-generator \
+ taler-merchant \
+ taler-harness \
+ taler-wallet-cli \
+ taler-exchange-offline \
+ libeufin-bank \
+ libeufin-nexus \
+ -y \
+ &>> setup.log
diff --git a/regional-currency/list-incoming.sh b/regional-currency/list-incoming.sh
new file mode 100755
index 0000000..bb3a67d
--- /dev/null
+++ b/regional-currency/list-incoming.sh
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+
+# This file is in the public domain.
+
+# Script for basic diagnostics of a Taler regio deployment.
+# @author Florian Dold <dold@taler.net>
+
+if [ "$(id -u)" -ne 0 ]; then
+ echo "FATAL: Please run as root." >&2
+ exit 1
+fi
+
+exchange_db=$(taler-config -s exchangedb-postgres -o config)
+
+# runsql db RESNAME < query
+function runsql() {
+ local sql
+ read -r -d '' sql
+ res=$(cd / && sudo -u postgres psql "$1" -t --csv -c "$sql")
+ printf -v "$2" '%s' "$res"
+}
+
+runsql "$exchange_db" reserves_in <<EOF
+select reserve_pub from exchange.reserves_in;
+EOF
+
+mapfile -t lines <<<$reserves_in
+
+for line in "${lines[@]}"; do
+ python3 -c "import binascii; import sys; sys.stdout.buffer.write(binascii.a2b_hex(sys.argv[1][2:]))" "$line" | gnunet-base32
+ echo
+done
+
diff --git a/regional-currency/main.sh b/regional-currency/main.sh
new file mode 100755
index 0000000..1655c7d
--- /dev/null
+++ b/regional-currency/main.sh
@@ -0,0 +1,101 @@
+#!/bin/bash
+# This file is in the public domain.
+
+# main.sh is the main script that asks the questions and
+# puts the answers into environment variables located at "config/taler-internal.conf or config/taler.conf" files
+# Nginx configuration - Reads values directly from these "config files".
+
+set -eu
+
+# include functions source file
+
+source functions.sh
+
+# Clear logs
+
+> setup.log
+
+# include variables from configuration
+mkdir -p config/
+touch config/user.conf config/internal.conf
+# Values we generated
+source config/internal.conf
+
+# Ask questions to user
+# START USER INTERACTION
+say "Welcome to the GNU Taler regional currency setup!"
+say ""
+say "All configuration values asked during the setup script"
+say "can be changed in config/user.conf."
+say "Logs are written in setup.log."
+say ""
+
+# END USER INTERACTION
+
+# Check if the user is root, otherwise EXIT.
+check_user
+
+# Installation of deb packages required
+say ""
+say "Installing packages (step 1 of 6)"
+. install_packages.sh
+
+say ""
+say "Interactive configuration (step 2 of 6)"
+{ source <(./config.py 3>&1 >&4 4>&-); } 4>&1
+
+# Remove when libeufin currencies.conf is in sync with exchange
+cat >>/usr/share/libeufin/config.d/netzbon.conf <<EOF
+[CURRENCY-NETZBON]
+enabled=yes
+name=NetzBon
+code=NETZBON
+fractional_input_digits=2
+fractional_normal_digits=2
+fractional_trailing_zero_digits=2
+alt_unit_names={"0":"NETZBON"}
+EOF
+
+if test -z "${BANK_EXCHANGE_PASSWORD:-}"; then
+ BANK_EXCHANGE_PASSWORD=$(uuidgen)
+ echo "BANK_EXCHANGE_PASSWORD=\"${BANK_EXCHANGE_PASSWORD}\"" >>config/internal.conf
+fi
+
+if test -z "${BANK_PORT:-}"; then
+ echo "BANK_PORT=8080" >>config/user.conf
+ export BANK_PORT=8080
+fi
+
+say ""
+say "Configuring nginx (step 3 of 6)"
+./config_nginx.sh
+
+say ""
+say "Setting up libeufin (step 4 of 6)"
+./setup-libeufin.sh
+
+say ""
+say "Setting up exchange (step 5 of 6)"
+./setup-exchange.sh
+
+say ""
+say "Setting up merchant (step 6 of 6)"
+./setup-merchant.sh
+
+# Final message to the user
+source config/user.conf
+say ""
+say "Congratulations, you have successfully installed GNU Taler"
+say "Your bank is at ${PROTO}://bank.${DOMAIN_NAME}/"
+say "You can connect to the bank web UI as 'admin' using '${BANK_ADMIN_PASSWORD}'"
+say "A merchant is at ${PROTO}://backend.${DOMAIN_NAME}/"
+say "You should set credentials for the merchant soon."
+say "The exchange withdraw URI is taler://withdraw-exchange/exchange.${DOMAIN_NAME}/"
+
+if test ${DO_CONVERSION} == y; then
+ say "For currency conversion to work, you must manually complete"
+ say "the EBICS configuration."
+fi
+
+exit 0
+# END INSTALLATION
diff --git a/regional-currency/nginx-conf/backend.taler-nginx.conf b/regional-currency/nginx-conf/backend.taler-nginx.conf
new file mode 100644
index 0000000..ea267df
--- /dev/null
+++ b/regional-currency/nginx-conf/backend.taler-nginx.conf
@@ -0,0 +1,19 @@
+server {
+
+ listen 80;
+ listen [::]:80;
+
+ server_name backend.${DOMAIN_NAME};
+
+ # Bigger than default timeout to support long polling
+ proxy_read_timeout 6500s;
+ keepalive_requests 1000000;
+ keepalive_timeout 6500s;
+
+ location / {
+ proxy_pass http://unix:/var/run/taler/merchant-httpd/merchant-http.sock;
+ proxy_set_header X-Forwarded-Proto "${PROTO}";
+ proxy_set_header X-Forwarded-Host "backend.${DOMAIN_NAME}";
+ proxy_set_header X-Forwarded-Prefix /;
+ }
+}
diff --git a/regional-currency/nginx-conf/bank.taler-nginx.conf b/regional-currency/nginx-conf/bank.taler-nginx.conf
new file mode 100644
index 0000000..1c6a6d3
--- /dev/null
+++ b/regional-currency/nginx-conf/bank.taler-nginx.conf
@@ -0,0 +1,23 @@
+server {
+ listen 80;
+ listen [::]:80;
+
+ server_name bank.${DOMAIN_NAME};
+
+ access_log /var/log/nginx/libeufin-sandbox.log;
+ error_log /var/log/nginx/libeufin-sandbox.err;
+
+ # Bigger than default timeout to support long polling
+ proxy_read_timeout 6500s;
+ keepalive_requests 1000000;
+ keepalive_timeout 6500s;
+
+ # TODO should we proxy SPA with nginx for perf and fallback to bank server on 404 ?
+ location / {
+ proxy_pass http://localhost:${BANK_PORT};
+ #Fixes withdrawal http request
+ proxy_set_header X-Forwarded-Proto "${PROTO}";
+ proxy_set_header X-Forwarded-Host "bank.${DOMAIN_NAME}";
+ proxy_set_header X-Forwarded-Prefix /;
+ }
+}
diff --git a/regional-currency/nginx-conf/exchange.taler-nginx.conf b/regional-currency/nginx-conf/exchange.taler-nginx.conf
new file mode 100644
index 0000000..b1e9d0a
--- /dev/null
+++ b/regional-currency/nginx-conf/exchange.taler-nginx.conf
@@ -0,0 +1,16 @@
+server {
+
+ listen 80;
+ listen [::]:80;
+
+ server_name exchange.${DOMAIN_NAME};
+
+ # Bigger than default timeout to support long polling
+ proxy_read_timeout 6500s;
+ keepalive_requests 1000000;
+ keepalive_timeout 6500s;
+
+ location / {
+ proxy_pass http://unix:/var/run/taler/exchange-httpd/exchange-http.sock;
+ }
+}
diff --git a/regional-currency/setup-exchange.sh b/regional-currency/setup-exchange.sh
new file mode 100755
index 0000000..91f916c
--- /dev/null
+++ b/regional-currency/setup-exchange.sh
@@ -0,0 +1,242 @@
+#!/bin/bash
+# This file is in the public domain.
+#
+# This script configure and launches the Taler exchange.
+#
+# The environment must provide the following variables:
+#
+# - BANK_EXCHANGE_PASSWORD (exchange password for libeufin-bank)
+# - EXCHANGE_WIRE_GATEWAY_URL (where is the exchange wire gateway / libeufin-nexus)
+# - EXCHANGE_PAYTO (exchange account PAYTO)
+# - ENABLE_TLS (http or https?)
+# - DOMAIN_NAME: DNS domain name to use for the setup
+#
+
+set -eu
+
+notify_exit() {
+ [[ $1 == 0 ]] || echo Script "$0" failed, exit code "$1"
+}
+
+notify_err() {
+ echo "errexit on line $(caller)" >&2
+}
+
+trap '(exit 130)' INT
+trap '(exit 143)' TERM
+trap notify_err ERR
+# shellcheck disable=SC2154
+trap 'rc=$?; notify_exit $rc; exit $rc' EXIT
+
+# End of error handling setup
+
+source functions.sh
+source config/user.conf
+source config/internal.conf
+
+EXCHANGE_DB="taler-exchange"
+
+say "Beginning Exchange setup"
+
+if test -z "${BANK_EXCHANGE_PASSWORD:-}"; then
+ say "Failure: BANK_EXCHANGE_PASSWORD not set"
+ exit 1
+fi
+if test -z "${EXCHANGE_PAYTO:-}"; then
+ say "Failure: EXCHANGE_PAYTO not set"
+ exit 1
+fi
+
+function die() {
+ say "$1"
+ exit 1
+}
+
+# Just try if sudo works for diagnostics
+sudo -i -u taler-exchange-offline id >/dev/null || die "Error: Unable to switch to taler-exchange-offline user"
+
+# Create master key as taler-exchange-offline *unless* user already
+# set the MASTER_PUBLIC_KEY to some value we can use.
+export MASTER_PRIV_DIR=.local/share/taler/exchange/offline-keys
+export MASTER_PRIV_FILE=${MASTER_PRIV_DIR}/master.priv
+export SECMOD_TOFU_FILE=${MASTER_PRIV_DIR}/secm_tofus.pub
+if test -z "${MASTER_PUBLIC_KEY:-}"; then
+ if test "${DO_OFFLINE:-y}" == n; then
+ say "Error: No MASTER_PUBLIC_KEY but DO_OFFLINE set to NO"
+ exit 1
+ fi
+ say "Setting up offline key"
+ echo -e "[exchange-offline]\n"\
+ "MASTER_PRIV_FILE=\$HOME/${MASTER_PRIV_FILE}\n"\
+ "SECM_TOFU_FILE=\$HOME/${SECMOD_TOFU_FILE}\n"\
+ >/etc/taler/conf.d/offline-setup.conf
+
+ MASTER_PUBLIC_KEY=$(sudo -i -u taler-exchange-offline taler-exchange-offline -c /etc/taler/taler.conf -LDEBUG setup 2>> setup.log)
+ echo "MASTER_PUBLIC_KEY=\"${MASTER_PUBLIC_KEY}\"" >>config/user.conf
+ if test -z "${DO_OFFLINE:-}"; then
+ # Set 'DO_OFFLINE'
+ DO_OFFLINE=y
+ echo "DO_OFFLINE=y" >>config/user.conf
+ fi
+else
+ say "Master public key is $MASTER_PUBLIC_KEY"
+ if test ${DO_OFFLINE:-y} == y; then
+ MASTER_PUBLIC_KEY2=$(sudo -i -u taler-exchange-offline taler-exchange-offline -c /etc/taler/taler.conf setup 2>> setup.log)
+ if test "${MASTER_PUBLIC_KEY2}" != "${MASTER_PUBLIC_KEY}"; then
+ say "Error: master public key missmatch ${MASTER_PUBLIC_KEY2} does not match ${MASTER_PUBLIC_KEY}"
+ exit 1
+ fi
+ fi
+fi
+
+say "Stopping running exchange before reconfiguration"
+systemctl stop taler-exchange.target &>> setup.log
+
+say "Configuring exchange"
+
+# Generate terms of service (ToS)
+TERMS_ETAG=
+if test ${DO_EXCHANGE_TERMS} == y; then
+ if test -z "${EXCHANGE_TERMS_FILE:-}"; then
+ say "Error: No EXCHANGE_TERMS_FILE set but DO_EXCHANGE_TERMS set to YES"
+ exit 1
+ fi
+
+ TERMS_ETAG="$(basename "$EXCHANGE_TERMS_FILE" .rst)"
+
+ say "Setting up terms of service (ToS)"
+ taler-terms-generator -i "${EXCHANGE_TERMS_FILE}" &>> setup.log
+fi
+
+# Generate privacy policy
+PRIVACY_ETAG=
+if test ${DO_EXCHANGE_PRIVACY} == y; then
+ if test -z "${EXCHANGE_PRIVACY_FILE:-}"; then
+ say "Error: No EXCHANGE_PRIVACY_FILE set but DO_EXCHANGE_PRIVACY set to YES"
+ exit 1
+ fi
+
+ PRIVACY_ETAG="$(basename "$EXCHANGE_PRIVACY_FILE" .rst)"
+
+ say "Setting up the privacy policy"
+ taler-terms-generator -i "${EXCHANGE_PRIVACY_FILE}" &>> setup.log
+fi
+
+export EXCHANGE_BASE_URL="$PROTO://exchange.${DOMAIN_NAME}/"
+
+cat << EOF > /etc/taler/conf.d/setup.conf
+[taler]
+CURRENCY=${CURRENCY}
+CURRENCY_ROUND_UNIT=${CURRENCY}:0.01
+
+[exchange]
+AML_THRESHOLD=${CURRENCY}:1000000
+MASTER_PUBLIC_KEY=${MASTER_PUBLIC_KEY}
+BASE_URL=${EXCHANGE_BASE_URL}
+STEFAN_ABS=${CURRENCY}:0
+STEFAN_LOG=${CURRENCY}:0
+STEFAN_LIN=0
+
+TERMS_ETAG=${TERMS_ETAG}
+PRIVACY_ETAG=${PRIVACY_ETAG}
+
+[merchant-exchange-${DOMAIN_NAME}]
+MASTER_KEY=${MASTER_PUBLIC_KEY}
+CURRENCY=${CURRENCY}
+EXCHANGE_BASE_URL=${EXCHANGE_BASE_URL}
+
+[exchange-account-default]
+PAYTO_URI=${EXCHANGE_PAYTO}
+ENABLE_DEBIT=YES
+ENABLE_CREDIT=YES
+@inline-secret@ exchange-accountcredentials-default ../secrets/exchange-accountcredentials-default.secret.conf
+EOF
+
+cat << EOF > /etc/taler/secrets/exchange-db.secret.conf
+[exchangedb-postgres]
+CONFIG=postgres:///exchange
+EOF
+
+chmod 440 /etc/taler/secrets/exchange-db.secret.conf
+chown root:taler-exchange-db /etc/taler/secrets/exchange-db.secret.conf
+
+cat << EOF > /etc/taler/secrets/exchange-accountcredentials-default.secret.conf
+
+[exchange-accountcredentials-default]
+WIRE_GATEWAY_URL=${PROTO}://bank.$DOMAIN_NAME/accounts/exchange/taler-wire-gateway/
+WIRE_GATEWAY_AUTH_METHOD=basic
+USERNAME=exchange
+PASSWORD=${BANK_EXCHANGE_PASSWORD}
+EOF
+
+chmod 400 /etc/taler/secrets/exchange-accountcredentials-default.secret.conf
+chown taler-exchange-wire:taler-exchange-db /etc/taler/secrets/exchange-accountcredentials-default.secret.conf
+
+taler-harness deployment gen-coin-config \
+ --min-amount "${CURRENCY}":0.01 \
+ --max-amount "${CURRENCY}":100 |
+ sed -e "s/FEE_DEPOSIT = ${CURRENCY}:0.01/FEE_DEPOSIT = ${CURRENCY}:0/" \
+ >/etc/taler/conf.d/"${CURRENCY}"-coins.conf
+
+say "Initializing exchange database"
+taler-exchange-dbconfig -c /etc/taler/taler.conf &>> setup.log
+
+say "Launching exchange"
+systemctl enable taler-exchange.target &>> setup.log
+systemctl restart taler-exchange.target &>> setup.log
+
+say "Waiting for exchange HTTP service (/config)..."
+curl -sS --max-time 2 \
+ --retry-all-errors \
+ --retry-delay 2 \
+ --retry 10 \
+ "${EXCHANGE_BASE_URL}"config &>> setup.log
+
+say "Waiting for exchange management keys (this may take a while)..."
+curl -sS --max-time 30 \
+ --retry-delay 2 \
+ --retry 60 \
+ "${EXCHANGE_BASE_URL}"management/keys &>> setup.log
+
+if test ${DO_OFFLINE} == y; then
+ say "Offline interaction..."
+ sudo -i -u taler-exchange-offline \
+ taler-exchange-offline \
+ -c /etc/taler/taler.conf \
+ download \
+ sign \
+ upload &>> setup.log
+
+ say "Exchange account setup..."
+ sudo -i -u taler-exchange-offline \
+ taler-exchange-offline \
+ enable-account "${EXCHANGE_PAYTO}" \
+ display-hint 0 "${CURRENCY} Exchange" \
+ wire-fee now x-taler-bank "${CURRENCY}":0 "${CURRENCY}":0 \
+ global-fee now "${CURRENCY}":0 "${CURRENCY}":0 "${CURRENCY}":0 1h 6a 0 \
+ upload &>> setup.log
+
+ say "Enabling timer to automate renewals..."
+ systemctl enable taler-exchange-offline.timer &>> setup.log
+ systemctl restart taler-exchange-offline.timer &>> setup.log
+
+ if test ${DO_CONVERSION} == y; then
+ say "Conversion account setup (restricted to CH-only)..."
+ sudo -i -u taler-exchange-offline taler-exchange-offline \
+ enable-account "${CONVERSION_PAYTO}" \
+ display-hint 10 "${FIAT_BANK_NAME}" \
+ conversion-url "${PROTO}://bank.$DOMAIN_NAME/conversion-info/" \
+ debit-restriction deny \
+ wire-fee now iban "${CURRENCY}":0 "${CURRENCY}":0 \
+ upload &>> setup.log
+ fi
+fi
+
+say "Waiting for exchange /keys..."
+curl -sS --max-time 2 \
+ --retry-connrefused \
+ --retry-delay 2 \
+ --retry 10 \
+ "${EXCHANGE_BASE_URL}"keys &>> setup.log
+
+say "Exchange setup finished"
diff --git a/regional-currency/setup-libeufin.sh b/regional-currency/setup-libeufin.sh
new file mode 100755
index 0000000..47d8725
--- /dev/null
+++ b/regional-currency/setup-libeufin.sh
@@ -0,0 +1,138 @@
+#!/bin/bash
+# This file is in the public domain.
+#
+# This script configure libeufin-bank and libeufin-nexus.
+
+
+set -eu
+
+source functions.sh
+{ source <(./config.py 3>&1 >&4 4>&-); } 4>&1
+source config/internal.conf
+
+say "Beginning LibEuFin setup"
+
+if test -z "${BANK_NAME:-}"; then
+ say "Error: config/user.conf does not specify BANK_NAME"
+ exit 1
+fi
+if test -z "${DOMAIN_NAME:-}"; then
+ say "Error: config/user.conf does not specify DOMAIN_NAME"
+ exit 1
+fi
+if test -z "${BANK_ADMIN_PASSWORD:-}"; then
+ say "Error: config/user.conf does not specify BANK_ADMIN_PASSWORD"
+ exit 1
+fi
+if test -z "${BANK_EXCHANGE_PASSWORD:-}"; then
+ say "Error: config/user.conf does not specify BANK_EXCHANGE_PASSWORD"
+ exit 1
+fi
+
+if test ${DO_CONVERSION} == y; then
+ say "Configuring libeufin-nexus with ${FIAT_CURRENCY}..."
+
+ taler-config -s nexus-ebics -o CURRENCY \
+ -V "$FIAT_CURRENCY" -c /etc/libeufin/libeufin-nexus.conf
+ taler-config -s nexus-ebics -o IBAN \
+ -V "$FIAT_ACCOUNT_IBAN" -c /etc/libeufin/libeufin-nexus.conf
+ taler-config -s nexus-ebics -o BIC \
+ -V "$FIAT_ACCOUNT_BIC" -c /etc/libeufin/libeufin-nexus.conf
+ taler-config -s nexus-ebics -o NAME \
+ -V "$FIAT_ACCOUNT_NAME" -c /etc/libeufin/libeufin-nexus.conf
+fi
+
+
+say "Configuring libeufin-bank with ${CURRENCY}..."
+
+cat >/etc/libeufin/libeufin-bank.conf <<EOF
+[libeufin-bank]
+CURRENCY=${CURRENCY}
+NAME="${BANK_NAME}"
+BASE_URL=bank.${DOMAIN_NAME}
+WIRE_TYPE=x-taler-bank
+X_TALER_BANK_PAYTO_HOSTNAME=bank.${DOMAIN_NAME}
+SUGGESTED_WITHDRAWAL_EXCHANGE=${PROTO}://exchange.${DOMAIN_NAME}/
+SERVE=tcp
+PORT=${BANK_PORT}
+EOF
+
+if test ${DO_CONVERSION} == y; then
+ cat >>/etc/libeufin/libeufin-bank.conf <<EOF
+ALLOW_CONVERSION=yes
+FIAT_CURRENCY=${FIAT_CURRENCY}
+ALLOW_EDIT_CASHOUT_PAYTO_URI=yes
+EOF
+fi
+
+if test -n "${TELESIGN_AUTH_TOKEN:-}"; then
+ cat >>/etc/libeufin/libeufin-bank.conf <<EOF
+TAN_SMS=libeufin-tan-sms.sh
+TAN_SMS_ENV={"AUTH_TOKEN":"$TELESIGN_AUTH_TOKEN"}
+EOF
+fi
+
+say "Setting up libeufin database..."
+
+libeufin-dbconfig &>> setup.log
+
+say "Setting up libeufin-bank..."
+
+
+say "Setting up libeufin-bank admin account..."
+sudo -u libeufin-bank \
+ libeufin-bank passwd \
+ -c /etc/libeufin/libeufin-bank.conf \
+ admin "${BANK_ADMIN_PASSWORD}" &>> setup.log
+
+say "Setting up admin's debt limit..."
+sudo -u libeufin-bank \
+ libeufin-bank edit-account \
+ -c /etc/libeufin/libeufin-bank.conf \
+ admin --debit_threshold=${CURRENCY}:200000000 &>> setup.log
+
+say "Setting up SPA configuration..."
+echo "settings = { bankName: \"${BANK_NAME}\" }" >/etc/libeufin/settings.js
+
+say "Create exchange account..."
+if test -z "${EXCHANGE_PAYTO:-}"; then
+ # FIXME create-account should have a way to update the password if the account already exists
+ EXCHANGE_PAYTO_NEW="$(sudo -u libeufin-bank libeufin-bank create-account -c /etc/libeufin/libeufin-bank.conf --username exchange --password "${BANK_EXCHANGE_PASSWORD}" --name Exchange --exchange 2>> setup.log)?receiver-name=Exchange"
+ echo "EXCHANGE_PAYTO=\"${EXCHANGE_PAYTO_NEW}\"" >> config/user.conf
+fi
+
+
+say "Start the bank..."
+systemctl enable libeufin-bank &>> setup.log
+systemctl restart libeufin-bank &>> setup.log
+
+say "Waiting for the bank (/config)..."
+curl -sS --max-time 2 \
+ --retry-all-errors \
+ --retry-delay 2 \
+ --retry 10 \
+ ${PROTO}://bank.${DOMAIN_NAME}/config &>> setup.log
+
+if test ${DO_CONVERSION} == y; then
+say "Setting conversion rates to 1:1 ..."
+# TODO only set conversion rates if known have been set
+curl -sS -u "admin:${BANK_ADMIN_PASSWORD}" \
+ -H 'Content-Type: application/json; charset=utf-8' \
+ ${PROTO}://bank.${DOMAIN_NAME}/conversion-info/conversion-rate \
+--data-binary @- &>> setup.log << EOF
+{
+ "cashin_ratio": "1",
+ "cashin_fee": "${CURRENCY}:0",
+ "cashin_tiny_amount": "${CURRENCY}:0.01",
+ "cashin_rounding_mode": "nearest",
+ "cashin_min_amount": "${FIAT_CURRENCY}:1",
+ "cashout_ratio": "1",
+ "cashout_fee": "${FIAT_CURRENCY}:0",
+ "cashout_tiny_amount": "${FIAT_CURRENCY}:0.01",
+ "cashout_rounding_mode": "nearest",
+ "cashout_min_amount": "${CURRENCY}:1"
+}
+EOF
+fi
+
+say "LibEuFin setup finished"
diff --git a/regional-currency/setup-merchant.sh b/regional-currency/setup-merchant.sh
new file mode 100755
index 0000000..a892b7a
--- /dev/null
+++ b/regional-currency/setup-merchant.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+set -eu
+
+source functions.sh
+source config/user.conf
+source config/internal.conf
+
+say "Setting up merchant database"
+taler-merchant-dbconfig &>> setup.log
+
+say "Launching taler-merchant-httpd"
+systemctl enable taler-merchant.target &>> setup.log
+systemctl restart taler-merchant.target &>> setup.log
diff --git a/regional-currency/upgrade.sh b/regional-currency/upgrade.sh
new file mode 100755
index 0000000..8924a77
--- /dev/null
+++ b/regional-currency/upgrade.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+# This file is in the public domain.
+set -eu
+
+echo "Fetching package list..."
+apt-get update
+
+echo -n "Stopping Taler services..."
+systemctl disable --now taler-exchange.target &>> upgrade.log
+systemctl disable --now taler-merchant.target &>> upgrade.log
+systemctl disable --now libeufin-bank &>> upgrade.log
+systemctl disable --now libeufin-nexus.target &>> upgrade.log
+echo " OK"
+
+echo "Upgrading packages..."
+apt-get upgrade
+
+echo "Upgrading databases..."
+libeufin-dbconfig &>> upgrade.log
+taler-exchange-dbconfig &>> upgrade.log
+taler-merchant-dbconfig &>> upgrade.log
+
+echo -n "Restarting Taler services..."
+systemctl enable --now taler-exchange.target &>> upgrade.log
+systemctl enable --now taler-merchant.target &>> upgrade.log
+systemctl enable --now libeufin-bank &>> upgrade.log
+systemctl enable --now libeufin-nexus.target &>> upgrade.log
+echo " OK"
+
+exit 0
diff --git a/regional-currency/vagrant/.gitignore b/regional-currency/vagrant/.gitignore
new file mode 100644
index 0000000..8000dd9
--- /dev/null
+++ b/regional-currency/vagrant/.gitignore
@@ -0,0 +1 @@
+.vagrant
diff --git a/regional-currency/vagrant/README b/regional-currency/vagrant/README
new file mode 100644
index 0000000..e9387d3
--- /dev/null
+++ b/regional-currency/vagrant/README
@@ -0,0 +1,2 @@
+This folder contains a vagrant configuration (https://developer.hashicorp.com/vagrant)
+that allows us to easily spin up a virtual machine to test the setup instructions.
diff --git a/regional-currency/vagrant/Vagrantfile b/regional-currency/vagrant/Vagrantfile
new file mode 100644
index 0000000..7cb3574
--- /dev/null
+++ b/regional-currency/vagrant/Vagrantfile
@@ -0,0 +1,77 @@
+# -*- mode: ruby -*-
+# vi: set ft=ruby :
+
+# All Vagrant configuration is done below. The "2" in Vagrant.configure
+# configures the configuration version (we support older styles for
+# backwards compatibility). Please don't change it unless you know what
+# you're doing.
+Vagrant.configure("2") do |config|
+ # The most common configuration options are documented and commented below.
+ # For a complete reference, please see the online documentation at
+ # https://docs.vagrantup.com.
+
+ # Every Vagrant development environment requires a box. You can search for
+ # boxes at https://vagrantcloud.com/search.
+ config.vm.box = "ubuntu/kinetic64"
+
+ config.ssh.forward_agent = true
+ config.ssh.forward_x11 = true
+
+ # Disable automatic box update checking. If you disable this, then
+ # boxes will only be checked for updates when the user runs
+ # `vagrant box outdated`. This is not recommended.
+ # config.vm.box_check_update = false
+
+ # Create a forwarded port mapping which allows access to a specific port
+ # within the machine from a port on the host machine. In the example below,
+ # accessing "localhost:8080" will access port 80 on the guest machine.
+ # NOTE: This will enable public access to the opened port
+ # config.vm.network "forwarded_port", guest: 80, host: 8080
+
+ # Create a forwarded port mapping which allows access to a specific port
+ # within the machine from a port on the host machine and only allow access
+ # via 127.0.0.1 to disable public access
+ # config.vm.network "forwarded_port", guest: 80, host: 8080, host_ip: "127.0.0.1"
+
+ # Create a private network, which allows host-only access to the machine
+ # using a specific IP.
+ # config.vm.network "private_network", ip: "192.168.33.10"
+
+ # Create a public network, which generally matched to bridged network.
+ # Bridged networks make the machine appear as another physical device on
+ # your network.
+ # config.vm.network "public_network"
+
+ # Share an additional folder to the guest VM. The first argument is
+ # the path on the host to the actual folder. The second argument is
+ # the path on the guest to mount the folder. And the optional third
+ # argument is a set of non-required options.
+ # config.vm.synced_folder "../data", "/vagrant_data"
+
+ # Provider-specific configuration so you can fine-tune various
+ # backing providers for Vagrant. These expose provider-specific options.
+ # Example for VirtualBox:
+ #
+ config.vm.provider "virtualbox" do |vb|
+ # Display the VirtualBox GUI when booting the machine
+ vb.gui = true
+
+ # Customize the amount of memory on the VM:
+ vb.memory = "4096"
+
+ # Required, or wayland doesn't seem to work
+ vb.customize ['modifyvm', :id, '--graphicscontroller', 'vmsvga']
+ end
+ #
+ # View the documentation for the provider you are using for more
+ # information on available options.
+
+ # Enable provisioning with a shell script. Additional provisioners such as
+ # Ansible, Chef, Docker, Puppet and Salt are also available. Please see the
+ # documentation for more information about their specific syntax and use.
+ config.vm.provision "shell", reboot: true, inline: <<-SHELL
+ apt-get update
+ apt-get upgrade -y
+ apt-get install -y ubuntu-desktop gnome-shell firefox virtualbox-guest-additions-iso git
+ SHELL
+end
diff --git a/regional-currency/withdraw.sh b/regional-currency/withdraw.sh
new file mode 100755
index 0000000..c0896e5
--- /dev/null
+++ b/regional-currency/withdraw.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+if test "$1" = "--help" || test "$1" = "-h"; then
+ echo "./withdraw [RESERVE_PUB]"
+ echo
+ echo "Injects one incoming CHF payment into nexus database"
+ echo "in order to trigger a Taler withdrawal. The reserve"
+ echo "pub can be passed either as the first parameter, or"
+ echo "it'll be generated by the CLI wallet. In both cases,"
+ echo "the exchange to withdraw from is \$PROTO://exchange.\$DOMAIN"
+
+ exit 0
+fi
+
+RESERVE_PUB="$1" # maybe passed
+set -eu
+
+. config/user.conf # DOMAIN_NAME, CURRENCY & FIAT_CURRENCY
+. config/internal.conf # PROTO
+
+NEXUS_CONFIG_FILE=/etc/libeufin/libeufin-nexus.conf
+if test -z "$RESERVE_PUB"; then
+ RESERVE_PUB=$(taler-wallet-cli \
+ api 'acceptManualWithdrawal' \
+ '{"exchangeBaseUrl":"'${PROTO}'://exchange.'$DOMAIN_NAME'",
+ "amount":"'$CURRENCY':5"
+ }' | jq -r .result.reservePub)
+fi
+DEBTOR_IBAN="CH8389144317421994586"
+sudo -i -u libeufin-nexus libeufin-nexus testing fake-incoming -L DEBUG --subject "$RESERVE_PUB" --amount "$FIAT_CURRENCY:5" "payto://iban/$DEBTOR_IBAN"
+
+taler-wallet-cli run-until-done
diff --git a/selenium/launch_selenium_test b/selenium/launch_selenium_test
deleted file mode 100755
index 12e35ca..0000000
--- a/selenium/launch_selenium_test
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/bash
-
-ulimit -v 6000000
-
-set -eu
-
-# clean /tmp
-rm -fr /tmp/.org.chromium*
-
-cd $HOME/wallet-webex
-git clean -fdx
-
-git fetch
-# reset to updated upstream branch, but only if we're tracking a branch
-branch=$(git rev-parse --abbrev-ref --symbolic-full-name @{u} 2>/dev/null || echo HEAD)
-git reset --hard "$branch"
-
-git submodule update --remote
-
-./configure && make
-
-# call python3 selenium script
-python3 $HOME/wallet-webex/selenium/withdraw_buy.py --ext-unpacked=$HOME/wallet-webex
diff --git a/splitops/README.md b/splitops/README.md
new file mode 100644
index 0000000..7a92b9b
--- /dev/null
+++ b/splitops/README.md
@@ -0,0 +1,128 @@
+# Splitops
+================
+
+Splitops is a script to allow execution of commands only after the approval of
+multiple users.
+
+It is intended to be used with OpenSSH by specifiying it as the "command" option
+for authorized users in `~/.ssh/authorized_keys` (explained below).
+
+Server requirements:
+=====================
+
+GNU/linux server.
+The Splitops program is written in the Python programming language, so you will need to have installed the
+python3 package in your server, please check in your terminal by typing "python3 -V", to see if you have this package already installed.
+
+Please do before using the program
+=====================================
+
+1) From your local computer git clone the deployment.git repository, and within the "splitops" folder,
+copy the file "splitops" from your computer to the remote server (path:/usr/local/bin/). There is no need for you
+to clone the whole deployment.git repository from the remote server to grab this program.
+
+You can use the command "scp" for this.
+
+scp /home/user/deployment/splitops/splitops root@server:/usr/local/bin/
+
+2) In your remote server SSH configuration (/etc/ssh/sshd_config),
+please make sure you have the option "PubkeyAuthentication yes" is
+de-commented. If not, decomment the line, and --reload your ssh service.
+
+3) In your /root/.ssh/authorized_keys, please add the next lines:
+
+command="/usr/local/bin/splitops alice" [... key of alice ...]
+command="/usr/local/bin/splitops bob" [... key of bob ...]
+
+(one for each user, that you want to have approval from for each command request)
+
+"Alice" and "Bob" are just usernames, they don't need to be real user system accounts in the remote server.
+
+The way the splitops program works, is by associating a public SSH key to a username, but this username doesn't need to match,
+with the real username you are using for a specific SSH public key.
+
+Having these 2 lines in the .ssh/authorized_key files, will *force* --anyone trying to login in as root through SSH,
+to execute the program: "splitops", so any other user not listed in this root/.ssh/authorized_key file, won't be able to do anything.
+
+4) Logout from the remote server, and from your client machine, in order to start using the program Splitops,
+try to "request" your very first command. This is done by using the splitops sub-command "propose".
+
+e.g: ssh root@server propose rm -rf /opt/something
+
+After executing this, you will have some sort of answer from the server such as next:
+
+- authenticated as: bob
+- requested command: ['rm', '-rf', '/opt/something']
+- assigned id: ccafbd
+
+That means, your new command request, is waiting for approval.
+
+Now, you (if you are either Alice or Bob), you can "approve" this command request, and afterwards wait for
+others to do the same thing (until the approval is complete for the number of users listed on the .ssh/authorized keys of the remote server).
+
+Let's say you are Bob for now,
+---------------------------------
+
+bob@computer:~$ root@server get # To see if you have anything pending to approve
+
+And you will see a list of pending requests. Now get the the ID of any request you want to approve,
+let's say "ccafbd", then type:
+
+bob@computer:~$ root@server approve ccafbd
+
+Now you have to ask Alice, to approve the same request "ccafbd"
+
+Alice will do,
+
+alice@computer:~$ root@server approve ccafbd
+
+And finally you will be able to --remotely execute "as root", that specific and --approved command:
+
+bob@computer:~$ ssh root@server run ccafbd
+
+Summary
+========
+
+Once the production remote server is up an running using the "splitops" command, a routine for you as "double-check systems administrator"
+would be to use the next splitops sub-commands (get, propose,approve, discargd, run...)
+
+alice@computer:~$ root@server get # To see if you any pending commands to approve
+alice@computer:~$ root@server approve "some ID" # To approve a specific command
+alice@computer:~$ root@server run "some ID" # To run, a specific approved command
+alice@computer:~$ root@server propose cp /opt/something /usr/local/something # (and wait)
+
+e.g 1) IF you try to "run" a command, but you don't have enough approvals, you will receive some output as this:
+
+- authenticated as: --user=javier
+- running command with ID bcb6a5
+- not enough approvals, got 1 but need 2
+
+e.g 2) If you try to connect to the remote server as --root, and without using any Splitops sub-command, you will receive the
+next answer from the server:
+
+alice@computer:~$ ssh root@server
+- authenticated as: --user=alice
+- no command provided, try help
+- Connection to 78.141.227.64 closed.
+
+
+Full command list
+===================
+CMDID= The hash assigned to each command request.
+
+whoami: Check authentication
+propose CMD ...: Propose a new command
+get: Get the currently proposed command
+approve CMDID: Approve a command
+run CMDID: Run a sufficiently approved command
+discard: Discard the currently proposed command
+
+Please remember you have to use this sub-command remotely from your computer, towards the remote server, by using the "root" user.
+Or with the user of your choice, who has on its .ssh/authorized_key file, the users list, with the OpenSSH "command= ..." option.
+
+[*]- Also take into account, if any other user "propose" a new command, the last "proposed" command will be discarded. The program doesn't use a stack
+to store all requests, it works just with 1 single request. So even if you have been approved by several users, a specific CMDID, you won't be able
+to run it, if a new command --proposal has been requested.
+
+[*] We encourage all users interested in using the Splitops command to use it first with a --normal user, and not with the root user.
+So this way you can learn how the program behaves, without losing the connection with your server as the root user.
diff --git a/splitops/splitops b/splitops/splitops
new file mode 100755
index 0000000..5972887
--- /dev/null
+++ b/splitops/splitops
@@ -0,0 +1,143 @@
+#!/usr/bin/env python3
+
+"""
+This script is intended to be used as a SSH command wrapper.
+
+It allows users to propose a command that should be run.
+The command will only be executed after a threshold of
+other users has approved the command.
+"""
+
+import os
+import shlex
+import sys
+import json
+from pathlib import Path
+import uuid
+from dataclasses import dataclass
+import subprocess
+
+# Approval threshold, including the approval
+# of the proposer.
+APPROVAL_THRESHOLD = 2
+
+cmdpath = Path.home() / "cmd.json"
+
+def write_cmd(d):
+ with open(cmdpath, "w") as f:
+ f.write(json.dumps(d))
+
+def read_cmd():
+ try:
+ with open(cmdpath, "r") as f:
+ return json.load(f)
+ except FileNotFoundError:
+ return None
+
+def propose(cmd):
+ request_id = uuid.uuid4().hex.lower()[0:6]
+ for x in cmd:
+ if not x.isascii():
+ print("requested command not ascii")
+ sys.exit(4)
+ print(f"requested command: {cmd}")
+ write_cmd({"cmd": cmd, "request_id": request_id})
+ print(f"assigned id: {request_id}")
+
+def approve(my_user, request_id):
+ print(f"approving command {request_id} as {my_user}")
+ d = read_cmd()
+ if d is None:
+ print("no command proposed")
+ sys.exit(1)
+ if d["request_id"] != request_id:
+ print("request ID does not match")
+ sys.exit(1)
+ approved_by = d.get("approved_by", [])
+ if my_user not in approved_by:
+ approved_by.append(my_user)
+ d["approved_by"] = approved_by
+ write_cmd(d)
+
+def run(request_id):
+ print(f"running command with ID {request_id}")
+ d = read_cmd()
+ if d is None:
+ print("no command proposed")
+ sys.exit(1)
+ if d["request_id"] != request_id:
+ print("request ID does not match")
+ sys.exit(1)
+ approved_by = d.get("approved_by", [])
+ num_approvals = len(approved_by)
+ if num_approvals < APPROVAL_THRESHOLD:
+ print(f"not enough approvals, got {num_approvals} but need {APPROVAL_THRESHOLD}")
+ sys.exit(1)
+ if d.get("executed", False):
+ print("command has already been executed once, please request again")
+ sys.exit(1)
+ cmd = d["cmd"]
+ d["executed"] = True
+ # Mark as executed, can only execute once!
+ write_cmd(d)
+ print("running command", cmd)
+ res = subprocess.run(cmd, capture_output=True, encoding="utf-8")
+ print(f"==stdout==\n{res.stdout}====")
+ print(f"==stderr==\n{res.stderr}====")
+ print(f"exit code: {res.returncode}")
+ # FIXME: Write log to disk?
+
+
+def usage():
+ print("Commands:")
+ print(" whoami: Check authentication.")
+ print(" propose CMD...: Propose a new command.")
+ print(" get: Get the currently proposed command.")
+ print(" approve CMDID: Approve a command.")
+ print(" run CMDID: Run a sufficiently approved command.")
+ print(" discard: Discard the currently proposed command.")
+ sys.exit(1)
+
+def die(msg):
+ print(msg)
+ sys.exit(2)
+
+def main():
+ if len(sys.argv) != 2:
+ die("unexpected usage")
+ user = sys.argv[1]
+ os_user = os.environ["USER"]
+ print(f"authenticated as: {user}")
+ inner_cmd = os.environ.get("SSH_ORIGINAL_COMMAND")
+ if inner_cmd is None:
+ print("no command provided, try help")
+ sys.exit(3)
+ inner_args = shlex.split(inner_cmd)
+ if len(inner_args) < 1:
+ usage()
+ subcommand = inner_args[0]
+ if subcommand == "discard":
+ cmdpath.unlink()
+ elif subcommand == "whoami":
+ print(f"you are {user} on {os_user}")
+ elif subcommand == "propose":
+ propose(inner_args[1:])
+ elif subcommand == "get":
+ print(read_cmd())
+ elif subcommand == "help":
+ usage()
+ elif subcommand == "run":
+ if len(inner_args) != 2:
+ usage()
+ run(inner_args[1])
+ elif subcommand == "approve":
+ if len(inner_args) != 2:
+ usage()
+ approve(user, inner_args[1])
+ else:
+ print(f"unknown subcommand {subcommand}")
+ usage()
+
+if __name__ == '__main__':
+ main()
+
diff --git a/systemd-services/buildbot-worker-wallet.service b/systemd-services/buildbot-worker-codespell.service
index 8cd9647..bd3151b 100644
--- a/systemd-services/buildbot-worker-wallet.service
+++ b/systemd-services/buildbot-worker-codespell.service
@@ -1,9 +1,9 @@
[Unit]
-Description=Buildbot worker service for wallet
-AssertPathExists=/home/walletbuilder/worker
+Description=Buildbot worker service for doc
+AssertPathExists=/home/codespell/worker
[Service]
-WorkingDirectory=/home/walletbuilder/
+WorkingDirectory=/home/codespell/
ExecStart=/usr/bin/buildbot-worker start --nodaemon worker
ExecReload=/usr/bin/buildbot-worker restart --nodaemon worker
ExecStop=/usr/bin/buildbot-worker stop worker
diff --git a/systemd-services/buildbot-worker-auditor.service b/systemd-services/buildbot-worker-compilecheck.service
index 44254b4..d0df3d7 100644
--- a/systemd-services/buildbot-worker-auditor.service
+++ b/systemd-services/buildbot-worker-compilecheck.service
@@ -1,5 +1,5 @@
[Unit]
-Description=Buildbot worker service for auditor
+Description=Buildbot worker service for compile and check
AssertPathExists=%h/worker
[Service]
diff --git a/systemd-services/buildbot-worker-container.service b/systemd-services/buildbot-worker-container.service
new file mode 100644
index 0000000..70d57c4
--- /dev/null
+++ b/systemd-services/buildbot-worker-container.service
@@ -0,0 +1,13 @@
+[Unit]
+Description=Buildbot worker service for container worker
+AssertPathExists=/home/container-worker/worker
+
+[Service]
+WorkingDirectory=/home/container-worker/
+ExecStart=/usr/bin/buildbot-worker start --nodaemon worker
+ExecReload=/usr/bin/buildbot-worker restart --nodaemon worker
+ExecStop=/usr/bin/buildbot-worker stop worker
+Restart=always
+
+[Install]
+WantedBy=default.target
diff --git a/systemd-services/buildbot-worker-linkchecker.service b/systemd-services/buildbot-worker-linkchecker.service
new file mode 100644
index 0000000..5e4a58f
--- /dev/null
+++ b/systemd-services/buildbot-worker-linkchecker.service
@@ -0,0 +1,13 @@
+[Unit]
+Description=Buildbot worker service for doc
+AssertPathExists=/home/linkchecker/worker
+
+[Service]
+WorkingDirectory=/home/linkchecker/
+ExecStart=/usr/bin/buildbot-worker start --nodaemon worker
+ExecReload=/usr/bin/buildbot-worker restart --nodaemon worker
+ExecStop=/usr/bin/buildbot-worker stop worker
+Restart=always
+
+[Install]
+WantedBy=default.target
diff --git a/systemd-services/buildbot-worker-packaging.service b/systemd-services/buildbot-worker-packaging.service
new file mode 100644
index 0000000..4d907f2
--- /dev/null
+++ b/systemd-services/buildbot-worker-packaging.service
@@ -0,0 +1,13 @@
+[Unit]
+Description=Buildbot worker service for creating Debian and Ubuntu packages
+AssertPathExists=%h/packaging-worker
+
+[Service]
+WorkingDirectory=%h
+ExecStart=/usr/bin/buildbot-worker start --nodaemon packaging-worker
+ExecReload=/usr/bin/buildbot-worker restart --nodaemon packaging-worker
+ExecStop=/usr/bin/buildbot-worker stop packaging-worker
+Restart=always
+
+[Install]
+WantedBy=default.target
diff --git a/systemd-services/tips-checker.service b/systemd-services/tips-checker.service
deleted file mode 100644
index 22322be..0000000
--- a/systemd-services/tips-checker.service
+++ /dev/null
@@ -1,13 +0,0 @@
-[Unit]
-Description=Buildbot worker to check tip reserves
-AssertPathExists=%h/tips-checker-dir
-
-[Service]
-WorkingDirectory=%h
-ExecStart=/usr/bin/buildbot-worker start --nodaemon tips-checker-dir
-ExecReload=/usr/bin/buildbot-worker restart --nodaemon tips-checker-dir
-ExecStop=/usr/bin/buildbot-worker stop tips-checker-dir
-Restart=always
-
-[Install]
-WantedBy=default.target
diff --git a/taler-arm/anastasis.conf b/taler-arm/anastasis.conf
deleted file mode 100644
index 43921f6..0000000
--- a/taler-arm/anastasis.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[anastasis]
-TYPE = simple
-BINARY = anastasis-httpd
-OPTIONS = -l $HOME/logs/anastasis-%Y-%m-%d.log
diff --git a/taler-arm/arm.conf b/taler-arm/arm.conf
deleted file mode 100644
index 8c6363d..0000000
--- a/taler-arm/arm.conf
+++ /dev/null
@@ -1,19 +0,0 @@
-[arm]
-# PORT = 2087
-HOSTNAME = localhost
-BINARY = gnunet-service-arm
-ACCEPT_FROM = 127.0.0.1;
-ACCEPT_FROM6 = ::1;
-
-# Special case, uses user runtime dir even for per-system service.
-UNIXPATH = $GNUNET_USER_RUNTIME_DIR/gnunet-service-arm.sock
-UNIX_MATCH_UID = YES
-UNIX_MATCH_GID = YES
-
-# In the "-l" option, format characters from 'strftime' are allowed;
-# In the GLOBAL_POSTFIX, "{}" stands for the name of the respective
-# service. Thus the following option would introduce per-service
-# logging with a new log file each day. Note that only the last 3
-# log files are preserved.
-# GLOBAL_POSTFIX = -l $GNUNET_CACHE_HOME/{}-%Y-%m-%d.log
-GLOBAL_PREFIX =
diff --git a/taler-arm/defaults.conf b/taler-arm/defaults.conf
deleted file mode 100644
index f12f805..0000000
--- a/taler-arm/defaults.conf
+++ /dev/null
@@ -1,20 +0,0 @@
-[PATHS]
-GNUNET_HOME = $HOME
-
-# Persistant data storage
-GNUNET_DATA_HOME = ${XDG_DATA_HOME:-$GNUNET_HOME/.local/share}/gnunet/
-
-# Configuration files
-GNUNET_CONFIG_HOME = ${XDG_CONFIG_HOME:-$GNUNET_HOME/.config}/gnunet/
-
-# Cached data, no big deal if lost
-GNUNET_CACHE_HOME = ${XDG_CACHE_HOME:-$GNUNET_HOME/.cache}/gnunet/
-
-GNUNET_RUNTIME_DIR = ${TMPDIR:-${TMP:-/tmp}}/gnunet-system-runtime/
-
-# Runtime data for per-user services
-GNUNET_USER_RUNTIME_DIR = ${TMPDIR:-${TMP:-/tmp}}/gnunet-${USERHOME:-${USER:-user}}-runtime/
-
-[arm]
-# FIXME: does this work? (if not, need to fix arm_api.c...)
-OPTIONS = -l $HOME/logs/arm-%Y-%m-%d.log
diff --git a/taler-arm/libeufin-nexus.conf b/taler-arm/libeufin-nexus.conf
deleted file mode 100644
index d5dff80..0000000
--- a/taler-arm/libeufin-nexus.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[libeufin-nexus]
-TYPE = simple
-BINARY = taler-log-adapter
-OPTIONS = $HOME/logs/nexus-%Y-%m-%d.log libeufin-nexus serve --port=5222
diff --git a/taler-arm/libeufin-sandbox.conf b/taler-arm/libeufin-sandbox.conf
deleted file mode 100644
index 1acd036..0000000
--- a/taler-arm/libeufin-sandbox.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[libeufin-sandbox]
-TYPE = simple
-BINARY = taler-log-adapter
-OPTIONS = $HOME/logs/sandbox-%Y-%m-%d.log libeufin-sandbox serve --port=5111
diff --git a/taler-arm/taler-aggregator.conf b/taler-arm/taler-aggregator.conf
deleted file mode 100644
index 6ef7d57..0000000
--- a/taler-arm/taler-aggregator.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-aggregator]
-TYPE = simple
-BINARY = taler-exchange-aggregator
-OPTIONS = -l $HOME/logs/aggregator-%Y-%m-%d.log
diff --git a/taler-arm/taler-auditor.conf b/taler-arm/taler-auditor.conf
deleted file mode 100644
index cf8ba14..0000000
--- a/taler-arm/taler-auditor.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-auditor]
-TYPE = simple
-BINARY = taler-auditor-httpd
-OPTIONS = -l $HOME/logs/auditor-%Y-%m-%d.log
diff --git a/taler-arm/taler-blog.conf b/taler-arm/taler-blog.conf
deleted file mode 100644
index 9a5bcc5..0000000
--- a/taler-arm/taler-blog.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-blog]
-TYPE = simple
-BINARY = taler-log-adapter
-OPTIONS = $HOME/logs/blog-%Y-%m-%d.log taler-merchant-demos blog
diff --git a/taler-arm/taler-closer.conf b/taler-arm/taler-closer.conf
deleted file mode 100644
index 51efbff..0000000
--- a/taler-arm/taler-closer.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-closer]
-TYPE = simple
-BINARY = taler-exchange-closer
-OPTIONS = -l $HOME/logs/closer-%Y-%m-%d.log
diff --git a/taler-arm/taler-demobank.conf b/taler-arm/taler-demobank.conf
deleted file mode 100644
index 0346d30..0000000
--- a/taler-arm/taler-demobank.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-demobank]
-TYPE = simple
-BINARY = taler-log-adapter
-OPTIONS = $HOME/logs/bank-%Y-%m-%d.log taler-bank-manage serve
diff --git a/taler-arm/taler-donations.conf b/taler-arm/taler-donations.conf
deleted file mode 100644
index 222f26f..0000000
--- a/taler-arm/taler-donations.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-donations]
-TYPE = simple
-BINARY = taler-log-adapter
-OPTIONS = $HOME/logs/donations-%Y-%m-%d.log taler-merchant-demos donations
diff --git a/taler-arm/taler-exchange-secmod-eddsa.conf b/taler-arm/taler-exchange-secmod-eddsa.conf
deleted file mode 100644
index b83c6cf..0000000
--- a/taler-arm/taler-exchange-secmod-eddsa.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-exchange-secmod-eddsa]
-TYPE = simple
-BINARY = taler-exchange-secmod-eddsa
-OPTIONS = -l $HOME/logs/crypto-eddsa-%Y-%m-%d.log
diff --git a/taler-arm/taler-exchange-secmod-rsa.conf b/taler-arm/taler-exchange-secmod-rsa.conf
deleted file mode 100644
index 32fcc56..0000000
--- a/taler-arm/taler-exchange-secmod-rsa.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-exchange-secmod-rsa]
-TYPE = simple
-BINARY = taler-exchange-secmod-rsa
-OPTIONS = -l $HOME/logs/crypto-rsa-%Y-%m-%d.log
diff --git a/taler-arm/taler-exchange-wirewatch.conf b/taler-arm/taler-exchange-wirewatch.conf
deleted file mode 100644
index 9595f88..0000000
--- a/taler-arm/taler-exchange-wirewatch.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-exchange-wirewatch]
-TYPE = simple
-BINARY = taler-exchange-wirewatch
-OPTIONS = -L INFO -l $HOME/logs/wirewatch-%Y-%m-%d.log
diff --git a/taler-arm/taler-exchange.conf b/taler-arm/taler-exchange.conf
deleted file mode 100644
index 08d5a0d..0000000
--- a/taler-arm/taler-exchange.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-exchange]
-TYPE = simple
-BINARY = taler-exchange-httpd
-OPTIONS = -l $HOME/logs/exchange-%Y-%m-%d.log
diff --git a/taler-arm/taler-landing.conf b/taler-arm/taler-landing.conf
deleted file mode 100644
index 6517125..0000000
--- a/taler-arm/taler-landing.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-landing]
-TYPE = simple
-BINARY = taler-log-adapter
-OPTIONS = $HOME/logs/landing-%Y-%m-%d.log taler-merchant-demos landing
diff --git a/taler-arm/taler-merchant.conf b/taler-arm/taler-merchant.conf
deleted file mode 100644
index acfd354..0000000
--- a/taler-arm/taler-merchant.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-merchant]
-TYPE = simple
-BINARY = taler-merchant-httpd
-OPTIONS = -l $HOME/logs/merchant-%Y-%m-%d.log
diff --git a/taler-arm/taler-postgres-standalone.conf b/taler-arm/taler-postgres-standalone.conf
deleted file mode 100644
index 053d4df..0000000
--- a/taler-arm/taler-postgres-standalone.conf
+++ /dev/null
@@ -1,8 +0,0 @@
-[taler-postgres-standalone]
-TYPE = simple
-BINARY = /usr/lib/postgresql/9.6/bin/postgres
-
-# -D: database configuration files
-# -k: directory hosting the database's listening domain sockets
-# -h "": turns off the TCP/IP layer
-OPTIONS = -D $HOME/talerdb -k $HOME/sockets -h ""
diff --git a/taler-arm/taler-survey.conf b/taler-arm/taler-survey.conf
deleted file mode 100644
index a6de783..0000000
--- a/taler-arm/taler-survey.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-survey]
-TYPE = simple
-BINARY = taler-log-adapter
-OPTIONS = $HOME/logs/survey-%Y-%m-%d.log taler-merchant-demos survey
diff --git a/taler-arm/taler-sync.conf b/taler-arm/taler-sync.conf
deleted file mode 100644
index 0ea8d7e..0000000
--- a/taler-arm/taler-sync.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-sync]
-TYPE = simple
-BINARY = sync-httpd
-OPTIONS = -l $HOME/logs/sync-%Y-%m-%d.log
diff --git a/taler-arm/taler-transfer.conf b/taler-arm/taler-transfer.conf
deleted file mode 100644
index bb97c2d..0000000
--- a/taler-arm/taler-transfer.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-transfer]
-TYPE = simple
-BINARY = taler-exchange-transfer
-OPTIONS = -l $HOME/logs/transfer-%Y-%m-%d.log
diff --git a/taler-arm/taler-twister-bank.conf b/taler-arm/taler-twister-bank.conf
deleted file mode 100644
index f542064..0000000
--- a/taler-arm/taler-twister-bank.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-twister-bank]
-TYPE = simple
-BINARY = taler-twister-service
-OPTIONS = -l $HOME/logs/twister-bank-%Y-%m-%d.log -c $HOME/.config/twister-bank.conf
diff --git a/taler-arm/taler-twister-exchange.conf b/taler-arm/taler-twister-exchange.conf
deleted file mode 100644
index 8150f57..0000000
--- a/taler-arm/taler-twister-exchange.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-twister-exchange]
-TYPE = simple
-BINARY = taler-twister-service
-OPTIONS = -l $HOME/logs/twister-exchange-%Y-%m-%d.log -c $HOME/.config/twister-exchange.conf
diff --git a/taler-arm/taler-twister.conf b/taler-arm/taler-twister.conf
deleted file mode 100644
index fed6b1f..0000000
--- a/taler-arm/taler-twister.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-twister]
-TYPE = simple
-BINARY = taler-twister-service
-OPTIONS = -l $HOME/logs/twister-%Y-%m-%d.log -c $HOME/.config/taler.conf
diff --git a/taler-sitesbuild/invalidate.sh b/taler-sitesbuild/invalidate.sh
index fbd5321..bb8c22f 100755
--- a/taler-sitesbuild/invalidate.sh
+++ b/taler-sitesbuild/invalidate.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
set -eu
diff --git a/taler-sitesbuild/update_buywith_page.sh b/taler-sitesbuild/update_buywith_page.sh
index 14980a8..a865a6b 100755
--- a/taler-sitesbuild/update_buywith_page.sh
+++ b/taler-sitesbuild/update_buywith_page.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
set -eu
diff --git a/taler-sitesbuild/update_stage.sh b/taler-sitesbuild/update_stage.sh
index 4c9a2b2..19dce15 100755
--- a/taler-sitesbuild/update_stage.sh
+++ b/taler-sitesbuild/update_stage.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
set -eu
@@ -18,7 +18,7 @@ fetch
git submodule update --init --force
# Generate current version under 'work in progress' (wip) folder
./bootstrap
-./configure --variant=stage.taler.net.wip --baseurl="https://stage.taler.net/" --prefix=$HOME
+./configure --baseurl="https://stage.taler.net/" --prefix=$HOME/stage.taler.net.wip
make install
chmod -R g+rx $HOME/stage.taler.net.wip/
# Clean up 'ancient' version (before previous)
diff --git a/taler-sitesbuild/update_twister_page.sh b/taler-sitesbuild/update_twister_page.sh
index 0bbb4e4..6cc3c8c 100755
--- a/taler-sitesbuild/update_twister_page.sh
+++ b/taler-sitesbuild/update_twister_page.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
set -eu
diff --git a/taler-sitesbuild/update_www.sh b/taler-sitesbuild/update_www.sh
index 014c2a9..62acd95 100755
--- a/taler-sitesbuild/update_www.sh
+++ b/taler-sitesbuild/update_www.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
set -eu
@@ -17,7 +17,7 @@ git checkout stable -f
fetch
git submodule update --init --force
./bootstrap
-./configure --variant=www.taler.net.wip --baseurl="https://taler.net/" --prefix=$HOME
+./configure --baseurl="https://taler.net/" --prefix=$HOME/www.taler.net.wip
make install
chmod -R g+rx $HOME/www.taler.net.wip/