summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--README.md2
-rwxr-xr-xaptly/aptly-cleanup.sh6
-rwxr-xr-xaptly/aptly-publish.sh18
-rwxr-xr-xaptly/entr.sh6
-rwxr-xr-xbin/taler-gv1789
-rwxr-xr-xbin/taler-local1840
-rw-r--r--bootstrap-docker/README2
-rwxr-xr-xbootstrap-docker/bootstrap-docker.sh7
-rwxr-xr-xbootstrap-docker/install-plugin-buildx.sh8
-rwxr-xr-xbootstrap-docker/install-plugin-compose.sh8
-rwxr-xr-xbootstrap-docker/install-rootless-docker.sh252
-rwxr-xr-xbuildbot/bootstrap-scripts/bootstrap-walletbuilder38
-rwxr-xr-xbuildbot/build.sh20
-rwxr-xr-xbuildbot/checks.sh70
-rwxr-xr-xbuildbot/create_instances.sh7
-rwxr-xr-xbuildbot/create_tip_reserve.sh7
-rw-r--r--buildbot/linkchecker.Containerfile10
-rwxr-xr-xbuildbot/linkchecker.sh30
-rw-r--r--buildbot/linkcheckerrc306
-rw-r--r--buildbot/master.cfg1213
-rwxr-xr-xbuildbot/restart.sh14
-rwxr-xr-xbuildbot/update-sources.sh2
-rw-r--r--codespell/dictionary.txt60
-rw-r--r--docker/compile-and-check/README26
-rw-r--r--docker/compile-and-check/base/Dockerfile40
-rwxr-xr-xdocker/compile-and-check/base/compile_and_check.sh94
-rw-r--r--docker/compile-and-check/base/util.sh34
-rwxr-xr-xdocker/compile-and-check/build.sh8
-rw-r--r--docker/compile-and-check/config/tags.sh7
-rwxr-xr-xdocker/compile-and-check/interactive.sh8
-rwxr-xr-xdocker/compile-and-check/run.sh8
-rw-r--r--docker/docs-build/Dockerfile27
-rwxr-xr-xdocker/docs-build/build.sh5
-rwxr-xr-xdocker/docs-build/run.sh14
-rw-r--r--docker/dpkg-build/Dockerfile (renamed from dpkg-build/Dockerfile)0
-rwxr-xr-xdocker/dpkg-build/build-ubuntu.sh (renamed from dpkg-build/build-ubuntu.sh)0
-rw-r--r--docker/hybrid/docker-compose.yml20
-rw-r--r--docker/hybrid/images/base/Dockerfile36
-rw-r--r--docker/hybrid/images/exchange/Dockerfile6
-rw-r--r--docker/hybrid/images/exchange/startup.sh42
-rw-r--r--docker/hybrid/images/exchange/taler.conf59
-rw-r--r--docker/hybrid/images/merchant/Dockerfile6
-rw-r--r--docker/hybrid/images/merchant/startup.sh40
-rw-r--r--docker/hybrid/images/merchant/taler.conf24
-rw-r--r--docker/hybrid/images/postgres/Dockerfile4
-rw-r--r--docker/hybrid/images/postgres/init.sql2
-rw-r--r--docker/sites-build/Dockerfile48
-rwxr-xr-xdocker/sites-build/build.sh3
-rwxr-xr-xdocker/sites-build/run.sh6
-rw-r--r--docker/taler-docker/base.Dockerfile177
-rw-r--r--docker/taler-docker/docker-compose.yml255
-rw-r--r--docker/taler-docker/exchange.Dockerfile46
-rw-r--r--docker/taler-docker/merchant.Dockerfile29
-rw-r--r--docker/taler-docker/postgres/Dockerfile14
-rw-r--r--docker/taler-docker/template-auditor.conf65
-rw-r--r--docker/taler-docker/template-bank.conf18
-rw-r--r--docker/taler-docker/template-exchange.conf268
-rw-r--r--docker/taler-docker/template-merchant.conf76
-rw-r--r--docker/taler-docker/wallet.Dockerfile16
-rw-r--r--envcfg.py.template16
-rw-r--r--envcfg/envcfg-demo-2019-11-02-01.py15
-rw-r--r--envcfg/envcfg-demo-2019-12-03-01.py15
-rw-r--r--envcfg/envcfg-demo-2019-12-09-01.py15
-rw-r--r--envcfg/envcfg-demo-2020-11-14.py22
-rw-r--r--envcfg/envcfg-demo-2021-08-18.py14
-rw-r--r--envcfg/envcfg-demo-2021-08-24.py14
-rw-r--r--envcfg/envcfg-demo-2022-08-23.py14
-rw-r--r--envcfg/envcfg.py.template17
-rw-r--r--envcfg/talerconf/euro.taler.conf333
-rw-r--r--gnunet.conf1
-rwxr-xr-xhead.taler.net/entr.sh6
-rw-r--r--head.taler.net/rsyncd.conf13
-rw-r--r--head.taler.net/rsyncd.service12
-rwxr-xr-xhead.taler.net/update-head-deployment.sh12
-rw-r--r--mypy/mypy.ini3
-rwxr-xr-xnetjail/netjail-init.sh10
-rwxr-xr-xnetjail/netjail.sh3
-rw-r--r--netzbon/.gitignore5
-rwxr-xr-xnetzbon/generate-letter.sh40
-rwxr-xr-xnetzbon/generate-qr.sh40
-rw-r--r--netzbon/qr.tex.j213
-rwxr-xr-xnetzbon/render.py49
-rwxr-xr-xnetzbon/setup-merchants.sh39
-rw-r--r--netzbon/template.tex.j279
-rw-r--r--netzbon/template_de.tex.j2103
-rw-r--r--netzbon/test.json7
-rw-r--r--nlnet/task1/Dockerfile32
-rwxr-xr-xnlnet/task1/start.sh15
-rw-r--r--nlnet/task2/Dockerfile29
-rw-r--r--nlnet/task3/Dockerfile15
-rwxr-xr-xnlnet/task3/keys.sh93
-rw-r--r--nlnet/task3/salted-incoming-payment-template.csv2
-rwxr-xr-xnlnet/task3/start.sh79
-rw-r--r--nlnet/task4/Dockerfile42
-rwxr-xr-xnlnet/task4/launch.sh7
-rwxr-xr-xnlnet/task4/start.sh27
-rw-r--r--nlnet/task5/date-range/Dockerfile15
-rw-r--r--nlnet/task5/date-range/start-libeufin.sh35
-rwxr-xr-xnlnet/task5/date-range/start.sh155
-rw-r--r--nlnet/task5/long-poll/Dockerfile14
-rwxr-xr-xnlnet/task5/long-poll/start.sh134
-rw-r--r--nlnet/task5/performance/Dockerfile70
-rwxr-xr-xnlnet/task5/performance/start.sh107
-rw-r--r--packaging/.gitignore2
-rw-r--r--packaging/debian-bookworm/Dockerfile56
-rw-r--r--packaging/debian-bookworm/README16
-rw-r--r--packaging/debian-bookworm/anastasis-build.sh (renamed from packaging/ubuntu-docker/anastasis-build.sh)15
-rw-r--r--packaging/debian-bookworm/exchange-build.sh24
-rw-r--r--packaging/debian-bookworm/gnunet-build.sh (renamed from packaging/ubuntu-docker/gnunet-build.sh)19
-rw-r--r--packaging/debian-bookworm/gnunet-gtk-build.sh (renamed from packaging/debian-docker/taler-build.sh)17
-rw-r--r--packaging/debian-bookworm/libeufin-build.sh24
-rw-r--r--packaging/debian-bookworm/mdb-build.sh22
-rw-r--r--packaging/debian-bookworm/merchant-build.sh24
-rwxr-xr-xpackaging/debian-bookworm/run.sh25
-rw-r--r--packaging/debian-bookworm/sync-build.sh (renamed from packaging/ubuntu-docker/taler-build.sh)19
-rw-r--r--packaging/debian-bookworm/wallet-build.sh (renamed from packaging/ubuntu-docker/wallet-build.sh)21
-rwxr-xr-xpackaging/debian-docker/run.sh16
-rw-r--r--packaging/docker-alpine/.gitignore3
-rw-r--r--packaging/docker-alpine/Makefile48
-rw-r--r--packaging/docker-alpine/README.anastasis.md73
-rw-r--r--packaging/docker-alpine/anastasis-builder.Containerfile24
-rw-r--r--packaging/docker-alpine/anastasis-compose.yml38
-rw-r--r--packaging/docker-alpine/anastasis.Containerfile16
-rwxr-xr-xpackaging/docker-alpine/build-files/anastasis-entrypoint.sh34
-rw-r--r--packaging/docker-alpine/builder.Containerfile17
-rwxr-xr-xpackaging/docker-alpine/contrib/build-image.sh5
-rwxr-xr-xpackaging/docker-alpine/contrib/clean.sh3
-rwxr-xr-xpackaging/docker-alpine/contrib/publish.sh10
-rw-r--r--packaging/docker-alpine/libgnunet-builder.Containerfile34
-rw-r--r--packaging/docker-alpine/libgnunet.Containerfile10
-rw-r--r--packaging/docker-alpine/libmicrohttpd-builder.Containerfile23
-rw-r--r--packaging/docker-alpine/libmicrohttpd.Containerfile9
-rw-r--r--packaging/docker-alpine/libtalerexchange.Containerfile13
-rw-r--r--packaging/docker-alpine/libtalermerchant.Containerfile12
-rw-r--r--packaging/docker-alpine/taler-exchange-builder.Containerfile22
-rw-r--r--packaging/docker-alpine/taler-merchant-builder.Containerfile22
-rw-r--r--packaging/ng/.gitignore2
-rw-r--r--packaging/ng/README.md25
-rwxr-xr-xpackaging/ng/build.sh67
-rw-r--r--packaging/ng/buildconfig/README5
-rw-r--r--packaging/ng/buildconfig/anastasis-gtk.giturl1
-rw-r--r--packaging/ng/buildconfig/anastasis-gtk.tag1
-rw-r--r--packaging/ng/buildconfig/anastasis.giturl1
-rw-r--r--packaging/ng/buildconfig/anastasis.tag1
-rw-r--r--packaging/ng/buildconfig/gnunet-gtk.giturl1
-rw-r--r--packaging/ng/buildconfig/gnunet-gtk.tag1
-rw-r--r--packaging/ng/buildconfig/gnunet.giturl1
-rw-r--r--packaging/ng/buildconfig/gnunet.tag1
-rw-r--r--packaging/ng/buildconfig/libeufin.giturl1
-rw-r--r--packaging/ng/buildconfig/libeufin.tag1
-rw-r--r--packaging/ng/buildconfig/sync.giturl1
-rw-r--r--packaging/ng/buildconfig/sync.tag1
-rw-r--r--packaging/ng/buildconfig/taler-exchange.giturl1
-rw-r--r--packaging/ng/buildconfig/taler-exchange.tag1
-rw-r--r--packaging/ng/buildconfig/taler-harness.giturl1
-rw-r--r--packaging/ng/buildconfig/taler-harness.tag1
-rw-r--r--packaging/ng/buildconfig/taler-merchant-demos.giturl1
-rw-r--r--packaging/ng/buildconfig/taler-merchant-demos.tag1
-rw-r--r--packaging/ng/buildconfig/taler-merchant.giturl1
-rw-r--r--packaging/ng/buildconfig/taler-merchant.tag1
-rw-r--r--packaging/ng/buildconfig/taler-wallet-cli.giturl1
-rw-r--r--packaging/ng/buildconfig/taler-wallet-cli.tag1
-rw-r--r--packaging/ng/buildscripts/generic.sh54
-rw-r--r--packaging/ng/distros/Dockerfile.debian-bookworm37
-rw-r--r--packaging/ng/distros/Dockerfile.ubuntu-kinetic (renamed from packaging/ubuntu-docker/Dockerfile)29
-rwxr-xr-xpackaging/ng/print-latest-versions22
-rw-r--r--packaging/ubuntu-docker/README9
-rwxr-xr-xpackaging/ubuntu-docker/run.sh16
-rw-r--r--packaging/ubuntu-docker/taler-systems.gpg.key41
-rw-r--r--packaging/ubuntu-lunar/.gitignore1
-rw-r--r--packaging/ubuntu-lunar/Dockerfile57
-rw-r--r--packaging/ubuntu-lunar/README19
-rw-r--r--packaging/ubuntu-lunar/anastasis-build.sh (renamed from packaging/debian-docker/anastasis-build.sh)15
-rw-r--r--packaging/ubuntu-lunar/exchange-build.sh24
-rw-r--r--packaging/ubuntu-lunar/gnunet-build.sh (renamed from packaging/debian-docker/gnunet-build.sh)19
-rw-r--r--packaging/ubuntu-lunar/gnunet-gtk-build.sh27
-rw-r--r--packaging/ubuntu-lunar/libeufin-build.sh24
-rw-r--r--packaging/ubuntu-lunar/mdb-build.sh22
-rw-r--r--packaging/ubuntu-lunar/merchant-build.sh24
-rwxr-xr-xpackaging/ubuntu-lunar/run.sh21
-rw-r--r--packaging/ubuntu-lunar/sync-build.sh (renamed from packaging/ubuntu-docker/sync-build.sh)14
-rw-r--r--packaging/ubuntu-lunar/taler.list (renamed from packaging/ubuntu-docker/taler.list)0
-rw-r--r--packaging/ubuntu-lunar/wallet-build.sh (renamed from packaging/debian-docker/wallet-build.sh)21
-rw-r--r--packaging/ubuntu-mantic/Dockerfile54
-rw-r--r--packaging/ubuntu-mantic/README19
-rw-r--r--packaging/ubuntu-mantic/anastasis-build.sh27
-rw-r--r--packaging/ubuntu-mantic/exchange-build.sh25
-rw-r--r--packaging/ubuntu-mantic/gnunet-build.sh29
-rw-r--r--packaging/ubuntu-mantic/gnunet-gtk-build.sh27
-rw-r--r--packaging/ubuntu-mantic/libeufin-build.sh22
-rw-r--r--packaging/ubuntu-mantic/mdb-build.sh22
-rw-r--r--packaging/ubuntu-mantic/merchant-build.sh24
-rwxr-xr-xpackaging/ubuntu-mantic/run.sh21
-rw-r--r--packaging/ubuntu-mantic/sync-build.sh (renamed from packaging/debian-docker/sync-build.sh)14
-rw-r--r--packaging/ubuntu-mantic/taler.list1
-rw-r--r--packaging/ubuntu-mantic/wallet-build.sh28
-rw-r--r--packaging/ubuntu-numbat/Dockerfile53
-rw-r--r--packaging/ubuntu-numbat/README16
-rw-r--r--packaging/ubuntu-numbat/anastasis-build.sh27
-rw-r--r--packaging/ubuntu-numbat/exchange-build.sh25
-rw-r--r--packaging/ubuntu-numbat/gnunet-build.sh29
-rw-r--r--packaging/ubuntu-numbat/gnunet-gtk-build.sh27
-rw-r--r--packaging/ubuntu-numbat/libeufin-build.sh22
-rw-r--r--packaging/ubuntu-numbat/mdb-build.sh22
-rw-r--r--packaging/ubuntu-numbat/merchant-build.sh24
-rwxr-xr-xpackaging/ubuntu-numbat/run.sh21
-rw-r--r--packaging/ubuntu-numbat/sync-build.sh27
-rw-r--r--packaging/ubuntu-numbat/wallet-build.sh28
-rw-r--r--regional-currency/.gitignore2
-rw-r--r--regional-currency/.shellcheckrc1
-rw-r--r--regional-currency/ChangeLog7
-rw-r--r--regional-currency/README2
-rwxr-xr-xregional-currency/config.py491
-rwxr-xr-xregional-currency/config_nginx.sh40
-rwxr-xr-xregional-currency/diagnose.sh125
-rwxr-xr-xregional-currency/functions.sh36
-rwxr-xr-xregional-currency/install_packages.sh84
-rwxr-xr-xregional-currency/list-incoming.sh33
-rwxr-xr-xregional-currency/main.sh104
-rw-r--r--regional-currency/nginx-conf/backend.taler-nginx.conf19
-rw-r--r--regional-currency/nginx-conf/bank.taler-nginx.conf23
-rw-r--r--regional-currency/nginx-conf/exchange.taler-nginx.conf16
-rwxr-xr-xregional-currency/setup-exchange.sh242
-rwxr-xr-xregional-currency/setup-libeufin.sh138
-rwxr-xr-xregional-currency/setup-merchant.sh14
-rwxr-xr-xregional-currency/upgrade.sh30
-rw-r--r--regional-currency/vagrant/.gitignore1
-rw-r--r--regional-currency/vagrant/README2
-rw-r--r--regional-currency/vagrant/Vagrantfile77
-rwxr-xr-xregional-currency/withdraw.sh32
-rwxr-xr-xselenium/launch_selenium_test23
-rw-r--r--splitops/README.md128
-rwxr-xr-xsplitops/splitops143
-rw-r--r--systemd-services/buildbot-worker-codespell.service (renamed from systemd-services/buildbot-worker-wallet.service)6
-rw-r--r--systemd-services/buildbot-worker-compilecheck.service (renamed from systemd-services/buildbot-worker-auditor.service)2
-rw-r--r--systemd-services/buildbot-worker-container.service13
-rw-r--r--systemd-services/buildbot-worker-linkchecker.service13
-rw-r--r--systemd-services/buildbot-worker-packaging.service13
-rw-r--r--systemd-services/tips-checker.service13
-rw-r--r--taler-arm/arm.conf19
-rw-r--r--taler-arm/defaults.conf20
-rw-r--r--taler-arm/libeufin-nexus.conf4
-rw-r--r--taler-arm/libeufin-sandbox.conf4
-rw-r--r--taler-arm/taler-aggregator.conf4
-rw-r--r--taler-arm/taler-auditor.conf4
-rw-r--r--taler-arm/taler-blog.conf4
-rw-r--r--taler-arm/taler-closer.conf4
-rw-r--r--taler-arm/taler-donations.conf4
-rw-r--r--taler-arm/taler-exchange-secmod-cs.conf4
-rw-r--r--taler-arm/taler-exchange-secmod-eddsa.conf4
-rw-r--r--taler-arm/taler-exchange-secmod-rsa.conf4
-rw-r--r--taler-arm/taler-exchange-wirewatch.conf4
-rw-r--r--taler-arm/taler-exchange.conf4
-rw-r--r--taler-arm/taler-landing.conf4
-rw-r--r--taler-arm/taler-merchant.conf4
-rw-r--r--taler-arm/taler-postgres-standalone.conf8
-rw-r--r--taler-arm/taler-survey.conf4
-rw-r--r--taler-arm/taler-sync.conf4
-rw-r--r--taler-arm/taler-transfer.conf4
-rw-r--r--taler-arm/taler-twister-bank.conf4
-rw-r--r--taler-arm/taler-twister-exchange.conf4
-rw-r--r--taler-arm/taler-twister.conf4
-rwxr-xr-xtaler-sitesbuild/invalidate.sh2
-rwxr-xr-xtaler-sitesbuild/update_buywith_page.sh2
-rwxr-xr-xtaler-sitesbuild/update_stage.sh4
-rwxr-xr-xtaler-sitesbuild/update_twister_page.sh2
-rwxr-xr-xtaler-sitesbuild/update_www.sh4
-rw-r--r--typescript/README9
-rw-r--r--typescript/config.ts2
-rw-r--r--typescript/container/Dockerfile51
-rw-r--r--typescript/container/prepare.service10
-rwxr-xr-xtypescript/container/prepare.sh7
-rwxr-xr-xtypescript/container/taler-config.sh7
-rwxr-xr-xtypescript/container/taler-local1889
274 files changed, 6351 insertions, 8632 deletions
diff --git a/README.md b/README.md
index 2e66f4c..6fd724a 100644
--- a/README.md
+++ b/README.md
@@ -20,7 +20,6 @@ surviving processes for unit tests that require ports that might conflict.
Deployment Layout
=================
-bin/ -- main scripts to manage a taler deployment (test/demo/int)
buildbot/ -- scripts and configuration for the buildmaster and jobs launched by it
envcfg/ -- Git tags for various deployments, specify which versions should be used
history/ -- directory for NOT MAINTAINED scripts
@@ -31,7 +30,6 @@ postmortem/ -- postmortem reports about major incidents we have had with lessons
private-keys/ -- deployment private keys so that keys do not change if we migrate elsewhere
sign-gnu-artefact -- shell script for making releases and preparing upload to ftp.gnu.org
systemd-services/ -- service definitions for buildbot build-slaves
-taler-arm/ -- gnunet-arm configurations for a deployment
taler-sitesbuild/ -- scripts for updating our Web sites
diff --git a/aptly/aptly-cleanup.sh b/aptly/aptly-cleanup.sh
new file mode 100755
index 0000000..85bcd5a
--- /dev/null
+++ b/aptly/aptly-cleanup.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+set -ex
+
+for i in $(aptly snapshot list | grep taler | cut -d'[' -f 2 | cut -d']' -f1) ; do
+ aptly snapshot drop $i
+done
diff --git a/aptly/aptly-publish.sh b/aptly/aptly-publish.sh
new file mode 100755
index 0000000..07a986f
--- /dev/null
+++ b/aptly/aptly-publish.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+set -exou pipefail
+
+REPOROOT="${HOME}"/.aptly/public
+DATESTAMP=$(date -Iseconds)
+
+rm -rf "${HOME}"/garbagecollect
+mv "${REPOROOT}"/pool "${HOME}"/garbagecollect || true
+aptly repo drop --force taler-ci-snapshots || true
+aptly db cleanup
+aptly repo create -distribution=bookworm -component=main taler-ci-snapshots
+aptly repo add taler-ci-snapshots /home/aptbuilder/incoming/bookworm-taler-ci || true
+aptly snapshot create "taler-${DATESTAMP}" from repo taler-ci-snapshots
+aptly -gpg-provider="gpg2" publish -architectures="amd64,arm64" switch bookworm "taler-${DATESTAMP}"
+
+# Publish a list of all packages in the repo
+aptly repo show -with-packages taler-ci-snapshots > "${REPOROOT}"/packages.txt
diff --git a/aptly/entr.sh b/aptly/entr.sh
new file mode 100755
index 0000000..6837508
--- /dev/null
+++ b/aptly/entr.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+set -xo
+
+while true ; do
+ echo "${HOME}/incoming/bookworm-taler-ci" | entr -n -d "${HOME}"/deployment/aptly/aptly-publish.sh ; sleep 1 || true
+done
diff --git a/bin/taler-gv b/bin/taler-gv
deleted file mode 100755
index 6e4de28..0000000
--- a/bin/taler-gv
+++ /dev/null
@@ -1,1789 +0,0 @@
-#!/usr/bin/env python3
-
-# This file is part of GNU Taler.
-#
-# GNU Taler is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# GNU Taler is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Taler. If not, see <https://www.gnu.org/licenses/>.
-
-import socket
-import shutil
-import atexit
-import click
-import types
-import os
-import sys
-import os.path
-import subprocess
-import time
-import random
-import json
-from os import listdir
-from os.path import isdir, join, basename
-from pathlib import Path
-from typing import List
-from sys import exit
-from urllib.parse import urljoin
-from os import remove
-import requests
-from collections import OrderedDict
-import errno
-from pathlib import Path
-from subprocess import Popen, DEVNULL, PIPE
-from datetime import datetime
-
-
-TALER_ROOT_DIR = Path.home()
-TALER_PREFIX = Path.home() / "local"
-
-# Print No Newline.
-def print_nn(msg):
- print(msg, end="")
- sys.stdout.flush()
-
-class Repo:
- def __init__(self, name, url, deps, builder, version="master"):
- self.name = name
- self.url = url
- self.deps = deps
- self.builder = builder
- self.version = version
-
-@click.group()
-def cli():
- pass
-
-# Parses the command-line-given and comma-separated repos list
-# into a list of names.
-def split_repos_list(repos):
- return [repo for repo in repos.split(",") if repo != ""]
-
-# fetch the remote. No timestamp deletion here
-def update_checkout(r: Repo, p: Path):
- """Clean the repository's working directory and
- update it to the match the latest version of the upstream branch
- that we are tracking."""
- subprocess.run(["git", "-C", str(p), "clean", "-fdx"], check=True) # remove unversioned files.
-
- # Equivalent to "git pull". Does nothing if in detached HEAD
- # but pulls new code into the local copy otherwise.
- subprocess.run(["git", "-C", str(p), "fetch"], check=True)
- subprocess.run(["git", "-C", str(p), "reset"], check=True)
-
- # Makes the last step "--hard", namely removes files not
- # belonging to the current version.
- res = subprocess.run(
- [
- "git",
- "-C",
- str(p),
- "rev-parse",
- "--abbrev-ref",
- "--symbolic-full-name",
- "@{u}",
- ],
- stderr=subprocess.DEVNULL,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if res.returncode != 0:
- ref = "HEAD"
- else:
- ref = res.stdout.strip("\n ")
- print(f"resetting {r.name} to ref {ref}")
- subprocess.run(["git", "-C", str(p), "reset", "--hard", ref], check=True)
-
-
-def default_configure(*extra):
- extra_list = list(extra)
- subprocess.run(["./configure", f"--prefix={TALER_PREFIX}"] + extra_list, check=True)
-
-def pyconfigure(*extra):
- """For python programs, --prefix doesn't work."""
- subprocess.run(["./configure"] + list(extra), check=True)
-
-def build_libeufin(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_libmicrohttpd(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure("--disable-doc")
- subprocess.run(["make"], check=True)
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_gnunet(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = TALER_PREFIX
- default_configure(
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- "--disable-documentation",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_exchange(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = TALER_PREFIX
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_wallet(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_twister(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = TALER_PREFIX
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_merchant(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = TALER_PREFIX
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_sync(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = TALER_PREFIX
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-merchant={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-
-def build_demos(r, p):
- update_checkout(r, p)
- pfx = TALER_PREFIX
- pyconfigure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_backoffice(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"])
- subprocess.run(["./configure"])
- subprocess.run(["make", "build-single"])
- (p / "taler-buildstamp").touch()
-
-repos = {
- "libmicrohttpd": Repo(
- "libmicrohttpd",
- "git://git.gnunet.org/libmicrohttpd.git",
- [],
- build_libmicrohttpd,
- ),
- "gnunet": Repo(
- "gnunet",
- "git://git.gnunet.org/gnunet.git",
- ["libmicrohttpd"],
- build_gnunet
- ),
- "exchange": Repo(
- "exchange",
- "git://git.taler.net/exchange",
- ["gnunet", "libmicrohttpd"],
- build_exchange,
- ),
- "merchant": Repo(
- "merchant",
- "git://git.taler.net/merchant",
- ["exchange","libmicrohttpd","gnunet"],
- build_merchant,
- ),
- "sync": Repo(
- "sync",
- "git://git.taler.net/sync",
- ["exchange",
- "merchant",
- "gnunet",
- "libmicrohttpd"],
- build_sync,
- ),
- "wallet-core": Repo(
- "wallet-core",
- "git://git.taler.net/wallet-core",
- [],
- build_wallet,
- ),
- "libeufin": Repo(
- "libeufin",
- "git://git.taler.net/libeufin.git",
- [],
- build_libeufin,
- ),
- "taler-merchant-demos": Repo(
- "taler-merchant-demos",
- "git://git.taler.net/taler-merchant-demos",
- [],
- build_demos,
- ),
- "twister": Repo(
- "twister",
- "git://git.taler.net/twister",
- ["gnunet", "libmicrohttpd"],
- build_twister,
- ),
-}
-
-def get_repos_names() -> List[str]:
- r_dir = TALER_ROOT_DIR / "sources"
- if not r_dir.is_dir():
- print(f"'{r_dir}' not found. Did bootstrap run?")
- return []
- return [el for el in listdir(r_dir) if isdir(join(r_dir, el)) and repos.get(el)]
-
-# Get 'Repo' objects (globally defined),
-# using their names as index.
-def load_repos(reposNames) -> List[Repo]:
- ret = []
- for repo in repos.keys():
- if repo in reposNames:
- ret.append(repos[repo])
- return ret
-
-# Return the list of repos (equipped with their version)
-# to install.
-def load_repos_with_envcfg(envcfg_path) -> List[Repo]:
- envcfg_path = Path(envcfg_path)
- if not os.path.isfile(envcfg_path):
- print(f"{envcfg_path} is not a file")
- sys.exit(1)
- cfgtext = envcfg_path.read_text()
- cfg = types.ModuleType("taler_deployment_cfg")
- try:
- exec(cfgtext, cfg.__dict__)
- except SyntaxError:
- print(f"{envcfg_path} is not Python.")
- exit(1)
- ret = []
- for repo in repos.keys():
- try:
- envcfg_entry = getattr(cfg, "tag_" + repo.replace("-", "_"))
- except AttributeError:
- # 'env' files doesn't have this repo, continue looping.
- continue
- repos[repo].version = envcfg_entry
- ret.append(repos[repo])
- return ret
-
-# Flag as stale the projects set on 'master' that
-# aren't in line with upstream. Detached head projects
-# aren't affected.
-def update_repos(repos: List[Repo], force) -> None:
- for r in repos:
- r_dir = TALER_ROOT_DIR / "sources" / r.name
- subprocess.run(["git", "-C", str(r_dir), "fetch"], check=True)
- res = subprocess.run(
- ["git", "-C", str(r_dir), "status", "-sb"],
- check=True,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if "behind" in res.stdout or force:
- print(f"{r.name} will be compiled")
- s = r_dir / "taler-buildstamp"
- if s.exists():
- s.unlink()
-
-# projects without the build timestamp are considered stale,
-# even if one of their dependencies _got_ marked as stale.
-def get_stale_repos(repos: List[Repo]) -> List[Repo]:
- timestamps = {}
- stale = []
- for r in repos:
- r_dir = TALER_ROOT_DIR / "sources" / r.name
- s = r_dir / "taler-buildstamp"
- if not s.exists():
- timestamps[r.name] = time.time()
- stale.append(r)
- continue
- ts = timestamps[r.name] = s.stat().st_mtime
- for dep in r.deps:
- # When 'dep' in not found, it has been
- # excluded from the compilation.
- if timestamps.get("dep", 0) > ts:
- stale.append(r)
- break
- return stale
-
-@cli.command()
-@click.option(
- "--without-repos", metavar="WITHOUT REPOS",
- help="WITHOUT REPOS is a unspaced and comma-separated list \
-of the repositories to _exclude_ from compilation",
- default="")
-@click.option(
- "--only-repos", metavar="ONLY REPOS",
- help="ONLY REPOS is a unspaced and comma-separated exclusive list \
-of the repositories to include in the compilation",
- default="")
-@click.option(
- "--dry/--no-dry", default=False,
- help="Only getting changes, without actual build."
-)
-@click.option(
- "--with-envcfg", metavar="PATH",
- help="python file pinning each codebase version.",
-)
-# Normally, we don't rebuild dependent projects when one
-# of their dependency changed. This lets check whether non
-# breaking changes are really so; this option invalidates
-# this policy by letting all the codebases be compiled.
-@click.option(
- "--force/--no-force", default=False,
- help="build all the projects.",
-)
-def build(without_repos, only_repos, dry, with_envcfg, force) -> None:
- """Build the deployment from source."""
- if only_repos != "" and without_repos != "":
- print("Either use --only-repos or --without-repos")
- exit(1)
- repos_names = get_repos_names()
- if only_repos != "":
- repos_names = list(filter(
- lambda x: x in split_repos_list(only_repos),
- repos_names
- ))
- if without_repos != "":
- repos_names = list(filter(
- lambda x: x not in split_repos_list(without_repos),
- repos_names
- ))
- if with_envcfg:
- target_repos = load_repos_with_envcfg(with_envcfg)
- else:
- target_repos = load_repos(repos_names)
- # enforce version here.
- sources = TALER_ROOT_DIR / "sources"
- for r in target_repos:
- subprocess.run(
- ["git", "-C", str(sources / r.name),
- "checkout", "-q", "-f",
- r.version, "--"], check=True
- )
- update_repos(target_repos, force)
- stale = get_stale_repos(target_repos)
- print(f"found stale repos: {[r.name for r in stale]}")
- for r in stale:
- # Inform, if a dependency is not being built:
- diff = set(r.deps) - set(repos_names)
- if len(diff) > 0:
- print(f"Info: those dependencies are not being built: {diff}")
- p = TALER_ROOT_DIR / "sources" / r.name
- os.chdir(str(p))
- if dry:
- print("dry running")
- continue
- r.builder(r, p)
-
-# Only git-clone the codebases. The 'build' step
-# will run all the update logic. At this point, a
-# 'env' file - as well as the --repos option - will
-# only express which codebases are to clone.
-@cli.command()
-@click.option(
- "--repos", "-r",
- metavar="REPOS",
- help="REPOS is a unspaced and comma-separated list of the repositories to clone.",
- default="libmicrohttpd,gnunet,exchange,merchant,wallet-core,taler-merchant-demos,sync,libeufin",
- show_default=True,
-)
-@click.option(
- "--without-repos",
- metavar="REPOS",
- help="REPOS is a unspaced and comma-separated list of the repositories NOT to clone."
-)
-@click.option(
- "--list-repos/--no-list-repos", default=False,
- help="Lists the repositories that were bootstrapped.",
-)
-@click.option(
- "--with-envcfg", metavar="PATH",
- help="python file pinning each codebase version.",
-)
-@click.option(
- "--dry/--no-dry", default=False,
- help="Print steps, without downloading any repository.",
-)
-def bootstrap(list_repos, repos, with_envcfg, dry, without_repos) -> None:
- """Clone all the specified repositories."""
- # Only saying _which_ repo were installed. No further action
- if list_repos:
- for repo in get_repos_names():
- print(repo)
- return
-
- # Download the repositories.
- def clone_repos(repos: List[Repo]):
- if len(repos) == 0:
- print("No repositories can be checked out. Spelled correctly?")
- return
- sources = TALER_ROOT_DIR / "sources"
- for r in repos:
- print(f"Bootstrapping '{r.name}', at version '{r.version}'")
- if dry:
- print("dry running")
- continue
- r_dir = sources / r.name
- if not r_dir.exists():
- r_dir.mkdir(parents=True, exist_ok=True)
- subprocess.run(
- ["git", "-C", str(sources),
- "clone", r.url], check=True
- )
- subprocess.run(
- ["git", "-C", str(r_dir),
- "checkout", "-q", "-f",
- r.version, "--"], check=True
- )
-
- # Get list of to-be-cloned repos from the 'env' file.
- if with_envcfg:
- # 'with_envcfg' is a path to a "envcfg.py" file.
- preparedRepos = load_repos_with_envcfg(with_envcfg)
- # Get list of to-be-cloned repos from the command line
- # (or its default)
- else:
- # 'repos' is here "repo1,repo2,.."
- reposList = split_repos_list(repos)
- # 'reposList' is here ["repo1", "repo2", ...]
- preparedRepos = load_repos(reposList)
- if without_repos:
- for exclude_repo in split_repos_list(without_repos):
- preparedRepos = [el for el in preparedRepos if el.name != exclude_repo]
- clone_repos(preparedRepos)
-
-# Globals sharead accross multiple sub-commands:
-# needed to configure and launch the reverse proxy.
-UNIX_SOCKETS_DIR = TALER_ROOT_DIR / "sockets"
-LOG_DIR = TALER_ROOT_DIR / "logs"
-# needed to create the customer's bank account and
-# to let them subsequently withdraw via the Access API.
-CUSTOMER_BANK_ACCOUNT = "sandbox-account-customer"
-CUSTOMER_BANK_PASSWORD = "secret"
-# needed along preparation and later to withdraw via
-# the Access API.
-CURRENCY = "KUDOS"
-
-@cli.command()
-@click.option(
- "--postgres-db-name", metavar="DBNAME",
- help="Set postgres database name for all the services.",
- default="talerdemo"
-)
-def prepare(postgres_db_name):
- """Generate configuration, run-time blobs, instances, euFin accounts."""
-
- def is_serving(check_url, tries=10):
- for i in range(tries):
- try:
- print_nn(".")
- # Raises if the service is not reachable.
- response = requests.get(
- check_url,
- timeout=1
- )
- # The reverse proxy may return 500 if the
- # end service is not ready, therefore this
- # case should be tolerated.
- response.raise_for_status()
- except:
- time.sleep(0.5)
- if i == tries - 1:
- return False
- continue
- break
- return True
-
- def fail(reason=None):
- if reason:
- print("ERROR:", reason)
- exit(1)
-
- def kill(proc):
- proc.terminate()
- proc.wait()
-
- def get_nexus_cli_env(
- username,
- password,
- nexus_url
- ):
- env = os.environ.copy()
- env["LIBEUFIN_NEXUS_USERNAME"] = username
- env["LIBEUFIN_NEXUS_PASSWORD"] = password
- env["LIBEUFIN_NEXUS_URL"] = nexus_url
- return env
-
- def get_sandbox_cli_env(
- username, password
- ):
- env = os.environ.copy()
- env["LIBEUFIN_SANDBOX_USERNAME"] = username
- env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- return env
-
- # Will be extended to include a SANDBOX_ADMIN_TOKEN
- # that will obsolete the 'superuser' flag of ordinary
- # user accounts. Likewise, the client side will be
- # modified to use such token.
- def get_sandbox_server_env(db_file, base_url, admin_password):
- env = os.environ.copy()
- env["LIBEUFIN_SANDBOX_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
- env["LIBEUFIN_SANDBOX_BASE_URL"] = base_url
- env["LIBEUFIN_SANDBOX_ADMIN_PASSWORD"] = admin_password
- return env
-
- def get_nexus_server_env(db_file, base_url):
- env = os.environ.copy()
- env["LIBEUFIN_NEXUS_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
- env["LIBEUFIN_NEXUS_BASE_URL"] = base_url
- return env
-
- def urljoin_nodrop(a, b):
- a = a + "/" # urljoin will drop extra trailing slashes.
- b = "/".join([x for x in b.split("/") if x != ""]) # remove leading slashes.
- return urljoin(a, b)
-
- def prepare_nexus_account(
- ebics_url,
- ebics_host_id,
- ebics_partner_id,
- ebics_user_id,
- bank_connection_name,
- bank_account_name_sandbox,
- bank_account_name_nexus,
- env
- ):
- # make connection
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "new-ebics-connection",
- "--ebics-url", ebics_url,
- "--host-id", ebics_host_id,
- "--partner-id", ebics_partner_id,
- "--ebics-user-id", ebics_user_id,
- bank_connection_name
- ],
- env
- ).run()
- # connect
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "connect", bank_connection_name
- ],
- env
- ).run()
- # Import bank account
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "download-bank-accounts",
- bank_connection_name
- ],
- env
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "import-bank-account",
- "--offered-account-id",
- bank_account_name_sandbox,
- "--nexus-bank-account-id",
- bank_account_name_nexus,
- bank_connection_name
- ],
- env
- ).run()
- # Set background tasks.
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "accounts",
- "task-schedule", bank_account_name_nexus,
- "--task-type", "submit",
- "--task-name", "submit-payments-each-second",
- "--task-cronspec", "* * *"
- ],
- env
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "accounts",
- "task-schedule", bank_account_name_nexus,
- "--task-type", "fetch",
- "--task-name", "fetch-reports-each-second",
- "--task-cronspec", "* * *",
- "--task-param-level", "report",
- "--task-param-range-type", "latest"
- ],
- env
- ).run()
-
- def get_sandbox_account_info(
- sandbox_url,
- bank_account_label,
- password,
- ):
- customer_env = os.environ.copy()
- customer_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_label
- customer_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
- r = Command([
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
- "--sandbox-url", demobank_url,
- "demobank", "info",
- "--bank-account", bank_account_label],
- env = customer_env,
- capture_stdout=True
- ).run()
- return json.loads(r)
-
- def prepare_sandbox_account(
- sandbox_url,
- ebics_host_id,
- ebics_partner_id,
- ebics_user_id,
- person_name,
- # This value is BOTH a username
- # and a bank account label.
- bank_account_name,
- password,
- is_public=False
- ):
- demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
- user_env = os.environ.copy()
- user_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_name
- user_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- register_cmd = [
- f"{TALER_PREFIX}/bin/libeufin-cli",
- "sandbox", "--sandbox-url", demobank_url,
- "demobank", "register"
- ]
- if is_public:
- register_cmd.append("--public")
- Command(register_cmd, env = user_env).run()
- admin_env = os.environ.copy()
- admin_env["LIBEUFIN_SANDBOX_USERNAME"] = SANDBOX_ADMIN_USERNAME
- admin_env["LIBEUFIN_SANDBOX_PASSWORD"] = SANDBOX_ADMIN_PASSWORD
- Command([
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
- "--sandbox-url", demobank_url,
- "demobank", "new-ebicssubscriber",
- "--host-id", ebics_host_id,
- "--partner-id", ebics_partner_id,
- "--user-id", ebics_user_id,
- "--bank-account", bank_account_name
- ],
- env = admin_env
- ).run()
-
-
- WIRE_METHOD = "iban"
- # euFin URLs
- SANDBOX_URL = "https://bank.demo.taler.net/"
- NEXUS_URL = "https://nexus.demo.taler.net/"
-
- # Filesystem's paths
- CFG_OUTDIR = TALER_ROOT_DIR / "config"
- TALER_RUNTIME_DIR = TALER_ROOT_DIR / "runtime"
- TALER_DATA_DIR = TALER_ROOT_DIR / "data"
- TALER_UNIT_FILES_DIR = systemd_user_dir = Path.home() / ".config" / "systemd" / "user"
-
- def create_tip_reserve():
- payto = Command([
- f"{TALER_PREFIX}/bin/taler-merchant-setup-reserve",
- "--amount", f"{CURRENCY}:20",
- "--exchange-url", "https://exchange.demo.taler.net/",
- "--merchant-url", "https://backend.demo.taler.net/instances/survey/",
- "--apikey", f"Bearer {FRONTENDS_API_TOKEN}",
- "--wire-method", WIRE_METHOD],
- capture_stdout=True
- ).run()
-
- Command([
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox", "--sandbox-url",
- SANDBOX_URL + "/demobanks/default/", "demobank",
- "new-transaction", "--bank-account", "sandbox-account-survey",
- "--payto-with-subject", payto, "--amount", "20"],
- env = get_sandbox_cli_env(
- username = "sandbox-account-survey",
- password = ALL_INSTANCES_BANK_PASSWORD
- )).run()
-
- def get_random_iban():
- cc_no_check = 131400 # is "DE00"
- bban = "".join(random.choices("0123456789", k=4))
- check_digits = 98 - (int(f"{bban}{cc_no_check}") % 97)
- return "DE" + (f"0{check_digits}"[-2:]) + bban
-
- # IBANs
-
- IBAN_MERCHANT_DEFAULT = get_random_iban()
- IBAN_MERCHANT_DEMOSHOP = get_random_iban()
-
- # Instances
- INSTANCES = [
- dict(name="GNUnet", isPublic=True),
- dict(name="Taler", isPublic=True),
- dict(name="Tor", isPublic=True),
- dict(name="survey"),
- dict(name="blog"),
- ]
-
- # Credentials / API keys
- EXCHANGE_NEXUS_USERNAME = "exchange-nexus-user"
- EXCHANGE_NEXUS_PASSWORD = "exchange-nexus-password"
- FRONTENDS_API_TOKEN = "secret-token:secret"
- TALER_MERCHANT_TOKEN = "secret-token:secret"
- ALL_INSTANCES_BANK_PASSWORD = "secret"
- EXCHANGE_BANK_ACCOUNT_SANDBOX = "sandbox-account-exchange"
- EXCHANGE_BANK_ACCOUNT_PASSWORD = "secret"
-
- # EBICS
- EBICS_HOST_ID = "ebicsDeployedHost"
- EXCHANGE_EBICS_USER_ID = "exchangeEbicsUserId"
- EXCHANGE_EBICS_PARTNER_ID = "exchangeEbicsPartnerId"
- EBICS_URL = "https://bank.demo.taler.net/ebicsweb"
-
- # euFin
- EXCHANGE_BANK_ACCOUNT_NEXUS = "exchange-imported-account-nexus"
- EXCHANGE_BANK_CONNECTION = "exchange-ebics-connection"
- NEXUS_DB_FILE = TALER_ROOT_DIR / "nexus.sqlite3"
- SANDBOX_DB_FILE = TALER_ROOT_DIR / "sandbox.sqlite3"
- EXCHANGE_FACADE_NAME = "exchange-taler-facade"
- SANDBOX_ADMIN_USERNAME = "admin"
- SANDBOX_ADMIN_PASSWORD = "secret"
-
- class Command:
- def __init__(
- self, cmd, env=os.environ, log_dir=LOG_DIR,
- custom_name=None, capture_stdout=False
- ):
- if len(cmd) == 0:
- fail("Command to execute was given empty.")
- self.name = custom_name if custom_name else basename(cmd[0])
- self.cmd = cmd
- self.capture_stdout = capture_stdout
- self.log_dir = log_dir
- self.env = env
-
- def run(self):
- self.do()
- return_code = self.handle.wait()
- self.cleanup() # Mainly closes the log file.
- if return_code != 0:
- fail(f"Command {self.name} failed. Logs in {self.log_dir}")
- if self.capture_stdout:
- return self.handle.communicate()[0].decode("utf-8").rstrip()
-
- def get_log_filename(self):
- return self.log_file.name
-
- def cleanup(self):
- self.log_file.flush()
- self.log_file.close()
-
- def do(self):
- if not self.log_dir.is_dir():
- os.makedirs(self.log_dir)
- try:
- log_filename = self.log_dir / f"{self.name}.log"
- self.log_file = open(log_filename, "a+")
- except Exception as error:
- fail(f"Could not open log file: {log_filename}: {error}")
- try:
- self.handle = Popen(
- self.cmd, # list
- stdin=DEVNULL,
- stdout=self.log_file if not self.capture_stdout else PIPE,
- stderr=self.log_file,
- env=self.env
- )
- except Exception as error:
- fail(f"Could not execute: {' '.join(self.cmd)}: {error}")
-
- class ConfigFile:
- def __init__(self, filename):
- self.sections = OrderedDict()
- self.filename = filename
-
- def destroy(self):
- del self.sections
- self.sections = OrderedDict()
-
- def cfg_put(self, section_name, key, value):
- s = self.sections[section_name] = self.sections.get(section_name, OrderedDict())
- s[key] = value
-
- def cfg_write(self, outdir):
- if outdir:
- if not os.path.isdir(outdir):
- os.makedirs(outdir)
- fstream = open(os.path.join(outdir, self.filename), "w")
- else:
- fstream = open(sys.stdout)
-
- for section_name, section in self.sections.items():
- fstream.write("[" + section_name + "]" + "\n")
- for key, value in section.items():
- fstream.write(key + " = " + value + "\n")
- fstream.write("\n")
- fstream.close()
-
- def config_specify_master_pub(
- filename,
- currency,
- exchange_master_pub
- ):
- Command([
- f"{TALER_PREFIX}/bin/taler-config", "-F", "-c", filename,
- "-s", "exchange", "-o", "master_public_key",
- "-V", exchange_master_pub
- ]).run()
- Command([
- f"{TALER_PREFIX}/bin/taler-config", "-F", "-c", filename,
- "-s", f"merchant-exchange-{currency}",
- "-o", "master_key",
- "-V", exchange_master_pub
- ]).run()
-
- # When called, there is no exchange master pub yet.
- # taler-exchange-offline will prouce the key _after_
- # taler.conf is generated. Only after that, we'll
- # specify the master key where it is missing; namely
- # in the merchant backend and exchange HTTP daemon sections.
-
- def config_main(
- filename,
- outdir,
- unix_sockets_dir,
- currency,
- wire_method,
- exchange_wire_gateway_username,
- exchange_wire_gateway_password,
- frontend_api_key,
- taler_runtime_dir,
- postgres_db_name
- ):
- def coin(
- obj,
- currency,
- name,
- value,
- d_withdraw="3 years",
- d_spend="5 years",
- d_legal="10 years",
- f_withdraw="0.01",
- f_deposit="0.01",
- f_refresh="0.01",
- f_refund="0.01",
- rsa_keysize="2048",
- ):
- sec = "coin_" + currency + "_" + name + "_age_restricted"
- obj.cfg_put(sec, "cipher", "RSA")
- obj.cfg_put(sec, "value", currency + ":" + value)
- obj.cfg_put(sec, "duration_withdraw", d_withdraw)
- obj.cfg_put(sec, "duration_spend", d_spend)
- obj.cfg_put(sec, "duration_legal", d_legal)
- obj.cfg_put(sec, "fee_withdraw", currency + ":" + f_withdraw)
- obj.cfg_put(sec, "fee_refresh", currency + ":" + f_refresh)
- obj.cfg_put(sec, "fee_refund", currency + ":" + f_refund)
- obj.cfg_put(sec, "fee_deposit", currency + ":" + f_deposit)
- obj.cfg_put(sec, "rsa_keysize", rsa_keysize)
- obj.cfg_put(sec, "age_restricted", "YES")
-
- obj = ConfigFile("taler.conf")
- obj.cfg_put("paths", "TALER_DATA_HOME", str(TALER_DATA_DIR))
- if not taler_runtime_dir.is_dir():
- os.makedirs(taler_runtime_dir)
- obj.cfg_put("paths", "TALER_RUNTIME_DIR", str(taler_runtime_dir))
- obj.cfg_put("taler", "CURRENCY", currency)
- obj.cfg_put("taler", "CURRENCY_ROUND_UNIT", f"{currency}:0.01")
-
- obj.cfg_put("kyc-provider-example-persona", "COST", "42");
- obj.cfg_put("kyc-provider-example-persona", "LOGIC", "persona");
- obj.cfg_put("kyc-provider-example-persona", "USER_TYPE", "INDIVIDUAL");
- obj.cfg_put("kyc-provider-example-persona", "PROVIDED_CHECKS", "DUMMY");
- obj.cfg_put("kyc-provider-example-persona", "PERSONA_VALIDITY", "forever");
- obj.cfg_put("kyc-provider-example-persona", "PERSONA_SUBDOMAIN", "taler");
- obj.cfg_put("kyc-provider-example-persona", "PERSONA_AUTH_TOKEN", "persona_sandbox_b1c70e49-b333-4f3c-b356-f0ed05029241");
- obj.cfg_put("kyc-provider-example-persona", "PERSONA_TEMPLATE_ID", "itmpl_Uj6X5J3GPT9kbuAZTLg7AUMx");
- obj.cfg_put("kyc-provider-example-persona", "KYC_POST_URL", "https://demo.taler.net/");
-
- obj.cfg_put("kyc-legitimization-balance-high", "OPERATION_TYPE", "BALANCE");
- obj.cfg_put("kyc-legitimization-balance-high", "REQUIRED_CHECKS", "DUMMY");
- obj.cfg_put("kyc-legitimization-balance-high", "THRESHOLD", f"{currency}:30");
- obj.cfg_put("kyc-legitimization-deposit-any", "OPERATION_TYPE", "DEPOSIT");
- obj.cfg_put("kyc-legitimization-deposit-any", "REQUIRED_CHECKS", "DUMMY");
- obj.cfg_put("kyc-legitimization-deposit-any", "THRESHOLD", f"{currency}:15");
- obj.cfg_put("kyc-legitimization-deposit-any", "TIMEFRAME", "1d");
- obj.cfg_put("kyc-legitimization-withdraw", "OPERATION_TYPE", "WITHDRAW");
- obj.cfg_put("kyc-legitimization-withdraw", "REQUIRED_CHECKS", "DUMMY");
- obj.cfg_put("kyc-legitimization-withdraw", "THRESHOLD", f"{currency}:15");
- obj.cfg_put("kyc-legitimization-withdraw", "TIMEFRAME", "1d");
- obj.cfg_put("kyc-legitimization-merge", "OPERATION_TYPE", "MERGE");
- obj.cfg_put("kyc-legitimization-merge", "REQUIRED_CHECKS", "DUMMY");
- obj.cfg_put("kyc-legitimization-merge", "THRESHOLD", f"{currency}:15");
- obj.cfg_put("kyc-legitimization-merge", "TIMEFRAME", "1d");
- obj.cfg_put("exchange-extension-age_restriction", "ENABLED", "YES");
- obj.cfg_put("exchange-extension-age_restriction", "AGE_GROUPS", "8:10:12:14:16:18");
-
-
- obj.cfg_put("donations", "serve", "http")
- obj.cfg_put("donations", "http_serve", "unix")
- obj.cfg_put("donations", "http_unixpath", str(unix_sockets_dir / "donations.http"))
- obj.cfg_put("donations", "http_unixpath_mode", "660")
-
- obj.cfg_put("landing", "serve", "http")
- obj.cfg_put("landing", "http_serve", "unix")
- obj.cfg_put("landing", "http_unixpath", str(unix_sockets_dir / "landing.http"))
- obj.cfg_put("landing", "http_unixpath_mode", "660")
-
- obj.cfg_put("blog", "serve", "http")
- obj.cfg_put("blog", "http_serve", "unix")
- obj.cfg_put("blog", "http_unixpath", str(unix_sockets_dir / "blog.http"))
- obj.cfg_put("blog", "http_unixpath_mode", "660")
-
- obj.cfg_put("survey", "serve", "http")
- obj.cfg_put("survey", "http_serve", "unix")
- obj.cfg_put("survey", "http_unixpath", str(unix_sockets_dir / "survey.http"))
- obj.cfg_put("survey", "http_unixpath_mode", "660")
- obj.cfg_put("survey", "bank_password", "x")
-
- obj.cfg_put("merchant", "serve", "unix")
- obj.cfg_put("merchant", "unixpath", str(unix_sockets_dir / "merchant.http"))
- obj.cfg_put("merchant", "wire_transfer_delay", "0 s")
- obj.cfg_put("merchant", "default_max_wire_fee", currency + ":" + "0.01")
- obj.cfg_put("merchant", "default_max_deposit_fee", currency + ":" + "0.05")
- obj.cfg_put("merchantdb-postgres", "config", f"postgres:///{postgres_db_name}")
-
- obj.cfg_put("frontends", "backend", "https://backend.demo.taler.net/")
- obj.cfg_put(
- "merchant-exchange-{}".format(currency),
- "exchange_base_url", "https://exchange.demo.taler.net/",
- )
- obj.cfg_put(
- "merchant-exchange-{}".format(currency),
- "currency", currency
- )
- obj.cfg_put("auditor", "serve", "unix")
- # FIXME: both below used?
- obj.cfg_put("auditor", "base_url", "https://auditor.demo.taler.net/")
- obj.cfg_put("auditor", "auditor_url", "https://auditor.demo.taler.net/")
- obj.cfg_put("auditor", "unixpath", str(unix_sockets_dir / "auditor.http"))
- obj.cfg_put("auditor", "tiny_amount", currency + ":0.01")
-
- obj.cfg_put(
- "taler-exchange-secmod-eddsa",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-eddsa.http")
- )
- obj.cfg_put(
- "taler-exchange-secmod-cs",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-cs.http")
- )
- obj.cfg_put("taler-exchange-secmod-cs", "sm_priv_key",
- "${TALER_DATA_HOME}/taler-exchange-secmod-cs/secmod-private-key"
- )
- obj.cfg_put(
- "taler-exchange-secmod-rsa",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-rsa.http")
- )
- obj.cfg_put("taler-exchange-secmod-rsa", "sm_priv_key",
- "${TALER_DATA_HOME}/taler-exchange-secmod-rsa/secmod-private-key"
- )
- obj.cfg_put("exchange", "base_url", "https://exchange.demo.taler.net/")
- obj.cfg_put("exchange", "serve", "unix")
- obj.cfg_put("exchange", "unixpath", str(unix_sockets_dir / "exchange.http"))
- obj.cfg_put("exchange", "terms_etag", "0")
- obj.cfg_put("exchange", "terms_dir", f"{TALER_PREFIX}/share/taler-exchange/tos")
- obj.cfg_put("exchange", "privacy_etag", "0")
- obj.cfg_put("exchange", "privacy_dir", f"{TALER_PREFIX}/share/taler-exchange/pp")
- obj.cfg_put("exchangedb-postgres", "config", f"postgres:///{postgres_db_name}")
- obj.cfg_put("auditordb-postgres", "db_conn_str", f"postgres:///{postgres_db_name}")
- obj.cfg_put("auditordb-postgres", "config", f"postgres:///{postgres_db_name}")
- obj.cfg_put("exchange-account-1", "enable_debit", "yes")
- obj.cfg_put("exchange-account-1", "enable_credit", "yes")
- obj.cfg_put("merchant-account-merchant",
- "wire_response",
- "${TALER_DATA_HOME}/merchant/wire/merchant.json",
- )
- obj.cfg_put("merchant-account-merchant", "wire_file_mode", "770")
- obj.cfg_put("frontends", "backend_apikey", frontend_api_key)
- coin(obj, currency, "ct_10", "0.10")
- coin(obj, currency, "1", "1")
- coin(obj, currency, "2", "2")
- coin(obj, currency, "5", "5")
- coin(obj, currency, "10", "10")
- coin(obj, currency, "1000", "1000")
- obj.cfg_write(outdir)
- return obj
-
- def config_sync(
- filename, outdir,
- unix_sockets_dir,
- currency, api_key,
- postgres_db_name
- ):
- obj = ConfigFile(filename)
- obj.cfg_put("taler", "currency", currency)
- obj.cfg_put("sync", "serve", "unix")
- obj.cfg_put("sync", "unixpath", str(unix_sockets_dir / "sync.http"))
- obj.cfg_put("sync", "api_key", f"Bearer {api_key}")
- obj.cfg_put("sync", "annual_fee", f"{currency}:0.1")
- obj.cfg_put("sync", "fulfillment_url", "taler://fulfillment-success/")
- obj.cfg_put(
- "sync",
- "payment_backend_url",
- "https://backend.demo.taler.net/instances/Taler/"
- )
- obj.cfg_put("syncdb-postgres", "config", f"postgres:///{postgres_db_name}")
- obj.cfg_write(outdir)
-
- def unit_file_content(description, cmd, env=None):
- executable_name = cmd.split(" ")[0].split("/")[-1]
- content = (
- "[Unit]\n"
- f"Description={description}\n"
- "[Service]\n"
- f"ExecStart={cmd}\n"
- f"StandardOutput=append:{LOG_DIR / executable_name}.log\n"
- f"StandardError=append:{LOG_DIR / executable_name}.log\n"
- )
- if env:
- content += f"\nEnvironmentFile={env}\n"
- return content
-
- if UNIX_SOCKETS_DIR.is_dir():
- for left_socket in os.listdir(UNIX_SOCKETS_DIR):
- s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- socket_file = str(UNIX_SOCKETS_DIR / left_socket)
- if s.connect_ex(socket_file.encode("utf-8")) == 0:
- fail(f"A service is unexpectedly running and bound to {socket_file}!")
-
- print_nn("Remove stale data and config...")
- if TALER_DATA_DIR.exists():
- shutil.rmtree(TALER_DATA_DIR)
- if TALER_RUNTIME_DIR.exists():
- shutil.rmtree(TALER_RUNTIME_DIR)
- if CFG_OUTDIR.exists():
- shutil.rmtree(CFG_OUTDIR)
- print(" OK")
-
- print_nn("Generate preliminary taler.conf...")
- mc = config_main(
- "taler.conf",
- outdir=CFG_OUTDIR,
- unix_sockets_dir=UNIX_SOCKETS_DIR,
- currency=CURRENCY,
- wire_method=WIRE_METHOD,
- exchange_wire_gateway_username=EXCHANGE_NEXUS_USERNAME,
- exchange_wire_gateway_password=EXCHANGE_NEXUS_PASSWORD,
- frontend_api_key=FRONTENDS_API_TOKEN,
- taler_runtime_dir=TALER_RUNTIME_DIR,
- postgres_db_name=postgres_db_name
- )
- print(" OK")
-
- print_nn("Installing SystemD unit files...")
- if not systemd_user_dir.exists():
- systemd_user_dir.mkdir(parents=True, exist_ok=True)
-
- if not TALER_UNIT_FILES_DIR.exists():
- TALER_UNIT_FILES_DIR.mkdir(parents=True, exist_ok=True)
-
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-httpd.service", "w") as exchange_unit:
- exchange_unit.write(unit_file_content(
- description = "Taler Exchange HTTP daemon",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-httpd -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-wirewatch.service", "w") as exchange_wirewatch_unit:
- exchange_wirewatch_unit.write(unit_file_content(
- description = "Taler Exchange Wirewatch",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-wirewatch -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-aggregator.service", "w") as exchange_aggregator_unit:
- exchange_aggregator_unit.write(unit_file_content(
- description = "Taler Exchange Aggregator",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-aggregator --kyc-off -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-transfer.service", "w") as exchange_transfer_unit:
- exchange_transfer_unit.write(unit_file_content(
- description = "Taler Exchange Transfer",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-transfer -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-cs.service", "w") as exchange_cs_unit:
- exchange_cs_unit.write(unit_file_content(
- description = "Taler Exchange CS security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-cs -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
-
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-rsa.service", "w") as exchange_rsa_unit:
- exchange_rsa_unit.write(unit_file_content(
- description = "Taler Exchange RSA security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-rsa -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-eddsa.service", "w") as exchange_eddsa_unit:
- exchange_eddsa_unit.write(unit_file_content(
- description = "Taler Exchange EDDSA security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-eddsa -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-merchant-backend.service", "w") as merchant_unit:
- merchant_unit.write(unit_file_content(
- description = "Taler Merchant backend",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-sync.service", "w") as sync_unit:
- sync_unit.write(unit_file_content(
- description = "Taler Sync",
- cmd = f"{TALER_PREFIX}/bin/sync-httpd -L DEBUG -c {CFG_OUTDIR / 'sync.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-merchant-backend-token.service", "w") as merchant_token_unit:
- merchant_token_unit.write(unit_file_content(
- description = "Taler Merchant backend with auth token to allow default instance creation.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -a {TALER_MERCHANT_TOKEN} -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- # Custom Postgres connection.
- if os.environ.get("PGPORT"):
- with open(TALER_UNIT_FILES_DIR / "taler-local-postgres.env", "w") as postgres_env:
- postgres_env.write(f"PGPORT={os.environ.get('PGPORT')}")
-
- # FIXME/REMINDER: libEufin needs to enable writes for the
- # domain socket's group, otherwise Nginx won't be able to
- # proxy_pass requests.
-
- # euFin unit files.
- with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.service", "w") as sandbox_unit:
- sandbox_unit.write(unit_file_content(
- description = "euFin Sandbox",
- cmd = f"{TALER_PREFIX}/bin/libeufin-sandbox serve", # takes port 5000
- # cmd = f"{TALER_PREFIX}/bin/libeufin-sandbox serve --with-unix-socket {UNIX_SOCKETS_DIR / 'bank.http'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-sandbox.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.service", "w") as nexus_unit:
- nexus_unit.write(unit_file_content(
- description = "euFin Nexus",
- cmd = f"{TALER_PREFIX}/bin/libeufin-nexus serve", # takes port 5001
- # cmd = f"{TALER_PREFIX}/bin/libeufin-nexus serve --with-unix-socket {UNIX_SOCKETS_DIR / 'nexus.http'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-nexus.env"
- ))
- # euFin env files.
- with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.env", "w") as sandbox_env:
- sandbox_env.write(f"LIBEUFIN_SANDBOX_DB_CONNECTION=jdbc:sqlite:{SANDBOX_DB_FILE}\n")
- sandbox_env.write(f"LIBEUFIN_SANDBOX_ADMIN_PASSWORD={SANDBOX_ADMIN_PASSWORD}\n")
- # The following populates the bank UI navigation bar.
- sandbox_env.write(f"TALER_ENV_URL_INTRO=https://demo.taler.net/\n")
- sandbox_env.write(f"TALER_ENV_URL_BANK=https://bank.demo.taler.net/\n")
- sandbox_env.write(f"TALER_ENV_URL_MERCHANT_BLOG=https://shop.demo.taler.net/\n")
- sandbox_env.write(f"TALER_ENV_URL_MERCHANT_DONATIONS=https://donations.demo.taler.net/\n")
- sandbox_env.write(f"TALER_ENV_URL_MERCHANT_SURVEY=https://survey.demo.taler.net/\n")
-
- with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.env", "w") as nexus_env:
- nexus_env.write(f"LIBEUFIN_NEXUS_DB_CONNECTION=jdbc:sqlite:{NEXUS_DB_FILE}\n")
- # FIXME: demo sites ignore the '-c' flag now. They access ~/.config/taler.conf
- # which is a symlink to ~/config/taler.conf
- with open(TALER_UNIT_FILES_DIR / "taler-local-donations.service", "w") as donations_unit:
- donations_unit.write(unit_file_content(
- description = "Donation Website that accepts Taler payments.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos donations",
- # cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos donations -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-blog.service", "w") as blog_unit:
- blog_unit.write(unit_file_content(
- description = "Blog that accepts Taler payments.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos blog",
- # cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos blog -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-survey.service", "w") as survey_unit:
- survey_unit.write(unit_file_content(
- description = "Survey Website awarding tips via Taler.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos survey",
- # cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos survey -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-landing.service", "w") as landing_unit:
- landing_unit.write(unit_file_content(
- description = "Landing Website of Taler demo.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos landing",
- # cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos landing -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-frontends.env", "w") as frontends_env:
- # To populate the navigation bar:
- frontends_env.write((
- f"PYTHONUSERBASE={TALER_PREFIX}\n"
- f"PATH={os.environ.get('PATH')}\n"
- f"TALER_CONFIG_FILE={CFG_OUTDIR / 'taler.conf'}\n"
- f"TALER_ENV_URL_INTRO=https://demo.taler.net/\n"
- f"TALER_ENV_URL_BANK=https://bank.demo.taler.net/\n"
- f"TALER_ENV_URL_MERCHANT_BLOG=https://shop.demo.taler.net/\n"
- f"TALER_ENV_URL_MERCHANT_DONATIONS=https://donations.demo.taler.net/\n"
- f"TALER_ENV_URL_MERCHANT_SURVEY=https://survey.demo.taler.net/\n"
- ))
- print(" OK")
- print_nn("Reload SystemD...")
- Command(["systemctl", "--user", "daemon-reload"]).run()
- atexit.register(lambda: subprocess.run(
- ["systemctl", "--user", "stop", "taler-local-*.service"],
- check=True
- )
- )
- print(" OK")
- print_nn("Generate exchange's master key...")
- EXCHANGE_MASTER_PUB = Command(
- [
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "setup"
- ],
- capture_stdout=True
- ).run()
- print(" OK")
- print_nn("Specify exchange master pub in taler.conf...")
- config_specify_master_pub(
- CFG_OUTDIR / "taler.conf",
- CURRENCY,
- EXCHANGE_MASTER_PUB
- )
- print(" OK")
- print_nn("Generating sync.conf...")
- config_sync(
- "sync.conf",
- outdir=CFG_OUTDIR,
- unix_sockets_dir=UNIX_SOCKETS_DIR,
- currency=CURRENCY,
- api_key=FRONTENDS_API_TOKEN,
- postgres_db_name=postgres_db_name
- )
- print(" OK")
- print_nn("Reset and init exchange DB..")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"]
- ).run()
- print(" OK")
-
- print_nn("Launching the exchange RSA helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-rsa.service"])
- print(" OK")
- print_nn("Launching the exchange EDDSA helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-eddsa.service"])
- print(" OK")
- print_nn("Launching the exchange CS helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-cs.service"])
- print(" OK")
- print_nn("Launching the exchange...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-httpd.service"])
- if not is_serving("https://exchange.demo.taler.net/"):
- fail(f"Exchange did not start correctly.")
- print(" OK")
- print_nn("exchange-offline: signing extensions...")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "extensions", "sign", "upload"
- ]).run()
- print(" OK")
- print_nn("exchange-offline: signing key material...")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "download", "sign", "upload"
- ]).run()
- print(" OK")
- # Set up wire fees for next 5 years
- NOW = datetime.now()
- YEAR = NOW.year
- print_nn("Setting wire fees for the next 5 years...")
- for year in range(YEAR, YEAR+5):
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "wire-fee",
- str(year),
- WIRE_METHOD,
- CURRENCY + ":0.01",
- CURRENCY + ":0.01",
- CURRENCY + ":0.01",
- "upload"
- ],
- custom_name="set-wire-fee"
- ).run()
- print(" OK")
- print_nn("Setting global fees for the next 5 years...")
- for year in range(YEAR, YEAR+5):
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "global-fee",
- str(year),
- CURRENCY + ":0.00",
- CURRENCY + ":0.00",
- CURRENCY + ":0.00",
- CURRENCY + ":0.00",
- "3000s",
- "30000s",
- "300000s",
- "0",
- "upload"
- ],
- custom_name="set-wire-fee"
- ).run()
- print(" OK")
- print_nn("Reset and init auditor DB..")
- Command([
- f"{TALER_PREFIX}/bin/taler-auditor-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"]
- ).run()
- print(" OK")
- print_nn("Add this exchange to the auditor...")
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-auditor-exchange",
- "-c", CFG_OUTDIR / "taler.conf",
- "-m", EXCHANGE_MASTER_PUB,
- "-u", "https://exchange.demo.taler.net/"
- ],
- ).run()
- print(" OK")
- ## Step 4: Set up euFin
- print_nn("Resetting euFin databases...")
- try:
- remove(SANDBOX_DB_FILE)
- remove(NEXUS_DB_FILE)
- except OSError as error:
- if error.errno != errno.ENOENT:
- raise error
- print(" OK")
- # Make the 'default' demobank at Sandbox. (No signup bonus)
- Command([
- f"{TALER_PREFIX}/bin/libeufin-sandbox",
- "config", "--currency", CURRENCY, "--with-signup-bonus", "default"],
- env={
- "PATH": os.environ["PATH"],
- "LIBEUFIN_SANDBOX_DB_CONNECTION": f"jdbc:sqlite:{SANDBOX_DB_FILE}"
- }).run()
- print_nn("Launching Sandbox...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-sandbox.service"])
- time.sleep(3)
- print(" OK")
- print_nn("Make Sandbox EBICS host...")
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
- "--sandbox-url", SANDBOX_URL,
- "ebicshost", "create",
- "--host-id", EBICS_HOST_ID,
- ],
- env=get_sandbox_cli_env(
- SANDBOX_ADMIN_USERNAME,
- SANDBOX_ADMIN_PASSWORD,
- ),
- custom_name="sandbox-create-ebicshost",
- ).run()
- print(" OK")
-
- print_nn("Create Exchange account at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
- ebics_user_id=EXCHANGE_EBICS_USER_ID,
- person_name="Exchange Owner",
- bank_account_name=EXCHANGE_BANK_ACCOUNT_SANDBOX,
- password=EXCHANGE_BANK_ACCOUNT_PASSWORD
- )
- print(" OK")
- print_nn("Getting exchange payto-URI from the bank...")
- exchange_bank_account_info = get_sandbox_account_info(
- SANDBOX_URL,
- EXCHANGE_BANK_ACCOUNT_SANDBOX,
- EXCHANGE_BANK_ACCOUNT_PASSWORD
- )
- EXCHANGE_PAYTO = exchange_bank_account_info["paytoUri"]
- print(" OK")
- print_nn("Specify own payto-URI to exchange's configuration..")
- Command([
- f"{TALER_PREFIX}/bin/taler-config", "-F", "-c", CFG_OUTDIR / 'taler.conf',
- "-s", "exchange-account-1", "-o", "payto_uri", "-V",
- EXCHANGE_PAYTO
- ]).run()
- print(" OK")
- print_nn(f"exchange-offline: enabling {EXCHANGE_PAYTO}...")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "enable-account", EXCHANGE_PAYTO, "upload"
- ]).run()
- print(" OK")
-
- # Give each instance a Sandbox account (note: 'default'
- # won't have one, as it should typically only manage other
- # instances).
- for instance in INSTANCES:
- instance_id = instance["name"]
- print_nn(f"Create account of {instance_id} at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedMerchantEbicsPartnerId",
- ebics_user_id=f"unused{instance_id}EbicsUserId",
- person_name=f"Shop Owner of {instance_id}",
- bank_account_name=f"sandbox-account-{instance_id.lower()}",
- password=ALL_INSTANCES_BANK_PASSWORD,
- is_public=instance.get("isPublic")
- )
- print(" OK")
- print_nn("Create Customer account at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedCustomerEbicsPartnerId",
- ebics_user_id="unusedCustomerEbicsUserId",
- person_name="Customer Person",
- bank_account_name=CUSTOMER_BANK_ACCOUNT,
- password=CUSTOMER_BANK_PASSWORD
- )
- print(" OK")
- print_nn("Make Nexus superuser ...")
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-nexus", "superuser",
- EXCHANGE_NEXUS_USERNAME,
- "--password", EXCHANGE_NEXUS_PASSWORD
- ],
- env=get_nexus_server_env(
- NEXUS_DB_FILE,
- NEXUS_URL
- ),
- custom_name="nexus-superuser",
- ).run()
- print(" OK")
-
- print_nn("Launching Nexus...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-nexus.service"])
- if not is_serving("https://nexus.demo.taler.net/"):
- fail(f"Nexus did not start correctly.")
- print(" OK")
- print_nn("Create Exchange account at Nexus...")
- prepare_nexus_account(
- ebics_url=EBICS_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
- ebics_user_id=EXCHANGE_EBICS_USER_ID,
- bank_connection_name=EXCHANGE_BANK_CONNECTION,
- bank_account_name_sandbox=EXCHANGE_BANK_ACCOUNT_SANDBOX,
- bank_account_name_nexus=EXCHANGE_BANK_ACCOUNT_NEXUS,
- env=get_nexus_cli_env(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD,
- NEXUS_URL
- )
- )
- print(" OK")
-
- print_nn("Create Taler facade ...")
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "facades",
- "new-taler-wire-gateway-facade",
- "--currency", CURRENCY,
- "--facade-name", EXCHANGE_FACADE_NAME,
- EXCHANGE_BANK_CONNECTION,
- EXCHANGE_BANK_ACCOUNT_NEXUS
- ],
- env=get_nexus_cli_env(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD,
- NEXUS_URL
- ),
- custom_name="create-taler-facade",
- ).run()
- print(" OK")
- try:
- response = requests.get(
- NEXUS_URL + "/facades",
- auth=requests.auth.HTTPBasicAuth(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD
- )
- )
- response.raise_for_status()
- except Exception as error:
- fail(error)
- FACADE_URL = response.json().get("facades")[0].get("baseUrl")
- print_nn("Set suggested exchange at Sandbox...")
- Command([
- f"{TALER_PREFIX}/bin/libeufin-sandbox",
- "default-exchange",
- "https://exchange.demo.taler.net/",
- EXCHANGE_PAYTO],
- env={
- "PATH": os.environ["PATH"],
- "LIBEUFIN_SANDBOX_DB_CONNECTION": f"jdbc:sqlite:{SANDBOX_DB_FILE}"
- }).run()
- print(" OK")
-
- # Point the exchange to the facade.
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-F",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "wire_gateway_auth_method",
- "-V", "basic"
- ],
- custom_name="specify-wire-gateway-auth-method",
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-F",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "wire_gateway_url",
- "-V", FACADE_URL
- ],
- custom_name="specify-facade-url",
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-F",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "username",
- "-V", EXCHANGE_NEXUS_USERNAME
- ],
- custom_name="specify-username-for-facade",
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-F",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "password",
- "-V", EXCHANGE_NEXUS_PASSWORD
- ],
- custom_name="specify-password-for-facade",
- ).run()
-
- ## Step 6: Set up merchant
-
- print_nn("Reset and init merchant database...")
- Command([
- f"{TALER_PREFIX}/bin/taler-merchant-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"
- ]).run()
- print(" OK")
-
- def ensure_instance(
- currency,
- instance_id,
- backend_url,
- wire_method,
- auth_token
- ):
- auth_header = {"Authorization": f"Bearer {auth_token}"}
- resp = requests.get(
- urljoin_nodrop(backend_url, f"management/instances/{instance_id}"),
- headers = auth_header
- )
- bankaccount_info = get_sandbox_account_info(
- SANDBOX_URL,
- f"sandbox-account-{instance_id.lower()}",
- ALL_INSTANCES_BANK_PASSWORD
- )
- req = dict(
- id=instance_id,
- name=f"Name of '{instance_id}'",
- payto_uris=[bankaccount_info["paytoUri"]],
- address=dict(),
- jurisdiction=dict(),
- default_max_wire_fee=f"{currency}:1",
- default_wire_fee_amortization=3,
- default_max_deposit_fee=f"{currency}:1",
- default_wire_transfer_delay=dict(d_us=0),
- default_pay_delay=dict(d_us=24*60*60*1000000),
- auth=dict(method="token", token=auth_token),
- )
- http_method = requests.post
- endpoint = "management/instances"
-
- # Instance exists, patching it.
- if resp.status_code == 200:
- print(f"Patching instance '{instance_id}'")
- http_method = requests.patch
- endpoint = f"management/instances/{instance_id}"
-
- resp = http_method(
- urljoin_nodrop(backend_url, endpoint),
- json=req,
- headers = auth_header
- )
- if resp.status_code < 200 or resp.status_code >= 300:
- print(f"Backend responds: {resp.status_code}/{resp.text}")
- fail(f"Could not create (or patch) instance '{instance_id}'")
-
- print_nn(f"Start merchant (with TALER_MERCHANT_TOKEN into the env)...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend-token.service"], check=True)
- if not is_serving("https://backend.demo.taler.net/"):
- fail(f"Merchant backend (with auth token) did not start correctly.")
- print(" OK")
- print_nn("Give default instance a bank account...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedMerchantEbicsPartnerId",
- ebics_user_id=f"unusedDefaultInstanceEbicsUserId",
- person_name=f"Shop Owner of default instance",
- bank_account_name="sandbox-account-default",
- password=ALL_INSTANCES_BANK_PASSWORD
- )
- print(" OK")
- ensure_instance(
- currency=CURRENCY,
- instance_id="default",
- backend_url = "https://backend.demo.taler.net/",
- wire_method = WIRE_METHOD,
- auth_token=FRONTENDS_API_TOKEN
- )
- print_nn("Stopping the merchant with TALER_MERCHANT_TOKEN into the env...")
- subprocess.run(["systemctl", "--user", "stop", "taler-local-merchant-backend-token.service"], check=True)
- print(" OK")
- print_nn("Restarting the merchant WITHOUT the auth-token in the env...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend.service"], check=True)
- if not is_serving("https://backend.demo.taler.net/"):
- fail(f"Merchant backend (without auth token) did not start correctly.")
- print(" OK")
- for instance in INSTANCES:
- instance_id = instance["name"]
- print_nn(f"Creating the {instance_id} instance...")
- ensure_instance(
- currency=CURRENCY,
- instance_id=instance_id,
- backend_url = "https://backend.demo.taler.net/",
- wire_method = WIRE_METHOD,
- auth_token=FRONTENDS_API_TOKEN
- )
- print(" OK")
- print_nn("Creating tip reserve...")
- create_tip_reserve()
- print(" OK")
- # 1 second to let Nexus read the payment from
- # Sandbox, 1 second to let the Exchange Wirewatch
- # to read the payment from Nexus.
- print_nn("Sleep 2 seconds to let the tip reserve settle...")
- time.sleep(2)
- print(" OK")
- # Configure Sync.
- print_nn("Reset and init Sync DB..")
- Command([
- f"{TALER_PREFIX}/bin/sync-dbinit",
- "-c", CFG_OUTDIR / "sync.conf",
- "--reset"]
- ).run()
- print(" OK")
- print_nn("Stopping any running service..")
- subprocess.run(["systemctl", "--user", "stop", "taler-local-*.service"], check=True)
- print(" OK")
-
-@cli.command()
-def launch():
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-rsa.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-eddsa.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-cs.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-httpd.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-wirewatch.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-aggregator.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-transfer.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-sync.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-sandbox.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-nexus.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-donations.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-blog.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-survey.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-landing.service"], check=True)
-
-@cli.command()
-def stop():
- subprocess.run(["systemctl", "--user", "stop", "taler-local-*.service"], check=True)
-
-if __name__ == "__main__":
- cli()
diff --git a/bin/taler-local b/bin/taler-local
deleted file mode 100755
index d33c06d..0000000
--- a/bin/taler-local
+++ /dev/null
@@ -1,1840 +0,0 @@
-#!/usr/bin/env python3
-
-# This file is part of GNU Taler.
-#
-# GNU Taler is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# GNU Taler is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Taler. If not, see <https://www.gnu.org/licenses/>.
-
-import socket
-import shutil
-import atexit
-import click
-import types
-import os
-import sys
-import os.path
-import subprocess
-import time
-import random
-import json
-from os import listdir
-from os.path import isdir, join, basename
-from pathlib import Path
-from typing import List
-from sys import exit
-from urllib.parse import urljoin
-from os import remove
-import requests
-from collections import OrderedDict
-import errno
-from pathlib import Path
-from subprocess import Popen, DEVNULL, PIPE
-from datetime import datetime
-
-
-TALER_ROOT_DIR = Path.home() / ".taler"
-TALER_PREFIX = Path.home() / ".local"
-
-# Print No Newline.
-def print_nn(msg):
- print(msg, end="")
- sys.stdout.flush()
-
-class Repo:
- def __init__(self, name, url, deps, builder, version="master"):
- self.name = name
- self.url = url
- self.deps = deps
- self.builder = builder
- self.version = version
-
-@click.group()
-def cli():
- pass
-
-# Parses the command-line-given and comma-separated repos list
-# into a list of names.
-def split_repos_list(repos):
- return [repo for repo in repos.split(",") if repo != ""]
-
-# fetch the remote. No timestamp deletion here
-def update_checkout(r: Repo, p: Path):
- """Clean the repository's working directory and
- update it to the match the latest version of the upstream branch
- that we are tracking."""
- subprocess.run(["git", "-C", str(p), "clean", "-fdx"], check=True) # remove unversioned files.
-
- # Equivalent to "git pull". Does nothing if in detached HEAD
- # but pulls new code into the local copy otherwise.
- subprocess.run(["git", "-C", str(p), "fetch"], check=True)
- subprocess.run(["git", "-C", str(p), "reset"], check=True)
-
- # Makes the last step "--hard", namely removes files not
- # belonging to the current version.
- res = subprocess.run(
- [
- "git",
- "-C",
- str(p),
- "rev-parse",
- "--abbrev-ref",
- "--symbolic-full-name",
- "@{u}",
- ],
- stderr=subprocess.DEVNULL,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if res.returncode != 0:
- ref = "HEAD"
- else:
- ref = res.stdout.strip("\n ")
- print(f"resetting {r.name} to ref {ref}")
- subprocess.run(["git", "-C", str(p), "reset", "--hard", ref], check=True)
-
-
-def default_configure(*extra):
- extra_list = list(extra)
- subprocess.run(["./configure", f"--prefix={TALER_PREFIX}"] + extra_list, check=True)
-
-def pyconfigure(*extra):
- """For python programs, --prefix doesn't work."""
- subprocess.run(["./configure"] + list(extra), check=True)
-
-def build_libeufin(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_libmicrohttpd(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure("--disable-doc")
- subprocess.run(["make"], check=True)
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_gnunet(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- "--disable-documentation",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_exchange(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_wallet(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_twister(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_merchant(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_sync(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-merchant={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-
-def build_demos(r, p):
- update_checkout(r, p)
- pfx = Path.home() / ".local"
- pyconfigure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_backoffice(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"])
- subprocess.run(["./configure"])
- subprocess.run(["make", "build-single"])
- (p / "taler-buildstamp").touch()
-
-repos = {
- "libmicrohttpd": Repo(
- "libmicrohttpd",
- "git://git.gnunet.org/libmicrohttpd.git",
- [],
- build_libmicrohttpd,
- ),
- "gnunet": Repo(
- "gnunet",
- "git://git.gnunet.org/gnunet.git",
- ["libmicrohttpd"],
- build_gnunet
- ),
- "exchange": Repo(
- "exchange",
- "git://git.taler.net/exchange",
- ["gnunet", "libmicrohttpd"],
- build_exchange,
- ),
- "merchant": Repo(
- "merchant",
- "git://git.taler.net/merchant",
- ["exchange","libmicrohttpd","gnunet"],
- build_merchant,
- ),
- "sync": Repo(
- "sync",
- "git://git.taler.net/sync",
- ["exchange",
- "merchant",
- "gnunet",
- "libmicrohttpd"],
- build_sync,
- ),
- "wallet-core": Repo(
- "wallet-core",
- "git://git.taler.net/wallet-core",
- [],
- build_wallet,
- ),
- "libeufin": Repo(
- "libeufin",
- "git://git.taler.net/libeufin.git",
- [],
- build_libeufin,
- ),
- "taler-merchant-demos": Repo(
- "taler-merchant-demos",
- "git://git.taler.net/taler-merchant-demos",
- [],
- build_demos,
- ),
- "twister": Repo(
- "twister",
- "git://git.taler.net/twister",
- ["gnunet", "libmicrohttpd"],
- build_twister,
- ),
-}
-
-def get_repos_names() -> List[str]:
- r_dir = TALER_ROOT_DIR / "sources"
- if not r_dir.is_dir():
- print(f"'{r_dir}' not found. Did bootstrap run?")
- return []
- return [el for el in listdir(r_dir) if isdir(join(r_dir, el)) and repos.get(el)]
-
-# Get 'Repo' objects (globally defined),
-# using their names as index.
-def load_repos(reposNames) -> List[Repo]:
- ret = []
- for repo in repos.keys():
- if repo in reposNames:
- ret.append(repos[repo])
- return ret
-
-# Return the list of repos (equipped with their version)
-# to install.
-def load_repos_with_envcfg(envcfg_path) -> List[Repo]:
- envcfg_path = Path(envcfg_path)
- if not os.path.isfile(envcfg_path):
- print(f"{envcfg_path} is not a file")
- sys.exit(1)
- cfgtext = envcfg_path.read_text()
- cfg = types.ModuleType("taler_deployment_cfg")
- try:
- exec(cfgtext, cfg.__dict__)
- except SyntaxError:
- print(f"{envcfg_path} is not Python.")
- exit(1)
- ret = []
- for repo in repos.keys():
- try:
- envcfg_entry = getattr(cfg, "tag_" + repo.replace("-", "_"))
- except AttributeError:
- # 'env' files doesn't have this repo, continue looping.
- continue
- repos[repo].version = envcfg_entry
- ret.append(repos[repo])
- return ret
-
-# Flag as stale the projects set on 'master' that
-# aren't in line with upstream. Detached head projects
-# aren't affected.
-def update_repos(repos: List[Repo], force) -> None:
- for r in repos:
- r_dir = TALER_ROOT_DIR / "sources" / r.name
- subprocess.run(["git", "-C", str(r_dir), "fetch"], check=True)
- res = subprocess.run(
- ["git", "-C", str(r_dir), "status", "-sb"],
- check=True,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if "behind" in res.stdout or force:
- print(f"{r.name} will be compiled")
- s = r_dir / "taler-buildstamp"
- if s.exists():
- s.unlink()
-
-# projects without the build timestamp are considered stale,
-# even if one of their dependencies _got_ marked as stale.
-def get_stale_repos(repos: List[Repo]) -> List[Repo]:
- timestamps = {}
- stale = []
- for r in repos:
- r_dir = TALER_ROOT_DIR / "sources" / r.name
- s = r_dir / "taler-buildstamp"
- if not s.exists():
- timestamps[r.name] = time.time()
- stale.append(r)
- continue
- ts = timestamps[r.name] = s.stat().st_mtime
- for dep in r.deps:
- # When 'dep' in not found, it has been
- # excluded from the compilation.
- if timestamps.get("dep", 0) > ts:
- stale.append(r)
- break
- return stale
-
-@cli.command()
-@click.option(
- "--without-repos", metavar="WITHOUT REPOS",
- help="WITHOUT REPOS is a unspaced and comma-separated list \
-of the repositories to _exclude_ from compilation",
- default="")
-@click.option(
- "--only-repos", metavar="ONLY REPOS",
- help="ONLY REPOS is a unspaced and comma-separated exclusive list \
-of the repositories to include in the compilation",
- default="")
-@click.option(
- "--dry/--no-dry", default=False,
- help="Only getting changes, without actual build."
-)
-@click.option(
- "--with-envcfg", metavar="PATH",
- help="python file pinning each codebase version.",
-)
-# Normally, we don't rebuild dependent projects when one
-# of their dependency changed. This lets check whether non
-# breaking changes are really so; this option invalidates
-# this policy by letting all the codebases be compiled.
-@click.option(
- "--force/--no-force", default=False,
- help="build all the projects.",
-)
-def build(without_repos, only_repos, dry, with_envcfg, force) -> None:
- """Build the deployment from source."""
- if only_repos != "" and without_repos != "":
- print("Either use --only-repos or --without-repos")
- exit(1)
- repos_names = get_repos_names()
- if only_repos != "":
- repos_names = list(filter(
- lambda x: x in split_repos_list(only_repos),
- repos_names
- ))
- if without_repos != "":
- repos_names = list(filter(
- lambda x: x not in split_repos_list(without_repos),
- repos_names
- ))
- if with_envcfg:
- target_repos = load_repos_with_envcfg(with_envcfg)
- else:
- target_repos = load_repos(repos_names)
- # enforce version here.
- sources = TALER_ROOT_DIR / "sources"
- for r in target_repos:
- subprocess.run(
- ["git", "-C", str(sources / r.name),
- "checkout", "-q", "-f",
- r.version, "--"], check=True
- )
- update_repos(target_repos, force)
- stale = get_stale_repos(target_repos)
- print(f"found stale repos: {[r.name for r in stale]}")
- for r in stale:
- # Inform, if a dependency is not being built:
- diff = set(r.deps) - set(repos_names)
- if len(diff) > 0:
- print(f"Info: those dependencies are not being built: {diff}")
- p = TALER_ROOT_DIR / "sources" / r.name
- os.chdir(str(p))
- if dry:
- print("dry running")
- continue
- r.builder(r, p)
-
-# Only git-clone the codebases. The 'build' step
-# will run all the update logic. At this point, a
-# 'env' file - as well as the --repos option - will
-# only express which codebases are to clone.
-@cli.command()
-@click.option(
- "--repos", "-r",
- metavar="REPOS",
- help="REPOS is a unspaced and comma-separated list of the repositories to clone.",
- default="libmicrohttpd,gnunet,exchange,merchant,wallet-core,taler-merchant-demos,sync,libeufin",
- show_default=True,
-)
-@click.option(
- "--without-repos",
- metavar="REPOS",
- help="REPOS is a unspaced and comma-separated list of the repositories NOT to clone."
-)
-@click.option(
- "--list-repos/--no-list-repos", default=False,
- help="Lists the repositories that were bootstrapped.",
-)
-@click.option(
- "--with-envcfg", metavar="PATH",
- help="python file pinning each codebase version.",
-)
-@click.option(
- "--dry/--no-dry", default=False,
- help="Print steps, without downloading any repository.",
-)
-def bootstrap(list_repos, repos, with_envcfg, dry, without_repos) -> None:
- """Clone all the specified repositories."""
- # Only saying _which_ repo were installed. No further action
- if list_repos:
- for repo in get_repos_names():
- print(repo)
- return
-
- # Download the repositories.
- def clone_repos(repos: List[Repo]):
- if len(repos) == 0:
- print("No repositories can be checked out. Spelled correctly?")
- return
- sources = TALER_ROOT_DIR / "sources"
- for r in repos:
- print(f"Bootstrapping '{r.name}', at version '{r.version}'")
- if dry:
- print("dry running")
- continue
- r_dir = sources / r.name
- if not r_dir.exists():
- r_dir.mkdir(parents=True, exist_ok=True)
- subprocess.run(
- ["git", "-C", str(sources),
- "clone", r.url], check=True
- )
- subprocess.run(
- ["git", "-C", str(r_dir),
- "checkout", "-q", "-f",
- r.version, "--"], check=True
- )
-
- # Get list of to-be-cloned repos from the 'env' file.
- if with_envcfg:
- # 'with_envcfg' is a path to a "envcfg.py" file.
- preparedRepos = load_repos_with_envcfg(with_envcfg)
- # Get list of to-be-cloned repos from the command line
- # (or its default)
- else:
- # 'repos' is here "repo1,repo2,.."
- reposList = split_repos_list(repos)
- # 'reposList' is here ["repo1", "repo2", ...]
- preparedRepos = load_repos(reposList)
- if without_repos:
- for exclude_repo in split_repos_list(without_repos):
- preparedRepos = [el for el in preparedRepos if el.name != exclude_repo]
- clone_repos(preparedRepos)
-
-# Globals sharead accross multiple sub-commands:
-# needed to configure and launch the reverse proxy.
-REV_PROXY_HOSTNAME = "localhost"
-REV_PROXY_PORT = "8080"
-REV_PROXY_NETLOC = REV_PROXY_HOSTNAME + ":" + REV_PROXY_PORT
-REV_PROXY_PROTO = "http"
-REV_PROXY_URL = f"{REV_PROXY_PROTO}://{REV_PROXY_NETLOC}"
-UNIX_SOCKETS_DIR = TALER_ROOT_DIR / "sockets"
-LOG_DIR = TALER_ROOT_DIR / "logs"
-# needed to create the customer's bank account and
-# to let them subsequently withdraw via the Access API.
-CUSTOMER_BANK_ACCOUNT = "sandbox-account-customer"
-CUSTOMER_BANK_PASSWORD = "secret"
-# needed along preparation and later to withdraw via
-# the Access API.
-CURRENCY = "CHF"
-
-@cli.command()
-@click.option(
- "--x-forwarded-host", metavar="HOST",
- help="Instruct Nginx to set HOST as the X-Forwarded-Host.",
- default=REV_PROXY_NETLOC
-)
-@click.option(
- "--x-forwarded-proto", metavar="PROTO",
- help="Instruct Nginx to set PROTO as the X-Forwarded-Proto.",
- default="http"
-)
-@click.option(
- "--postgres-db-name", metavar="DBNAME",
- help="Set postgres database name for all the services.",
- default="taler"
-)
-def prepare(x_forwarded_host, x_forwarded_proto, postgres_db_name):
- """Generate configuration, run-time blobs, instances, euFin accounts."""
- def is_serving(check_url, tries=10):
- for i in range(tries):
- try:
- print_nn(".")
- # Raises if the service is not reachable.
- response = requests.get(
- check_url,
- timeout=1
- )
- # The reverse proxy may return 500 if the
- # end service is not ready, therefore this
- # case should be tolerated.
- response.raise_for_status()
- except:
- time.sleep(0.5)
- if i == tries - 1:
- return False
- continue
- break
- return True
-
- def fail(reason=None):
- if reason:
- print("ERROR:", reason)
- exit(1)
-
- def kill(proc):
- proc.terminate()
- proc.wait()
-
- def get_nexus_cli_env(
- username,
- password,
- nexus_url
- ):
- env = os.environ.copy()
- env["LIBEUFIN_NEXUS_USERNAME"] = username
- env["LIBEUFIN_NEXUS_PASSWORD"] = password
- env["LIBEUFIN_NEXUS_URL"] = nexus_url
- return env
-
- def get_sandbox_cli_env(
- username, password
- ):
- env = os.environ.copy()
- env["LIBEUFIN_SANDBOX_USERNAME"] = username
- env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- return env
-
- # Will be extended to include a SANDBOX_ADMIN_TOKEN
- # that will obsolete the 'superuser' flag of ordinary
- # user accounts. Likewise, the client side will be
- # modified to use such token.
- def get_sandbox_server_env(db_file, base_url, admin_password):
- env = os.environ.copy()
- env["LIBEUFIN_SANDBOX_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
- env["LIBEUFIN_SANDBOX_BASE_URL"] = base_url
- env["LIBEUFIN_SANDBOX_ADMIN_PASSWORD"] = admin_password
- return env
-
- def get_nexus_server_env(db_file, base_url):
- env = os.environ.copy()
- env["LIBEUFIN_NEXUS_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
- env["LIBEUFIN_NEXUS_BASE_URL"] = base_url
- return env
-
- def urljoin_nodrop(a, b):
- a = a + "/" # urljoin will drop extra trailing slashes.
- b = "/".join([x for x in b.split("/") if x != ""]) # remove leading slashes.
- return urljoin(a, b)
-
- def prepare_nexus_account(
- ebics_url,
- ebics_host_id,
- ebics_partner_id,
- ebics_user_id,
- bank_connection_name,
- bank_account_name_sandbox,
- bank_account_name_nexus,
- env
- ):
- # make connection
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "new-ebics-connection",
- "--ebics-url", ebics_url,
- "--host-id", ebics_host_id,
- "--partner-id", ebics_partner_id,
- "--ebics-user-id", ebics_user_id,
- bank_connection_name
- ],
- env
- ).run()
- # connect
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "connect", bank_connection_name
- ],
- env
- ).run()
- # Import bank account
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "download-bank-accounts",
- bank_connection_name
- ],
- env
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "import-bank-account",
- "--offered-account-id",
- bank_account_name_sandbox,
- "--nexus-bank-account-id",
- bank_account_name_nexus,
- bank_connection_name
- ],
- env
- ).run()
- # Set background tasks.
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "accounts",
- "task-schedule", bank_account_name_nexus,
- "--task-type", "submit",
- "--task-name", "submit-payments-each-second",
- "--task-cronspec", "* * *"
- ],
- env
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "accounts",
- "task-schedule", bank_account_name_nexus,
- "--task-type", "fetch",
- "--task-name", "fetch-reports-each-second",
- "--task-cronspec", "* * *",
- "--task-param-level", "report",
- "--task-param-range-type", "latest"
- ],
- env
- ).run()
-
- def get_sandbox_account_info(
- sandbox_url,
- bank_account_label,
- password,
- ):
- customer_env = os.environ.copy()
- customer_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_label
- customer_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
- r = Command([
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
- "--sandbox-url", demobank_url,
- "demobank", "info",
- "--bank-account", bank_account_label],
- env = customer_env,
- capture_stdout=True
- ).run()
- return json.loads(r)
-
- def prepare_sandbox_account(
- sandbox_url,
- ebics_host_id,
- ebics_partner_id,
- ebics_user_id,
- person_name,
- # This value is BOTH a username
- # and a bank account label.
- bank_account_name,
- password,
- is_public=False
- ):
- demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
- user_env = os.environ.copy()
- user_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_name
- user_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- register_cmd = [
- f"{TALER_PREFIX}/bin/libeufin-cli",
- "sandbox", "--sandbox-url", demobank_url,
- "demobank", "register"
- ]
- if is_public:
- register_cmd.append("--public")
- Command(register_cmd, env = user_env).run()
- admin_env = os.environ.copy()
- admin_env["LIBEUFIN_SANDBOX_USERNAME"] = SANDBOX_ADMIN_USERNAME
- admin_env["LIBEUFIN_SANDBOX_PASSWORD"] = SANDBOX_ADMIN_PASSWORD
- Command([
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
- "--sandbox-url", demobank_url,
- "demobank", "new-ebicssubscriber",
- "--host-id", ebics_host_id,
- "--partner-id", ebics_partner_id,
- "--user-id", ebics_user_id,
- "--bank-account", bank_account_name
- ],
- env = admin_env
- ).run()
-
-
- WIRE_METHOD = "iban"
- # euFin URLs
- SANDBOX_URL = REV_PROXY_URL + "/sandbox"
- NEXUS_URL = REV_PROXY_URL + "/nexus"
-
- # Filesystem's paths
- CFG_OUTDIR = TALER_ROOT_DIR / "config"
- TALER_RUNTIME_DIR = TALER_ROOT_DIR / "runtime"
- TALER_DATA_DIR = TALER_ROOT_DIR / "data"
- TALER_UNIT_FILES_DIR = systemd_user_dir = Path.home() / ".config" / "systemd" / "user"
-
- def get_link(path = ""):
- return x_forwarded_proto + "://" + x_forwarded_host + path
-
- def create_tip_reserve():
- payto = Command([
- f"{TALER_PREFIX}/bin/taler-merchant-setup-reserve",
- "--amount", f"{CURRENCY}:20",
- "--exchange-url", get_link("/exchange/"),
- "--merchant-url", get_link("/merchant-backend/instances/survey/"),
- "--apikey", f"Bearer {FRONTENDS_API_TOKEN}",
- "--wire-method", WIRE_METHOD],
- capture_stdout=True
- ).run()
-
- Command([
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox", "--sandbox-url",
- SANDBOX_URL + "/demobanks/default/", "demobank",
- "new-transaction", "--bank-account", "sandbox-account-survey",
- "--payto-with-subject", payto, "--amount", "20"],
- env = get_sandbox_cli_env(
- username = "sandbox-account-survey",
- password = ALL_INSTANCES_BANK_PASSWORD
- )).run()
-
- def get_random_iban():
- cc_no_check = 131400 # is "DE00"
- bban = "".join(random.choices("0123456789", k=4))
- check_digits = 98 - (int(f"{bban}{cc_no_check}") % 97)
- return "DE" + (f"0{check_digits}"[-2:]) + bban
-
- # IBANs
-
- IBAN_MERCHANT_DEFAULT = get_random_iban()
- IBAN_MERCHANT_DEMOSHOP = get_random_iban()
-
- # Instances
- INSTANCES = [
- dict(name="GNUnet", isPublic=True),
- dict(name="Taler", isPublic=True),
- dict(name="Tor", isPublic=True),
- dict(name="survey"),
- dict(name="blog"),
- ]
-
- # Credentials / API keys
- EXCHANGE_NEXUS_USERNAME = "exchange-nexus-user"
- EXCHANGE_NEXUS_PASSWORD = "exchange-nexus-password"
- FRONTENDS_API_TOKEN = "secret-token:secret"
- TALER_MERCHANT_TOKEN = "secret-token:secret"
- ALL_INSTANCES_BANK_PASSWORD = "secret"
- EXCHANGE_BANK_ACCOUNT_SANDBOX = "sandbox-account-exchange"
- EXCHANGE_BANK_ACCOUNT_PASSWORD = "secret"
-
- # EBICS
- EBICS_HOST_ID = "ebicsDeployedHost"
- EXCHANGE_EBICS_USER_ID = "exchangeEbicsUserId"
- EXCHANGE_EBICS_PARTNER_ID = "exchangeEbicsPartnerId"
- EBICS_URL = REV_PROXY_URL + "/sandbox/ebicsweb"
-
- # euFin
- EXCHANGE_BANK_ACCOUNT_NEXUS = "exchange-imported-account-nexus"
- EXCHANGE_BANK_CONNECTION = "exchange-ebics-connection"
- NEXUS_DB_FILE = "/tmp/nexus.sqlite"
- SANDBOX_DB_FILE = "/tmp/sandbox.sqlite"
- EXCHANGE_FACADE_NAME = "exchange-taler-facade"
- SANDBOX_ADMIN_USERNAME = "admin"
- SANDBOX_ADMIN_PASSWORD = "secret"
-
- class Command:
- def __init__(
- self, cmd, env=os.environ, log_dir=LOG_DIR,
- custom_name=None, capture_stdout=False
- ):
- if len(cmd) == 0:
- fail("Command to execute was given empty.")
- self.name = custom_name if custom_name else basename(cmd[0])
- self.cmd = cmd
- self.capture_stdout = capture_stdout
- self.log_dir = log_dir
- self.env = env
-
- def run(self):
- self.do()
- return_code = self.handle.wait()
- self.cleanup() # Mainly closes the log file.
- if return_code != 0:
- fail(f"Command {self.name} failed. Logs in {self.log_dir}")
- if self.capture_stdout:
- return self.handle.communicate()[0].decode("utf-8").rstrip()
-
- def get_log_filename(self):
- return self.log_file.name
-
- def cleanup(self):
- self.log_file.flush()
- self.log_file.close()
-
- def do(self):
- if not self.log_dir.is_dir():
- os.makedirs(self.log_dir)
- try:
- log_filename = self.log_dir / f"{self.name}.log"
- self.log_file = open(log_filename, "a+")
- except Exception as error:
- fail(f"Could not open log file: {log_filename}: {error}")
- try:
- self.handle = Popen(
- self.cmd, # list
- stdin=DEVNULL,
- stdout=self.log_file if not self.capture_stdout else PIPE,
- stderr=self.log_file,
- env=self.env
- )
- except Exception as error:
- fail(f"Could not execute: {' '.join(self.cmd)}: {error}")
-
- class ConfigFile:
- def __init__(self, filename):
- self.sections = OrderedDict()
- self.filename = filename
-
- def destroy(self):
- del self.sections
- self.sections = OrderedDict()
-
- def cfg_put(self, section_name, key, value):
- s = self.sections[section_name] = self.sections.get(section_name, OrderedDict())
- s[key] = value
-
- def cfg_write(self, outdir):
- if outdir:
- if not os.path.isdir(outdir):
- os.makedirs(outdir)
- fstream = open(os.path.join(outdir, self.filename), "w")
- else:
- fstream = open(sys.stdout)
-
- for section_name, section in self.sections.items():
- fstream.write("[" + section_name + "]" + "\n")
- for key, value in section.items():
- fstream.write(key + " = " + value + "\n")
- fstream.write("\n")
- fstream.close()
-
- def config_specify_master_pub(
- filename,
- currency,
- exchange_master_pub
- ):
- Command([
- f"{TALER_PREFIX}/bin/taler-config", "-c", filename,
- "-s", "exchange", "-o", "master_public_key",
- "-V", exchange_master_pub
- ]).run()
- Command([
- f"{TALER_PREFIX}/bin/taler-config", "-c", filename,
- "-s", f"merchant-exchange-{currency}",
- "-o", "master_key",
- "-V", exchange_master_pub
- ]).run()
-
- # When called, there is no exchange master pub yet.
- # taler-exchange-offline will prouce the key _after_
- # taler.conf is generated. Only after that, we'll
- # specify the master key where it is missing; namely
- # in the merchant backend and exchange HTTP daemon sections.
-
- def config_main(
- filename,
- outdir,
- unix_sockets_dir,
- currency,
- rev_proxy_url,
- wire_method,
- exchange_wire_gateway_username,
- exchange_wire_gateway_password,
- frontend_api_key,
- taler_runtime_dir,
- postgres_db_name
- ):
- def coin(
- obj,
- currency,
- name,
- value,
- d_withdraw="3 years",
- d_spend="5 years",
- d_legal="10 years",
- f_withdraw="0.01",
- f_deposit="0.01",
- f_refresh="0.01",
- f_refund="0.01",
- rsa_keysize="2048",
- ):
- sec = "coin_" + currency + "_" + name
- obj.cfg_put(sec, "cipher", "RSA")
- obj.cfg_put(sec, "value", currency + ":" + value)
- obj.cfg_put(sec, "duration_withdraw", d_withdraw)
- obj.cfg_put(sec, "duration_spend", d_spend)
- obj.cfg_put(sec, "duration_legal", d_legal)
- obj.cfg_put(sec, "fee_withdraw", currency + ":" + f_withdraw)
- obj.cfg_put(sec, "fee_refresh", currency + ":" + f_refresh)
- obj.cfg_put(sec, "fee_refund", currency + ":" + f_refund)
- obj.cfg_put(sec, "fee_deposit", currency + ":" + f_deposit)
- obj.cfg_put(sec, "rsa_keysize", rsa_keysize)
-
- obj = ConfigFile("taler.conf")
- obj.cfg_put("paths", "TALER_DATA_HOME", str(TALER_DATA_DIR))
- if not taler_runtime_dir.is_dir():
- os.makedirs(taler_runtime_dir)
- obj.cfg_put("paths", "TALER_RUNTIME_DIR", str(taler_runtime_dir))
- obj.cfg_put("taler", "CURRENCY", currency)
- obj.cfg_put("taler", "CURRENCY_ROUND_UNIT", f"{currency}:0.01")
-
- obj.cfg_put("bank", "serve", "uwsgi")
- obj.cfg_put("bank", "uwsgi_serve", "unix")
- obj.cfg_put("bank", "uwsgi_unixpath", str(unix_sockets_dir / "bank.sock"))
- obj.cfg_put("bank", "uwsgi_unixpath_mode", "660")
- obj.cfg_put("bank", "database", "taler")
- obj.cfg_put("bank", "max_debt", "%s:500.0" % currency)
- obj.cfg_put("bank", "max_debt_bank", "%s:1000000000.0" % currency)
- obj.cfg_put("bank", "allow_registrations", "YES")
- obj.cfg_put("bank", "base_url", rev_proxy_url + "/bank/")
- obj.cfg_put("bank", "database", f"postgres:///{postgres_db_name}")
- obj.cfg_put("bank", "suggested_exchange", rev_proxy_url + "/exchange/")
-
- obj.cfg_put("donations", "serve", "http")
- obj.cfg_put("donations", "http_serve", "unix")
- obj.cfg_put("donations", "http_unixpath", str(unix_sockets_dir / "donations.sock"))
- obj.cfg_put("donations", "http_unixpath_mode", "660")
-
- obj.cfg_put("landing", "serve", "http")
- obj.cfg_put("landing", "http_serve", "unix")
- obj.cfg_put("landing", "http_unixpath", str(unix_sockets_dir / "landing.sock"))
- obj.cfg_put("landing", "http_unixpath_mode", "660")
-
- obj.cfg_put("blog", "serve", "http")
- obj.cfg_put("blog", "http_serve", "unix")
- obj.cfg_put("blog", "http_unixpath", str(unix_sockets_dir / "blog.sock"))
- obj.cfg_put("blog", "http_unixpath_mode", "660")
-
- obj.cfg_put("survey", "serve", "http")
- obj.cfg_put("survey", "http_serve", "unix")
- obj.cfg_put("survey", "http_unixpath", str(unix_sockets_dir / "survey.sock"))
- obj.cfg_put("survey", "http_unixpath_mode", "660")
- obj.cfg_put("survey", "bank_password", "x")
-
- obj.cfg_put("merchant", "serve", "unix")
- obj.cfg_put("merchant", "unixpath", str(unix_sockets_dir / "merchant-backend.sock"))
- obj.cfg_put("merchant", "wire_transfer_delay", "0 s")
- obj.cfg_put("merchant", "default_max_wire_fee", currency + ":" + "0.01")
- obj.cfg_put("merchant", "default_max_deposit_fee", currency + ":" + "0.05")
- obj.cfg_put("merchantdb-postgres", "config", f"postgres:///{postgres_db_name}")
-
- obj.cfg_put("frontends", "backend", rev_proxy_url + "/merchant-backend/")
- obj.cfg_put(
- "merchant-exchange-{}".format(currency),
- "exchange_base_url", rev_proxy_url + "/exchange/",
- )
- obj.cfg_put(
- "merchant-exchange-{}".format(currency),
- "currency", currency
- )
- obj.cfg_put("auditor", "serve", "unix")
- # FIXME: both below used?
- obj.cfg_put("auditor", "base_url", rev_proxy_url + "/auditor")
- obj.cfg_put("auditor", "auditor_url", rev_proxy_url + "/auditor")
- obj.cfg_put("auditor", "unixpath", str(unix_sockets_dir / "auditor.sock"))
- obj.cfg_put("auditor", "tiny_amount", currency + ":0.01")
-
- obj.cfg_put(
- "taler-exchange-secmod-eddsa",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-eddsa.sock")
- )
- obj.cfg_put(
- "taler-exchange-secmod-cs",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-cs.sock")
- )
- obj.cfg_put("taler-exchange-secmod-cs", "sm_priv_key",
- "${TALER_DATA_HOME}/taler-exchange-secmod-cs/secmod-private-key"
- )
- obj.cfg_put(
- "taler-exchange-secmod-rsa",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-rsa.sock")
- )
- obj.cfg_put("taler-exchange-secmod-rsa", "sm_priv_key",
- "${TALER_DATA_HOME}/taler-exchange-secmod-rsa/secmod-private-key"
- )
- obj.cfg_put("exchange", "base_url", rev_proxy_url + "/exchange/")
- obj.cfg_put("exchange", "serve", "unix")
- obj.cfg_put("exchange", "unixpath", str(unix_sockets_dir / "exchange.sock"))
- obj.cfg_put("exchange", "terms_etag", "0")
- obj.cfg_put("exchange", "terms_dir", "$HOME/.local/share/taler-exchange/tos")
- obj.cfg_put("exchange", "privacy_etag", "0")
- obj.cfg_put("exchange", "privacy_dir", "$HOME/.local/share/taler-exchange/pp")
- obj.cfg_put("exchangedb-postgres", "config", f"postgres:///{postgres_db_name}")
- obj.cfg_put("auditordb-postgres", "db_conn_str", f"postgres:///{postgres_db_name}")
- obj.cfg_put("auditordb-postgres", "config", f"postgres:///{postgres_db_name}")
- obj.cfg_put("exchange-account-1", "enable_debit", "yes")
- obj.cfg_put("exchange-account-1", "enable_credit", "yes")
- obj.cfg_put("merchant-account-merchant",
- "wire_response",
- "${TALER_DATA_HOME}/merchant/wire/merchant.json",
- )
- obj.cfg_put("merchant-account-merchant", "wire_file_mode", "770")
- # The demo shops prepend the "Bearer " part. NOTE: should
- # this be changed to match 'sync', since it expects also the
- # "Bearer " part?
- obj.cfg_put("frontends", "backend_apikey", frontend_api_key)
- coin(obj, currency, "ct_10", "0.10")
- coin(obj, currency, "1", "1")
- coin(obj, currency, "2", "2")
- coin(obj, currency, "5", "5")
- coin(obj, currency, "10", "10")
- coin(obj, currency, "1000", "1000")
- obj.cfg_write(outdir)
- return obj
-
- def config_sync(
- filename, outdir,
- unix_sockets_dir,
- currency, api_key,
- rev_proxy_url,
- postgres_db_name
- ):
- obj = ConfigFile(filename)
- obj.cfg_put("taler", "currency", currency)
- obj.cfg_put("sync", "serve", "unix")
- obj.cfg_put("sync", "unixpath", str(unix_sockets_dir / "sync.sock"))
- obj.cfg_put("sync", "api_key", f"Bearer {api_key}")
- obj.cfg_put("sync", "annual_fee", f"{currency}:0.1")
- obj.cfg_put("sync", "fulfillment_url", "taler://fulfillment-success/")
- obj.cfg_put("sync", "payment_backend_url", rev_proxy_url + "/merchant-backend/instances/Taler/")
- obj.cfg_put("syncdb-postgres", "config", f"postgres:///{postgres_db_name}")
- obj.cfg_write(outdir)
-
- def unit_file_content(description, cmd, env=None):
- executable_name = cmd.split(" ")[0].split("/")[-1]
- content = (
- "[Unit]\n"
- f"Description={description}\n"
- "[Service]\n"
- f"ExecStart={cmd}\n"
- f"StandardOutput=append:{LOG_DIR / executable_name}.log\n"
- f"StandardError=append:{LOG_DIR / executable_name}.log"
- )
- if env:
- content += f"\nEnvironmentFile={env}"
- return content
-
-
- print_nn("Ensure no service is running...")
- if is_serving(REV_PROXY_URL + "/", tries=3):
- fail("Reverse proxy is unexpectedly running!")
- if UNIX_SOCKETS_DIR.is_dir():
- for left_socket in os.listdir(UNIX_SOCKETS_DIR):
- s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- socket_file = str(UNIX_SOCKETS_DIR / left_socket)
- if s.connect_ex(socket_file.encode("utf-8")) == 0:
- fail(f"A service is unexpectedly running and bound to {socket_file}!")
- print(" OK")
-
- print_nn("Remove stale data and config...")
- if TALER_DATA_DIR.exists():
- shutil.rmtree(TALER_DATA_DIR)
- if TALER_RUNTIME_DIR.exists():
- shutil.rmtree(TALER_RUNTIME_DIR)
- if CFG_OUTDIR.exists():
- shutil.rmtree(CFG_OUTDIR)
- print(" OK")
-
- print_nn("Generate preliminary taler.conf...")
- mc = config_main(
- "taler.conf",
- outdir=CFG_OUTDIR,
- unix_sockets_dir=UNIX_SOCKETS_DIR,
- currency=CURRENCY,
- rev_proxy_url=get_link(), # Gets X-Forwarded-* compatible base URL.
- wire_method=WIRE_METHOD,
- exchange_wire_gateway_username=EXCHANGE_NEXUS_USERNAME,
- exchange_wire_gateway_password=EXCHANGE_NEXUS_PASSWORD,
- frontend_api_key=FRONTENDS_API_TOKEN,
- taler_runtime_dir=TALER_RUNTIME_DIR,
- postgres_db_name=postgres_db_name
- )
- print(" OK")
-
- print_nn("Installing SystemD unit files...")
- if not systemd_user_dir.exists():
- systemd_user_dir.mkdir(parents=True, exist_ok=True)
-
- if not TALER_UNIT_FILES_DIR.exists():
- TALER_UNIT_FILES_DIR.mkdir(parents=True, exist_ok=True)
-
- # Internal redirect of X-Forwarded-Host's port
- # to the port Nginx binds to. Allows clients
- # connecting from within a container to still
- # reach services at X-Forwarded-Host.
- try:
- x_forwarded_port = x_forwarded_host.split(":")[1]
- except IndexError:
- x_forwarded_port = None
-
- need_redirect = (x_forwarded_port) and (x_forwarded_port != REV_PROXY_PORT)
- with open(TALER_UNIT_FILES_DIR / "taler-local-port-redirect.service", "w") as port_redirect_unit:
- port_redirect_unit.write(unit_file_content(
- description = "Port redirect allowing configuration at X-Forwarded-Host",
- cmd = f"socat TCP4-LISTEN:{x_forwarded_port},fork TCP4:{REV_PROXY_NETLOC}" if need_redirect else "true",
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-httpd.service", "w") as exchange_unit:
- exchange_unit.write(unit_file_content(
- description = "Taler Exchange HTTP daemon",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-httpd -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-wirewatch.service", "w") as exchange_wirewatch_unit:
- exchange_wirewatch_unit.write(unit_file_content(
- description = "Taler Exchange Wirewatch",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-wirewatch -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-aggregator.service", "w") as exchange_aggregator_unit:
- exchange_aggregator_unit.write(unit_file_content(
- description = "Taler Exchange Aggregator",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-aggregator --kyc-off -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-transfer.service", "w") as exchange_transfer_unit:
- exchange_transfer_unit.write(unit_file_content(
- description = "Taler Exchange Transfer",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-transfer -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-cs.service", "w") as exchange_cs_unit:
- exchange_cs_unit.write(unit_file_content(
- description = "Taler Exchange CS security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-cs -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
-
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-rsa.service", "w") as exchange_rsa_unit:
- exchange_rsa_unit.write(unit_file_content(
- description = "Taler Exchange RSA security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-rsa -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-eddsa.service", "w") as exchange_eddsa_unit:
- exchange_eddsa_unit.write(unit_file_content(
- description = "Taler Exchange EDDSA security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-eddsa -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-merchant-backend.service", "w") as merchant_unit:
- merchant_unit.write(unit_file_content(
- description = "Taler Merchant backend",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-sync.service", "w") as sync_unit:
- sync_unit.write(unit_file_content(
- description = "Taler Sync",
- cmd = f"{TALER_PREFIX}/bin/sync-httpd -L DEBUG -c {CFG_OUTDIR / 'sync.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-merchant-backend-token.service", "w") as merchant_token_unit:
- merchant_token_unit.write(unit_file_content(
- description = "Taler Merchant backend with auth token to allow default instance creation.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -a {TALER_MERCHANT_TOKEN} -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- # Custom Postgres connection.
- if os.environ.get("PGPORT"):
- with open(TALER_UNIT_FILES_DIR / "taler-local-postgres.env", "w") as postgres_env:
- postgres_env.write(f"PGPORT={os.environ.get('PGPORT')}")
-
- # euFin unit files.
- with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.service", "w") as sandbox_unit:
- sandbox_unit.write(unit_file_content(
- description = "euFin Sandbox",
- cmd = f"{TALER_PREFIX}/bin/libeufin-sandbox serve --with-unix-socket {UNIX_SOCKETS_DIR / 'sandbox.sock'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-sandbox.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.service", "w") as nexus_unit:
- nexus_unit.write(unit_file_content(
- description = "euFin Nexus",
- cmd = f"{TALER_PREFIX}/bin/libeufin-nexus serve --with-unix-socket {UNIX_SOCKETS_DIR / 'nexus.sock'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-nexus.env"
- ))
- # euFin env files.
- with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.env", "w") as sandbox_env:
- sandbox_env.write(f"LIBEUFIN_SANDBOX_DB_CONNECTION=jdbc:sqlite:{SANDBOX_DB_FILE}\n")
- sandbox_env.write(f"LIBEUFIN_SANDBOX_ADMIN_PASSWORD={SANDBOX_ADMIN_PASSWORD}\n")
- sandbox_env.write(f"TALER_ENV_URL_INTRO={get_link('/landing/')}\n")
- sandbox_env.write(f"TALER_ENV_URL_BANK={get_link('/sandbox/demobanks/default')}\n")
- sandbox_env.write(f"TALER_ENV_URL_MERCHANT_BLOG={get_link('/blog/')}\n")
- sandbox_env.write(f"TALER_ENV_URL_MERCHANT_DONATIONS={get_link('/donations/')}\n")
- sandbox_env.write(f"TALER_ENV_URL_MERCHANT_SURVEY={get_link('/survey/')}\n")
-
- with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.env", "w") as nexus_env:
- nexus_env.write(f"LIBEUFIN_NEXUS_DB_CONNECTION=jdbc:sqlite:{NEXUS_DB_FILE}\n")
- nexus_env.write((
- f"TALER_ENV_URL_INTRO={get_link('/landing/')}\n"
- f"TALER_ENV_URL_BANK={get_link('/sandbox/demobanks/default')}\n"
- f"TALER_ENV_URL_MERCHANT_BLOG={get_link('/blog/')}\n"
- f"TALER_ENV_URL_MERCHANT_DONATIONS={get_link('/donations/')}\n"
- f"TALER_ENV_URL_MERCHANT_SURVEY={get_link('/survey/')}\n"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-donations.service", "w") as donations_unit:
- donations_unit.write(unit_file_content(
- description = "Donation Website that accepts Taler payments.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos donations -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-blog.service", "w") as blog_unit:
- blog_unit.write(unit_file_content(
- description = "Blog that accepts Taler payments.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos blog -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-survey.service", "w") as survey_unit:
- survey_unit.write(unit_file_content(
- description = "Survey Website awarding tips via Taler.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos survey -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-landing.service", "w") as landing_unit:
- landing_unit.write(unit_file_content(
- description = "Landing Website of Taler demo.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos landing -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-frontends.env", "w") as frontends_env:
- frontends_env.write((
- f"PATH={os.environ.get('PATH')}\n"
- f"TALER_CONFIG_FILE={CFG_OUTDIR / 'taler.conf'}\n"
- f"TALER_ENV_URL_INTRO={get_link('/landing/')}\n"
- f"TALER_ENV_URL_BANK={get_link('/sandbox/demobanks/default/')}\n"
- f"TALER_ENV_URL_MERCHANT_BLOG={get_link('/blog/')}\n"
- f"TALER_ENV_URL_MERCHANT_DONATIONS={get_link('/donations/')}\n"
- f"TALER_ENV_URL_MERCHANT_SURVEY={get_link('/survey/')}\n"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-nginx.service", "w") as nginx_unit:
- nginx_unit.write(unit_file_content(
- description = "Nginx: reverse proxy for taler-local.",
- cmd = f"nginx -c {CFG_OUTDIR / 'nginx.conf'}",
- ))
- print(" OK")
- print_nn("Reload SystemD...")
- Command(["systemctl", "--user", "daemon-reload"]).run()
- atexit.register(lambda: subprocess.run(
- ["systemctl", "--user", "stop", "taler-local-*.service"],
- check=True
- )
- )
- print(" OK")
- print_nn("Generate exchange's master key...")
- EXCHANGE_MASTER_PUB = Command(
- [
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "setup"
- ],
- capture_stdout=True
- ).run()
- print(" OK")
- print_nn("Specify exchange master pub in taler.conf...")
- config_specify_master_pub(
- CFG_OUTDIR / "taler.conf",
- CURRENCY,
- EXCHANGE_MASTER_PUB
- )
- print(" OK")
- print_nn("Generating sync.conf...")
- config_sync(
- "sync.conf",
- outdir=CFG_OUTDIR,
- unix_sockets_dir=UNIX_SOCKETS_DIR,
- currency=CURRENCY,
- api_key=FRONTENDS_API_TOKEN,
- rev_proxy_url=get_link(),
- postgres_db_name=postgres_db_name
- )
- print(" OK")
- print_nn("Reset and init exchange DB..")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"]
- ).run()
- print(" OK")
-
- print_nn("Launching X-Forwarded-Host port redirect...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-port-redirect.service"], check=True)
- time.sleep(1)
- print(" OK")
- print_nn("Launching the reverse proxy...")
- with open(CFG_OUTDIR / "nginx.conf", "w") as nginx_conf:
- nginx_conf.write((
- f"error_log {LOG_DIR / 'nginx.log'};\n"
- f"pid {TALER_ROOT_DIR / 'nginx.pid'};\n"
- "daemon off;\n"
- "events {}\n"
- "http {\n"
- f"access_log {LOG_DIR / 'nginx.log'};\n"
- "server {\n"
- f"listen {REV_PROXY_PORT};\n"
- f"listen [::]:{REV_PROXY_PORT};\n"
- "location / {\n"
- "return 200 'Hello, I am Nginx - proxying taler-local\n';\n"
- "}\n"
- "location ~* ^/(?<component>[a-z\-]+)(/(?<taler_uri>.*))? {\n"
- "proxy_redirect off;\n"
- "proxy_set_header X-Forwarded-Prefix /$component;\n"
- f"proxy_set_header X-Forwarded-Host {x_forwarded_host};\n"
- f"proxy_set_header X-Forwarded-Proto {x_forwarded_proto};\n"
- f"client_body_temp_path /tmp/taler-local-nginx;\n"
- f"proxy_pass http://unix:{UNIX_SOCKETS_DIR}/$component.sock:/$taler_uri?$args;\n"
- "}\n"
- "}\n"
- "}\n"
- ))
- subprocess.run(["systemctl", "--user", "start", "taler-local-nginx.service"], check=True)
- if not is_serving(REV_PROXY_URL + "/"):
- fail(f"Reverse proxy did not start correctly")
- # Do check.
- print(" OK")
- print_nn("Launching the exchange RSA helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-rsa.service"])
- print(" OK")
- print_nn("Launching the exchange EDDSA helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-eddsa.service"])
- print(" OK")
- print_nn("Launching the exchange CS helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-cs.service"])
- print(" OK")
- print_nn("Launching the exchange...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-httpd.service"])
- if not is_serving(REV_PROXY_URL + "/exchange/"):
- fail(f"Exchange did not start correctly.")
- print(" OK")
- print_nn("exchange-offline: signing key material...")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "download", "sign", "upload"
- ]).run()
- print(" OK")
- # Set up wire fees for next 5 years
- NOW = datetime.now()
- YEAR = NOW.year
- print_nn("Setting wire fees for the next 5 years...")
- for year in range(YEAR, YEAR+5):
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "wire-fee",
- str(year),
- WIRE_METHOD,
- CURRENCY + ":0.01",
- CURRENCY + ":0.01",
- CURRENCY + ":0.01",
- "upload"
- ],
- custom_name="set-wire-fee"
- ).run()
- print(" OK")
- print_nn("Reset and init auditor DB..")
- Command([
- f"{TALER_PREFIX}/bin/taler-auditor-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"]
- ).run()
- print(" OK")
- print_nn("Add this exchange to the auditor...")
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-auditor-exchange",
- "-c", CFG_OUTDIR / "taler.conf",
- "-m", EXCHANGE_MASTER_PUB,
- "-u", REV_PROXY_URL + "/exchange/"
- ],
- ).run()
- print(" OK")
- ## Step 4: Set up euFin
- print_nn("Resetting euFin databases...")
- try:
- remove(SANDBOX_DB_FILE)
- remove(NEXUS_DB_FILE)
- except OSError as error:
- if error.errno != errno.ENOENT:
- raise error
- print(" OK")
- # Make the 'default' demobank at Sandbox. (No signup bonus)
- Command([
- f"{TALER_PREFIX}/bin/libeufin-sandbox",
- "config", "--currency", CURRENCY, "--with-signup-bonus", "default"],
- env={
- "PATH": os.environ["PATH"],
- "LIBEUFIN_SANDBOX_DB_CONNECTION": f"jdbc:sqlite:{SANDBOX_DB_FILE}"
- }).run()
- # This step transparantly creates a default demobank.
- print_nn("Launching Sandbox...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-sandbox.service"])
- if not is_serving(SANDBOX_URL):
- fail(f"Sandbox did not start correctly.")
- print(" OK")
- print_nn("Make Sandbox EBICS host...")
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
- "--sandbox-url", SANDBOX_URL,
- "ebicshost", "create",
- "--host-id", EBICS_HOST_ID,
- ],
- env=get_sandbox_cli_env(
- SANDBOX_ADMIN_USERNAME,
- SANDBOX_ADMIN_PASSWORD,
- ),
- custom_name="sandbox-create-ebicshost",
- ).run()
- print(" OK")
-
- print_nn("Create Exchange account at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
- ebics_user_id=EXCHANGE_EBICS_USER_ID,
- person_name="Exchange Owner",
- bank_account_name=EXCHANGE_BANK_ACCOUNT_SANDBOX,
- password=EXCHANGE_BANK_ACCOUNT_PASSWORD
- )
- print(" OK")
- print_nn("Getting exchange payto-URI from the bank...")
- exchange_bank_account_info = get_sandbox_account_info(
- SANDBOX_URL,
- EXCHANGE_BANK_ACCOUNT_SANDBOX,
- EXCHANGE_BANK_ACCOUNT_PASSWORD
- )
- EXCHANGE_PAYTO = exchange_bank_account_info["paytoUri"]
- print(" OK")
- print_nn("Specify own payto-URI to exchange's configuration..")
- Command([
- f"{TALER_PREFIX}/bin/taler-config", "-c", CFG_OUTDIR / 'taler.conf',
- "-s", "exchange-account-1", "-o", "payto_uri", "-V",
- EXCHANGE_PAYTO
- ]).run()
- print(" OK")
- print_nn(f"exchange-offline: enabling {EXCHANGE_PAYTO}...")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "enable-account", EXCHANGE_PAYTO, "upload"
- ]).run()
- print(" OK")
-
- # Give each instance a Sandbox account (note: 'default'
- # won't have one, as it should typically only manage other
- # instances).
- for instance in INSTANCES:
- instance_id = instance["name"]
- print_nn(f"Create account of {instance_id} at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedMerchantEbicsPartnerId",
- ebics_user_id=f"unused{instance_id}EbicsUserId",
- person_name=f"Shop Owner of {instance_id}",
- bank_account_name=f"sandbox-account-{instance_id.lower()}",
- password=ALL_INSTANCES_BANK_PASSWORD,
- is_public=instance.get("isPublic")
- )
- print(" OK")
- print_nn("Create Customer account at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedCustomerEbicsPartnerId",
- ebics_user_id="unusedCustomerEbicsUserId",
- person_name="Customer Person",
- bank_account_name=CUSTOMER_BANK_ACCOUNT,
- password=CUSTOMER_BANK_PASSWORD
- )
- print(" OK")
- print_nn("Make Nexus superuser ...")
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-nexus", "superuser",
- EXCHANGE_NEXUS_USERNAME,
- "--password", EXCHANGE_NEXUS_PASSWORD
- ],
- env=get_nexus_server_env(
- NEXUS_DB_FILE,
- NEXUS_URL
- ),
- custom_name="nexus-superuser",
- ).run()
- print(" OK")
-
- print_nn("Launching Nexus...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-nexus.service"])
- if not is_serving(NEXUS_URL):
- fail(f"Nexus did not start correctly")
- print(" OK")
-
- print_nn("Create Exchange account at Nexus...")
- prepare_nexus_account(
- ebics_url=EBICS_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
- ebics_user_id=EXCHANGE_EBICS_USER_ID,
- bank_connection_name=EXCHANGE_BANK_CONNECTION,
- bank_account_name_sandbox=EXCHANGE_BANK_ACCOUNT_SANDBOX,
- bank_account_name_nexus=EXCHANGE_BANK_ACCOUNT_NEXUS,
- env=get_nexus_cli_env(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD,
- NEXUS_URL
- )
- )
- print(" OK")
-
- print_nn("Create Taler facade ...")
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "facades",
- "new-taler-wire-gateway-facade",
- "--currency", CURRENCY,
- "--facade-name", EXCHANGE_FACADE_NAME,
- EXCHANGE_BANK_CONNECTION,
- EXCHANGE_BANK_ACCOUNT_NEXUS
- ],
- env=get_nexus_cli_env(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD,
- NEXUS_URL
- ),
- custom_name="create-taler-facade",
- ).run()
- print(" OK")
- try:
- response = requests.get(
- NEXUS_URL + "/facades",
- auth=requests.auth.HTTPBasicAuth(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD
- )
- )
- response.raise_for_status()
- except Exception as error:
- fail(error)
- FACADE_URL = response.json().get("facades")[0].get("baseUrl")
- print_nn("Set suggested exchange at Sandbox...")
- Command([
- f"{TALER_PREFIX}/bin/libeufin-sandbox",
- "default-exchange",
- get_link('/exchange/'),
- EXCHANGE_PAYTO],
- env={
- "PATH": os.environ["PATH"],
- "LIBEUFIN_SANDBOX_DB_CONNECTION": f"jdbc:sqlite:{SANDBOX_DB_FILE}"
- }).run()
- print(" OK")
-
- # Point the exchange to the facade.
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "wire_gateway_auth_method",
- "-V", "basic"
- ],
- custom_name="specify-wire-gateway-auth-method",
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "wire_gateway_url",
- "-V", FACADE_URL
- ],
- custom_name="specify-facade-url",
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "username",
- "-V", EXCHANGE_NEXUS_USERNAME
- ],
- custom_name="specify-username-for-facade",
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "password",
- "-V", EXCHANGE_NEXUS_PASSWORD
- ],
- custom_name="specify-password-for-facade",
- ).run()
-
- ## Step 6: Set up merchant
-
- print_nn("Reset and init merchant database...")
- Command([
- f"{TALER_PREFIX}/bin/taler-merchant-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"
- ]).run()
- print(" OK")
-
- def ensure_instance(
- currency,
- instance_id,
- backend_url,
- bank_hostname,
- wire_method,
- auth_token
- ):
- auth_header = {"Authorization": f"Bearer {auth_token}"}
- resp = requests.get(
- urljoin_nodrop(backend_url, f"management/instances/{instance_id}"),
- headers = auth_header
- )
- bankaccount_info = get_sandbox_account_info(
- SANDBOX_URL,
- f"sandbox-account-{instance_id.lower()}",
- ALL_INSTANCES_BANK_PASSWORD
- )
- req = dict(
- id=instance_id,
- name=f"Name of '{instance_id}'",
- payto_uris=[bankaccount_info["paytoUri"]],
- address=dict(),
- jurisdiction=dict(),
- default_max_wire_fee=f"{currency}:1",
- default_wire_fee_amortization=3,
- default_max_deposit_fee=f"{currency}:1",
- default_wire_transfer_delay=dict(d_us=0),
- default_pay_delay=dict(d_us=24*60*60*1000000),
- auth=dict(method="token", token=auth_token),
- )
- http_method = requests.post
- endpoint = "management/instances"
-
- # Instance exists, patching it.
- if resp.status_code == 200:
- print(f"Patching instance '{instance_id}'")
- http_method = requests.patch
- endpoint = f"management/instances/{instance_id}"
-
- resp = http_method(
- urljoin_nodrop(backend_url, endpoint),
- json=req,
- headers = auth_header
- )
- if resp.status_code < 200 or resp.status_code >= 300:
- print(f"Backend responds: {resp.status_code}/{resp.text}")
- fail(f"Could not create (or patch) instance '{instance_id}'")
-
- print_nn(f"Start merchant (with TALER_MERCHANT_TOKEN into the env)...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend-token.service"], check=True)
- if not is_serving(REV_PROXY_URL + "/merchant-backend/config"):
- fail(
- f"Merchant backend did not start correctly.",
- )
- print(" OK")
- print_nn("Give default instance a bank account...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedMerchantEbicsPartnerId",
- ebics_user_id=f"unusedDefaultInstanceEbicsUserId",
- person_name=f"Shop Owner of default instance",
- bank_account_name="sandbox-account-default",
- password=ALL_INSTANCES_BANK_PASSWORD
- )
- print(" OK")
- ensure_instance(
- currency=CURRENCY,
- instance_id="default",
- backend_url = REV_PROXY_URL + "/merchant-backend",
- bank_hostname = REV_PROXY_NETLOC + "/sandbox",
- wire_method = WIRE_METHOD,
- auth_token=FRONTENDS_API_TOKEN
- )
- print_nn("Stopping the merchant with TALER_MERCHANT_TOKEN into the env...")
- subprocess.run(["systemctl", "--user", "stop", "taler-local-merchant-backend-token.service"], check=True)
- print(" OK")
- print_nn("Restarting the merchant WITHOUT the auth-token in the env...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend.service"], check=True)
- if not is_serving(REV_PROXY_URL + "/merchant-backend/config"):
- # check_running logs errors already.
- fail(f"Merchant backend did not re start correctly.")
- print(" OK")
-
- for instance in INSTANCES:
- instance_id = instance["name"]
- print_nn(f"Creating the {instance_id} instance...")
- ensure_instance(
- currency=CURRENCY,
- instance_id=instance_id,
- backend_url = REV_PROXY_URL + "/merchant-backend",
- bank_hostname = REV_PROXY_NETLOC + "/sandbox",
- wire_method = WIRE_METHOD,
- auth_token=FRONTENDS_API_TOKEN
- )
- print(" OK")
- print_nn("Creating tip reserve...")
- create_tip_reserve()
- print(" OK")
- # 1 second to let Nexus read the payment from
- # Sandbox, 1 second to let the Exchange Wirewatch
- # to read the payment from Nexus.
- print_nn("Sleep 2 seconds to let the tip reserve settle...")
- time.sleep(2)
- print(" OK")
-
- # Configure Sync.
- print_nn("Reset and init Sync DB..")
- Command([
- f"{TALER_PREFIX}/bin/sync-dbinit",
- "-c", CFG_OUTDIR / "sync.conf",
- "--reset"]
- ).run()
- print(" OK")
- subprocess.run(
- ["systemctl", "--user", "stop", "taler-local-*.service"],
- check=True
- )
-
-@cli.command()
-def launch():
- subprocess.run(["systemctl", "--user", "start", "taler-local-port-redirect.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-nginx.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-rsa.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-eddsa.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-cs.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-httpd.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-wirewatch.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-aggregator.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-transfer.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-sync.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-sandbox.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-nexus.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-donations.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-blog.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-survey.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-landing.service"], check=True)
-
- print((
- "\n"
- "Taler launched!\n\n"
- f"Serving {REV_PROXY_URL + '/$service'}\n\n"
- "Services:\n"
- " - landing\n"
- " - exchange\n"
- " - merchant-backend\n"
- " - sandbox\n"
- " - nexus\n"
- " - blog\n"
- " - survey\n"
- " - donations\n"
- ))
-
-@cli.command()
-def stop():
- subprocess.run(["systemctl", "--user", "stop", "taler-local-*.service"], check=True)
-
-if __name__ == "__main__":
- cli()
diff --git a/bootstrap-docker/README b/bootstrap-docker/README
new file mode 100644
index 0000000..85a3e98
--- /dev/null
+++ b/bootstrap-docker/README
@@ -0,0 +1,2 @@
+Scripts to install rootless docker together with docker-compose and buildx
+plugins.
diff --git a/bootstrap-docker/bootstrap-docker.sh b/bootstrap-docker/bootstrap-docker.sh
new file mode 100755
index 0000000..601e121
--- /dev/null
+++ b/bootstrap-docker/bootstrap-docker.sh
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+
+set -eu
+
+./install-rootless-docker.sh
+./install-plugin-compose.sh
+./install-plugin-buildx.sh
diff --git a/bootstrap-docker/install-plugin-buildx.sh b/bootstrap-docker/install-plugin-buildx.sh
new file mode 100755
index 0000000..aee66ad
--- /dev/null
+++ b/bootstrap-docker/install-plugin-buildx.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+
+set -eu
+
+DOCKER_CONFIG=${DOCKER_CONFIG:-$HOME/.docker}
+mkdir -p $DOCKER_CONFIG/cli-plugins
+curl -SL https://github.com/docker/buildx/releases/download/v0.10.5/buildx-v0.10.5.linux-amd64 -o $DOCKER_CONFIG/cli-plugins/docker-buildx
+chmod +x $DOCKER_CONFIG/cli-plugins/docker-buildx
diff --git a/bootstrap-docker/install-plugin-compose.sh b/bootstrap-docker/install-plugin-compose.sh
new file mode 100755
index 0000000..bb8c8a0
--- /dev/null
+++ b/bootstrap-docker/install-plugin-compose.sh
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+
+set -eu
+
+DOCKER_CONFIG=${DOCKER_CONFIG:-$HOME/.docker}
+mkdir -p $DOCKER_CONFIG/cli-plugins
+curl -SL https://github.com/docker/compose/releases/download/v2.18.1/docker-compose-linux-x86_64 -o $DOCKER_CONFIG/cli-plugins/docker-compose
+chmod +x $DOCKER_CONFIG/cli-plugins/docker-compose
diff --git a/bootstrap-docker/install-rootless-docker.sh b/bootstrap-docker/install-rootless-docker.sh
new file mode 100755
index 0000000..c597186
--- /dev/null
+++ b/bootstrap-docker/install-rootless-docker.sh
@@ -0,0 +1,252 @@
+#!/bin/sh
+set -e
+# Docker CE for Linux installation script (Rootless mode)
+#
+# See https://docs.docker.com/go/rootless/ for the
+# installation steps.
+#
+# This script is meant for quick & easy install via:
+# $ curl -fsSL https://get.docker.com/rootless -o get-docker.sh
+# $ sh get-docker.sh
+#
+# NOTE: Make sure to verify the contents of the script
+# you downloaded matches the contents of install.sh
+# located at https://github.com/docker/docker-install
+# before executing.
+#
+# Git commit from https://github.com/docker/docker-install when
+# the script was uploaded (Should only be modified by upload job):
+SCRIPT_COMMIT_SHA=c2de081
+
+# This script should be run with an unprivileged user and install/setup Docker under $HOME/bin/.
+
+# The channel to install from:
+# * nightly
+# * test
+# * stable
+DEFAULT_CHANNEL_VALUE="stable"
+if [ -z "$CHANNEL" ]; then
+ CHANNEL=$DEFAULT_CHANNEL_VALUE
+fi
+# The latest release is currently hard-coded.
+STABLE_LATEST="24.0.1"
+TEST_LATEST="24.0.1"
+STATIC_RELEASE_URL=
+STATIC_RELEASE_ROOTLESS_URL=
+case "$CHANNEL" in
+ "stable")
+ echo "# Installing stable version ${STABLE_LATEST}"
+ STATIC_RELEASE_URL="https://download.docker.com/linux/static/$CHANNEL/$(uname -m)/docker-${STABLE_LATEST}.tgz"
+ STATIC_RELEASE_ROOTLESS_URL="https://download.docker.com/linux/static/$CHANNEL/$(uname -m)/docker-rootless-extras-${STABLE_LATEST}.tgz"
+ ;;
+ "test")
+ echo "# Installing test version ${TEST_LATEST}"
+ STATIC_RELEASE_URL="https://download.docker.com/linux/static/$CHANNEL/$(uname -m)/docker-${TEST_LATEST}.tgz"
+ STATIC_RELEASE_ROOTLESS_URL="https://download.docker.com/linux/static/$CHANNEL/$(uname -m)/docker-rootless-extras-${TEST_LATEST}.tgz"
+ ;;
+ "nightly")
+ echo "# Installing nightly"
+ STATIC_RELEASE_URL="https://master.dockerproject.org/linux/$(uname -m)/docker.tgz"
+ STATIC_RELEASE_ROOTLESS_URL="https://master.dockerproject.org/linux/$(uname -m)/docker-rootless-extras.tgz"
+ ;;
+ *)
+ >&2 echo "Aborting because of unknown CHANNEL \"$CHANNEL\". Set \$CHANNEL to either \"stable\", \"test\", or \"nightly\"."; exit 1
+ ;;
+esac
+
+init_vars() {
+ BIN="${DOCKER_BIN:-$HOME/bin}"
+
+ DAEMON=dockerd
+ SYSTEMD=
+ if systemctl --user daemon-reload >/dev/null 2>&1; then
+ SYSTEMD=1
+ fi
+}
+
+checks() {
+ # OS verification: Linux only, point osx/win to helpful locations
+ case "$(uname)" in
+ Linux)
+ ;;
+ *)
+ >&2 echo "Rootless Docker cannot be installed on $(uname)"; exit 1
+ ;;
+ esac
+
+ # User verification: deny running as root (unless forced?)
+ if [ "$(id -u)" = "0" ] && [ -z "$FORCE_ROOTLESS_INSTALL" ]; then
+ >&2 echo "Refusing to install rootless Docker as the root user"; exit 1
+ fi
+
+ # HOME verification
+ if [ ! -d "$HOME" ]; then
+ >&2 echo "Aborting because HOME directory $HOME does not exist"; exit 1
+ fi
+
+ if [ -d "$BIN" ]; then
+ if [ ! -w "$BIN" ]; then
+ >&2 echo "Aborting because $BIN is not writable"; exit 1
+ fi
+ else
+ if [ ! -w "$HOME" ]; then
+ >&2 echo "Aborting because HOME (\"$HOME\") is not writable"; exit 1
+ fi
+ fi
+
+ # Existing rootful docker verification
+ if [ -w /var/run/docker.sock ] && [ -z "$FORCE_ROOTLESS_INSTALL" ]; then
+ >&2 echo "Aborting because rootful Docker is running and accessible. Set FORCE_ROOTLESS_INSTALL=1 to ignore."; exit 1
+ fi
+
+ # Validate XDG_RUNTIME_DIR
+ if [ ! -w "$XDG_RUNTIME_DIR" ]; then
+ if [ -n "$SYSTEMD" ]; then
+ >&2 echo "Aborting because systemd was detected but XDG_RUNTIME_DIR (\"$XDG_RUNTIME_DIR\") does not exist or is not writable"
+ >&2 echo "Hint: this could happen if you changed users with 'su' or 'sudo'. To work around this:"
+ >&2 echo "- try again by first running with root privileges 'loginctl enable-linger <user>' where <user> is the unprivileged user and export XDG_RUNTIME_DIR to the value of RuntimePath as shown by 'loginctl show-user <user>'"
+ >&2 echo "- or simply log back in as the desired unprivileged user (ssh works for remote machines)"
+ exit 1
+ fi
+ fi
+
+ # Already installed verification (unless force?). Only having docker cli binary previously shouldn't fail the build.
+ if [ -x "$BIN/$DAEMON" ]; then
+ # If rootless installation is detected print out the modified PATH and DOCKER_HOST that needs to be set.
+ echo "# Existing rootless Docker detected at $BIN/$DAEMON"
+ echo
+ echo "# To reinstall or upgrade rootless Docker, run the following commands and then rerun the installation script:"
+ echo "systemctl --user stop docker"
+ echo "rm -f $BIN/$DAEMON"
+ echo
+ echo "# Alternatively, install the docker-ce-rootless-extras RPM/deb package for ease of package management (requires root)."
+ echo "# See https://docs.docker.com/go/rootless/ for details."
+ exit 0
+ fi
+
+ INSTRUCTIONS=
+
+ # uidmap dependency check
+ if ! command -v newuidmap >/dev/null 2>&1; then
+ if command -v apt-get >/dev/null 2>&1; then
+ INSTRUCTIONS="apt-get install -y uidmap"
+ elif command -v dnf >/dev/null 2>&1; then
+ INSTRUCTIONS="dnf install -y shadow-utils"
+ elif command -v yum >/dev/null 2>&1; then
+ INSTRUCTIONS="curl -o /etc/yum.repos.d/vbatts-shadow-utils-newxidmap-epel-7.repo https://copr.fedorainfracloud.org/coprs/vbatts/shadow-utils-newxidmap/repo/epel-7/vbatts-shadow-utils-newxidmap-epel-7.repo
+yum install -y shadow-utils46-newxidmap"
+ else
+ echo "newuidmap binary not found. Please install with a package manager."
+ exit 1
+ fi
+ fi
+
+ # iptables dependency check
+ if [ -z "$SKIP_IPTABLES" ] && ! command -v iptables >/dev/null 2>&1 && [ ! -f /sbin/iptables ] && [ ! -f /usr/sbin/iptables ]; then
+ if command -v apt-get >/dev/null 2>&1; then
+ INSTRUCTIONS="${INSTRUCTIONS}
+apt-get install -y iptables"
+ elif command -v dnf >/dev/null 2>&1; then
+ INSTRUCTIONS="${INSTRUCTIONS}
+dnf install -y iptables"
+ else
+ echo "iptables binary not found. Please install with a package manager."
+ exit 1
+ fi
+ fi
+
+ # ip_tables module dependency check
+ if [ -z "$SKIP_IPTABLES" ] && ! lsmod | grep ip_tables >/dev/null 2>&1 && ! grep -q ip_tables "/lib/modules/$(uname -r)/modules.builtin"; then
+ INSTRUCTIONS="${INSTRUCTIONS}
+modprobe ip_tables"
+ fi
+
+ # debian requires setting unprivileged_userns_clone
+ if [ -f /proc/sys/kernel/unprivileged_userns_clone ]; then
+ if [ "1" != "$(cat /proc/sys/kernel/unprivileged_userns_clone)" ]; then
+ INSTRUCTIONS="${INSTRUCTIONS}
+cat <<EOT > /etc/sysctl.d/50-rootless.conf
+kernel.unprivileged_userns_clone = 1
+EOT
+sysctl --system"
+ fi
+ fi
+
+ # centos requires setting max_user_namespaces
+ if [ -f /proc/sys/user/max_user_namespaces ]; then
+ if [ "0" = "$(cat /proc/sys/user/max_user_namespaces)" ]; then
+ INSTRUCTIONS="${INSTRUCTIONS}
+cat <<EOT > /etc/sysctl.d/51-rootless.conf
+user.max_user_namespaces = 28633
+EOT
+sysctl --system"
+ fi
+ fi
+
+ if [ -n "$INSTRUCTIONS" ]; then
+ echo "# Missing system requirements. Please run following commands to
+# install the requirements and run this installer again.
+# Alternatively iptables checks can be disabled with SKIP_IPTABLES=1"
+
+ echo
+ echo "cat <<EOF | sudo sh -x"
+ echo "$INSTRUCTIONS"
+ echo "EOF"
+ echo
+ exit 1
+ fi
+
+ # validate subuid/subgid files for current user
+ if ! grep "^$(id -un):\|^$(id -u):" /etc/subuid >/dev/null 2>&1; then
+ >&2 echo "Could not find records for the current user $(id -un) from /etc/subuid . Please make sure valid subuid range is set there.
+For example:
+echo \"$(id -un):100000:65536\" >> /etc/subuid"
+ exit 1
+ fi
+ if ! grep "^$(id -un):\|^$(id -u):" /etc/subgid >/dev/null 2>&1; then
+ >&2 echo "Could not find records for the current user $(id -un) from /etc/subgid . Please make sure valid subuid range is set there.
+For example:
+echo \"$(id -un):100000:65536\" >> /etc/subgid"
+ exit 1
+ fi
+}
+
+exec_setuptool() {
+ if [ -n "$FORCE_ROOTLESS_INSTALL" ]; then
+ set -- "$@" --force
+ fi
+ if [ -n "$SKIP_IPTABLES" ]; then
+ set -- "$@" --skip-iptables
+ fi
+ (
+ set -x
+ PATH="$BIN:$PATH" "$BIN/dockerd-rootless-setuptool.sh" install "$@"
+ )
+}
+
+do_install() {
+ echo "# Executing docker rootless install script, commit: $SCRIPT_COMMIT_SHA"
+
+ init_vars
+ checks
+
+ tmp=$(mktemp -d)
+ trap 'rm -rf "$tmp"' EXIT INT TERM
+ # Download tarballs docker-* and docker-rootless-extras=*
+ (
+ cd "$tmp"
+ curl -L -o docker.tgz "$STATIC_RELEASE_URL"
+ curl -L -o rootless.tgz "$STATIC_RELEASE_ROOTLESS_URL"
+ )
+ # Extract under $HOME/bin/
+ (
+ mkdir -p "$BIN"
+ cd "$BIN"
+ tar zxf "$tmp/docker.tgz" --strip-components=1
+ tar zxf "$tmp/rootless.tgz" --strip-components=1
+ )
+
+ exec_setuptool "$@"
+}
+
+do_install "$@"
diff --git a/buildbot/bootstrap-scripts/bootstrap-walletbuilder b/buildbot/bootstrap-scripts/bootstrap-walletbuilder
deleted file mode 100755
index 8a5304c..0000000
--- a/buildbot/bootstrap-scripts/bootstrap-walletbuilder
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/bash
-
-# Bootstrap the Taler setup for the user account that
-# is currently logged in.
-
-# Generates a setup for a single user,
-# including a postgresql DB.
-
-set -eu
-
-BRANCH=master
-REPOS="wallet-core"
-
-cd $HOME
-
-for component in $REPOS; do
- if ! test -d $HOME/$component; then
- git clone git://localhost/$component.git
- fi
-done
-
-for component in $REPOS; do
- echo "Checking out $component to $BRANCH"
- git -C $HOME/$component checkout $BRANCH
-done
-
-if test ! -d worker ; then
- buildbot-worker create-worker --umask=0o22 ~/worker localhost:9989 wallet-worker wallet-pass
-fi
-
-
-mkdir -p ~/.config/systemd/user/
-cp systemd-services/buildbot-worker-wallet.service ~/.config/systemd/user/
-
-systemctl --user daemon-reload || echo "Please use 'machinectl shell walletbuilder@.host' to log in to use this script"
-
-systemctl --user enable buildbot-worker-wallet.service
-systemctl --user start buildbot-worker-wallet.service
diff --git a/buildbot/build.sh b/buildbot/build.sh
index 6d99cac..79f0e60 100755
--- a/buildbot/build.sh
+++ b/buildbot/build.sh
@@ -2,14 +2,16 @@
set -eu
-echo "Running taler-deployment bootstrap"
+echo "Building the Docker base image (taler_local/taler_base)."
+# INI file with global config entries; typically
+# URLs and secrets. Not automatically generated.
+export TALER_DEPLOYMENT_CONFIG=${HOME}/deployment.conf
+export DOCKER_HOST=unix://${XDG_RUNTIME_DIR}/docker.sock
+echo "Remove data from previous builds. Volumes will be removed before restarting."
+docker system prune -a -f
-# Cannot have this here, as the activate script
-# will be made by the 'bootstrap' command.
-# source "${HOME}/activate". Currently under test.
+${HOME}/deployment/docker/demo/build_base.sh
-${HOME}/deployment/bin/taler-deployment bootstrap
-
-echo "Running taler-deployment build"
-source "${HOME}/activate"
-taler-deployment build
+echo "Building each service's image."
+cd ${HOME}/deployment/docker/demo
+docker-compose build
diff --git a/buildbot/checks.sh b/buildbot/checks.sh
index 6d2a74f..7eb331e 100755
--- a/buildbot/checks.sh
+++ b/buildbot/checks.sh
@@ -25,6 +25,7 @@ error_stringify ()
error_fmt="%s (http status code: %s)/(curl condition: %s - %s)\n"
+echo -n "Check exchange..."
URL="https://exchange.${DEPLOYMENT}.${DOMAIN}/"
http_status_code=$(curl \
-s "$URL" -o /dev/null \
@@ -36,21 +37,24 @@ if ! test 200 = $http_status_code; then
$http_status_code $? "$(error_stringify $?)"
exit 1
fi
+echo OK
-URL="http://backend.${DEPLOYMENT}.${DOMAIN}/"
+echo -n "Check merchant backend..."
+URL="https://backend.${DEPLOYMENT}.${DOMAIN}/"
http_status_code=$(curl \
-s $URL \
-o /dev/null \
-w "%{http_code}")
-if ! test 301 = $http_status_code; then
+if ! test 200 = $http_status_code; then
printf "'%s' failed\n" $URL
printf "$error_fmt" \
"Merchant backend did not restart correctly" \
$http_status_code $? "$(error_stringify $?)"
exit 1
fi
+echo OK
-
+echo -n "Check blog..."
URL="https://shop.${DEPLOYMENT}.${DOMAIN}/"
http_status_code=$(curl \
-s $URL -o /dev/null \
@@ -62,7 +66,9 @@ if ! test 302 = $http_status_code; then
$http_status_code $? "$(error_stringify $?)"
exit 1
fi
+echo OK
+echo -n "Check survey..."
URL="https://survey.${DEPLOYMENT}.${DOMAIN}/"
http_status_code=$(curl \
-s $URL -o /dev/null \
@@ -74,7 +80,9 @@ if ! test 302 = $http_status_code; then
$http_status_code $? "$(error_stringify $?)"
exit 1
fi
+echo OK
+echo -n "Check donations..."
URL="https://donations.${DEPLOYMENT}.${DOMAIN}/"
http_status_code=$(curl \
-s $URL -o /dev/null \
@@ -86,19 +94,37 @@ if ! test 302 = $http_status_code; then
$http_status_code $? "$(error_stringify $?)"
exit 1
fi
+echo OK
-URL="https://bank.${DEPLOYMENT}.${DOMAIN}/"
+echo -n "Check bank Web UI..."
+URL="https://bank.${DEPLOYMENT}.${DOMAIN}/webui/"
http_status_code=$(curl \
-s $URL -o /dev/null \
-w "%{http_code}")
-if ! test 302 = $http_status_code; then
+if ! test 200 = $http_status_code; then
+ printf "%s failed\n" $URL
+ printf "$error_fmt" \
+ "Bank did not restart correctly" \
+ $http_status_code $? "$(error_stringify $?)"
+ exit 1
+fi
+echo OK
+
+echo -n "Check libEufin (Sandbox)..."
+URL="https://bank.${DEPLOYMENT}.${DOMAIN}/demobanks/default/integration-api/config"
+http_status_code=$(curl \
+ -s $URL -o /dev/null \
+ -w "%{http_code}")
+if ! test 200 = $http_status_code; then
printf "%s failed\n" $URL
printf "$error_fmt" \
"Bank did not restart correctly" \
$http_status_code $? "$(error_stringify $?)"
exit 1
fi
+echo OK
+echo -n "Check landing page..."
URL="https://${DEPLOYMENT}.${DOMAIN}/en/index.html"
http_status_code=$(curl \
-s $URL -o /dev/null \
@@ -110,36 +136,4 @@ if ! test 200 = $http_status_code; then
$http_status_code $? "$(error_stringify $?)"
exit 1
fi
-
-
-
-if $(taler-config -s twister -o twister_deploy >& /dev/null); then
-
- for twister_url in "https://twister-backend.wild.gv.taler.net" \
- "https://twister-bank.wild.gv.taler.net" \
- "https://twister-exchange.wild.gv.taler.net"; do
- http_status_code=$(curl \
- -H "Authorization: ApiKey sandbox" \
- -s $twister_url -o /dev/null \
- -w "%{http_code}")
- if ! test 200 = $http_status_code; then
-
- if test 503 = $http_status_code; then
- printf "%s %s\n" \
- "Hit a '503 Service Unavailable' from Twister." \
- "Assuming all is correct."
- exit 0
- fi
-
- # Real failure here.
- printf "%s failed\n" $twister_url
- printf "$error_fmt" \
- "Twister did not restart correctly" \
- $http_status_code $? "$(error_stringify $?)"
- exit 1
- fi
- done
-fi
-
-
-printf "All services correctly restarted!\n"
+echo OK
diff --git a/buildbot/create_instances.sh b/buildbot/create_instances.sh
deleted file mode 100755
index c67cff6..0000000
--- a/buildbot/create_instances.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env bash
-
-# Step for the BUILD_FACTORY running the 'test.taler.net' site.
-set -eu
-
-source "${HOME}/activate"
-taler-deployment-config-instances
diff --git a/buildbot/create_tip_reserve.sh b/buildbot/create_tip_reserve.sh
deleted file mode 100755
index df756d3..0000000
--- a/buildbot/create_tip_reserve.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env bash
-
-# Step for the BUILD_FACTORY running the 'test.taler.net' site.
-set -eu
-
-source "${HOME}/activate"
-taler-deployment-config-tips
diff --git a/buildbot/linkchecker.Containerfile b/buildbot/linkchecker.Containerfile
new file mode 100644
index 0000000..d80693c
--- /dev/null
+++ b/buildbot/linkchecker.Containerfile
@@ -0,0 +1,10 @@
+FROM docker.io/library/debian:bookworm-slim
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+RUN apt-get update && \
+ apt-get install -yqq \
+ linkchecker \
+&& rm -rf /var/lib/apt/lists/*
+
+COPY linkcheckerrc /root/.config/linkchecker/linkcheckerrc
diff --git a/buildbot/linkchecker.sh b/buildbot/linkchecker.sh
index 2b24f7c..24c90a0 100755
--- a/buildbot/linkchecker.sh
+++ b/buildbot/linkchecker.sh
@@ -1,11 +1,14 @@
#!/bin/bash
-
+#set -v
# Removed because wget errors with error 8 (Server issued an error response.)
#set -e
## This script will scan www.taler.net for broken links and e-mail a copy of the log if any are found.
-logfile="linkchecker.log"
+logfile="$HOME/linkchecker.log"
+wait_time="1"
+recurse_level="1"
+ignore_list="(.*)demo.taler.net(.*)\/orders\/(.*)" # appears to do *nothing*
# Remove old log
@@ -17,20 +20,21 @@ if [ -f "$logfile" ]
echo "Info: existing log file '$logfile' not found."
fi
-# Use wget to scan www.taler.net and save output
-
-echo
-echo "Running this command:"
-echo " wget --spider -r -nd -nv -H -l 1 -w 2 -o $logfile https://www.taler.net/"
-wget --spider -r -nd -nv -H -l 1 -w 2 -o $logfile https://www.taler.net/
+podman build -t linkchecker:latest -f "$HOME/deployment/buildbot/linkchecker.Containerfile" "$HOME/deployment/buildbot"
-# display logfile
-echo
-echo "Displaying contents of logfile"
-cat $logfile
+# Use wget to scan hosts and save output
+for url in "https://www.taler.net/" "https://docs.taler.net/" "https://taler-systems.com/" "https://demo.taler.net/" "https://bank.demo.taler.net/" "https://shop.demo.taler.net/" "https://donations.demo.taler.net/" ; do
+ echo -e "\n\n#############################\n## Starting check on ${url}\n#############################\n"
+ podman run --rm localhost/linkchecker:latest \
+ linkchecker \
+ --no-robots \
+ --check-extern \
+ --recursion-level="$recurse_level" \
+ "$url" | tee --append "$logfile"
+done
# Search the log for the phrase "broken link" as this is what wget will report
-if grep -iRl 'broken link!!' $logfile
+if grep -Rl 'Error' $logfile
then
echo "Found broken links. Build should fail (exit 1), triggering e-mail notification."
exit 1
diff --git a/buildbot/linkcheckerrc b/buildbot/linkcheckerrc
new file mode 100644
index 0000000..27337c1
--- /dev/null
+++ b/buildbot/linkcheckerrc
@@ -0,0 +1,306 @@
+# Sample configuration file; see the linkcheckerrc(5) man page or
+# execute linkchecker -h for help on these options.
+# Commandline options override these settings.
+
+##################### output configuration ##########################
+[output]
+# enable debug messages; see 'linkchecker -h' for valid debug names, example:
+#debug=all
+# print status output
+#status=1
+# change the logging type
+#log=text
+# turn on/off --verbose
+#verbose=0
+# turn on/off --warnings
+#warnings=1
+# turn on/off --quiet
+#quiet=0
+# additional file output, example:
+#fileoutput = text, html, gml, sql
+# errors to ignore (URL regular expression, message regular expression)
+ignoreerrors=
+ ^mailto
+ .*orders.*
+ ^https://donations.demo.taler.net/en/checkout
+ ^https://web.archive.org/web/20120118201902/http://www.gnu.org/
+ ^https://www.researchgate.net/publication/4980956_The_Case_Against_Intellectual_Property
+ ^https://shop.fsf.org/.*
+ ^https://twitter.com.*
+# ignore all errors for broken.example.com:
+# ^https?://broken.example.com/
+# ignore SSL errors for dev.example.com:
+# ^https://dev.example.com/ ^SSLError .*
+
+
+##################### logger configuration ##########################
+# logger output part names:
+# all For all parts
+# realurl The full url link
+# result Valid or invalid, with messages
+# extern 1 or 0, only in some logger types reported
+# base <base href=...>
+# name <a href=...>name</a> and <img alt="name">
+# parenturl The referrer URL if there is any
+# info Some additional info, e.g. FTP welcome messages
+# warning Warnings
+# dltime Download time
+# checktime Check time
+# url The original url name, can be relative
+# intro The blurb at the beginning, "starting at ..."
+# outro The blurb at the end, "found x errors ..."
+# stats Statistics including URL lengths and contents.
+
+# each Logger can have separate configuration parameters
+
+# standard text logger
+[text]
+#filename=linkchecker-out.txt
+#parts=all
+# colors for the various parts, syntax is <color> or <type>;<color>
+# type can be bold, light, blink, invert
+# color can be default, black, red, green, yellow, blue, purple, cyan, white,
+# Black, Red, Green, Yellow, Blue, Purple, Cyan, White
+#colorparent=default
+#colorurl=default
+#colorname=default
+#colorreal=cyan
+#colorbase=purple
+#colorvalid=bold;green
+#colorinvalid=bold;red
+#colorinfo=default
+#colorwarning=bold;yellow
+#colordltime=default
+#colorreset=default
+
+# GML logger
+[gml]
+#filename=linkchecker-out.gml
+#parts=all
+# valid encodings are listed in http://docs.python.org/library/codecs.html#standard-encodings
+# example:
+#encoding=utf_16
+
+# DOT logger
+[dot]
+#filename=linkchecker-out.dot
+#parts=all
+# default encoding is ascii since the original DOT format does not
+# support other charsets, example:
+#encoding=iso-8859-15
+
+# CSV logger
+[csv]
+#filename=linkchecker-out.csv
+#separator=;
+#quotechar="
+#dialect=excel
+#parts=all
+
+# SQL logger
+[sql]
+#filename=linkchecker-out.sql
+#dbname=linksdb
+#separator=;
+#parts=all
+
+# HTML logger
+[html]
+#filename=linkchecker-out.html
+# colors for the various parts
+#colorbackground=#fff7e5
+#colorurl=#dcd5cf
+#colorborder=#000000
+#colorlink=#191c83
+#colorwarning=#e0954e
+#colorerror=#db4930
+#colorok=#3ba557
+#parts=all
+
+# failures logger
+[failures]
+#filename=$XDG_DATA_HOME/linkchecker/failures
+
+# custom xml logger
+[xml]
+#filename=linkchecker-out.xml
+# system encoding is used by default. Example:
+#encoding=iso-8859-1
+
+# GraphXML logger
+[gxml]
+#filename=linkchecker-out.gxml
+# system encoding is used by default. Example:
+#encoding=iso-8859-1
+
+# Sitemap logger
+[sitemap]
+#filename=linkchecker-out.sitemap.xml
+#encoding=utf-8
+#priority=0.5
+#frequency=daily
+
+
+##################### checking options ##########################
+[checking]
+# number of threads
+#threads=10
+# connection timeout in seconds
+#timeout=60
+# Time to wait for checks to finish after the user aborts the first time
+# (with Ctrl-C or the abort button).
+#aborttimeout=300
+# The recursion level determines how many times links inside pages are followed.
+#recursionlevel=-1
+# Basic NNTP server. Overrides NNTP_SERVER environment variable.
+#nntpserver=
+# parse a cookiefile for initial cookie data, example:
+#cookiefile=/path/to/cookies.txt
+# User-Agent header string to send to HTTP web servers
+# Note that robots.txt are always checked with the original User-Agent. Example:
+#useragent=Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)
+# When checking finishes, write a memory dump to a temporary file.
+# The memory dump is written both when checking finishes normally
+# and when checking gets canceled.
+# The memory dump only works if the python-meliae package is installed.
+# Otherwise a warning is printed to install it.
+#debugmemory=0
+# When checking absolute URLs inside local files, the given root directory
+# is used as base URL.
+# Note that the given directory must have URL syntax, so it must use a slash
+# to join directories instead of a backslash.
+# And the given directory must end with a slash.
+# Unix example:
+#localwebroot=/var/www/
+# Windows example:
+#localwebroot=/C|/public_html/
+# Check SSL certificates. Set to an absolute pathname for a custom
+# CA cert bundle to use. Set to zero to disable SSL certificate verification.
+#sslverify=1
+# Stop checking new URLs after the given number of seconds. Same as if the
+# user hits Ctrl-C after X seconds. Example:
+#maxrunseconds=600
+# Don't download files larger than the given number of bytes
+#maxfilesizedownload=5242880
+# Don't parse files larger than the given number of bytes
+#maxfilesizeparse=1048576
+# Maximum number of URLs to check. New URLs will not be queued after the
+# given number of URLs is checked. Example:
+#maxnumurls=153
+# Maximum number of requests per second to one host.
+#maxrequestspersecond=10
+# Respect the instructions in any robots.txt files
+#robotstxt=1
+# Allowed URL schemes as a comma-separated list. Example:
+#allowedschemes=http,https
+# Size of the result cache. Checking more urls might increase memory usage during runtime
+#resultcachesize=100000
+
+##################### filtering options ##########################
+[filtering]
+#ignore=
+# ignore everything with 'lconline' in the URL name
+# lconline
+# and ignore everything with 'bookmark' in the URL name
+# bookmark
+# and ignore all mailto: URLs
+# ^mailto:
+# do not recurse into the following URLs
+
+#nofollow=
+# just an example
+# http://www\.example\.com/bla
+
+# Ignore specified warnings (see linkchecker -h for the list of
+# recognized warnings). Add a comma-separated list of warnings here
+# that prevent a valid URL from being logged. Note that the warning
+# will be logged for invalid URLs. Example:
+#ignorewarnings=url-unicode-domain
+# Regular expression to add more URLs recognized as internal links.
+# Default is that URLs given on the command line are internal.
+#internlinks=^http://www\.example\.net/
+# Check external links
+#checkextern=0
+
+
+##################### password authentication ##########################
+[authentication]
+# WARNING: if you store passwords in this configuration entry, make sure the
+# configuration file is not readable by other users.
+# Different user/password pairs for different URLs can be provided.
+# Entries are a triple (URL regular expression, username, password),
+# separated by whitespace.
+# If the regular expression matches, the given user/password pair is used
+# for authentication. The commandline options -u,-p match every link
+# and therefore override the entries given here. The first match wins.
+# At the moment, authentication is used for http[s] and ftp links.
+#entry=
+# Note that passwords are optional. If any passwords are stored here,
+# this file should not readable by other users.
+# ^https?://www\.example\.com/~calvin/ calvin mypass
+# ^ftp://www\.example\.com/secret/ calvin
+
+# if the website requires a login via a page with an HTML form the URL of the
+# page and optionally the username and password input element name attributes
+# can be provided.
+#loginurl=http://www.example.com/
+
+# The name attributes of the username and password HTML input elements
+#loginuserfield=login
+#loginpasswordfield=password
+# Optionally the name attributes of any additional input elements and the values
+# to populate them with. Note that these are submitted without checking
+# whether matching input elements exist in the HTML form. Example:
+#loginextrafields=
+# name1:value1
+# name 2:value 2
+
+############################ Plugins ###################################
+#
+# uncomment sections to enable plugins
+
+# Check HTML anchors
+#[AnchorCheck]
+
+# Print HTTP header info
+#[HttpHeaderInfo]
+# Comma separated list of header prefixes to print.
+# The names are case insensitive.
+# The default list is empty, so it should be non-empty when activating
+# this plugin. Example:
+#prefixes=Server,X-
+
+# Add country info to URLs
+#[LocationInfo]
+
+# Run W3C syntax checks
+#[CssSyntaxCheck]
+#[HtmlSyntaxCheck]
+
+# Search for regular expression in page contents
+#[RegexCheck]
+# Example:
+#warningregex=Oracle Error
+
+# Search for viruses in page contents
+#[VirusCheck]
+#clamavconf=/etc/clamav/clamd.conf
+
+# Check that SSL certificates have at least the given number of days validity.
+#[SslCertificateCheck]
+#sslcertwarndays=30
+
+# Parse and check links in PDF files
+#[PdfParser]
+
+# Parse and check links in Word files
+#[WordParser]
+
+# Parse and check links in Markdown files.
+# Supported links are:
+# <http://autolink.com>
+# [name](http://link.com "Optional title")
+# [id]: http://link.com "Optional title"
+#[MarkdownCheck]
+# Regexp of filename
+#filename_re=.*\.(markdown|md(own)?|mkdn?)$
diff --git a/buildbot/master.cfg b/buildbot/master.cfg
index 24f5483..e6f2e78 100644
--- a/buildbot/master.cfg
+++ b/buildbot/master.cfg
@@ -1,6 +1,9 @@
+# -*- python -*-
+# ex: set syntax=python:
+
##
# This file is part of TALER
-# (C) 2016-2022 Taler Systems SA
+# (C) 2016-2023 Taler Systems SA
#
# TALER is free software; you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public
@@ -20,12 +23,28 @@
# @author Marcello Stanisci
# @author ng0
# @author Christian Grothoff
+# @author Devan Carpenter
+import ast
+import configparser
+import glob
+import os
+import pathlib
+import pwd
import re
-from getpass import getuser
+import subprocess
+
+from buildbot.changes.pb import PBChangeSource
from buildbot.steps.source.git import Git
from buildbot.steps.shell import ShellCommand
-from buildbot.plugins import *
+from buildbot.plugins import changes
+from buildbot.plugins import reporters
+from buildbot.plugins import schedulers
+from buildbot.plugins import steps
+from buildbot.plugins import util
+from buildbot.process import buildstep, logobserver
from buildbot.reporters.generators.build import BuildStatusGenerator
+from buildbot.worker import Worker
+from twisted.internet import defer
# This is a sample buildmaster config file. It must be
# installed as 'master.cfg' in your buildmaster's base
@@ -68,19 +87,20 @@ class MessageFormatterWithStdout(reporters.MessageFormatter):
stdout.append(line[1:])
ctx.update(dict(stdout="\n".join(stdout)))
-tipReserveEmails = reporters.MailNotifier(
- fromaddr="buildbot@taler.net", # to be sent to a dedicate alias
- sendToInterestedUsers=False,
- mode=("all"),
- builders=["check-tips-builder"],
- extraRecipients=["tips@taler.net"],
- dumpMailsToLog=True, # debug, to remove
- messageFormatter=MessageFormatterWithStdout(
- wantSteps=True,
- wantLogs=True,
- template="{{ stdout }}",
- subject="tips availability on demo")
-)
+# tipReserveEmails = reporters.MailNotifier(
+# fromaddr="buildbot@taler.net", # to be sent to a dedicate alias
+# sendToInterestedUsers=False,
+# mode=("all"),
+# builders=["check-tips-builder"], # This builder has been removed - Javisep.
+# extraRecipients=["tips@taler.net"],
+# dumpMailsToLog=True, # debug, to remove
+# messageFormatter=MessageFormatterWithStdout(
+# wantSteps=True,
+# wantLogs=True,
+# template="{{ stdout }}",
+# subject="tips availability on demo")
+# )
+
SERVICES = []
@@ -98,27 +118,32 @@ SCHEDULERS = []
NIGHTLY_TRIGGERS=[]
# Array of builders to be scheduled whenever any of the code Git repos change
-CODECHANGE_TRIGGERS=[]
+CODECHANGE_TRIGGERS = []
-# Array of builders to be scheduled whenever the wallet-core or deployment change
-WALLETCHANGE_TRIGGERS=[]
+# Array of builders to be scheduled whenever the wallet-core or
+# deployment change
+WALLETCHANGE_TRIGGERS = []
-# Array of builder names for which build status reports should be sent via e-mail
-EMAIL_ALERTS=[]
+# Array of builder names for which build status reports should be sent
+# via e-mail
+EMAIL_ALERTS = []
+# Array of email address for which build status reports shoudl be sent
+BUILDER_EMAIL_ADDRESSES = []
############ Convenience functions #################
# Create a FACTORY with a deployment.git checkout as the first step.
def create_factory_with_deployment():
f = util.BuildFactory()
- update_deployment (f)
+ update_deployment(f)
return f
+
# Convenience function that checks out a Git repository.
# First argument is the URL of the Git to clone, second
# the desired branch. Default is 'master'.
-def git_step(repo,target_branch="master"):
+def git_step(repo, target_branch="master"):
return Git(
repourl=repo,
mode="full",
@@ -129,9 +154,10 @@ def git_step(repo,target_branch="master"):
branch=target_branch
)
+
# Convenience function that runs 'make check' in a
# directory of the code inside of a netjail.
-def jailed_check(package,srcdirs):
+def jailed_check(package, srcdirs):
return steps.ShellSequence(
name="Tests of " + package,
description="Testing " + package,
@@ -141,6 +167,7 @@ def jailed_check(package,srcdirs):
workdir="../../sources/" + package
)
+
# Convenience function that checks out the deployment.
def update_deployment(factory):
factory.addStep(steps.ShellSequence(
@@ -156,6 +183,69 @@ def update_deployment(factory):
))
+# Convenience function that builds and runs a container.
+def container_add_step(HALT_ON_FAILURE,
+ WARN_ON_FAILURE,
+ CONTAINER_BUILD,
+ CONTAINER_NAME,
+ factory,
+ WORK_DIR,
+ stepName,
+ CONTAINER_ARCH="amd64",
+ jobCmd="/workdir/contrib/ci/ci.sh",
+ containerFile="contrib/ci/Containerfile"):
+ print(f"HALT_ON_FAILURE: {HALT_ON_FAILURE}, WARN_ON_FAILURE: {WARN_ON_FAILURE}, CONTAINER_BUILD: {CONTAINER_BUILD}, CONTAINER_NAME: {CONTAINER_NAME}")
+ if not CONTAINER_BUILD:
+ return steps.ShellSequence(
+ name=stepName,
+ commands=[
+ util.ShellArg(command=["podman", "run", "--rm",
+ "--arch", CONTAINER_ARCH,
+ "--add-host", "taler.host.internal:10.0.2.2",
+ "--network", "slirp4netns:allow_host_loopback=true",
+ "--env", util.Interpolate("CI_COMMIT_REF=%(prop:got_revision:-%(src::revision:-unknown)s)s"),
+ "--env", util.Interpolate("CI_GIT_BRANCH=%(src::branch)s"),
+ "--volume", f"{WORK_DIR}:/workdir",
+ "--volume", "/home/container-worker/container_artifacts:/artifacts",
+ "--volume", "/home/container-worker/mounted_files/ci_container_id_ed25519:/root/.ssh/id_ed25519:ro",
+ "--volume", "/home/container-worker/mounted_files/container_known_hosts:/root/.ssh/known_hosts:ro",
+ "--workdir", "/workdir",
+ CONTAINER_NAME, jobCmd],
+ logname='run inside container',
+ haltOnFailure=HALT_ON_FAILURE),
+ ],
+ haltOnFailure=HALT_ON_FAILURE,
+ workdir=WORK_DIR
+ )
+ else:
+ return steps.ShellSequence(
+ name=stepName,
+ commands=[
+ util.ShellArg(command=["podman", "build", "-t", CONTAINER_NAME,
+ "--arch", CONTAINER_ARCH,
+ "-f", containerFile, "."],
+ logname='build container', haltOnFailure=True),
+ util.ShellArg(command=["podman", "run", "--rm",
+ "--arch", CONTAINER_ARCH,
+ "--add-host", "taler.host.internal:10.0.2.2",
+ "--network", "slirp4netns:allow_host_loopback=true",
+ "--env", util.Interpolate("CI_COMMIT_REF=%(prop:got_revision:-%(src::revision:-unknown)s)s"),
+ "--env", util.Interpolate("CI_GIT_BRANCH=%(src::branch)s"),
+ "--volume", f"{WORK_DIR}:/workdir",
+ "--volume", "/home/container-worker/container_artifacts:/artifacts",
+ "--volume", f"/run/user/{pwd.getpwnam('container-worker').pw_uid}/podman/podman.sock:/run/podman/podman.sock",
+ "--volume", "/home/container-worker/mounted_files/ci_container_id_ed25519:/root/.ssh/id_ed25519:ro",
+ "--volume", "/home/container-worker/mounted_files/container_known_hosts:/root/.ssh/known_hosts:ro",
+ "--security-opt", "label=disable",
+ "--workdir", "/workdir",
+ CONTAINER_NAME, jobCmd],
+ logname='run inside container',
+ haltOnFailure=HALT_ON_FAILURE),
+ ],
+ haltOnFailure=HALT_ON_FAILURE,
+ workdir=WORK_DIR
+ )
+
##################################################################
######################## JOBS ####################################
##################################################################
@@ -171,11 +261,12 @@ def update_deployment(factory):
################ 1: BUILDMASTER JOB ###################################
+
##
# This worker restarts the buildmaster itself on
# changes to this file.
# Location: /home/buildbot-master @ taler.net
-WORKERS.append(worker.Worker("buildmaster-worker", "buildmaster-pass"))
+WORKERS.append(Worker("buildmaster-worker", "buildmaster-pass"))
BUILDMASTER_FACTORY = create_factory_with_deployment()
BUILDMASTER_FACTORY.addStep(
@@ -208,115 +299,6 @@ SCHEDULERS.append(schedulers.SingleBranchScheduler(
))
-################ 2: DOCUMENTATION JOB ###################################
-
-##
-# This worker builds manuals / API docs / tutorials.
-# Location: /home/docbuilder @ taler.net
-WORKERS.append(worker.Worker("doc-worker", "doc-pass"))
-
-DOC_FACTORY = create_factory_with_deployment()
-DOC_FACTORY.addStep(
- ShellCommand(
- name="build docs",
- description="Building documentation",
- descriptionDone="Documentation built.",
- command=["./build-docs.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True
- )
-)
-DOC_FACTORY.addStep(
- steps.ShellSequence(
- name="prepare exchange",
- description="Running bootstrap and configure for exchange",
- descriptionDone="exchange ready for doxygen",
- commands=[
- util.ShellArg(command=["./bootstrap"], logname='bootstrap'),
- util.ShellArg(command=["./configure", "--enable-only-doc"], logname='configure'),
- ],
- workdir="../../sources/exchange/",
- haltOnFailure=True,
- )
-)
-DOC_FACTORY.addStep(
- ShellCommand(
- name="doxygen::exchange",
- description="building exchange doxygen documentation",
- descriptionDone="doxygen on exchange finished",
- command=["make", "full" ],
- want_stderr=False,
- workdir="../../sources/exchange/doc/doxygen",
- haltOnFailure=True,
- )
-)
-DOC_FACTORY.addStep(
- steps.ShellSequence(
- name="prepare merchant",
- description="prepare merchant",
- descriptionDone="merchant prepared",
- commands=[
- util.ShellArg(command=["./bootstrap"], logname='bootstrap'),
- util.ShellArg(command=["./configure", "--enable-only-doc"], logname='configure'),
- util.ShellArg(command=["cp", "../exchange/doc/doxygen/taler-exchange.tag", "doc/doxygen/taler-exchange.tag"]),
- ],
- workdir="../../sources/merchant/"
- )
-)
-DOC_FACTORY.addStep(
- ShellCommand(
- name="doxygen::merchant",
- description="building merchant doxygen documentation",
- descriptionDone="doxygen on merchant finished",
- command=["make", "full" ],
- want_stderr=False,
- workdir="../../sources/merchant/doc/doxygen"
- )
-)
-DOC_FACTORY.addStep(
- steps.ShellSequence(
- name="doxygen::wallet",
- description="building wallet typescript documentation",
- descriptionDone="typedoc on taler-wallet-core finished",
- commands=[
- util.ShellArg(command=["./bootstrap"], logname="bootstrap"),
- util.ShellArg(command=["./configure"], logname="configure"),
- util.ShellArg(command=["make"], logname="make"),
- util.ShellArg(command=["pnpm", "install", "-W", "typedoc"], logname="pnpm"),
- util.ShellArg(command=["./node_modules/typedoc/bin/typedoc", "--out", "dist/typedoc", "--tsconfig", "tsconfig.build.json", "packages/taler-util/src/index.ts", "packages/taler-wallet-cli/src/index.ts", "packages/taler-wallet-android/src/index.ts", "packages/taler-wallet-core/src/index.ts" ], logname="typedoc"),
- ],
- workdir="../../sources/wallet-core/"
- )
-)
-
-
-BUILDERS.append(util.BuilderConfig(
- name="doc-builder", workernames=["doc-worker"], factory=DOC_FACTORY
-))
-
-EMAIL_ALERTS.append("doc-builder")
-
-#sphinxErrorNotifier = reporters.MailNotifier(
-# fromaddr="bb@taler.net",
-# sendToInterestedUsers=False,
-# addLogs=['build docs.stdio',],
-# useTls=True,
-# # notify if sphinx exits with error (command line option in Makefile turns warnings into exit 1)
-# mode=('failing'),
-# builders=('doc-builder',),
-# extraRecipients=['sphinxerrors@taler.net']
-#)
-
-
-# Docs run if master or stable branch of 'docs' (or deployment) changed.
-SCHEDULERS.append(schedulers.SingleBranchScheduler(
- name="periodic-doc-scheduler",
- builderNames=["doc-builder"],
- change_filter=util.ChangeFilter(
- branch_re="(master|stable)", project_re="(docs|deployment)"
- ),
- treeStableTimer=None,
-))
################ 3: WEBSITE JOB ###################################
@@ -324,7 +306,7 @@ SCHEDULERS.append(schedulers.SingleBranchScheduler(
##
# This worker builds Websites: www and stage.
#
-WORKERS.append(worker.Worker("sites-worker", "sites-pass"))
+WORKERS.append(Worker("sites-worker", "sites-pass"))
SITES_FACTORY = create_factory_with_deployment()
SITES_FACTORY.addStep(
@@ -342,7 +324,8 @@ BUILDERS.append(util.BuilderConfig(
name="sites-builder", workernames=["sites-worker"], factory=SITES_FACTORY
))
-EMAIL_ALERTS.append("sites-builder")
+#EMAIL_ALERTS.append("sites-builder")
+
# The web page changed if 'www' changed OR if 'web' in the 'twister' repo changed:
def twister_web_page(change):
@@ -358,6 +341,7 @@ def twister_web_page(change):
return True
return False
+
# Sites are re-build whenever deployment, www buywith, or twister changes.
SCHEDULERS.append(schedulers.SingleBranchScheduler(
name="sites-scheduler",
@@ -370,345 +354,12 @@ SCHEDULERS.append(schedulers.SingleBranchScheduler(
))
-################ 4: LCOV JOB ###################################
-
-##
-# This worker makes the code coverage and publishes it
-# under the "lcov" Website.
-WORKERS.append(worker.Worker("lcov-worker", "lcov-pass"))
-
-LCOV_FACTORY = create_factory_with_deployment()
-LCOV_FACTORY.addStep(git_step("git://git.taler.net/wallet-core.git"))
-LCOV_FACTORY.addStep(
- ShellCommand(
- name="fetch",
- description="Running yarn install of wallet",
- descriptionDone="Correctly installed",
- command=["npm", "install", "-g", "--prefix", "$HOME", "@gnu-taler/taler-wallet-cli"],
- workdir="build/",
- haltOnFailure=True,
- )
-)
-LCOV_FACTORY.addStep(
- ShellCommand(
- name="build",
- description="Building other Taler components",
- descriptionDone="Taler built",
- command=["./build.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- env={'TALER_COVERAGE': "1"},
- )
-)
-LCOV_FACTORY.addStep(
- ShellCommand(
- name="coverage generation",
- description="running tests",
- descriptionDone="generating HTML report",
- command=["/usr/bin/sudo", "/usr/local/bin/netjail.sh", "/home/lcovworker/deployment/buildbot/coverage.sh"],
- workdir="../../deployment/buildbot",
- env={'PATH': "${HOME}/local/bin:${PATH}"}
- )
-)
-
-BUILDERS.append(util.BuilderConfig(
- name="lcov-builder",
- workernames=["lcov-worker"],
- factory=LCOV_FACTORY
-))
-
-EMAIL_ALERTS.append("lcov-builder")
-NIGHTLY_TRIGGERS.append("lcov-builder")
-NIGHTLY_TRIGGERS.append("check-tips-builder")
-NIGHTLY_TRIGGERS.append("taler-demo-healthcheck-builder")
-
-################ 5: UNIT TEST JOB ###################################
-
-##
-# This worker builds everything and runs our 'make check'
-# test suite against 'everything'.
-WORKERS.append(worker.Worker("checker-worker", "checker-pass"))
-
-INTEGRATIONTEST_FACTORY = create_factory_with_deployment()
-INTEGRATIONTEST_FACTORY.addStep(
- ShellCommand(
- name="fetch gnunet.org sources",
- description="fetching latest deployment repositories from git.gnunet.org",
- descriptionDone="GNUnet code base updated",
- command=["./update-sources.sh", "git://git.gnunet.org/", "libmicrohttpd", "gnunet"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- )
-)
-INTEGRATIONTEST_FACTORY.addStep(
- ShellCommand(
- name="fetch taler.net sources",
- description="fetching latest deployment repositories from git.taler.net",
- descriptionDone="Taler code base updated",
- command=["./update-sources.sh", "git://git.taler.net/", "exchange", "merchant", "wallet-core", "sync", "bank", "twister"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- )
-)
-INTEGRATIONTEST_FACTORY.addStep(git_step("git://git.taler.net/wallet-core.git"))
-INTEGRATIONTEST_FACTORY.addStep(
- ShellCommand(
- name="fetch",
- description="Running yarn install of wallet",
- descriptionDone="Correctly installed",
- command=["npm", "install", "-g", "--prefix", "$HOME", "@gnu-taler/taler-wallet-cli"],
- workdir="../../sources/wallet-core",
- haltOnFailure=True,
- )
-)
-INTEGRATIONTEST_FACTORY.addStep(
- steps.ShellSequence(
- name="fetch",
- description="Running local install of wallet",
- descriptionDone="local wallet install done",
- commands=[
- util.ShellArg(command=["./bootstrap"]),
- util.ShellArg(command=["./configure", "--prefix=$HOME/local/"]),
- util.ShellArg(command=["make install"]),
- ],
- workdir="../../sources/wallet-core",
- haltOnFailure=True,
- )
-)
-INTEGRATIONTEST_FACTORY.addStep(
- ShellCommand(
- name="build",
- description="Building other Taler components",
- descriptionDone="Taler built",
- command=["./build.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=False
- )
-)
-INTEGRATIONTEST_FACTORY.addStep(jailed_check ("libmicrohttpd", [
- "microhttpd",
- "testcurl",
- "testzzuf",
-]))
-INTEGRATIONTEST_FACTORY.addStep(jailed_check ("gnunet", [
- "util",
- "pq",
- "curl",
- "json",
-]))
-INTEGRATIONTEST_FACTORY.addStep(jailed_check ("twister", [
- "test",
-]))
-INTEGRATIONTEST_FACTORY.addStep(jailed_check ("exchange", [
- "util",
- "curl",
- "mhd",
- "pq",
- "json",
- "bank-lib",
- "exchangedb",
- "auditordb",
- "exchange",
- "auditor",
- "lib",
- "exchange-tools",
- "testing",
- "benchmark",
-]))
-INTEGRATIONTEST_FACTORY.addStep(jailed_check ("merchant", [
- "mustach",
- "backenddb",
- "backend",
- "lib",
- "testing",
- "merchant-tools",
-]))
-INTEGRATIONTEST_FACTORY.addStep(jailed_check ("sync", [
- "util",
- "syncdb",
- "sync",
- "lib"
-]))
-INTEGRATIONTEST_FACTORY.addStep(
- ShellCommand(
- name="bank check",
- description="Testing Taler Python bank",
- descriptionDone="Done",
- command=["sudo", "/usr/local/bin/netjail.sh", "/home/integrationtest/deployment/buildbot/with-postgres.sh", "make", "check"],
- workdir="../../sources/bank",
- haltOnFailure=False,
- env={'PYTHONUSERBASE': "$HOME/local" }
- )
-)
-INTEGRATIONTEST_FACTORY.addStep(
- ShellCommand(
- name="wallet check",
- description="Testing wallet-core",
- descriptionDone="Done",
- command=["make", "check"],
- workdir="../../sources/wallet-core",
- haltOnFailure=False
- )
-)
-
-BUILDERS.append(util.BuilderConfig(
- name="checker-builder",
- workernames=["checker-worker"],
- factory=INTEGRATIONTEST_FACTORY
-))
-
-EMAIL_ALERTS.append("checker-builder")
-CODECHANGE_TRIGGERS.append("checker-builder")
-
-
-################ 6: 'test.taler.net' deployment JOB ###################################
-
-##
-# This worker builds Taler for the 'test' deployment.
-WORKERS.append(worker.Worker("test-worker", "test-pass"))
-
-# buildslavetest FACTORY
-BUILDSLAVETEST_FACTORY = create_factory_with_deployment()
-BUILDSLAVETEST_FACTORY.addStep(
- ShellCommand(
- name="buildslavetest script (for testing purposes)",
- description="Build Slave Test",
- descriptionDone="buildslavetest: Done",
- command=["./buildslavetest.sh"],
- workdir="/home/buildslavetest/"
- )
-)
-
-# buildslavetest BUILDER
-BUILDERS.append(util.BuilderConfig(
- name="buildslavetest-builder",
- workernames=["buildslavetest-worker"],
- factory=BUILDSLAVETEST_FACTORY
-))
-
-EMAIL_ALERTS.append("buildslavetest-builder")
-
-# buildslavetest SCHEDULER
-SCHEDULERS.append(schedulers.SingleBranchScheduler(
- name="buildslavetest-scheduler",
- builderNames=["buildslavetest-builder"],
- change_filter=util.ChangeFilter(
- branch_re="(master|stable)", project_re="(help|deployment)"
- ),
- treeStableTimer=None,
-))
-
-################ 7: 'test-auditor' deployment JOB ###################################
-
-
-##
-# This worker compiles the auditor reports for the "test"
-# demo deployment.
-WORKERS.append(worker.Worker("test-auditor-worker", "test-auditor-pass"))
-
-AUDITOR_FACTORY_TEST = create_factory_with_deployment()
-AUDITOR_FACTORY_TEST.addStep(
- ShellCommand(
- name="Auditor reports generator",
- description="Generating auditor reports.",
- descriptionDone="Auditor reports correctly generated.",
- command=["./make_auditor_reports.sh"],
- workdir="../../deployment/buildbot",
- env={'TALER_HOME': "/home/taler-test/"}
- )
-)
-
-BUILDERS.append(util.BuilderConfig(
- name="auditor-builder-test",
- workernames=["test-auditor-worker"],
- factory=AUDITOR_FACTORY_TEST
-))
-NIGHTLY_TRIGGERS.append("auditor-builder-test")
-
-
-################ 8: 'demo-auditor' deployment JOB ###################################
-##
-# This worker compiles the auditor reports for the "green"
-# demo deployment.
-WORKERS.append(worker.Worker("demo-auditor-worker", "demo-auditor-pass"))
-
-AUDITOR_FACTORY_DEMO = create_factory_with_deployment()
-AUDITOR_FACTORY_DEMO.addStep(
- ShellCommand(
- name="Auditor reports generator",
- description="Generating auditor reports.",
- descriptionDone="Auditor reports correctly generated.",
- command=["./make_auditor_reports.sh"],
- workdir="../../deployment/buildbot",
- env={'TALER_HOME': "/home/demo/active-home/"}
- )
-)
-
-BUILDERS.append(util.BuilderConfig(
- name="auditor-builder-demo",
- workernames=["demo-auditor-worker"],
- factory=AUDITOR_FACTORY_DEMO
-))
-
-NIGHTLY_TRIGGERS.append("auditor-builder-demo")
-
-
-################ 8: 'build wallet-core' JOB ###################################
-
-##
-# This worker builds wallet-core.
-WORKERS.append(worker.Worker("wallet-worker", "wallet-pass"))
-
-WALLET_FACTORY = create_factory_with_deployment()
-WALLET_FACTORY.addStep(
- ShellCommand(
- name="build",
- description="Building all Taler codebase.",
- descriptionDone="Taler built.",
- command=["./build.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True
- )
-)
-WALLET_FACTORY.addStep(
- ShellCommand(
- name="test",
- description="Running wallet tests",
- descriptionDone="Test correctly run",
- command=["timeout", "--preserve-status", "5m",
- "taler-wallet-cli", "testing", "run-integrationtests",
- "--suites", "wallet,merchant,libeufin,wallet-backup,wallet-tipping"],
- workdir="../../",
- env={'PATH': "${HOME}/local/bin:${PATH}"}
- )
-)
-
-BUILDERS.append(util.BuilderConfig(
- name="wallet-builder",
- workernames=["wallet-worker"],
- factory=WALLET_FACTORY
-))
-
-EMAIL_ALERTS.append("wallet-builder")
-
-# Wallet is re-build whenever wallet-core or deployment changes
-SCHEDULERS.append(schedulers.SingleBranchScheduler(
- name="wallet-scheduler",
- change_filter=util.ChangeFilter(
- branch="master",
- project_re="(wallet-core|deployment|libeufin)"
- ),
- treeStableTimer=None,
- builderNames=["wallet-builder"]
-))
-
-
################ 9: 'check links' JOB ###################################
##
# linkchecker worker checks for dead links in the Website
# Location: /home/linkchecker @ taler.net
-WORKERS.append(worker.Worker("linkchecker-worker", "linkchecker-pass"))
+WORKERS.append(Worker("linkchecker-worker", "linkchecker-pass"))
# linkchecker FACTORY
LINKCHECKER_FACTORY = create_factory_with_deployment()
@@ -720,6 +371,7 @@ LINKCHECKER_FACTORY.addStep(
command=["/home/linkchecker/deployment/buildbot/linkchecker.sh"],
workdir="/home/linkchecker",
haltOnFailure=True,
+ timeout=7200 # 2 hours
)
)
@@ -732,7 +384,7 @@ BUILDERS.append(util.BuilderConfig(
))
docs_generator = BuildStatusGenerator(
- mode=('change','problem','failing','exception',),
+ mode=('change', 'problem', 'failing', 'exception',),
builders=[
'linkchecker-builder',
],
@@ -741,388 +393,261 @@ docs_generator = BuildStatusGenerator(
wantSteps=True,
wantLogs=True
),
- add_logs=True
+ add_logs=True
)
+
SERVICES.append(reporters.MailNotifier(
fromaddr="bb@taler.net",
generators=[docs_generator],
sendToInterestedUsers=False,
- useTls=True,
+ useTls=False,
+ relayhost="localhost",
+ smtpPort=25,
+ dumpMailsToLog=True,
extraRecipients=['linkcheck@taler.net']
))
-SERVICES.append(tipReserveEmails)
-
-NIGHTLY_TRIGGERS.append("linkchecker-builder")
+# SERVICES.append(tipReserveEmails)
-################ 10: 'check spelling' JOB ###################################
-
-##
-# codespell worker checks for spelling mistakes in code
-# Location: /home/codespell @ taler.net
-WORKERS.append(worker.Worker("codespell-worker", "codespell-pass"))
-
-CODESPELL_FACTORY = create_factory_with_deployment()
-CODESPELL_FACTORY.addStep(
- ShellCommand(
- name="clean old deployment",
- description="cleaning previous doxygen runs",
- descriptionDone="Doxygen cleaned",
- command=["rm", "-rf", "exchange/doc/doxygen/html/", "merchant/doc/doxygen/html/" ],
- workdir="/home/codespell/sources/"
- )
-)
-CODESPELL_FACTORY.addStep(
- ShellCommand(
- name="fetch gnunet.org sources",
- description="fetching latest deployment repositories from git.gnunet.org",
- descriptionDone="GNUnet code base updated",
- command=["./update-sources.sh", "git://git.gnunet.org/", "libmicrohttpd", "gnunet"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- )
-)
-CODESPELL_FACTORY.addStep(
- ShellCommand(
- name="fetch taler.net sources",
- description="fetching latest deployment repositories from git.taler.net",
- descriptionDone="Taler code base updated",
- command=["./update-sources.sh", "git://git.taler.net/", "exchange", "merchant", "wallet-core", "sync", "bank", "twister"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- )
-)
-CODESPELL_FACTORY.addStep(git_step("git://git.taler.net/wallet-core.git"))
-CODESPELL_FACTORY.addStep(
- ShellCommand(
- name="codespell",
- description="spell checking repositories",
- descriptionDone="Spell check complete",
- command=["/home/codespell/.local/bin/codespell", "-I", "/home/codespell/deployment/codespell/dictionary.txt", "-S", "*.bib,*.bst,*.cls,*.json,*.png,*.svg,*.wav,*.gz,*/templating/test?/**,**/auditor/*.sql,**/templating/mustach**,*.fees,*key,*.tag,*.info,*.latexmkrc,*.ecc,*.jpg,*.zkey,*.sqlite,*/contrib/hellos/**,*/vpn/tests/**,*.priv,*.file,*.tgz,*.woff,*.gif,*.odt,*.fee,*.deflate,*.dat,*.jpeg,*.eps,*.odg,*/m4/ax_lib_postgresql.m4,*/m4/libgcrypt.m4,*.rpath,config.status,ABOUT-NLS,*/doc/texinfo.tex,*.PNG,*.??.json,*.docx,*.ods,*.doc,*.docx,*.xcf,*.xlsx,*.ecc,*.ttf,*.woff2,*.eot,*.ttf,*.eot,*.mp4,*.pptx,*.epgz,*.min.js,**/*.map,**/fonts/**,*.pack.js,*.po,*.bbl,*/afl-tests/*,*/.git/**,*.pdf,*.epub,**/signing-key.asc,**/pnpm-lock.yaml,**/*.svg,**/*.cls,**/rfc.bib,**/*.bst,*/cbdc-es.tex,*/cbdc-it.tex,**/ExchangeSelection/example.ts,*/testcurl/test_tricky.c,*/i18n/strings.ts,*/src/anastasis-data.ts", "bank", "exchange", "libmicrohttpd", "merchant", "sync", "twister", "wallet-core"],
- workdir="/home/codespell/sources/"
- )
-)
-CODESPELL_FACTORY.addStep(
- ShellCommand(
- name="gana",
- description="fetch GANA",
- descriptionDone="GANA obtained",
- command=["./bootstrap" ],
- workdir="/home/codespell/sources/exchange/"
- )
-)
-CODESPELL_FACTORY.addStep(
- ShellCommand(
- name="doxygen::exchange",
- description="checking for doxygen mistakes in exchange",
- descriptionDone="doxygen on exchange finished",
- command=["/home/codespell/deployment/buildbot/doxygen.sh", "taler.doxy" ],
- workdir="/home/codespell/sources/exchange/doc/doxygen"
- )
-)
-CODESPELL_FACTORY.addStep(
- steps.ShellSequence(
- name="tag",
- description="prepare merchant",
- descriptionDone="directory created",
- commands=[
- util.ShellArg(command=["mkdir", "-p", "merchant/doc/doxygen/"]),
- util.ShellArg(command=["cp", "exchange/doc/doxygen/taler-exchange.tag", "merchant/doc/doxygen/taler-exchange.tag"]),
- ],
- workdir="/home/codespell/sources/"
- )
-)
-CODESPELL_FACTORY.addStep(
- ShellCommand(
- name="doxygen::merchant",
- description="checking for doxygen mistakes in merchant",
- descriptionDone="doxygen on merchant finished",
- command=["/home/codespell/deployment/buildbot/doxygen.sh", "taler.doxy" ],
- workdir="/home/codespell/sources/merchant/doc/doxygen"
- )
-)
-
-BUILDERS.append(util.BuilderConfig(
- name="codespell-builder",
- workernames=["codespell-worker"],
- factory=CODESPELL_FACTORY
-))
-
-EMAIL_ALERTS.append("codespell-builder")
-
-CODECHANGE_TRIGGERS.append("codespell-builder")
-
+NIGHTLY_TRIGGERS.append("linkchecker-builder")
-################ 11: 'demo checks' JOB ###################################
+#############################################
+# 19: CONTAINER FACTORY #####################
+#############################################
##
-# This worker checks that all the services run under the
-# 'demo' deployment are up&running.
-
-WORKERS.append(worker.Worker("tips-checker-worker", "tips-checker-pass"))
-DEMO_CHECK_TIPS_FACTORY = create_factory_with_deployment()
-DEMO_CHECK_TIPS_FACTORY.addStep(
- ShellCommand(
- name="demo tip reserves checker",
- description="Checking that demo allocated tip money",
- descriptionDone="Demo can tip visitors!.",
- command=["./check_tip_reserve.sh"],
- workdir="../../deployment/buildbot",
+# These factories uses the standard container worker.
+WORKERS.append(Worker("container-worker", "container-pass"))
+
+
+# Container Job Generator Functions
+# Parse config file and save values in a dict
+def ingest_job_config(configPath, jobName):
+ configDict = {jobName: {}}
+ print(configDict)
+ ini.read_string(configPath)
+ for key in ini["build"]:
+ value = ini['build'][key]
+ configDict[jobName][key] = value
+ print(configDict)
+ configDict.update(configDict)
+ print(configDict)
+ return configDict
+
+
+# Search for configs, and ingest
+def handle_job_config(jobDirPath, jobName, repoName, configPath, configExist):
+ print(configPath)
+ if configExist == 0:
+ print(f"Ingesting Job Config: {configPath}")
+ configDict = ingest_job_config(configPath, jobName)
+ print(configDict)
+ return configDict
+ else:
+ print("No job config; Using default params")
+ # Set default job config parameters
+ configDict = {jobName: {"HALT_ON_FAILURE": True,
+ "WARN_ON_FAILURE": False,
+ "CONTAINER_BUILD": True,
+ "CONTAINER_NAME": repoName,
+ "CONTAINER_ARCH": "amd64"}}
+ return configDict
+
+
+class GenerateStagesCommand(buildstep.ShellMixin, steps.BuildStep):
+
+ def __init__(self, REPO_NAME, **kwargs):
+ self.REPO_NAME = REPO_NAME
+ kwargs = self.setupShellMixin(kwargs)
+ super().__init__(**kwargs)
+ self.observer = logobserver.BufferLogObserver()
+ self.addLogObserver('stdio', self.observer)
+
+ def extract_stages(self, stdout):
+ stages = []
+ for line in stdout.split('\n'):
+ stage = str(line.strip())
+ if stage:
+ stages.append(stage)
+ return stages
+
+ @defer.inlineCallbacks
+ def run(self):
+ CONTAINER_WORKDIR = f"/home/container-worker/workspace/{self.REPO_NAME}"
+ CI_JOBS_PATH = f"{CONTAINER_WORKDIR}/contrib/ci/jobs"
+ # run 'ls <project_root>/contrib/ci/jobs/' to get the list of stages
+ cmd = yield self.makeRemoteShellCommand()
+ yield self.runCommand(cmd)
+ jobDirs = []
+
+ # if the command passes extract the list of stages
+ result = cmd.results()
+ if result == util.SUCCESS:
+ jobDirs = self.extract_stages(self.observer.getStdout())
+ print(f"this is jobDirs list: {jobDirs}")
+ self.configDict = {}
+ print(f"Remote cmd stdout: {self.observer.getStdout()}")
+ print(f"cmd.results: {cmd.results()}")
+ for stage in jobDirs:
+ jobDirPath = f"{CI_JOBS_PATH}/{stage}"
+ observer = logobserver.BufferLogObserver()
+ self.addLogObserver('stdio', observer)
+ cmd1 = yield self.makeRemoteShellCommand(
+ command=["cat", f"{jobDirPath}/config.ini"])
+ yield self.runCommand(cmd1)
+ print(f"cmd1.results: {cmd1.results()}")
+ print(f"Second command stdout: {observer.getStdout()}")
+ print(f"Current stage: {stage}")
+ print(jobDirPath)
+ self.configDict.update(
+ handle_job_config(
+ jobDirPath, stage, self.REPO_NAME,
+ observer.getStdout(), cmd1.results()))
+ print(self.configDict)
+ # create a container step for each stage and
+ # add them to the build
+ convstr2bool = ast.literal_eval
+ self.build.addStepsAfterCurrentStep([
+ container_add_step(
+ convstr2bool(
+ str(self.configDict[stage]["HALT_ON_FAILURE"])),
+ convstr2bool(
+ str(self.configDict[stage]["WARN_ON_FAILURE"])),
+ convstr2bool(
+ str(self.configDict[stage]["CONTAINER_BUILD"])),
+ self.configDict[stage]["CONTAINER_NAME"],
+ container_factory,
+ CONTAINER_WORKDIR,
+ stage,
+ self.configDict[stage]["CONTAINER_ARCH"],
+ f"contrib/ci/jobs/{stage}/job.sh")
+ for stage in jobDirs
+ ])
+
+ return result
+
+
+# List of repos to add to container factory.
+container_repos = ["git.taler.net/wallet-core",
+ "git.taler.net/libeufin",
+ "git.taler.net/merchant",
+ "git.taler.net/exchange",
+ "git.taler.net/docs",
+ "git.taler.net/taler-ops-www",
+ "git.taler.net/taler-systems-www",
+ "git.taler.net/anastasis-www",
+ "git.taler.net/tutorials",
+ "git.taler.net/sync",
+ "git.taler.net/challenger",
+ "git.taler.net/sandcastle-ng",
+ "git.gnunet.org/gnunet"]
+
+for repo in container_repos:
+
+ # Prepare to read job configs
+ ini = configparser.ConfigParser()
+ ini.optionxform = str
+
+ # Factory-wide variables
+ REPO_NAME = repo.rsplit('/', 1)[1]
+ REPO_URL = "https://" + repo + ".git"
+ CONTAINER_WORKDIR = f"/home/container-worker/workspace/{REPO_NAME}"
+ CI_JOBS_PATH = f"{CONTAINER_WORKDIR}/contrib/ci/jobs"
+
+ # Create a factory
+ container_factory = util.BuildFactory()
+ container_factory.workdir = CONTAINER_WORKDIR
+
+ # Setup workspace
+ container_factory.addStep(ShellCommand(
+ name="workspace",
+ descriptionDone="Workspace directory check",
+ command=f"test -d {CONTAINER_WORKDIR} && podman run --rm --volume {CONTAINER_WORKDIR}:/workdir docker.io/library/debian:bookworm-slim chmod -R 777 /workdir || mkdir -p {CONTAINER_WORKDIR}",
haltOnFailure=True,
- # Needed to test the 'demo' deployment.
- env={"DEPLOYMENT": "demo"}
- )
-)
+ ))
-WORKERS.append(worker.Worker("demo-worker", "demo-pass"))
-DEMO_SERVICES_INTEGRATIONTEST_FACTORY = create_factory_with_deployment()
-DEMO_SERVICES_INTEGRATIONTEST_FACTORY.addStep(
- ShellCommand(
- name="demo services checker",
- description="Checking demo services are online",
- descriptionDone="Demo services are online!.",
- command=["./checks.sh"],
- workdir="../../deployment/buildbot",
+ # Ensure repo is cloned or clean.
+ # Git() will clone repo if it doesn't exist.
+ # Method clobber removes directory and makes a fresh clone.
+ # Shallow set to "True" defaults to a depth of 1.
+ # Will checkout value of "branch" property from job properties.
+ # https://docs.buildbot.net/latest/manual/configuration/steps/source_git.html
+ container_factory.addStep(Git(
+ name="git",
+ repourl=REPO_URL,
+ branch=util.Interpolate('%(src::branch)s'),
+ mode='full',
+ method='clobber',
+ shallow=True,
+ submodules=True,
haltOnFailure=True,
- # Needed to test the 'demo' deployment.
- env={"DEPLOYMENT": "demo"}
- )
-)
-BUILDERS.append(util.BuilderConfig(
- name="demo-services-checker-builder",
- workernames="demo-worker",
- factory=DEMO_SERVICES_INTEGRATIONTEST_FACTORY
-))
-BUILDERS.append(util.BuilderConfig(
- name="check-tips-builder",
- workernames="tips-checker-worker",
- factory=DEMO_CHECK_TIPS_FACTORY
-))
-EMAIL_ALERTS.append("demo-services-checker-builder")
-
-# We check demo once per hour.
-SCHEDULERS.append(schedulers.Periodic(
- name="demo-services-checker-scheduler",
- periodicBuildTimer=60 * 60, # 1 hour
- builderNames=["demo-services-checker-builder"]
-))
-
-
-################ 12: 'demo health wallet-cli check' JOB ###################################
-
-
-##
-# health checks performed by wallet-cli for demo
-WORKERS.append(worker.Worker("taler-demo-healthcheck", "taler-demo-healthcheck-pass"))
-
-TALER_DEMO_HEALTHCHECK_FACTORY = create_factory_with_deployment()
-# For the moment, the health-check uses whatever wallet
-# was (possibly manually) installed beforehand. This is
-# not necessarily a problem, since demo-deployments do never
-# update code automatically.
-TALER_DEMO_HEALTHCHECK_FACTORY.addStep(
- ShellCommand(
- name="test-withdraw-and-spend",
- description="Running wallet spend tests",
- descriptionDone="Test correctly run",
- workdir="../../deployment/buildbot",
- command=["./demo-healthchecks.sh"],
- )
-)
-BUILDERS.append(util.BuilderConfig(
- name="taler-demo-healthcheck-builder",
- workernames=["taler-demo-healthcheck"],
- factory=TALER_DEMO_HEALTHCHECK_FACTORY
-))
-
-################ 13: 'test health wallet-cli check' JOB ###################################
-
-##
-# health checks performed by wallet-cli for test
-WORKERS.append(worker.Worker("taler-test-healthcheck", "taler-test-healthcheck-pass"))
+ ))
-TALER_TEST_HEALTHCHECK_FACTORY = create_factory_with_deployment()
-TALER_TEST_HEALTHCHECK_FACTORY.addStep(git_step("git://git.taler.net/wallet-core.git"))
-TALER_TEST_HEALTHCHECK_FACTORY.addStep(
- ShellCommand(
- name="fetch",
- description="Running yarn install",
- descriptionDone="Correctly installed",
- command=["npm", "install", "-g", "--prefix", "$HOME", "@gnu-taler/taler-wallet-cli"],
- workdir="build/"
- )
-)
-TALER_TEST_HEALTHCHECK_FACTORY.addStep(
- ShellCommand(
- name="test-withdraw",
- description="Running wallet withdraw tests",
- descriptionDone="Test correctly run",
- command=["timeout", "--preserve-status", "5m", "./$HOME/bin/taler-wallet-cli", "integrationtest", "--verbose", "-b", "https://bank.test.taler.net", "-w", "TESTKUDOS:10"],
- workdir="build/",
- )
-)
-TALER_TEST_HEALTHCHECK_FACTORY.addStep(
- ShellCommand(
- name="test-spend",
- description="Running wallet spend tests",
- descriptionDone="Test correctly run",
- command=["timeout", "--preserve-status", "5m", "./$HOME/bin/taler-wallet-cli", "integrationtest", "--verbose", "-b", "https://bank.test.taler.net", "-s", "TESTKUDOS:4"],
- workdir="build/",
- )
-)
-BUILDERS.append(util.BuilderConfig(
- name="taler-test-healthcheck-builder",
- workernames=["taler-test-healthcheck"],
- factory=TALER_TEST_HEALTHCHECK_FACTORY
-))
+ container_factory.addStep(GenerateStagesCommand(
+ REPO_NAME,
+ name="Generate build stages",
+ command=["ls", CI_JOBS_PATH],
+ haltOnFailure=True))
+ BUILDERS.append(util.BuilderConfig(
+ name=f"{REPO_NAME}-builder",
+ workernames=["container-worker"],
+ factory=container_factory
+ ))
-WALLETCHANGE_TRIGGERS.append("taler-test-healthcheck-builder")
+ # Only enable this scheduler for debugging!
+ # Will run builders with 1 minute of waiting inbetween builds
+ # SCHEDULERS.append(schedulers.Periodic(
+ # name=f"{REPO_NAME}-minutely",
+ # builderNames=[f"{REPO_NAME}-builder"],
+ # periodicBuildTimer=60
+ # ))
+
+ SCHEDULERS.append(schedulers.SingleBranchScheduler(
+ name=f"{REPO_NAME}-container-scheduler",
+ change_filter=util.ChangeFilter(
+ branch="master",
+ project_re=f"({REPO_NAME})"
+ ),
+ treeStableTimer=30,
+ builderNames=[f"{REPO_NAME}-builder"]
+ ))
+ SERVICES.append(reporters.MailNotifier(
+ fromaddr="buildbot@taler.net",
+ # notify from pass to fail, and viceversa.
+ generators=[BuildStatusGenerator(
+ mode=('change','problem','failing','exception',),
+ builders=[f"{REPO_NAME}-builder",],
+ message_formatter=reporters.MessageFormatter(
+ template_type='plain',
+ wantSteps=True,
+ wantLogs=True
+ ),
+ add_logs=True,
+ )],
+ sendToInterestedUsers=False,
+ useTls=False,
+ relayhost="localhost",
+ smtpPort=25,
+ dumpMailsToLog=True,
+ extraRecipients=[f"ci-{REPO_NAME}@taler.net"]
+ ))
-################ 14: upgrade test deployment JOB ###################################
-##
-# testing buildbot using the "buildslavetest" user (for no specific reason except it exists)
-# Location: /home/buidlslavetest @ taler.net
-WORKERS.append(worker.Worker("buildslavetest-worker", "Gei8naiyox4uuhoo"))
+############## sandcastle-ng Scheduler #################################
-BUILD_FACTORY = create_factory_with_deployment()
-BUILD_FACTORY.addStep(
- ShellCommand(
- name="build",
- description="Building all Taler codebase.",
- descriptionDone="Taler built.",
- command=["./build.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True
- )
-)
-BUILD_FACTORY.addStep(
- ShellCommand(
- name="keys generation and sign",
- description="Generating exchange keys, and auditor-sign them.",
- descriptionDone="Exchange keys generated, and auditor-signed.",
- command=["timeout", "--preserve-status", "1m",
- "./keys.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- env={'BRANCH': util.Property("branch")}
- )
-)
-BUILD_FACTORY.addStep(
- ShellCommand(
- name="restart services",
- description="Restarting inactive blue-green party.",
- descriptionDone="Restarting Taler.",
- command=["./restart.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- env={'BRANCH': util.Property("branch")}
- )
-)
-BUILD_FACTORY.addStep(
- ShellCommand(
- name="check services correctly restarted",
- description="Checking services are correctly restarted.",
- descriptionDone="All services are correctly restarted.",
- command=["./checks.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- env={'DEPLOYMENT': "test"}
- )
-)
-BUILD_FACTORY.addStep(
- ShellCommand(
- name="create instances",
- description="Create merchant instances.",
- descriptionDone="All the instances got created.",
- command=["./create_instances.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- env={'DEPLOYMENT': "test"}
- )
-)
-
-BUILD_FACTORY.addStep(
- ShellCommand(
- name="activate tip reserve",
- description="Instruct the merchant to pay and authorize the tip reserve.",
- descriptionDone="The tip reserve got payed and authorized.",
- command=["./create_tip_reserve.sh"],
- workdir="../../deployment/buildbot",
- haltOnFailure=True,
- env={'DEPLOYMENT': "test"}
- )
-)
-
-BUILDERS.append(util.BuilderConfig(
- name="test-builder", workernames=["test-worker"], factory=BUILD_FACTORY
+# Periodic scheduler for sandcastle-ng.
+# Runs every 2 hours (60 seconds * 60 * 2)
+SCHEDULERS.append(schedulers.Periodic(
+ name="sandcastle-ng-periodic-scheduler",
+ builderNames=["sandcastle-ng-builder"],
+ change_filter=util.ChangeFilter(branch="master"),
+ periodicBuildTimer=60*60*2
))
-EMAIL_ALERTS.append("test-builder")
-
-# Scheduler that triggers if anything changes
-CODECHANGE_TRIGGERS.append("test-builder")
-CODECHANGE_TRIGGERS.append("wallet-builder")
-
-################ 15: Python linting JOB ###################################
-
-# This job is noat active / complete yet!
-def lint_dispatcher(project):
- return "./lint_%s.sh" % project
-
-LINT_FACTORY = util.BuildFactory()
-LINT_FACTORY.addStep(
- ShellCommand(
- name="Python linter",
- description="linting Python",
- descriptionDone="linting done",
- command=util.Transform(lint_dispatcher, util.Property("project")),
- workdir="../../deployment/taler-build"
- )
-)
-# This builder is NOT ACTIVE!
-#BUILDERS.append(util.BuilderConfig(
-# name="lint-builder",
-# workernames=["lint-worker"],
-# factory=LINT_FACTORY
-#))
-
-# Consider adding other Python parts, like the various frontends.
-# NOTE: scheduler is NOT active! (commented out below)
-#SCHEDULERS.append(schedulers.SingleBranchScheduler(
-# name="lint-scheduler",
-# change_filter=util.ChangeFilter(
-# branch="master", project_re="(bank|donations|survey|blog)"
-# ),
-# treeStableTimer=None,
-# builderNames=["lint-builder"]
-#))
-
-
-################ 16: Selenium JOB ###################################
-
-# This job is noat active!
-SELENIUM_FACTORY = create_factory_with_deployment()
-SELENIUM_FACTORY.addStep(
- ShellCommand(
- name="selenium",
- description="Headless browser test",
- descriptionDone="Test finished",
- command=["launch_selenium_test"],
- env={'PATH': "${HOME}/local/bin:/usr/lib/chromium:${PATH}"}
- )
-)
-
-#BUILDERS.append(util.BuilderConfig(
-# name="selenium-builder",
-# workernames=["selenium-worker"],
-# factory=SELENIUM_FACTORY
-#))
-
################ 99: debug stuff JOB ###################################
@@ -1147,21 +672,6 @@ BUILDER_LIST = map(lambda builder: builder.name, BUILDERS)
####### GENERAL PURPOSE BUILDBOT SERVICES #######################
-SERVICES.append(reporters.IRC(
- "irc.eu.libera.net",
- "taler-bb",
- useColors=False,
- channels=[{
- "channel": "#taler"
- }],
- password="taler-bb-pass19",
- notify_events={
- 'exception': 1,
- 'successToFailure': 1,
- 'failureToSuccess': 1
- }
-))
-
SERVICES.append(reporters.MailNotifier(
fromaddr="testbuild@taler.net",
# notify from pass to fail, and viceversa.
@@ -1176,7 +686,11 @@ SERVICES.append(reporters.MailNotifier(
add_logs=True,
)],
sendToInterestedUsers=False,
- extraRecipients=["buildfailures@taler.net"]
+ useTls=False,
+ relayhost="localhost",
+ smtpPort=25,
+ dumpMailsToLog=True,
+ extraRecipients=BUILDER_EMAIL_ADDRESSES
))
@@ -1248,7 +762,16 @@ c["db"] = {
# the 'change_source' setting tells the buildmaster how it should
# find out about source code changes.
-c["change_source"] = [changes.PBChangeSource(user="allcs", passwd="allcs")]
+pbSource = PBChangeSource(user="allcs", passwd="allcs")
+
+
+pollGnunetSource = changes.GitPoller(repourl='https://git.gnunet.org/gnunet.git',
+ branches=True,
+ pollInterval=300,
+ pollAtLaunch=True,
+ project="gnunet")
+
+c["change_source"] = [pollGnunetSource, pbSource]
# 'protocols' contains information about protocols which master
# will use for communicating with workers. You must define at
@@ -1265,7 +788,7 @@ c["buildbotURL"] = "https://buildbot.taler.net/"
# -- formerly commented out as not packaged properly in Debian and others, see
# https://bugzilla.redhat.com/show_bug.cgi?id=1557687
c["www"] = {
- "port": 8010,
+ "port": "tcp:8010:interface=127.0.0.1",
"default_page": 'waterfall',
"plugins": {
"waterfall_view": True,
diff --git a/buildbot/restart.sh b/buildbot/restart.sh
index 03d55a3..c6c4b25 100755
--- a/buildbot/restart.sh
+++ b/buildbot/restart.sh
@@ -2,10 +2,12 @@
set -eu
-DATE=`date`
-echo "Restarting Taler deployment at $DATE"
-source "${HOME}/activate"
-taler-deployment-restart
+cd ${HOME}/deployment/docker/demo
-echo "Deployment ready"
-exit 0
+export DOCKER_HOST=unix://${XDG_RUNTIME_DIR}/docker.sock
+export TALER_DEPLOYMENT_CONFIG=${HOME}/deployment.conf
+docker-compose stop
+docker-compose down -v
+docker-compose up --remove-orphans -d
+# Make early errors shown on the Web console:
+timeout 5m docker-compose logs --follow || true
diff --git a/buildbot/update-sources.sh b/buildbot/update-sources.sh
index 8c95340..9fc7b80 100755
--- a/buildbot/update-sources.sh
+++ b/buildbot/update-sources.sh
@@ -22,10 +22,12 @@ do
git -C $P clean -fdx
git -C $P fetch
git -C $P reset --hard origin/master
+ git submodule sync --recursive
git submodule update --init
else
cd $HOME/sources
git clone ${BASE_URL}$n
+ git submodule sync --recursive
git submodule update --init
cd -
fi
diff --git a/codespell/dictionary.txt b/codespell/dictionary.txt
index a073a52..702ac5c 100644
--- a/codespell/dictionary.txt
+++ b/codespell/dictionary.txt
@@ -1,35 +1,47 @@
# List of "words" that codespell should ignore in our sources.
-ect
-cant
-ehr
-ba
-wih
-ifset
-openin
-fo
-complet
-ist
+#
+# Note: The word sensitivity depends on how the to-be-ignored word is
+# spelled in codespell_lib/data/dictionary.txt. F.e. if there is a word
+# 'foo' and you add 'Foo' _here_, codespell will continue to complain
+# about 'Foo'.
+#
+Nam
+BRE
+ND
Nd
+TE
TEH
-onl
UPDATEing
-BRE
-TE
-te
-ND
+WAN
aci
-doas
acn
-tha
-ths
-nd
-WAN
-wan
+ba
+bre
+cant
+clen
+complet
+doas
+ect
+ehr
+fo
hel
-te
+ifset
+ist
keypair
-sie
+nd
+onl
+openin
ot
-bre
+ser
+sie
+som
+sover
+te
+te
teh
+tha
+ths
updateing
+vie
+wan
+wih
diff --git a/docker/compile-and-check/README b/docker/compile-and-check/README
new file mode 100644
index 0000000..e70c571
--- /dev/null
+++ b/docker/compile-and-check/README
@@ -0,0 +1,26 @@
+This Docker image compiles and checks all the Taler code
+along its entrypoint. In case of build failures, it optionally
+offers a debug shell to inspect the running container.
+
+Build Toolchain Image
+---------------------
+
+To build image with app dependencies to build Taler, run ./build.sh
+
+Run
+---
+
+To start the build process, run
+
+$ ./build.sh
+
+Interactive Debugging
+---------------------
+
+To interactively debug the build process, run
+
+$ ./interactive.sh
+
+Then inside the container, run the entry point manually
+
+[cointainer]$ /home/talercheck/compile_and_check.sh
diff --git a/docker/compile-and-check/base/Dockerfile b/docker/compile-and-check/base/Dockerfile
new file mode 100644
index 0000000..8ce899b
--- /dev/null
+++ b/docker/compile-and-check/base/Dockerfile
@@ -0,0 +1,40 @@
+FROM debian:bookworm
+
+
+# This image provides base dependencies needed to compile and run
+# GNU Taler components
+
+RUN apt-get update
+
+# Explanations for weirder dependencies:
+# - texlive-* is required by the exchange test cases
+RUN apt-get install -y autoconf autopoint libtool texinfo \
+ libgcrypt-dev libidn11-dev zlib1g-dev libunistring-dev \
+ libjansson-dev python3-pip git recutils libsqlite3-dev \
+ libpq-dev postgresql libcurl4-openssl-dev libsodium-dev git \
+ libqrencode-dev zip jq npm openjdk-17-jre nginx procps \
+ curl python3-jinja2 wget curl python3-sphinx socat apache2-utils \
+ python3-sphinx-rtd-theme sqlite3 vim emacs faketime \
+ texlive-latex-base texlive-latex-extra
+
+RUN useradd -m talercheck
+
+USER talercheck
+WORKDIR /home/talercheck
+
+# pnpm likes to have the tmp directory
+RUN mkdir -p tmp
+
+# Make pip3 happy by running as a non-root user
+# and setting PATH correctly
+ENV PATH="/home/talercheck/.local/bin:$PATH"
+
+RUN pip3 install --break-system-packages requests click poetry uwsgi htmlark
+
+RUN npm config set prefix $HOME/.npm-global
+RUN npm install -g pnpm
+
+COPY ./base/util.sh ./base/compile_and_check.sh /home/talercheck/
+COPY ./config/tags.sh /home/talercheck/tags.sh
+
+ENTRYPOINT /home/talercheck/compile_and_check.sh
diff --git a/docker/compile-and-check/base/compile_and_check.sh b/docker/compile-and-check/base/compile_and_check.sh
new file mode 100755
index 0000000..59e16cb
--- /dev/null
+++ b/docker/compile-and-check/base/compile_and_check.sh
@@ -0,0 +1,94 @@
+#!/bin/bash
+
+set -eu
+set -x
+
+start_debug_shell () {
+ if test "${SANDCASTLE_DEBUG_SHELL:-no}" = yes; then
+ bash --init-file <(echo \
+ "echo 'Taler build failed, press enter to get the debug shell..'; read || exit $?"
+ ) -i
+ fi
+}
+trap start_debug_shell ERR
+
+source ~/util.sh
+
+echo -n Exporting the tags environment..
+set -a
+. tags.sh
+set +a
+echo DONE
+echo Exported tags:
+{ env | grep TAG_; } || echo NONE
+
+export LD_LIBRARY_PATH=$HOME/local
+
+num_processors=$(getconf _NPROCESSORS_ONLN)
+JFLAG="-j$num_processors"
+PREFIX=$HOME/local
+
+git clone --depth=1 git://git.gnunet.org/libmicrohttpd --branch ${TAG_LIBMHD:-master}
+git clone --depth=1 git://git.gnunet.org/gnunet --branch ${TAG_GNUNET:-master}
+git clone --depth=1 git://git.taler.net/exchange --branch ${TAG_EXCHANGE:-master}
+git clone --depth=1 git://git.taler.net/merchant --branch ${TAG_MERCHANT:-master}
+git clone --depth=1 git://git.taler.net/libeufin --branch ${TAG_LIBEUFIN:-master}
+git clone --depth=1 git://git.taler.net/taler-merchant-demos --branch ${TAG_MERCHANT_DEMOS:-master}
+git clone --depth=1 git://git.taler.net/wallet-core --branch ${TAG_WALLET:-master}
+git clone --depth=1 git://git.taler.net/sync --branch ${TAG_SYNC:-master}
+
+cd ~/libmicrohttpd
+./bootstrap
+./configure --disable-doc --prefix=$PREFIX
+make $JFLAG install
+
+cd ~/gnunet
+./bootstrap
+./configure --enable-logging=verbose --disable-documentation --prefix=$PREFIX --with-microhttpd=$PREFIX
+make $JFLAG install
+
+cd ~/exchange
+./bootstrap
+./configure CFLAGS="-ggdb -O0" --enable-logging=verbose --disable-doc --prefix=$PREFIX --with-gnunet=$PREFIX
+make $JFLAG install
+
+cd ~/merchant
+./bootstrap
+./configure CFLAGS="-ggdb -O0" --enable-logging=verbose --disable-doc --prefix=$PREFIX --with-gnunet=$PREFIX --with-exchange=$PREFIX
+make $JFLAG install
+
+cd ~/libeufin
+./bootstrap
+./configure --prefix=$PREFIX
+make install
+
+cd ~/taler-merchant-demos
+./bootstrap
+./configure --destination=local
+make install
+
+cd ~/sync
+./bootstrap
+./configure CFLAGS="-ggdb -O0" --enable-logging=verbose --disable-doc --prefix=$PREFIX --with-gnunet=$PREFIX --with-exchange=$PREFIX
+make $JFLAG install
+
+cd ~/wallet-core
+./bootstrap
+cd ~/wallet-core/packages/taler-wallet-cli
+./configure --prefix=$HOME/local
+make install
+cd ~/wallet-core/packages/taler-harness
+./configure --prefix=$HOME/local
+make install
+
+db_start
+createdb talercheck
+
+cd ~/exchange
+make check
+
+cd ~/merchant
+make check
+
+cd ~
+taler-harness run-integrationtests
diff --git a/docker/compile-and-check/base/util.sh b/docker/compile-and-check/base/util.sh
new file mode 100644
index 0000000..9ff8984
--- /dev/null
+++ b/docker/compile-and-check/base/util.sh
@@ -0,0 +1,34 @@
+export PATH=$HOME/.npm-global/bin:$PATH
+export PATH=$HOME/local/bin:$PATH
+export PATH=/lib/postgresql/15/bin/:$PATH
+
+export DBDIR=$HOME/talerdb
+export LOGDIR=$HOME/logs
+export TMPDIR=$HOME/tmp
+export SOCKDIR=$HOME/sockets
+export PGHOST=$SOCKDIR
+
+function db_start() {
+ mkdir -p $SOCKDIR
+ mkdir -p $LOGDIR
+ mkdir -p $TMPDIR
+ initdb --no-sync --auth=trust -D $DBDIR # > /logs/postgres-dbinit.log 2> /logs/postgres-dbinit.er
+ echo "Launching Postgres"
+ cat - > $DBDIR/postgresql.conf <<EOF
+unix_socket_directories='$SOCKDIR'
+fsync=off
+max_wal_senders=0
+synchronous_commit=off
+wal_level=minimal
+listen_addresses=''
+EOF
+ cat $DBDIR/pg_hba.conf | grep -v host > $DBDIR/pg_hba.conf.new
+ mv $DBDIR/pg_hba.conf.new $DBDIR/pg_hba.conf
+ pg_ctl -D $DBDIR -l /dev/null start > $LOGDIR/postgres-start.log 2> $LOGDIR/postgres-start.err
+ echo " DONE"
+}
+
+db_destroy() {
+ pg_ctl -D $DBDIR -l /dev/null stop
+ rm -rf $DBDIR
+}
diff --git a/docker/compile-and-check/build.sh b/docker/compile-and-check/build.sh
new file mode 100755
index 0000000..c5164ed
--- /dev/null
+++ b/docker/compile-and-check/build.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+# Build the image and tag it
+
+mydir="$(dirname -- "$0")"
+cd $mydir
+
+exec docker build -f base/Dockerfile -t sandcastle-checker .
diff --git a/docker/compile-and-check/config/tags.sh b/docker/compile-and-check/config/tags.sh
new file mode 100644
index 0000000..ce2ed9c
--- /dev/null
+++ b/docker/compile-and-check/config/tags.sh
@@ -0,0 +1,7 @@
+TAG_LIBMHD=v0.9.75
+TAG_GNUNET=v0.19.3
+TAG_EXCHANGE=master
+TAG_MERCHANT=master
+TAG_EXCHANGE=master
+TAG_SYNC=master
+TAG_WALLET=master
diff --git a/docker/compile-and-check/interactive.sh b/docker/compile-and-check/interactive.sh
new file mode 100755
index 0000000..94fb404
--- /dev/null
+++ b/docker/compile-and-check/interactive.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+# Run the image in interactive mode
+
+mydir="$(dirname -- "$0")"
+cd $mydir
+
+exec docker run -it --entrypoint /bin/bash sandcastle-checker
diff --git a/docker/compile-and-check/run.sh b/docker/compile-and-check/run.sh
new file mode 100755
index 0000000..e5dc0fe
--- /dev/null
+++ b/docker/compile-and-check/run.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+# Run compilation and tests in the image
+
+mydir="$(dirname -- "$0")"
+cd $mydir
+
+exec docker run sandcastle-checker
diff --git a/docker/docs-build/Dockerfile b/docker/docs-build/Dockerfile
new file mode 100644
index 0000000..5bcddce
--- /dev/null
+++ b/docker/docs-build/Dockerfile
@@ -0,0 +1,27 @@
+# Stage 1
+
+FROM debian:bookworm-slim AS repo
+
+RUN apt update \
+ && apt install --no-install-recommends -y ca-certificates git
+
+RUN git clone --branch=master --depth=1 https://git.taler.net/docs.git
+
+# Final image
+
+FROM sphinxdoc/sphinx-latexpdf
+
+# Copy content from one container to the other
+
+WORKDIR /docs
+
+COPY --from=repo /docs .
+
+# Install dependencies
+
+RUN python3 -m pip install --no-cache-dir recommonmark
+
+# Compile /docs/*
+
+ENTRYPOINT ["make", "BUILDDIR=/output", "-C", "/docs", "html", "latexpdf"]
+
diff --git a/docker/docs-build/build.sh b/docker/docs-build/build.sh
new file mode 100755
index 0000000..0289932
--- /dev/null
+++ b/docker/docs-build/build.sh
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+set -eu
+
+docker build . -t taler-docs-image
diff --git a/docker/docs-build/run.sh b/docker/docs-build/run.sh
new file mode 100755
index 0000000..9f2eba6
--- /dev/null
+++ b/docker/docs-build/run.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+set -eu
+
+# Execute the dockerfile, and leave output in local system
+
+docker run -v /home/docbuilder/docs:/output taler-docs-image
+
+# COPY data from volume to local
+
+cp -r /home/docbuilder/docs/html/* /home/docbuilder/build/docs/html
+cp -r /home/docbuilder/docs/latex/*.pdf /home/docbuilder/build/docs/pdf
+
+
diff --git a/dpkg-build/Dockerfile b/docker/dpkg-build/Dockerfile
index f745c0c..f745c0c 100644
--- a/dpkg-build/Dockerfile
+++ b/docker/dpkg-build/Dockerfile
diff --git a/dpkg-build/build-ubuntu.sh b/docker/dpkg-build/build-ubuntu.sh
index 2ce4855..2ce4855 100755
--- a/dpkg-build/build-ubuntu.sh
+++ b/docker/dpkg-build/build-ubuntu.sh
diff --git a/docker/hybrid/docker-compose.yml b/docker/hybrid/docker-compose.yml
deleted file mode 100644
index ede1dc5..0000000
--- a/docker/hybrid/docker-compose.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-version: '3' # '0' is not allowed
-
-services:
-
- talerdb:
- build: ./images/postgres
- ports:
- - 8888:5432
- exchange:
- build: ./images/exchange
- depends_on:
- - talerdb
- ports:
- - 5555:80
- merchant:
- build: ./images/merchant
- depends_on:
- - talerdb
- ports:
- - 5556:80
diff --git a/docker/hybrid/images/base/Dockerfile b/docker/hybrid/images/base/Dockerfile
deleted file mode 100644
index 9e4d487..0000000
--- a/docker/hybrid/images/base/Dockerfile
+++ /dev/null
@@ -1,36 +0,0 @@
-FROM debian:testing
-RUN apt-get update
-
-RUN apt-get install -y autoconf autopoint libtool texinfo \
- libgcrypt-dev libidn11-dev zlib1g-dev libunistring-dev \
- libjansson-dev python3-pip git recutils libsqlite3-dev \
- libpq-dev postgresql libcurl4-openssl-dev libsodium-dev git \
- libqrencode-dev zip jq nodejs npm openjdk-17-jre nginx procps \
- curl python3-jinja2 wget curl
-
-RUN git clone git://git.gnunet.org/libmicrohttpd
-RUN git clone git://git.gnunet.org/gnunet
-RUN git clone git://git.taler.net/exchange /exchange
-RUN git clone git://git.taler.net/merchant /merchant
-
-WORKDIR /libmicrohttpd
-RUN ./bootstrap
-RUN ./configure --disable-doc
-RUN make install
-
-WORKDIR /gnunet
-RUN ./bootstrap
-RUN ./configure --enable-logging=verbose --disable-documentation
-RUN make install
-
-WORKDIR /exchange
-RUN ./bootstrap
-RUN ./configure CFLAGS="-ggdb -O0" --enable-logging=verbose --disable-doc
-RUN make install
-
-WORKDIR /merchant
-RUN ./bootstrap
-RUN ./configure CFLAGS="-ggdb -O0" \
- --enable-logging=verbose \
- --disable-doc
-RUN make install
diff --git a/docker/hybrid/images/exchange/Dockerfile b/docker/hybrid/images/exchange/Dockerfile
deleted file mode 100644
index 4f744a5..0000000
--- a/docker/hybrid/images/exchange/Dockerfile
+++ /dev/null
@@ -1,6 +0,0 @@
-FROM taler_local/taler_base
-
-COPY taler.conf /config/taler.conf
-COPY startup.sh /
-RUN chmod +x /startup.sh
-ENTRYPOINT /startup.sh
diff --git a/docker/hybrid/images/exchange/startup.sh b/docker/hybrid/images/exchange/startup.sh
deleted file mode 100644
index 9fe4052..0000000
--- a/docker/hybrid/images/exchange/startup.sh
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/bash
-
-set -eu
-export LD_LIBRARY_PATH=/usr/local/lib
-
-sed -i 's/\${EXCHANGE_URL}/http:\/\/exchange:80\//' /config/taler.conf
-sed -i 's/\${CURRENCY}/EUR/' /config/taler.conf
-
-while ! pg_isready -h talerdb -d taler; do
- echo DB not ready yet.
- sleep 2
-done
-echo Now DB is ready.
-
-# Make sure we have the right to connect
-echo "" | psql -h talerdb -d taler
-
-taler-exchange-dbinit -c /config/taler.conf --reset # &> /dev/null
-
-taler-exchange-secmod-eddsa -c /config/taler.conf &
-taler-exchange-secmod-rsa -c /config/taler.conf &
-taler-exchange-secmod-cs -c /config/taler.conf &
-echo "Crypto helpers started.."
-EXCHANGE_MASTER_PUB=$(taler-exchange-offline -c /config/taler.conf setup)
-sed -i "s/\${EXCHANGE_MASTER_PUB}/$EXCHANGE_MASTER_PUB/" /config/taler.conf
-taler-exchange-httpd -c /config/taler.conf &
-for n in `seq 1 80`
- do
- echo "."
- sleep 0.1
- OK=1
- wget http://exchange/ -o /dev/null -O /dev/null >/dev/null && break
- OK=0
- done
- if [ 1 != $OK ]
- then
- echo "Failed to launch Exchange"
- fi
-echo Echange launched.
-
-taler-exchange-offline -c /config/taler.conf download sign upload
-wait
diff --git a/docker/hybrid/images/exchange/taler.conf b/docker/hybrid/images/exchange/taler.conf
deleted file mode 100644
index dd93b3d..0000000
--- a/docker/hybrid/images/exchange/taler.conf
+++ /dev/null
@@ -1,59 +0,0 @@
-# TO REPLACE (wrapped with ${}):
-
-# NEXUS_URL
-# EXCHANGE_URL
-# CURRENCY
-# EXCHANGE_MASTER_PUB
-
-[taler]
-currency = ${CURRENCY}
-
-[paths]
-taler_data_home = /data
-
-[taler-exchange-secmod-eddsa]
-unixpath = /eddsa.http
-
-[taler-exchange-secmod-rsa]
-sm_priv_key = /data/taler-exchange-secmod-rsa/secmod-private-key
-unixpath = /sockets/exchange-secmod-rsa.sock
-
-[taler-exchange-secmod-cs]
-sm_priv_key = /data/taler-exchange-secmod-cs/secmod-private-key
-unixpath = /sockets/exchange-secmod-cs.sock
-
-[exchange-accountcredentials-1]
-username = exchange-nexus-user
-wire_gateway_auth_method = basic
-wire_gateway_url = ${NEXUS_URL}/facades/exchange-taler-facade/taler-wire-gateway/
-password = exchange-nexus-password
-
-[exchange-account-1]
-enable_credit = yes
-enable_debit = yes
-payto_uri = payto://iban/SANDBOXX/DE159593?receiver-name=Name+unknown
-
-[exchange]
-master_public_key = ${EXCHANGE_MASTER_PUB}
-privacy_dir = /usr/local/share/taler/exchange/pp
-terms_dir = /usr/local/share/taler/exchange/tos
-base_url = ${EXCHANGE_URL}
-unixpath = /sockets/exchange.sock
-serve = tcp
-port = 80
-# serve = unix
-
-[exchangedb-postgres]
-config = postgres://talerdb/taler
-
-[coin_${CURRENCY}_1]
-rsa_keysize = 2048
-fee_deposit = ${CURRENCY}:0.01
-fee_refund = ${CURRENCY}:0.01
-fee_refresh = ${CURRENCY}:0.01
-fee_withdraw = ${CURRENCY}:0.01
-duration_legal = 10 years
-duration_spend = 5 years
-duration_withdraw = 3 years
-value = ${CURRENCY}:1
-cipher = RSA
diff --git a/docker/hybrid/images/merchant/Dockerfile b/docker/hybrid/images/merchant/Dockerfile
deleted file mode 100644
index 4f744a5..0000000
--- a/docker/hybrid/images/merchant/Dockerfile
+++ /dev/null
@@ -1,6 +0,0 @@
-FROM taler_local/taler_base
-
-COPY taler.conf /config/taler.conf
-COPY startup.sh /
-RUN chmod +x /startup.sh
-ENTRYPOINT /startup.sh
diff --git a/docker/hybrid/images/merchant/startup.sh b/docker/hybrid/images/merchant/startup.sh
deleted file mode 100644
index 4303e78..0000000
--- a/docker/hybrid/images/merchant/startup.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-
-set -eu
-export LD_LIBRARY_PATH=/usr/local/lib
-
-while ! pg_isready -h talerdb -d taler; do
- echo DB not ready yet.
- sleep 2
-done
-echo Now DB is ready.
-
-for n in `seq 1 80`
- do
- echo "."
- sleep 0.1
- OK=1
- wget http://exchange/ -o /dev/null -O /dev/null >/dev/null && break
- OK=0
- done
- if [ 1 != $OK ]
- then
- echo "Exchange unreachable."
- fi
-echo Echange reachable.
-
-EXCHANGE_MASTER_PUB=$(curl -s http://exchange/keys | jq -r .master_public_key)
-echo Found Exchange Pub: $EXCHANGE_MASTER_PUB
-sed -i 's/\${EXCHANGE_URL}/http:\/\/exchange\//' /config/taler.conf
-sed -i "s/\${EXCHANGE_PUB}/${EXCHANGE_MASTER_PUB}/" /config/taler.conf
-sed -i 's/\${CURRENCY}/EUR/' /config/taler.conf
-# FRONTENDS_APIKEY=secret-token:secret, later..
-
-taler-merchant-dbinit -c /config/taler.conf --reset
-taler-merchant-httpd -c /config/taler.conf &
-sleep 1
-
-curl -s -H "Content-Type: application/json" -X POST -d '{"auth":{"method":"external"},"payto_uris":["payto://iban/SANDBOXX/DE474361?receiver-name=Merchant43"],"id":"default","name":"default","address":{},"jurisdiction":{},"default_max_wire_fee":"EUR:1", "default_max_deposit_fee":"EUR:1","default_wire_fee_amortization":1,"default_wire_transfer_delay":{"d_us" : 3600000000},"default_pay_delay":{"d_us": 3600000000}}' http://merchant/management/instances
-
-echo Default instance created.
-wait
diff --git a/docker/hybrid/images/merchant/taler.conf b/docker/hybrid/images/merchant/taler.conf
deleted file mode 100644
index cd50bba..0000000
--- a/docker/hybrid/images/merchant/taler.conf
+++ /dev/null
@@ -1,24 +0,0 @@
-[taler]
-currency = ${CURRENCY}
-
-[paths]
-TALER_DATA_HOME = /data
-
-[merchant-exchange-${CURRENCY}]
-currency = ${CURRENCY}
-exchange_base_url = http://exchange/
-master_key = ${EXCHANGE_PUB}
-
-[merchantdb-postgres]
-config = postgres://talerdb/taler
-
-[merchant]
-default_max_deposit_fee = ${CURRENCY}:0.05
-default_max_wire_fee = ${CURRENCY}:0.01
-wire_transfer_delay = 0 s
-port = 80
-serve = tcp
-
-[merchant-account-merchant]
-wire_file_mode = 770
-wire_response = ${TALER_DATA_HOME}/merchant/wire/merchant.json
diff --git a/docker/hybrid/images/postgres/Dockerfile b/docker/hybrid/images/postgres/Dockerfile
deleted file mode 100644
index e6e514d..0000000
--- a/docker/hybrid/images/postgres/Dockerfile
+++ /dev/null
@@ -1,4 +0,0 @@
-FROM postgres
-ENV POSTGRES_HOST_AUTH_METHOD=trust
-
-COPY init.sql /docker-entrypoint-initdb.d/init.sql
diff --git a/docker/hybrid/images/postgres/init.sql b/docker/hybrid/images/postgres/init.sql
deleted file mode 100644
index dec35df..0000000
--- a/docker/hybrid/images/postgres/init.sql
+++ /dev/null
@@ -1,2 +0,0 @@
-CREATE ROLE root SUPERUSER LOGIN;
-CREATE DATABASE taler WITH OWNER root;
diff --git a/docker/sites-build/Dockerfile b/docker/sites-build/Dockerfile
new file mode 100644
index 0000000..8c541b5
--- /dev/null
+++ b/docker/sites-build/Dockerfile
@@ -0,0 +1,48 @@
+FROM debian:bookworm-slim
+
+# Install dependencies
+
+RUN apt update
+
+RUN apt install --no-install-recommends -y ca-certificates git make python3-pip gettext
+
+RUN pip install Jinja2 ruamel.yaml Babel beautifulsoup4 lxml
+
+# User and folder
+
+RUN useradd -m taler-websites
+
+USER taler-websites
+
+WORKDIR /home/taler-websites
+
+# Get the bash files which do the sites' compilation - step 1
+
+RUN git clone https://git.taler.net/deployment.git
+
+# Get the compilation Make files - step 2
+
+RUN git clone https://git.taler.net/www.git
+
+# Get twister code
+
+RUN git clone https://git.taler.net/twister.git
+
+# Get buywith code
+
+RUN git clone https://git.taler.net/buywith.git
+
+# Create needed directories requested by .sh files of step 1
+
+RUN mkdir buywith.taler.net stage.taler.net twister.taler.net stamps
+
+# Copy needed files to stamps directory
+
+RUN cp ~/deployment/taler-sitesbuild/invalidate.sh ~/stamps \
+ && cp ~/deployment/taler-sitesbuild/Makefile ~/stamps
+
+WORKDIR deployment/buildbot
+
+# Compile Taler websites
+
+ENTRYPOINT ["./build-sites.sh"]
diff --git a/docker/sites-build/build.sh b/docker/sites-build/build.sh
new file mode 100755
index 0000000..41e8dfd
--- /dev/null
+++ b/docker/sites-build/build.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+docker build . -t docker_image_taler_websites \ No newline at end of file
diff --git a/docker/sites-build/run.sh b/docker/sites-build/run.sh
new file mode 100755
index 0000000..37d1a79
--- /dev/null
+++ b/docker/sites-build/run.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+# Execute "Docker: sites-builer-image"
+
+
+docker run -v $HOME/taler-websites:/home/taler-websites docker_image_taler_websites
diff --git a/docker/taler-docker/base.Dockerfile b/docker/taler-docker/base.Dockerfile
deleted file mode 100644
index 803e0b5..0000000
--- a/docker/taler-docker/base.Dockerfile
+++ /dev/null
@@ -1,177 +0,0 @@
-# FROM alpine:3.12.2 as network
-FROM debian:bullseye-slim as network
-# FROM debian:buster-slim as network
-
-ENV HOME /root
-WORKDIR $HOME
-
-# RUN apk add --no-cache git curl alpine-sdk gnupg
-RUN apt update && apt install -y git
-#alpine-sdk
-
-# RUN addgroup -g 1000 -S talergroup && adduser -u 1000 -S taleruser -G talergroup
-# USER taleruser
-# ENV HOME /home/taleruser
-# WORKDIR $HOME
-
-FROM network as network-microhttpd
-COPY libmicrohttpd.git $HOME/src
-RUN git -C src checkout v0.9.75
-
-FROM network as network-gnunet
-COPY gnunet.git $HOME/src
-RUN git -C src checkout 69844eacf3e43ad882c38f4d954fb5f5dd5a848b
-
-FROM network as network-jansson
-COPY jansson.git $HOME/src
-RUN git -C src checkout 2.13
-
-# FROM network as network-recutils
-# RUN curl -O http://gnu.c3sl.ufpr.br/ftp/recutils/recutils-1.8.tar.gz && tar xzf recutils-1.8.tar.gz && mv recutils-1.8 src
-# COPY jemarch-key.gpg $HOME
-# COPY recutils-1.8.tar.gz.sig $HOME
-# RUN gpg --import jemarch-key.gpg
-# RUN echo BDFA5717FC1DD35C2C3832A23EF90523B304AF08:6: | gpg --import-ownertrust
-# RUN gpg --verify recutils-1.8.tar.gz.sig recutils-1.8.tar.gz || exit 1
-
-FROM network as basic
-
-# RUN echo https://dl-cdn.alpinelinux.org/alpine/edge/testing >> /etc/apk/repositories
-# RUN apk add --no-cache \
-# zip
-# dpkg \
-# automake uncrustify \
-# autoconf \
-# libtool \
-# libidn-dev \
-# libunistring-dev \
-# jansson-dev \
-# libpq \
-# # libmicrohttpd-dev \
-# py3-pip \
-# libsodium \
-# postgresql-client vim \
-# texinfo \
-# npm curl-dev \
-# libsodium-dev bind-tools \
-# argon2-dev \
-# libqrencode-dev \
-# libxml2-dev libxslt-dev \
-# python3-dev libffi-dev libressl-dev musl-dev \
-# gettext-dev libgcrypt-dev sqlite-dev \
-# postgresql-dev zbar-dev bluez-dev iptables-dev \
-# jq httpie
-# && pip3 install click requests poetry jinja2 flask_babel uwsgi lxml pdflatex
-
-RUN apt update && apt install -y \
- dpkg git curl gnupg \
- automake uncrustify \
- autoconf \
- libtool \
- libidn2-dev \
- libunistring-dev \
- # libjansson-dev \
- libpq-dev \
- # libmicrohttpd-dev \
- python3-pip \
- libsodium23 \
- postgresql-client vim \
- texinfo \
- #anasatasis needs wget and postgres
- postgresql wget \
- npm openssl libcurl4-openssl-dev \
- libsodium-dev \
- # bind-tools \
- libargon2-0-dev \
- libqrencode-dev \
- libxml2-dev libxslt-dev \
- python3-dev libffi-dev \
- # libressl-dev \
- # musl-dev \
- libgettextpo-dev libgcrypt-dev libsqlite3-dev \
- libpq-dev libzbar-dev libbluetooth-dev libiptc-dev \
- python3-dev cargo \
- # not sure which one of this is needed
- librec-dev recutils librec1 \
- gettext autopoint \
- zip patchelf \
- # jansson uses cmake
- cmake \
- jq httpie
-
-RUN apt install -y sphinx
-RUN pip install sphinx_rtd_theme
-# FROM basic as recutils
-
-# COPY --from=network-recutils $HOME/src $HOME/src
-
-# WORKDIR $HOME/src
-
-# RUN ./configure \
-# --build=x86_64-alpine-linux-musl \
-# && make \
-# && make install
-
-WORKDIR $HOME
-
-FROM basic as microhttpd
-
-COPY --from=network-microhttpd $HOME/src $HOME/src
-
-WORKDIR $HOME/src
-
-RUN ./bootstrap \
- && ./configure \
- # --build=x86_64-alpine-linux-musl \
- --enable-logging=verbose \
- --disable-doc \
- && make \
- && make install
-
-WORKDIR $HOME
-
-FROM microhttpd as jansson
-
-COPY --from=network-jansson /root/src $HOME/src
-
-WORKDIR $HOME/src
-RUN cmake -DJANSSON_BUILD_SHARED_LIBS=1 . && make && make install
-
-FROM jansson as gnunet
-
-COPY --from=network-gnunet /root/src $HOME/src
-
-WORKDIR $HOME/src
-
-# #RUN addgroup gnunet
-# #RUN adduser --system --home "/var/lib/gnunet" -G gnunet --shell /bin/sh gnunet
-# #RUN addgroup root gnunet
-
-# #ENV GNUNET_PREFIX=/usr/local/lib
-# #RUN echo === $GNUNET_PREFIX > test.file
-
-RUN ./bootstrap \
- && ./configure \
- CFLAGS='-ggdb -O0' \
- --enable-static \
- # --build=x86_64-alpine-linux-musl \
- --enable-logging=verbose \
- && make \
- && make install
-
-WORKDIR $HOME
-
-FROM basic as final
-
-# COPY --from=recutils /usr/local/bin /usr/local/bin
-# COPY --from=recutils /usr/local/lib /usr/local/lib
-COPY --from=microhttpd /usr/local/lib /usr/local/lib
-COPY --from=gnunet /usr/local/lib /usr/local/lib
-COPY --from=gnunet /usr/local/bin /usr/local/bin
-COPY --from=gnunet /usr/local/include /usr/local/include
-COPY --from=jansson /usr/local/lib/libjansson.* /usr/local/lib/
-COPY --from=jansson /usr/local/include/jansson*.h /usr/local/lib/
-COPY --from=jansson /usr/local/lib/pkgconfig/jansson.pc /usr/local/lib/pkgconfig/
-
-RUN ldconfig
-
diff --git a/docker/taler-docker/docker-compose.yml b/docker/taler-docker/docker-compose.yml
deleted file mode 100644
index 6d67d25..0000000
--- a/docker/taler-docker/docker-compose.yml
+++ /dev/null
@@ -1,255 +0,0 @@
-version: '3'
-services:
-
- database:
- build: ./images/postgres
- volumes:
- - /tmp/taler/database-data:/var/lib/postgresql/data
- environment:
- POSTGRES_USER: root
- POSTGRES_PASSWORD:
- POSTGRES_HOST_AUTH_METHOD: trust
- ports:
- - "5432:5432"
-
- exchange-closer:
- image: taler_exchange
- entrypoint: taler-exchange-closer
- volumes:
- - /tmp/taler/config/exchange_feb/taler.conf:/root/.config/taler.conf
- depends_on:
- - exchange
- - name
- extra_hosts:
- - "bank.taler:172.17.0.1"
- - "auditor.taler:172.17.0.1"
- - "exchange.taler:172.17.0.1"
-
- # exchange-transfer:
- # image: taler_exchange
- # entrypoint: taler-exchange-transfer
- # volumes:
- # - /tmp/taler/config/exchange_feb/taler.conf:/root/.config/taler.conf
- # depends_on:
- # - exchange
- # extra_hosts:
- # - "bank.taler:172.17.0.1"
- # - "auditor.taler:172.17.0.1"
- # - "exchange.taler:172.17.0.1"
-
- exchange-wirewatch:
- image: taler_exchange
- entrypoint: taler-exchange-wirewatch
- volumes:
- - /tmp/taler/config/exchange_feb/taler.conf:/root/.config/taler.conf
- depends_on:
- - name
- - exchange
- extra_hosts:
- - "bank.taler:172.17.0.1"
- - "auditor.taler:172.17.0.1"
- - "exchange.taler:172.17.0.1"
-
- # exchange1-aggregator:
- # image: taler_exchange
- # entrypoint: taler-exchange-aggregator
- # volumes:
- # - /tmp/taler/config/exchange_jun/taler.conf:/root/.config/taler.conf
- # - /tmp/taler/config/exchange_jun/wirefees:/root/exchange/wirefees
- # depends_on:
- # - exchange
- # extra_hosts:
- # - "bank.taler:172.17.0.1"
- # - "auditor.taler:172.17.0.1"
- # - "exchange1.taler:172.17.0.1"
-
- # exchange1-closer:
- # image: taler_exchange
- # entrypoint: taler-exchange-closer
- # volumes:
- # - /tmp/taler/config/exchange_jun/taler.conf:/root/.config/taler.conf
- # depends_on:
- # - exchange
- # extra_hosts:
- # - "bank.taler:172.17.0.1"
- # - "auditor.taler:172.17.0.1"
- # - "exchange1.taler:172.17.0.1"
-
- # exchange1-transfer:
- # image: taler_exchange
- # entrypoint: taler-exchange-transfer
- # volumes:
- # - /tmp/taler/config/exchange_jun/taler.conf:/root/.config/taler.conf
- # depends_on:
- # - exchange
- # extra_hosts:
- # - "bank.taler:172.17.0.1"
- # - "auditor.taler:172.17.0.1"
- # - "exchange1.taler:172.17.0.1"
-
- # exchange1-wirewatch:
- # image: taler_exchange
- # entrypoint: taler-exchange-wirewatch
- # volumes:
- # - /tmp/taler/config/exchange_jun/taler.conf:/root/.config/taler.conf
- # depends_on:
- # - exchange
- # extra_hosts:
- # - "bank.taler:172.17.0.1"
- # - "auditor.taler:172.17.0.1"
- # - "exchange1.taler:172.17.0.1"
-
- # exchange2-aggregator:
- # image: taler_exchange
- # entrypoint: taler-exchange-aggregator
- # volumes:
- # - /tmp/taler/config/exchange_jul/taler.conf:/root/.config/taler.conf
- # - /tmp/taler/config/exchange_jul/wirefees:/root/exchange/wirefees
- # depends_on:
- # - exchange
- # extra_hosts:
- # - "bank.taler:172.17.0.1"
- # - "auditor.taler:172.17.0.1"
- # - "exchange2.taler:172.17.0.1"
-
- # exchange2-closer:
- # image: taler_exchange
- # entrypoint: taler-exchange-closer
- # volumes:
- # - /tmp/taler/config/exchange_jul/taler.conf:/root/.config/taler.conf
- # depends_on:
- # - exchange
- # extra_hosts:
- # - "bank.taler:172.17.0.1"
- # - "auditor.taler:172.17.0.1"
- # - "exchange2.taler:172.17.0.1"
-
- # exchange2-transfer:
- # image: taler_exchange
- # entrypoint: taler-exchange-transfer
- # volumes:
- # - /tmp/taler/config/exchange_jul/taler.conf:/root/.config/taler.conf
- # depends_on:
- # - exchange
- # extra_hosts:
- # - "bank.taler:172.17.0.1"
- # - "auditor.taler:172.17.0.1"
- # - "exchange2.taler:172.17.0.1"
-
- # exchange2-wirewatch:
- # image: taler_exchange
- # entrypoint: taler-exchange-wirewatch
- # volumes:
- # - /tmp/taler/config/exchange_jul/taler.conf:/root/.config/taler.conf
- # depends_on:
- # - exchange
- # extra_hosts:
- # - "bank.taler:172.17.0.1"
- # - "auditor.taler:172.17.0.1"
- # - "exchange2.taler:172.17.0.1"
-
- auditor:
- image: taler_exchange
- entrypoint: taler-auditor-httpd
- #entrypoint: sleep infinity
- volumes:
- - /tmp/taler/config/auditor_jan/taler.conf:/root/.config/taler.conf
- ports:
- - "8083:8083"
- depends_on:
- - bank-ars
- - name
-
- exchange:
- image: taler_exchange
- # entrypoint: sleep infinity
- entrypoint: /exchange.sh
- volumes:
- - /tmp/taler/config/exchange_feb/taler.conf:/root/.config/taler.conf
- - /tmp/taler/config/exchange_feb/live-keys:/root/exchange/live-keys
- - /tmp/taler/config/exchange_feb/wirefees:/root/exchange/wirefees
- - /tmp/taler/config/exchange_feb/audited:/root/exchange/audited
- - ./exchange.sh:/exchange.sh
- ports:
- - "8081:8081"
- depends_on:
- - bank-ars
- - name
- - auditor
- extra_hosts:
- - "bank.taler:172.17.0.1"
- - "auditor.taler:172.17.0.1"
-
- # exchange1:
- # image: taler_exchange
- # # entrypoint: sleep infinity
- # entrypoint: /exchange.sh
- # volumes:
- # - /tmp/taler/config/exchange_jun/taler.conf:/root/.config/taler.conf
- # - /tmp/taler/config/exchange_jun/live-keys:/root/exchange/live-keys
- # - /tmp/taler/config/exchange_jun/wirefees:/root/exchange/wirefees
- # - /tmp/taler/config/exchange_jun/audited:/root/exchange/audited
- # - ./exchange.sh:/exchange.sh
- # ports:
- # - "8071:8071"
- # depends_on:
- # - bank-ars
- # - auditor
- # extra_hosts:
- # - "bank.taler:172.17.0.1"
- # - "auditor.taler:172.17.0.1"
-
- # exchange2:
- # image: taler_exchange
- # # entrypoint: sleep infinity
- # entrypoint: /exchange.sh
- # volumes:
- # - /tmp/taler/config/exchange_jul/taler.conf:/root/.config/taler.conf
- # - /tmp/taler/config/exchange_jul/live-keys:/root/exchange/live-keys
- # - /tmp/taler/config/exchange_jul/wirefees:/root/exchange/wirefees
- # - /tmp/taler/config/exchange_jul/audited:/root/exchange/audited
- # - ./exchange.sh:/exchange.sh
- # ports:
- # - "8072:8072"
- # depends_on:
- # - bank-uru
- # - auditor
- # extra_hosts:
- # - "bank.taler:172.17.0.1"
- # - "auditor.taler:172.17.0.1"
-
- merchant-backend:
- build: ./images/merchant
- environment:
- TALER_MERCHANT_TOKEN: secret-token:super_secret
- # entrypoint: sleep infinity
- # to used GDB
- # security_opt:
- # - "seccomp=unconfined"
- # cap_add:
- # - "SYS_PTRACE"
- volumes:
- - /tmp/taler/config/merchant_mar/taler.conf:/root/.config/taler.conf
- depends_on:
- - exchange
- - name
- - auditor
- ports:
- - "9966:9966"
- extra_hosts:
- - "exchange.taler:172.17.0.1"
- - "auditor.taler:172.17.0.1"
-
- bank-ars:
- image: taler_bank
- entrypoint:
- - taler-bank-manage
- - serve
- volumes:
- - /tmp/taler/config/bank_apr/taler.conf:/root/.config/taler.conf
- environment:
- TALER_BANK_SECRET_KEY: somesecret1
- PATH: /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/root/.local/bin/
- ports:
- - "5882:5882"
-
diff --git a/docker/taler-docker/exchange.Dockerfile b/docker/taler-docker/exchange.Dockerfile
deleted file mode 100644
index 220a4b1..0000000
--- a/docker/taler-docker/exchange.Dockerfile
+++ /dev/null
@@ -1,46 +0,0 @@
-FROM taler_base
-
-# RUN addgroup -g 1000 -S talergroup && adduser -u 1000 -S taleruser -G talergroup
-# USER taleruser
-# ENV HOME /home/taleruser
-
-COPY exchange.git $HOME/exchange
-RUN git -C exchange checkout a199ba7fe61d1ea0aa7f9dcd4083ae2e52546c40
-#cb27943f1443c38a4dcfde118207688eb049f13c
-
-WORKDIR $HOME/exchange
-
-# COPY *.patch ./
-# RUN patch -p1 -i wire_response.patch && patch -p1 -i keys_currency.patch
-
-#building with statics libs
-# RUN apk add libltdl-static libgcrypt-static
-RUN pip3 install jinja2
-# patch for
-# ./configure: line 17303: ac_fn_c_check_decl: not found
-# RUN cat configure | tail -n+16861 | head -n 45 > configure.patch && sed -i '17302r configure.patch' configure
-# RUN sed -i 's-include <taler/taler_json_lib.h>-include "taler_json_lib.h"-' src/mustach/mustach-jansson.c
-
-RUN ./bootstrap
-RUN echo | git submodule update --init
-RUN ./contrib/gana.sh
-
- # --build=x86_64-alpine-linux-musl \
- # --with-gnunet=/usr/local \
- # --with-libgnurl --with-microhttpd --with-gnunet \
- # //--with-libgnurl=/usr/local --with-microhttpd=/usr/local
-RUN ./configure \
- CFLAGS='-ggdb -O0' \
- --enable-logging=verbose \
- && make \
- && make install
- # RUN make 'LDFLAGS=-all-static'
-
-WORKDIR $HOME
-
-RUN mkdir -p privacy terms/en
-COPY terms.xml terms/en/v1.xml
-
-RUN ldconfig
-
-ENTRYPOINT ["taler-exchange-httpd"]
diff --git a/docker/taler-docker/merchant.Dockerfile b/docker/taler-docker/merchant.Dockerfile
deleted file mode 100644
index 03c6ccc..0000000
--- a/docker/taler-docker/merchant.Dockerfile
+++ /dev/null
@@ -1,29 +0,0 @@
-FROM taler_base
-
-COPY . $HOME/merchant
-RUN git -C merchant checkout 1a62744325edc68e106b806ede0ac418d880ca4f
-
-RUN npm install -g pnpm@5.17.2
-
-WORKDIR $HOME/merchant
-
-#RUN ./bootstrap
-
-COPY --from=taler_exchange /usr/local/include /usr/local/include
-COPY --from=taler_exchange /usr/local/lib /usr/local/lib
-
-RUN ./bootstrap \
- && ./configure \
- # --build=x86_64-alpine-linux-musl \
- CFLAGS='-ggdb -O0' \
- --enable-logging=verbose \
- # --with-gnunet=/usr/local \
- # --with-exchange=/usr/local \
- # --with-microhttpd=/usr/local \
- && make \
- && make install
-
-WORKDIR $HOME
-
-RUN ldconfig
-ENTRYPOINT ["taler-merchant-httpd"]
diff --git a/docker/taler-docker/postgres/Dockerfile b/docker/taler-docker/postgres/Dockerfile
deleted file mode 100644
index 24cac5b..0000000
--- a/docker/taler-docker/postgres/Dockerfile
+++ /dev/null
@@ -1,14 +0,0 @@
-FROM postgres
-
-RUN echo "\
-CREATE USER taler1;\n\
-CREATE DATABASE bank1;\n\
-CREATE DATABASE sync1;\n\
-CREATE DATABASE nexus;\n\
-CREATE DATABASE newbank;\n\
-GRANT ALL PRIVILEGES ON DATABASE bank1 TO taler1;\n\
-GRANT ALL PRIVILEGES ON DATABASE sync1 TO taler1;\n\
-GRANT ALL PRIVILEGES ON DATABASE nexus TO taler1;\n\
-GRANT ALL PRIVILEGES ON DATABASE newbank TO taler1;\n\
-LOAD 'auto_explain';\n\
-" > /docker-entrypoint-initdb.d/init.sql
diff --git a/docker/taler-docker/template-auditor.conf b/docker/taler-docker/template-auditor.conf
deleted file mode 100644
index f2f62e8..0000000
--- a/docker/taler-docker/template-auditor.conf
+++ /dev/null
@@ -1,65 +0,0 @@
-[exchangedb-postgres]
-CONFIG = TBD
-SQL_DIR = TBD
-
-[exchange]
-KEYDIR = /root/exchange/live-keys/
-REVOCATION_DIR = /root/exchange/revocations/
-MAX_KEYS_CACHING = forever
-DB = postgres
-SERVE = tcp
-UNIXPATH = ${TALER_RUNTIME_DIR}/exchange.http
-UNIXPATH_MODE = 660
-PORT = 8081
-BASE_URL = http://localhost:8081/
-AGGREGATOR_IDLE_SLEEP_INTERVAL = 60 s
-WIREWATCH_IDLE_SLEEP_INTERVAL = 1 s
-SIGNKEY_DURATION = 4 weeks
-SIGNKEY_LEGAL_DURATION = 2 years
-LOOKAHEAD_SIGN = 32 weeks 1 day
-LOOKAHEAD_PROVIDE = 4 weeks 1 day
-
-[exchangedb]
-AUDITOR_BASE_DIR = /root/auditor/
-WIREFEE_BASE_DIR = /root/exchange/wirefees/
-IDLE_RESERVE_EXPIRATION_TIME = 4 weeks
-LEGAL_RESERVE_EXPIRATION_TIME = 7 years
-DURATION_OVERLAP = 5 minutes
-
-[auditordb-postgres]
-CONFIG = TBD
-SQL_DIR = TBD
-
-[taler]
-CURRENCY = TBD
-CURRENCY_ROUND_UNIT = CURRENCY:0.1
-
-[auditor]
-DB = postgres
-SERVE = tcp
-UNIXPATH = ${TALER_RUNTIME_DIR}/exchange.http
-UNIXPATH_MODE = 660
-PORT = 8083
-master_public_key = TBD
-TINY_AMOUNT = CURRENCY:0.1
-base_url = http://auditor.taler:8083/
-
-[arm]
-CONFIG = /root/auditor.conf
-
-[PATHS]
-TALER_HOME = ${TALER_TEST_HOME:-${HOME:-${USERPROFILE}}}
-TALER_DATA_HOME = ${XDG_DATA_HOME:-$TALER_HOME/.local/share}/taler/
-TALER_CONFIG_HOME = ${XDG_CONFIG_HOME:-$TALER_HOME/.config}/taler/
-TALER_CACHE_HOME = ${XDG_CACHE_HOME:-$TALER_HOME/.cache}/taler/
-TALER_RUNTIME_DIR = ${TMPDIR:-${TMP:-/tmp}}/taler-system-runtime/
-DEFAULTCONFIG = /root/auditor.conf
-LIBEXECDIR = /usr/local/taler/libexec/
-DOCDIR = /usr/local/share/doc/taler/
-ICONDIR = /usr/local/share/icons/
-LOCALEDIR = /usr/local/share/locale/
-PREFIX = /usr/local/
-BINDIR = /usr/local/bin/
-LIBDIR = /usr/local/lib/taler/
-DATADIR = /usr/local/share/taler/
-
diff --git a/docker/taler-docker/template-bank.conf b/docker/taler-docker/template-bank.conf
deleted file mode 100644
index 0cca529..0000000
--- a/docker/taler-docker/template-bank.conf
+++ /dev/null
@@ -1,18 +0,0 @@
-[taler]
-currency = CURRENCY
-
-[bank]
-serve = http
-http_port = TBD #5882
-database = TBD #postgres:///bank1?host=database&user=root
-max_debt = CURRENCY:500
-max_debt_bank = CURRENCY:99990
-allow_registrations = YES
-base_url = TBD #http://bank.taler:5882/
-suggested_exchange = TBD #http://exchange.taler:8081/
-suggested_exchange_payto = TBD#payto://x-taler-bank/bank.taler:5882/9
-SHOW_FREEFORM_WITHDRAWAL = yes
-
-[bank-admin]
-serve = http
-http_port = 5883
diff --git a/docker/taler-docker/template-exchange.conf b/docker/taler-docker/template-exchange.conf
deleted file mode 100644
index 1ed3705..0000000
--- a/docker/taler-docker/template-exchange.conf
+++ /dev/null
@@ -1,268 +0,0 @@
-[exchangedb-postgres]
-CONFIG = TBD
-SQL_DIR = TBD
-
-[exchange]
-KEYDIR = /root/exchange/live-keys/
-REVOCATION_DIR = /root/exchange/revocations/
-MAX_KEYS_CACHING = forever
-DB = postgres
-SERVE = tcp
-UNIXPATH = ${TALER_RUNTIME_DIR}/exchange.http
-UNIXPATH_MODE = 660
-PORT = TBD
-BASE_URL = TBD
-AGGREGATOR_IDLE_SLEEP_INTERVAL = 10 s
-WIREWATCH_IDLE_SLEEP_INTERVAL = 10 s
-SIGNKEY_DURATION = 20 weeks
-SIGNKEY_LEGAL_DURATION = 2 years
-LOOKAHEAD_SIGN = 32 weeks 1 day
-LOOKAHEAD_PROVIDE = 20 weeks 1 day
-master_public_key = TBD
-TERMS_ETAG = v1
-TERMS_DIR = /root/terms
-PRIVACY_ETAG = v1
-PRIVACY_DIR = /root/privacy
-
-#TBD: taler-exchange-offline should have a master_priv argument
-[exchange-offline]
-MASTER_PRIV_FILE = TBD
-SECM_TOFU_FILE = TBD
-
-[exchangedb]
-AUDITOR_BASE_DIR = /root/exchange/audited/
-WIREFEE_BASE_DIR = /root/exchange/wirefees/
-IDLE_RESERVE_EXPIRATION_TIME = 1 years
-LEGAL_RESERVE_EXPIRATION_TIME = 7 years
-DURATION_OVERLAP = 5 minutes
-AGGREGATOR_SHIFT = 1 seconds
-
-[taler]
-CURRENCY = TBD
-CURRENCY_ROUND_UNIT = CURRENCY:0.1
-
-[exchange-admin]
-port = 18080
-serve = tcp
-
-[taler-exchange-secmod-cs]
-OVERLAP_DURATION = 3 days
-LOOKAHEAD_SIGN = 2 years
-KEY_DIR = /root/cs/keydir/
-SM_PRIV_KEY = /root/cs/priv.key
-UNIXPATH = /root/cs/unix
-CLIENT_DIR = /root/cs/clients
-
-[taler-exchange-secmod-rsa]
-OVERLAP_DURATION = 3 days
-LOOKAHEAD_SIGN = 2 years
-KEY_DIR = /root/rsa/keydir/
-SM_PRIV_KEY = /root/rsa/priv.key
-UNIXPATH = /root/rsa/unix
-CLIENT_DIR = /root/rsa/clients
-
-[taler-exchange-secmod-eddsa]
-# DURATION = 3 days
-# OVERLAP_DURATION = 3 days
-# LOOKAHEAD_SIGN = 2 years
-DURATION = 12 weeks
-OVERLAP_DURATION = 5m
-LOOKAHEAD_SIGN = 1 year
-KEY_DIR = /root/eddsa/keydir/
-SM_PRIV_KEY = /root/eddsa/priv.key
-UNIXPATH = /root/eddsa/unix
-CLIENT_DIR = /root/eddsa/clients
-
-[exchange-account-1]
-#for aggregator
-PAYTO_URI = TBD #payto://x-taler-bank/bank.taler:5882/exchangeminator
-#WIRE_RESPONSE = /account-1.json
-ENABLE_DEBIT = YES
-ENABLE_CREDIT = YES
-
-[exchange-extension-age_restriction]
-enabled = YES
-age_groups = 6:12:18
-
-[exchange-accountcredentials-1]
-wire_gateway_url = TBD #http://bank.taler:5882/taler-wire-gateway/exchangeminator/
-WIRE_GATEWAY_AUTH_METHOD = basic
-USERNAME = TBD #exchagemintaor
-PASSWORD = asd
-
-# [fees-x-taler-bank]
-# WIRE-FEE-2018 = CURRENCY:0.1
-# WIRE-FEE-2019 = CURRENCY:0.1
-# WIRE-FEE-2020 = CURRENCY:0.1
-# WIRE-FEE-2021 = CURRENCY:0.1
-# WIRE-FEE-2022 = CURRENCY:0.1
-# WIRE-FEE-2023 = CURRENCY:0.1
-# WIRE-FEE-2024 = CURRENCY:0.1
-# WIRE-FEE-2025 = CURRENCY:0.1
-# WIRE-FEE-2026 = CURRENCY:0.1
-# WIRE-FEE-2027 = CURRENCY:0.1
-# WIRE-FEE-2028 = CURRENCY:0.1
-# WIRE-FEE-2029 = CURRENCY:0.1
-# CLOSING-FEE-2018 = CURRENCY:0.1
-# CLOSING-FEE-2019 = CURRENCY:0.1
-# CLOSING-FEE-2020 = CURRENCY:0.1
-# CLOSING-FEE-2021 = CURRENCY:0.1
-# CLOSING-FEE-2022 = CURRENCY:0.1
-# CLOSING-FEE-2023 = CURRENCY:0.1
-# CLOSING-FEE-2024 = CURRENCY:0.1
-# CLOSING-FEE-2025 = CURRENCY:0.1
-# CLOSING-FEE-2026 = CURRENCY:0.1
-# CLOSING-FEE-2027 = CURRENCY:0.1
-# CLOSING-FEE-2028 = CURRENCY:0.1
-# CLOSING-FEE-2029 = CURRENCY:0.1
-
-[coin_currency_01_0]
-cipher = RSA
-value = CURRENCY:0.1
-duration_withdraw = 7 days
-duration_spend = 2 years
-duration_legal = 3 years
-fee_withdraw = CURRENCY:0.1
-fee_deposit = CURRENCY:0.1
-fee_refresh = CURRENCY:0.1
-fee_refund = CURRENCY:0.1
-rsa_keysize = 1024
-# age_restricted = YES
-
-[coin_currency_1_0]
-cipher = RSA
-value = CURRENCY:1
-duration_withdraw = 7 days
-duration_spend = 2 years
-duration_legal = 3 years
-fee_withdraw = CURRENCY:0.1
-fee_deposit = CURRENCY:0.1
-fee_refresh = CURRENCY:0.1
-fee_refund = CURRENCY:0.1
-rsa_keysize = 1024
-# age_restricted = YES
-
-[coin_currency_5_0]
-cipher = RSA
-value = CURRENCY:5
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = CURRENCY:0.1
-fee_deposit = CURRENCY:0.1
-fee_refresh = CURRENCY:0.1
-fee_refund = CURRENCY:0.1
-rsa_keysize = 1024
-# age_restricted = YES
-
-[coin_currency_10_0]
-cipher = RSA
-value = CURRENCY:10
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = CURRENCY:0.1
-fee_deposit = CURRENCY:0.1
-fee_refresh = CURRENCY:0.1
-fee_refund = CURRENCY:0.1
-rsa_keysize = 1024
-# age_restricted = YES
-
-[coin_currency_50_0]
-cipher = RSA
-value = CURRENCY:50
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = CURRENCY:0.1
-fee_deposit = CURRENCY:0.1
-fee_refresh = CURRENCY:0.1
-fee_refund = CURRENCY:0.1
-rsa_keysize = 1024
-# age_restricted = YES
-
-[coin_currency_100_0]
-cipher = RSA
-value = CURRENCY:100
-duration_withdraw = 3 years
-duration_spend = 5 years
-#missing in docs
-duration_legal = 10 years
-fee_withdraw = CURRENCY:0.1
-fee_deposit = CURRENCY:0.1
-fee_refresh = CURRENCY:0.1
-fee_refund = CURRENCY:0.1
-rsa_keysize = 1024
-# age_restricted = YES
-
-[coin_currency_500_0]
-cipher = RSA
-value = CURRENCY:500
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = CURRENCY:0.1
-fee_deposit = CURRENCY:0.1
-fee_refresh = CURRENCY:0.1
-fee_refund = CURRENCY:0.1
-rsa_keysize = 1024
-# age_restricted = YES
-
-[coin_currency_1000_0]
-cipher = RSA
-value = CURRENCY:1000
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = CURRENCY:0.1
-fee_deposit = CURRENCY:0.1
-fee_refresh = CURRENCY:0.1
-fee_refund = CURRENCY:0.1
-rsa_keysize = 2048
-# age_restricted = YES
-
-# [fees-iban]
-# WIRE-FEE-2018 = CURRENCY:0.1
-# WIRE-FEE-2019 = CURRENCY:0.1
-# WIRE-FEE-2020 = CURRENCY:0.1
-# WIRE-FEE-2021 = CURRENCY:0.1
-# WIRE-FEE-2022 = CURRENCY:0.1
-# WIRE-FEE-2023 = CURRENCY:0.1
-# WIRE-FEE-2024 = CURRENCY:0.1
-# WIRE-FEE-2025 = CURRENCY:0.1
-# WIRE-FEE-2026 = CURRENCY:0.1
-# WIRE-FEE-2027 = CURRENCY:0.1
-# WIRE-FEE-2028 = CURRENCY:0.1
-# WIRE-FEE-2029 = CURRENCY:0.1
-# CLOSING-FEE-2018 = CURRENCY:0.1
-# CLOSING-FEE-2019 = CURRENCY:0.1
-# CLOSING-FEE-2020 = CURRENCY:0.1
-# CLOSING-FEE-2021 = CURRENCY:0.1
-# CLOSING-FEE-2022 = CURRENCY:0.1
-# CLOSING-FEE-2023 = CURRENCY:0.1
-# CLOSING-FEE-2024 = CURRENCY:0.1
-# CLOSING-FEE-2025 = CURRENCY:0.1
-# CLOSING-FEE-2026 = CURRENCY:0.1
-# CLOSING-FEE-2027 = CURRENCY:0.1
-# CLOSING-FEE-2028 = CURRENCY:0.1
-# CLOSING-FEE-2029 = CURRENCY:0.1
-
-[arm]
-CONFIG = /root/exchange.conf
-
-[PATHS]
-TALER_HOME = /root
-TALER_DATA_HOME = /root/.local/share/taler/
-TALER_CONFIG_HOME = ${XDG_CONFIG_HOME:-$TALER_HOME/.config}/taler/
-TALER_CACHE_HOME = ${XDG_CACHE_HOME:-$TALER_HOME/.cache}/taler/
-TALER_RUNTIME_DIR = ${TMPDIR:-${TMP:-/tmp}}/taler-system-runtime/
-DEFAULTCONFIG = ~/.config/taler.conf
-LIBEXECDIR = /usr/local/taler/libexec/
-DOCDIR = /usr/local/share/doc/taler/
-ICONDIR = /usr/local/share/icons/
-LOCALEDIR = /usr/local/share/locale/
-PREFIX = /usr/local/
-BINDIR = /usr/local/bin/
-LIBDIR = /usr/local/lib/taler/
-DATADIR = /usr/local/share/taler/
-
diff --git a/docker/taler-docker/template-merchant.conf b/docker/taler-docker/template-merchant.conf
deleted file mode 100644
index 2c22914..0000000
--- a/docker/taler-docker/template-merchant.conf
+++ /dev/null
@@ -1,76 +0,0 @@
-[merchant]
-SERVE = tcp
-PORT = 9966
-LEGAL_PRESERVATION = 11 years
-UNIXPATH = ${TALER_RUNTIME_DIR}/merchant.http
-UNIXPATH_MODE = 660
-FORCE_AUDIT = NO
-DEFAULT_WIRE_FEE_AMORTIZATION = 1
-DB = postgres
-WIRE_TRANSFER_DELAY = 30 s
-DEFAULT_PAY_DEADLINE = 15 m
-DATABASE = postgres
-wireformat = test
-default_max_wire_fee = CURRENCY:0.2
-default_max_deposit_fee = CURRENCY:0.1
-
-[merchantdb-postgres]
-CONFIG = TBD
-SQL_DIR = $DATADIR/sql/merchant/
-
-[taler]
-CURRENCY = TBD
-
-[merchant-exchange-demo]
-EXCHANGE_BASE_URL = TBD
-MASTER_KEY = TBD
-CURRENCY = TBD
-
-[merchant-auditor-demo]
-AUDITOR_BASE_URL = TBD
-AUDITOR_KEY = TDB
-CURRENCY = TDB
-
-[arm]
-CONFIG = ~/.config/taler.conf
-
-[merchant-location-FSF-address]
-street = 51 Franklin Street, Fifth Floor.
-city = Boston
-country = USA
-
-[merchant-location-default-address]
-country = Kudosland
-
-[backoffice-app]
-serve = http
-http_port = 5959
-backend = http://merchant-backend.taler:9966/
-instances = blog
-
-[instance-default]
-keyfile = ${TALER_DEPLOYMENT_DATA}/merchant/default.priv
-name = Kudos Inc.
-tip_reserve_priv_filename = ${TALER_DEPLOYMENT_DATA}/merchant/default-tip.priv
-tip_exchange = http://exchange.taler:8081/
-
-[instance-blog]
-keyfile = ${TALER_DEPLOYMENT_DATA}/merchant/blog.priv
-name = Tutorial
-
-[PATHS]
-TALER_HOME = ${TALER_TEST_HOME:-${HOME:-${USERPROFILE}}}
-TALER_DATA_HOME = ${XDG_DATA_HOME:-$TALER_HOME/.local/share}/taler/
-TALER_CONFIG_HOME = ${XDG_CONFIG_HOME:-$TALER_HOME/.config}/taler/
-TALER_CACHE_HOME = ${XDG_CACHE_HOME:-$TALER_HOME/.cache}/taler/
-TALER_RUNTIME_DIR = ${TMPDIR:-${TMP:-/tmp}}/taler-system-runtime/
-DEFAULTCONFIG = ~/.config/taler.conf
-LIBEXECDIR = /usr/local/taler/libexec/
-DOCDIR = /usr/local/share/doc/taler/
-ICONDIR = /usr/local/share/icons/
-LOCALEDIR = /usr/local/share/locale/
-PREFIX = /usr/local/
-BINDIR = /usr/local/bin/
-LIBDIR = /usr/local/lib/taler/
-DATADIR = /usr/local/share/taler/
-
diff --git a/docker/taler-docker/wallet.Dockerfile b/docker/taler-docker/wallet.Dockerfile
deleted file mode 100644
index d326145..0000000
--- a/docker/taler-docker/wallet.Dockerfile
+++ /dev/null
@@ -1,16 +0,0 @@
-FROM taler_base
-
-COPY wallet.git $HOME/wallet
-RUN git -C wallet checkout 7dc66c2441c4b77cfed0c4add592d4b7d5912ec3
-
-# COPY --from=taler/base:network-wallet /root/src $HOME/src
-
-WORKDIR $HOME/wallet
-
-RUN npm install -g pnpm@5.17.2
-
-RUN ./bootstrap && ./configure && pnpm install && \
- pnpm run -r prepare && \
- make && make install
-
-
diff --git a/envcfg.py.template b/envcfg.py.template
deleted file mode 100644
index 3e12ea4..0000000
--- a/envcfg.py.template
+++ /dev/null
@@ -1,16 +0,0 @@
-# Name of the environment (test, demo, int, coverage, demo-checker, auditor-reporter, ...)
-env = "..."
-
-tag = "master"
-
-tag_gnunet = tag
-tag_libmicrohttpd = tag
-tag_exchange = tag
-tag_merchant = tag
-tag_bank = tag
-tag_twister = tag
-tag_landing = tag
-tag_backoffice = tag
-tag_taler_merchant_demos = tag
-tag_sync = tag
-tag_wallet_core = tag
diff --git a/envcfg/envcfg-demo-2019-11-02-01.py b/envcfg/envcfg-demo-2019-11-02-01.py
deleted file mode 100644
index e02becf..0000000
--- a/envcfg/envcfg-demo-2019-11-02-01.py
+++ /dev/null
@@ -1,15 +0,0 @@
-env = "demo"
-
-tag = "demo-2019-11-02-00"
-
-tag_gnunet = "v0.11.8"
-tag_libmicrohttpd = "v0.9.68"
-tag_exchange = tag
-tag_merchant = tag
-tag_bank = tag
-tag_twister = tag
-tag_landing = tag
-tag_donations = tag
-tag_blog = tag
-tag_survey = tag
-tag_backoffice = tag
diff --git a/envcfg/envcfg-demo-2019-12-03-01.py b/envcfg/envcfg-demo-2019-12-03-01.py
deleted file mode 100644
index 4c77213..0000000
--- a/envcfg/envcfg-demo-2019-12-03-01.py
+++ /dev/null
@@ -1,15 +0,0 @@
-env = "demo"
-
-tag = "demo-2019-12-03-01"
-
-tag_gnunet = "taler-new-crypto"
-tag_libmicrohttpd = "v0.9.68"
-tag_exchange = tag
-tag_merchant = tag
-tag_bank = tag
-tag_twister = tag
-tag_landing = "demo-2019-08-31-00"
-tag_donations = tag
-tag_blog = tag
-tag_survey = tag
-tag_backoffice = tag
diff --git a/envcfg/envcfg-demo-2019-12-09-01.py b/envcfg/envcfg-demo-2019-12-09-01.py
deleted file mode 100644
index faa85a3..0000000
--- a/envcfg/envcfg-demo-2019-12-09-01.py
+++ /dev/null
@@ -1,15 +0,0 @@
-env = "demo"
-
-tag = "demo-2019-12-03-01"
-
-tag_gnunet = "taler-new-crypto"
-tag_libmicrohttpd = "v0.9.68"
-tag_exchange = "demo-2019-12-09-01"
-tag_merchant = tag
-tag_bank = tag
-tag_twister = tag
-tag_landing = "demo-2019-08-31-00"
-tag_donations = tag
-tag_blog = tag
-tag_survey = tag
-tag_backoffice = tag
diff --git a/envcfg/envcfg-demo-2020-11-14.py b/envcfg/envcfg-demo-2020-11-14.py
deleted file mode 100644
index 189d508..0000000
--- a/envcfg/envcfg-demo-2020-11-14.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Name of the environment
-# (test, demo, int, coverage, demo-checker, auditor-reporter, ...)
-env = "demo"
-
-# Such tag is only used to make the builder happy.
-unused_codebase_tag = "master"
-
-tag_gnunet = "v0.14.0"
-tag_libmicrohttpd = "ad8a3e3fde50de45d075dbb6971ed52003200ee2"
-tag_exchange = "v0.8.1"
-tag_merchant = "v0.8.0"
-tag_bank = "v0.8.1"
-tag_sync = "v0.8.1"
-tag_taler_merchant_demos = "71193537361e0f230214137f7f5211117d35277e"
-tag_wallet_core = "cdf5cc583cd7fc938f38137da25aaee2aeaf28a9"
-
-# The following repositories do not really take part
-# in any demo, for now. Some of them are old, some are
-# not really needed (like Twister, for example.)
-tag_backoffice = unused_codebase_tag
-tag_twister = unused_codebase_tag
-
diff --git a/envcfg/envcfg-demo-2021-08-18.py b/envcfg/envcfg-demo-2021-08-18.py
deleted file mode 100644
index d372c20..0000000
--- a/envcfg/envcfg-demo-2021-08-18.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Name of the environment
-# (test, demo, int, coverage, demo-checker, auditor-reporter, ...)
-env = "demo"
-
-tag_gnunet = "v0.15.0"
-tag_libmicrohttpd = "3db35a4ca6192cd26770ee69f1c48e353535b70d"
-# previous at exchange: "35b232642bc831e8c9759f7ae6180bb2deabed7e"
-tag_exchange = "v0.8.3"
-tag_merchant = "v0.8.2"
-tag_bank = "v0.8.2"
-tag_sync = "v0.8.2"
-tag_taler_merchant_demos = "1d66634cd8f4b5c089be58d62615fc48d3f7163b"
-tag_wallet_core = "daf9dc507ec16c34cecb7f423be8935b6816eede"
-tag_twister = "v0.8.1"
diff --git a/envcfg/envcfg-demo-2021-08-24.py b/envcfg/envcfg-demo-2021-08-24.py
deleted file mode 100644
index 4623cac..0000000
--- a/envcfg/envcfg-demo-2021-08-24.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Name of the environment
-# (test, demo, int, coverage, demo-checker, auditor-reporter, ...)
-env = "demo"
-
-tag_gnunet = "v0.15.1"
-tag_libmicrohttpd = "3db35a4ca6192cd26770ee69f1c48e353535b70d"
-# previous at exchange: "35b232642bc831e8c9759f7ae6180bb2deabed7e"
-tag_exchange = "v0.8.4"
-tag_merchant = "v0.8.3"
-tag_bank = "v0.8.2"
-tag_sync = "v0.8.2"
-tag_taler_merchant_demos = "1d66634cd8f4b5c089be58d62615fc48d3f7163b"
-tag_wallet_core = "daf9dc507ec16c34cecb7f423be8935b6816eede"
-tag_twister = "v0.8.2"
diff --git a/envcfg/envcfg-demo-2022-08-23.py b/envcfg/envcfg-demo-2022-08-23.py
deleted file mode 100644
index b49486f..0000000
--- a/envcfg/envcfg-demo-2022-08-23.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Name of the environment
-# (test, demo, int, coverage, demo-checker, auditor-reporter, ...)
-env = "demo"
-
-tag_gnunet = "a53dce1f8aaf497c1f7a23cf4fa3e982f9baabfb"
-tag_libmicrohttpd = "83f50ffcea81a01ee4b06391a884df277d31c76d"
-tag_exchange = "783d06cad64aece5af97e9f6c25e0384afd070ea"
-tag_merchant = "b14e18eb31c957276c3f74834495d1bfa5c9329a"
-tag_sync = "57eab1b4810ae1ddb3e6a0b96120148e00a4a75b"
-tag_taler_merchant_demos = "b701e627779710116b8c16af943601314df5a5db"
-tag_wallet_core = "f3ff5a72257dda27cab555f8b8d921d45bfc3e4b"
-tag_libeufin = "91eb52ccb2f68333ff715573d18a14ea1cb3d615"
-tag_twister = "SKIP"
-tag_anastasis = "SKIP"
diff --git a/envcfg/envcfg.py.template b/envcfg/envcfg.py.template
deleted file mode 100644
index 7153874..0000000
--- a/envcfg/envcfg.py.template
+++ /dev/null
@@ -1,17 +0,0 @@
-# Name of the environment (test, demo, int, ...)
-env = "..."
-
-tag = "master"
-
-tag_gnunet = tag
-tag_libmicrohttpd = tag
-tag_exchange = tag
-tag_merchant = tag
-tag_bank = tag
-tag_twister = tag
-tag_landing = tag
-tag_donations = tag
-tag_blog = tag
-tag_survey = tag
-tag_backoffice = tag
-tag_sync = tag
diff --git a/envcfg/talerconf/euro.taler.conf b/envcfg/talerconf/euro.taler.conf
deleted file mode 100644
index ad3ee78..0000000
--- a/envcfg/talerconf/euro.taler.conf
+++ /dev/null
@@ -1,333 +0,0 @@
-[paths]
-TALER_DEPLOYMENT_DATA = ${HOME}/taler-data
-
-[taler]
-CURRENCY = EUR
-
-[bank]
-serve = uwsgi
-uwsgi_serve = unix
-uwsgi_unixpath = $HOME/sockets/bank.uwsgi
-uwsgi_unixpath_mode = 660
-database = postgres:///talereuro
-max_debt = EUR:20.0
-max_debt_bank = EUR:0.0
-suggested_exchange = https://exchange.euro.taler.net/
-suggested_exchange_payto = payto://x-taler-bank/bank.euro.taler.net/2
-
-[bank-admin]
-uwsgi_serve = unix
-uwsgi_unixpath = $HOME/sockets/bank-admin.uwsgi
-uwsgi_unixpath_mode = 660
-
-[donations]
-uwsgi_serve = unix
-uwsgi_unixpath = $HOME/sockets/donations.uwsgi
-uwsgi_unixpath_mode = 660
-
-[survey]
-uwsgi_serve = unix
-uwsgi_unixpath = $HOME/sockets/survey.uwsgi
-uwsgi_unixpath_mode = 660
-
-[blog]
-uwsgi_serve = unix
-uwsgi_unixpath = $HOME/sockets/shop.uwsgi
-uwsgi_unixpath_mode = 660
-instance = FSF
-
-[backoffice-all]
-backend = https://backend.euro.taler.net/
-uwsgi_serve = unix
-uwsgi_unixpath_mode = 660
-uwsgi_unixpath = $HOME/sockets/backoffice.uwsgi
-instances = FSF default Tor
-
-[merchant]
-wireformat = test
-serve = unix
-unixpath = $HOME/sockets/merchant.http
-wire_transfer_delay = 0 s
-default_max_wire_fee = EUR:0.01
-default_max_deposit_fee = EUR:0.05
-
-[merchantdb-postgres]
-config = postgres:///talereuro
-
-[merchant-exchange-test]
-url = https://exchange.euro.taler.net/
-master_key = J3QPEAEDKWZ22VQQTXE5EW1MAC6RFRWC7DHFEC4M74V8NR2109TG
-
-[frontends]
-backend_apikey = sandbox
-backend = https://backend.euro.taler.net/
-
-[exchange-EUR]
-master_key = J3QPEAEDKWZ22VQQTXE5EW1MAC6RFRWC7DHFEC4M74V8NR2109TG
-currency = EUR
-base_url = https://exchange.euro.taler.net/
-
-[auditor]
-auditor_priv_file = ${TALER_DEPLOYMENT_DATA}/auditor/offline-keys/auditor.priv
-serve = unix
-auditor_url = https://auditor.euro.taler.net/service/
-unixpath = $HOME/sockets/auditor.http
-reports = ${TALER_DEPLOYMENT_DATA}/auditor/reports
-
-[exchange]
-base_url = https://exchange.euro.taler.net/
-serve = unix
-unixpath = $HOME/sockets/exchange.http
-master_public_key = J3QPEAEDKWZ22VQQTXE5EW1MAC6RFRWC7DHFEC4M74V8NR2109TG
-master_priv_file = ${TALER_DEPLOYMENT_DATA}/exchange/offline-keys/master.priv
-keydir = ${TALER_DEPLOYMENT_DATA}/exchange/live-keys/
-
-[exchangedb]
-auditor_base_dir = ${TALER_DEPLOYMENT_DATA}/exchange/auditors/
-wirefee_base_dir = ${TALER_DEPLOYMENT_DATA}/exchange/wirefees/
-auditor_inputs = ${TALER_DEPLOYMENT_DATA}/exchange/auditor-inputs/
-
-[exchangedb-postgres]
-db_conn_str = postgres:///talereuro
-config = postgres:///talereuro
-
-[auditordb-postgres]
-db_conn_str = postgres:///talereuro
-config = postgres:///talereuro
-
-[account-1]
-url = payto://x-taler-bank/bank.euro.taler.net/2
-wire_response = ${TALER_DEPLOYMENT_DATA}/exchange/wire/test.json
-plugin = taler_bank
-taler_bank_auth_method = basic
-username = Exchange
-password = x
-enable_debit = yes
-enable_credit = yes
-
-[fees-x-taler-bank]
-wire-fee-2018 = EUR:0.02
-wire-fee-2019 = EUR:0.03
-wire-fee-2020 = EUR:0.04
-wire-fee-2021 = EUR:0.04
-wire-fee-2022 = EUR:0.05
-wire-fee-2023 = EUR:0.06
-wire-fee-2024 = EUR:0.07
-wire-fee-2025 = EUR:0.08
-closing-fee-2018 = EUR:0.01
-closing-fee-2019 = EUR:0.01
-closing-fee-2020 = EUR:0.01
-closing-fee-2021 = EUR:0.01
-closing-fee-2022 = EUR:0.01
-closing-fee-2023 = EUR:0.01
-closing-fee-2024 = EUR:0.01
-closing-fee-2025 = EUR:0.01
-
-[exchange_keys]
-signkey_duration = 18 weeks
-legal_duration = 2 years
-lookahead_sign = 32 weeks 1 day
-lookahead_provide = 4 weeks 1 day
-
-[instance-FSF]
-keyfile = ${TALER_DEPLOYMENT_DATA}/merchant/fsf.priv
-name = Free Software Foundation
-
-[merchant-location-FSF-address]
-street = 51 Franklin Street, Fifth Floor.
-city = Boston
-country = USA
-
-[instance-Tor]
-keyfile = ${TALER_DEPLOYMENT_DATA}/merchant/tor.priv
-name = The Tor Project
-
-[instance-GNUnet]
-keyfile = ${TALER_DEPLOYMENT_DATA}/merchant/gnunet.priv
-name = GNUnet Project
-
-[instance-Taler]
-keyfile = ${TALER_DEPLOYMENT_DATA}/merchant/taler.priv
-name = Taler
-
-[instance-default]
-keyfile = ${TALER_DEPLOYMENT_DATA}/merchant/default.priv
-name = Kudos Inc.
-tip_reserve_priv_filename = ${TALER_DEPLOYMENT_DATA}/merchant/default-tip.priv
-tip_exchange = https://exchange.euro.taler.net/
-
-[merchant-location-default-address]
-country = Kudosland
-
-[instance-Tutorial]
-keyfile = ${TALER_DEPLOYMENT_DATA}/merchant/tutorial.priv
-name = Tutorial
-
-[account-merchant]
-url = payto://x-taler-bank/bank.euro.taler.net/3
-plugin = taler_bank
-taler_bank_auth_method = basic
-username = user
-password = pass
-wire_response = ${TALER_DEPLOYMENT_DATA}/merchant/wire/merchant.json
-wire_file_mode = 770
-HONOR_default = YES
-HONOR_Tor = YES
-HONOR_GNUnet = YES
-HONOR_Taler = YES
-HONOR_FSF = YES
-HONOR_Tutorial = YES
-
-[coin_EUR_decimilli_25]
-value = EUR:0.0025
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.0001
-fee_refresh = EUR:0.0001
-fee_refund = EUR:0.0001
-fee_deposit = EUR:0.0001
-rsa_keysize = 2048
-
-[coin_EUR_milli_5]
-value = EUR:0.005
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.0001
-fee_refresh = EUR:0.0001
-fee_refund = EUR:0.0001
-fee_deposit = EUR:0.0001
-rsa_keysize = 2048
-
-[coin_EUR_cent_1]
-value = EUR:0.01
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.001
-fee_refresh = EUR:0.001
-fee_refund = EUR:0.001
-fee_deposit = EUR:0.001
-rsa_keysize = 2048
-
-[coin_EUR_cent_2]
-value = EUR:0.02
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.001
-fee_refresh = EUR:0.001
-fee_refund = EUR:0.001
-fee_deposit = EUR:0.001
-rsa_keysize = 2048
-
-[coin_EUR_cent_4]
-value = EUR:0.04
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.001
-fee_refresh = EUR:0.001
-fee_refund = EUR:0.001
-fee_deposit = EUR:0.001
-rsa_keysize = 2048
-
-[coin_EUR_cent_8]
-value = EUR:0.08
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.001
-fee_refresh = EUR:0.001
-fee_refund = EUR:0.001
-fee_deposit = EUR:0.001
-rsa_keysize = 2048
-
-[coin_EUR_cent_16]
-value = EUR:0.16
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.001
-fee_refresh = EUR:0.001
-fee_refund = EUR:0.001
-fee_deposit = EUR:0.001
-rsa_keysize = 2048
-
-[coin_EUR_cent_32]
-value = EUR:0.32
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.01
-fee_refresh = EUR:0.01
-fee_refund = EUR:0.01
-fee_deposit = EUR:0.01
-rsa_keysize = 2048
-
-[coin_EUR_cent_64]
-value = EUR:0.64
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.03
-fee_refresh = EUR:0.03
-fee_refund = EUR:0.03
-fee_deposit = EUR:0.03
-rsa_keysize = 2048
-
-[coin_EUR_1_cent_28]
-value = EUR:1.28
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.03
-fee_refresh = EUR:0.03
-fee_refund = EUR:0.03
-fee_deposit = EUR:0.03
-rsa_keysize = 2048
-
-[coin_EUR_2_cent_56]
-value = EUR:2.56
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.03
-fee_refresh = EUR:0.03
-fee_refund = EUR:0.03
-fee_deposit = EUR:0.03
-rsa_keysize = 2048
-
-[coin_EUR_5_cent_12]
-value = EUR:5.12
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.05
-fee_refresh = EUR:0.05
-fee_refund = EUR:0.05
-fee_deposit = EUR:0.05
-rsa_keysize = 2048
-
-[coin_EUR_10_cent_24]
-value = EUR:10.24
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.05
-fee_refresh = EUR:0.05
-fee_refund = EUR:0.05
-fee_deposit = EUR:0.05
-rsa_keysize = 2048
-
-[coin_EUR_20_cent_48]
-value = EUR:20.48
-duration_withdraw = 3 years
-duration_spend = 5 years
-duration_legal = 10 years
-fee_withdraw = EUR:0.10
-fee_refresh = EUR:0.10
-fee_refund = EUR:0.10
-fee_deposit = EUR:0.10
-rsa_keysize = 2048
diff --git a/gnunet.conf b/gnunet.conf
deleted file mode 100644
index 00cd536..0000000
--- a/gnunet.conf
+++ /dev/null
@@ -1 +0,0 @@
-# Empty configuration file used for gnunet-arm / taler-deployment-arm.
diff --git a/head.taler.net/entr.sh b/head.taler.net/entr.sh
new file mode 100755
index 0000000..b44d826
--- /dev/null
+++ b/head.taler.net/entr.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+set -xo
+
+while true ; do
+ echo "${HOME}/incoming" | entr -n -d "${HOME}"/deployment/head.taler.net/update-head-deployment.sh ; sleep 1 || true
+done
diff --git a/head.taler.net/rsyncd.conf b/head.taler.net/rsyncd.conf
new file mode 100644
index 0000000..613dea6
--- /dev/null
+++ b/head.taler.net/rsyncd.conf
@@ -0,0 +1,13 @@
+max connections = 4
+log file = /home/head/.local/var/log/rsync.log
+lock file = /home/head/.local/var/run/rsyncd.lock
+timeout = 300
+use chroot = no
+
+[incoming]
+ comment = Inbox for head.taler.net images
+ path = /home/head/incoming
+ read only = no
+ write only = no
+ #uid = head
+ #gid = head
diff --git a/head.taler.net/rsyncd.service b/head.taler.net/rsyncd.service
new file mode 100644
index 0000000..cf3791b
--- /dev/null
+++ b/head.taler.net/rsyncd.service
@@ -0,0 +1,12 @@
+[Unit]
+Description=fast remote file copy program daemon
+ConditionPathExists=/home/head/.config/rsyncd.conf
+Documentation=man:rsync(1) man:rsyncd.conf(5)
+
+[Service]
+ExecStart=/usr/bin/rsync --daemon --no-detach --address 127.0.0.1 --port 424240 --config=/home/head/.config/rsyncd.conf
+RestartSec=1
+Restart=on-failure
+
+[Install]
+WantedBy=default.target
diff --git a/head.taler.net/update-head-deployment.sh b/head.taler.net/update-head-deployment.sh
new file mode 100755
index 0000000..09f7fd2
--- /dev/null
+++ b/head.taler.net/update-head-deployment.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -ex
+
+pushd "${HOME}/sandcastle-ng"
+git pull
+popd
+
+podman load -i "${HOME}/incoming/taler-base-all-head.tar"
+podman tag taler-base-all-head:latest taler-base-all:latest
+rm -f "${HOME}/incoming/taler-base-all-head.tar"
+
+exec systemctl --user restart container-taler-sandcastle-head.service
diff --git a/mypy/mypy.ini b/mypy/mypy.ini
deleted file mode 100644
index 924c128..0000000
--- a/mypy/mypy.ini
+++ /dev/null
@@ -1,3 +0,0 @@
-[mypy]
-ignore_missing_imports = True
-python_version = 3.5
diff --git a/netjail/netjail-init.sh b/netjail/netjail-init.sh
index 7fd0dd5..9b28a37 100755
--- a/netjail/netjail-init.sh
+++ b/netjail/netjail-init.sh
@@ -17,13 +17,17 @@ export PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
BRIDGE=builderbridge
-brctl addbr $BRIDGE
-brctl stp $BRIDGE off
+# This is deprecated
+# brctl addbr $BRIDGE
+# brctl stp $BRIDGE off
+ip link add dev $BRIDGE type bridge stp_state 0
+
ip link set dev $BRIDGE up
# Connect bridge to host network
ip link add tap0 type veth peer name br-tap0
-brctl addif $BRIDGE br-tap0
+#brctl addif $BRIDGE br-tap0
+ip link set dev br-tap0 master $BRIDGE
ip link set dev tap0 up
ip link set dev br-tap0 up
diff --git a/netjail/netjail.sh b/netjail/netjail.sh
index 136390b..e445245 100755
--- a/netjail/netjail.sh
+++ b/netjail/netjail.sh
@@ -31,7 +31,8 @@ BRTAP=br-tap-$NSUID
# Setup link to our bridge
ip link add "$TAP" type veth peer name "$BRTAP"
-brctl addif "$BRIDGE" "$BRTAP"
+#brctl addif "$BRIDGE" "$BRTAP"
+ip link set dev $BRTAP master $BRIDGE
ip link set "$TAP" netns "$NSNAME"
ip link set dev "$BRTAP" up
diff --git a/netzbon/.gitignore b/netzbon/.gitignore
new file mode 100644
index 0000000..8a9cd7b
--- /dev/null
+++ b/netzbon/.gitignore
@@ -0,0 +1,5 @@
+export/
+tmp/
+result.pdf
+qre/
+qr.pdf
diff --git a/netzbon/generate-letter.sh b/netzbon/generate-letter.sh
new file mode 100755
index 0000000..272463c
--- /dev/null
+++ b/netzbon/generate-letter.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+# This script is in the public domain.
+#
+# Call with the JSON file (like test.json) with
+# an array of merchants to generate letters for!
+#
+# You must export
+#
+# export BASE_URL=https://e.netzbon-basel.ch/
+#
+# before running this script!
+#
+
+set -eu
+LENGTH=$(jq length < $1)
+echo "Generating $LENGTH letters for ${BASE_URL}"
+DOMAIN=$( echo "${BASE_URL}" | sed -e "s/https:\/\///" | sed -e "s/\/$//")
+mkdir -p export
+mkdir -p tmp
+
+for n in $(seq 1 $LENGTH)
+do
+ echo "Processing merchant $n"
+ INDEX=$(expr $n - 1 || true)
+ ID=$(jq -r .[$INDEX].id < $1)
+
+ jq ".[$INDEX]" < $1 | jq '.domain="'"${DOMAIN}"'"' > "tmp/${ID}.json"
+ cd tmp
+ ../render.py "${ID}.json" < ../template_de.tex.j2 > "${ID}.tex"
+ pdflatex "${ID}.tex" < /dev/null &> /dev/null || true
+ pdflatex "${ID}.tex" < /dev/null &> /dev/null || true
+ pdflatex "${ID}.tex" < /dev/null
+ mv "${ID}.pdf" ../export/
+ cd ..
+
+ echo "Done with ${ID}"
+done
+
+pdftk export/*.pdf cat output result.pdf
+echo "Combined letters are in 'result.pdf'"
diff --git a/netzbon/generate-qr.sh b/netzbon/generate-qr.sh
new file mode 100755
index 0000000..e5c01c6
--- /dev/null
+++ b/netzbon/generate-qr.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+# This script is in the public domain.
+#
+# Call with the JSON file (like test.json) with
+# an array of merchants to generate letters for!
+#
+# You must export
+#
+# export BASE_URL=https://e.netzbon-basel.ch/
+#
+# before running this script!
+#
+
+set -eu
+LENGTH=$(jq length < $1)
+echo "Generating $LENGTH QR codes for ${BASE_URL}"
+DOMAIN=$( echo "${BASE_URL}" | sed -e "s/https:\/\///" | sed -e "s/\/$//")
+mkdir -p qre
+mkdir -p tmp
+
+for n in $(seq 1 $LENGTH)
+do
+ echo "Processing merchant $n"
+ INDEX=$(expr $n - 1 || true)
+ ID=$(jq -r .[$INDEX].id < $1)
+
+ jq ".[$INDEX]" < $1 | jq '.domain="'"${DOMAIN}"'"' > "tmp/${ID}.json"
+ cd tmp
+ ../render.py "${ID}.json" < ../qr.tex.j2 > "${ID}.tex"
+ pdflatex "${ID}.tex" < /dev/null &> /dev/null || true
+ pdflatex "${ID}.tex" < /dev/null &> /dev/null || true
+ pdflatex "${ID}.tex" < /dev/null
+ mv "${ID}.pdf" ../qre/
+ cd ..
+
+ echo "Done with ${ID}"
+done
+
+pdftk qre/*.pdf cat output qr.pdf
+echo "Combined letters are in 'qr.pdf'"
diff --git a/netzbon/qr.tex.j2 b/netzbon/qr.tex.j2
new file mode 100644
index 0000000..ff7b52d
--- /dev/null
+++ b/netzbon/qr.tex.j2
@@ -0,0 +1,13 @@
+\documentclass[a4paper]{minimal}
+\usepackage[
+ paperwidth=53mm,
+ paperheight=53mm,
+ total={53mm,53mm}]{geometry}
+\usepackage[nolinks,final,forget]{qrcode}
+
+\begin{document}
+\vspace*{0.6mm}
+\begin{center}
+\qrcode[hyperlink,level=M,height=45mm]{taler://pay-template/backend.{{data.domain}}/instances/{{data.id}}/default}
+\end{center}
+\end{document}
diff --git a/netzbon/render.py b/netzbon/render.py
new file mode 100755
index 0000000..8bce600
--- /dev/null
+++ b/netzbon/render.py
@@ -0,0 +1,49 @@
+#!/usr/bin/python3
+# This file is in the public domain.
+
+"""Expand Jinja2 templates based on JSON input.
+
+The tool then reads the template from stdin and writes the expanded
+output to stdout.
+
+TODO: proper installation, man page, error handling, --help option.
+
+@author Christian Grothoff
+
+"""
+
+import sys
+import json
+import jinja2
+from jinja2 import BaseLoader
+
+
+class StdinLoader(BaseLoader):
+ def __init__ (self):
+ self.path = '-'
+ def get_source(self, environment, template):
+ source = sys.stdin.read()
+ return source, self.path, lambda: false
+
+
+jsonFile1 = open (sys.argv[1], 'r')
+jsonData1 = json.load(jsonFile1)
+
+jinjaEnv = jinja2.Environment(loader=StdinLoader(),
+ lstrip_blocks=True,
+ trim_blocks=True,
+ undefined=jinja2.StrictUndefined,
+ autoescape=False)
+tmpl = jinjaEnv.get_template('stdin');
+
+try:
+ print(tmpl.render(data = jsonData1))
+except jinja2.TemplateSyntaxError as error:
+ print("Template syntax error: {error.message} on line {error.lineno}.".format(error=error))
+ exit(1)
+except jinja2.UndefinedError as error:
+ print("Template undefined error: {error.message}.".format(error=error))
+ exit(1)
+except TypeError as error:
+ print("Template type error: {0}.".format(error.args[0]))
+ exit(1)
diff --git a/netzbon/setup-merchants.sh b/netzbon/setup-merchants.sh
new file mode 100755
index 0000000..7fa1d3b
--- /dev/null
+++ b/netzbon/setup-merchants.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+# This script is in the public domain.
+#
+# You must export
+#
+# export BASE_URL=e.netzbon-basel.ch
+# export MERCHANT_TOKEN=password
+# export BANK_TOKEN=password
+#
+# before running this script!
+#
+# Call with the JSON file (like test.json) with
+# an array of merchants to set up as the first argument!
+#
+# FIXME: nice error handling is non-existent...
+#
+set -eu
+LENGTH=$(jq length < $1)
+echo "Setting up $LENGTH merchants at ${BASE_URL}"
+
+for n in $(seq 1 $LENGTH)
+do
+ echo "Processing merchant $n"
+ INDEX=$(expr $n - 1 || true)
+ NAME=$(jq ".[$INDEX].name" < $1)
+ ID=$(jq .[$INDEX].id < $1)
+ PW=$(jq .[$INDEX].pass < $1)
+
+ taler-harness deployment provision-bank-and-merchant \
+ "merchant.${BASE_URL}" \
+ "bank.${BASE_URL}" \
+ "--merchant-management-token=${MERCHANT_TOKEN}" \
+ "--bank-admin-token=${BANK_ADMIN_TOKEN}" \
+ "--id=${ID}" \
+ "--legal-name=${NAME}" \
+ "--password=${PW}"
+
+ echo "Done with ${ID}"
+done
diff --git a/netzbon/template.tex.j2 b/netzbon/template.tex.j2
new file mode 100644
index 0000000..81ec978
--- /dev/null
+++ b/netzbon/template.tex.j2
@@ -0,0 +1,79 @@
+\documentclass[12pt,a4paper]{letter}
+\usepackage[utf8]{inputenc}
+\usepackage[english]{babel}
+\usepackage[nolinks,final,forget]{qrcode}
+\usepackage[top=2cm,
+bottom=2cm,
+includefoot,
+left=2.5cm,
+right=2cm,
+footskip=1cm]{geometry}
+\usepackage{url}
+\usepackage[colorlinks=true, allcolors=black]{hyperref}
+\IfFileExists{lmodern.sty}{\usepackage{lmodern}}{}
+\date{\today}
+%
+\selectlanguage{english}
+
+\signature{Isidor}
+\begin{document}
+%
+\begin{letter}{To \\ {{data.name}}}
+
+\opening{Dear {{data.name}},}
+
+We are excited to introduce you to the new digital Netzbon {\bf eNetzBon} using GNU Taler.
+In the enclosed brochure, you will find some introduction on how you can
+set up your business to accept payments in eNetzBon.
+
+This letter provides you with your personalized credentials to access your
+{\bf eNetzBon bank account} and {\bf merchant backend}. Please keep the password
+confidential as otherwise others might gain control over your eNetzBon! You
+are encouraged to set up second-factor authentication (via SMS or email)
+before using the system.
+
+Your initial password is {\bf {{data.pass}}}.
+
+Using this password and the username {\tt {{data.id}}} you can log into
+your eNetzBon bank account at {\url{https://bank.{{data.domain}}/}}.
+
+Furthermore, we are happy to provide you with a GNU Taler merchant
+backend at {\url{https://backend.{{data.domain}}/instances/{{data.id}}/}}.
+The backend is already configured to use your eNetzBon bank account
+and uses the same password.
+
+You are encouraged to change the password (separately) in both systems.
+
+If you want to use a GNU Taler wallet (from {\url{https://wallet.taler.net/}})
+you need to add eNetzBon as a payment service provider before you can use it to
+invoice your customers. To do so, please scan the following QR code with your Taler wallet:
+\begin{center}
+{\qrcode[hyperlink,level=M,height=3cm]{\tt taler://exchange/exchange.{{data.domain}}/}}
+
+{\tt taler://exchange/exchange.{{data.domain}}/}
+\end{center}
+
+This welcome package includes five identical stickers with a QR code which is
+pre-configured to link all your customers' payments into your eNetzBon bank account. Your
+specific QR code looks like this:
+\begin{center}
+{\qrcode[hyperlink,level=M,height=3cm]{\tt taler://pay-template/backend.{{data.domain}}/instances/{{data.id}}/default}}
+
+{\tt taler://pay-template/backend.{{data.domain}}/instances/{{data.id}}/default}
+\end{center}
+You are free to create additional QR codes or change the contents for this QR code
+in the merchant backend.
+
+Please sign the included Terms of service on the attached paper slip and return it to us.
+If you want us to set up the Taler Point-of-sale app, please complete the form in the
+introduction brochure and return that form to us as well.
+
+We hope your customers enjoy paying you with eNetzBon!
+
+
+\closing{Best regards}
+\encl{Five similar QR code stickers, \\ Introduction to GNU Taler for merchants, \\
+eNetzBon Terms of service (to sign), \\ Return envelope}
+
+\end{letter}
+\end{document}
diff --git a/netzbon/template_de.tex.j2 b/netzbon/template_de.tex.j2
new file mode 100644
index 0000000..400942c
--- /dev/null
+++ b/netzbon/template_de.tex.j2
@@ -0,0 +1,103 @@
+\documentclass[12pt,a4paper]{letter}
+\usepackage[utf8]{inputenc}
+\usepackage[ngerman]{babel}
+\usepackage[nolinks,final,forget]{qrcode}
+\usepackage[top=2cm,
+bottom=2cm,
+includefoot,
+left=2.5cm,
+right=2cm,
+footskip=1cm]{geometry}
+\usepackage{url}
+\usepackage[colorlinks=true, allcolors=black]{hyperref}
+\IfFileExists{lmodern.sty}{\usepackage{lmodern}}{}
+\date{\today}
+%
+\selectlanguage{german}
+\address{Verein Soziale \"Okonomie \\Klybeckstrasse 95 \\4057 Basel}
+\signature{Isidor}
+\begin{document}
+%
+\begin{letter}{An \\ {{data.name}}}
+\opening{Liebe(r) {{data.name}},}
+
+Wir freuen uns, dir heute die digitale NetzBon-Variante {\bf eNetzBon} vorstellen zu
+d\"urfen. Der Verein Soziale \"Okonomie betreibt dieses Bezahlsystem basierend auf der
+Technik von {\bf GNU Taler} und l\"adt dich ein, es gleich einmal praktisch kennenzulernen.
+
+Die beiliegende Brosch\"ure erkl\"art, wie du die Software ausprobierst und so einstellst,
+dass dein Betrieb Zahlungen von Kunden und anderen Betrieben in eNetzBon auf deinem
+internen Konto beim Verein empfangen kann. Die {\bf pers\"onlichen Zugangsdaten} gelten
+sowohl f\"ur das {\bf eNetzBon-Konto} als auch f\"ur das {\bf Verwaltungsprogramm GNU Taler
+Merchant}, mit dem du deine Artikelstammdaten anlegen und Buchungen in eNetzBon verwalten
+kannst.
+
+Um Zugang zu deinem {\bf eNetzBon-Konto} zu erhalten, rufst du in deinem Browser die Seite
+\begin{center}
+{\url{https://bank.{{data.domain}}/}}
+\end{center}
+auf und gibst den Benutzernamen {\tt {{data.id}}} und das Passwort {\bf {{data.pass}}} ein.
+
+Dein Passwort musst du nach dem ersten Besuch \"andern und es dauerhaft vor dem Zugriff
+Unbefugter sch\"utzen, weil diese sonst Kontrolle \"uber die eNetzBon erlangen k\"onnten!
+Wir empfehlen daf\"ur eine Zwei-Faktor-Authentifizierung (mittels SMS oder E-Mail), bevor
+das System in Betrieb genommen wird.
+
+Das {\bf Verwaltungsprogramm GNU Taler Merchant} ist zug\"anglich unter
+\begin{center}
+{\url{https://backend.{{data.domain}}/instances/{{data.id}}/}}.
+\end{center}
+Es ist bereits mit deinem eNetzBon-Konto verbunden und verwendet {\bf dasselbe
+Passwort}.
+
+Wir empfehlen zugunsten h\"oherer Sicherheit die beiden Passw\"orter unabh\"angig
+voneinander in beiden Systemen zu \"andern.
+
+Wenn du die {\bf elektronische Geldb\"orse GNU Taler Wallet} verwenden willst, um von
+deinen Kunden eNetzBon an dieses gezahlt zu bekommen bzw. um selbst mit eNetzBon zu
+bezahlen, besuchst du
+\begin{center}
+{\url{https://wallet.taler.net/}}
+\end{center}
+und installierst das passende Wallet f\"ur dein Smartphone (Android oder iOS).
+\newpage
+
+Bevor du {\bf Rechnungen an deine Kunden stellen} kannst, musst du im Wallet zuerst
+eNetzBon als Zahlungsdienst hinzuf\"ugen. Um dies zu tun, aktiviere bitte dein GNU Taler
+Wallet und scanne folgenden QR-Code:
+\begin{center}
+{\qrcode[hyperlink,level=M,height=3cm]{taler://exchange/exchange.{{data.domain}}/}}
+
+{\tt taler://exchange/exchange.{{data.domain}}/}
+\end{center}
+
+Anbei erh\"altst du {\bf f\"unf gleichartige Aufkleber mit einem QR-Code}, der den Verweis
+auf dein eNetzBon-Konto enth\"alt bzw. deinen Betrieb eindeutig bezeichnet. Die Kunden
+m\"ussen diesen QR-Code beim Bezahlen mit ihren GNU Taler Wallets scannen, damit ihre
+Zahlungen auf dein eNetzBon-Konto gelenkt werden. So sieht dein QR-Code aus:
+\begin{center}
+{\qrcode[hyperlink,level=M,height=3cm]{taler://pay-template/backend.{{data.domain}}/instances/{{data.id}}/default}}
+
+{\tt taler://pay-template/backend.{{data.domain}}/instances/{{data.id}}/default}
+\end{center}
+Im Verwaltungsprogramm GNU Taler Merchant besteht die M\"oglichkeit, weitere QR-Codes zu
+erzeugen und zus\"atzliche Daten des QR-Codes festzulegen, z.B. QR-Codes mit festen
+oder variablen Preisen f\"ur deine angebotenen Waren oder Dienstleistungen.
+
+Eine Bitte haben wir noch, bevor es losgehen kann:
+
+Wir ben\"otigen die {\bf Allgemeinen Geschäftsbedingungen (AGB)} zur eNetzBon-Nutzung
+unterschrieben an den Verein Soziale \"Okonomie zur\"uckgesendet.
+
+F\"ur den Fall deines Interesses, dass wir dir die Anwendung {\bf Taler
+Point-of-sale App} aufsetzen und in Betrieb nehmen sollen, f\"ulle bitte den Antrag in der
+Anleitungsbrosch\"ure aus und sende auch diesen an uns zur\"uck.
+
+Und nun w\"unschen wir dir gutes Gelingen und viel Freude beim Entdecken des eNetzBon!
+\closing{Herzliche Gr\"usse}
+\encl{F\"unf identische Aufkleber mit dem QR-Code deines eNetzBon-Kontos, \\
+Anleitungsbrosch\"ure GNU Taler f\"ur NetzBon-Betriebe, \\
+eNetzBon-AGB (bitte mit Unterschrift zur\"ucksenden), \\ Antwortcouvert}
+
+\end{letter}
+\end{document}
diff --git a/netzbon/test.json b/netzbon/test.json
new file mode 100644
index 0000000..9a47fe6
--- /dev/null
+++ b/netzbon/test.json
@@ -0,0 +1,7 @@
+[
+ {
+ "name": "Test shop",
+ "id": "test",
+ "pass": "password"
+ }
+]
diff --git a/nlnet/task1/Dockerfile b/nlnet/task1/Dockerfile
new file mode 100644
index 0000000..498d54a
--- /dev/null
+++ b/nlnet/task1/Dockerfile
@@ -0,0 +1,32 @@
+FROM debian:stable
+
+RUN apt-get update
+RUN apt-get install -y openjdk-17-jre git python3-pip curl jq sqlite3
+RUN pip3 install click requests
+
+# Installation
+RUN git clone git://git.taler.net/libeufin
+WORKDIR /libeufin
+RUN git fetch && git checkout 9c7079e5323eed4d16e24c1c4245d6586cecac53 # amounts zero-check fixed.
+RUN ./bootstrap
+RUN ./configure --prefix=/usr/local
+RUN make install
+
+# Reverse proxy
+RUN apt-get install -y nginx
+
+# Importing the UI.
+RUN git clone -b prebuilt git://git.taler.net/wallet-core
+RUN git -C wallet-core checkout 75af013b348b08b8fb9e65cc9270f2fde964979b # checkout rates fixed.
+RUN cp /libeufin/debian/etc/nginx/sites-available/libeufin-sandbox /etc/nginx/sites-enabled/
+RUN mkdir -p /usr/share/libeufin/demobank-ui/
+RUN mkdir -p /etc/libeufin/
+RUN cp /libeufin/debian/usr/share/libeufin/demobank-ui/demobank-ui-settings.js /etc/libeufin/
+RUN cp wallet-core/demobank/* /usr/share/libeufin/demobank-ui/
+
+# Default place for the database.
+RUN mkdir /libeufin-data
+
+COPY start.sh /
+# ENTRYPOINT /start.sh
+CMD /start.sh
diff --git a/nlnet/task1/start.sh b/nlnet/task1/start.sh
new file mode 100755
index 0000000..18bf9b8
--- /dev/null
+++ b/nlnet/task1/start.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+
+set -eu
+
+export LIBEUFIN_SANDBOX_ADMIN_PASSWORD=${LIBEUFIN_SANDBOX_ADMIN_PASSWORD:-admin}
+export LIBEUFIN_SANDBOX_DB_CONNECTION="jdbc:sqlite:/libeufin-data/libeufin.sqlite"
+libeufin-sandbox config --without-registrations --currency ${CURRENCY:-EUR} default
+if test -z $LIBEUFIN_EXPOSED_PORT; then
+ echo ERROR: LIBEUFIN_EXPOSED_PORT is an empty string.
+ exit 1
+fi
+
+sed -i "s/localhost/localhost:$LIBEUFIN_EXPOSED_PORT/" /etc/libeufin/demobank-ui-settings.js
+service nginx start
+libeufin-sandbox serve --port 5016 --no-localhost-only
diff --git a/nlnet/task2/Dockerfile b/nlnet/task2/Dockerfile
new file mode 100644
index 0000000..e7cc048
--- /dev/null
+++ b/nlnet/task2/Dockerfile
@@ -0,0 +1,29 @@
+FROM debian:stable
+
+RUN apt-get update
+RUN apt-get install -y git
+
+ # python3-pip
+# Libeufin Dependencies
+RUN apt-get install -y openjdk-17-jre
+# Installation
+RUN git clone git://git.taler.net/libeufin
+WORKDIR /libeufin
+RUN git fetch && git checkout a52cf289234683c4ff492cd8b508cfb6c85ca1e8
+RUN ./bootstrap
+RUN apt-get install -y python3-venv
+RUN apt-get install -y make
+RUN ./configure --prefix=/usr/local
+RUN make install
+# FIXME: move to the deps block.
+RUN apt-get install -y postgresql sudo
+RUN grep -v ^host.*all /etc/postgresql/13/main/pg_hba.conf > /tmp/pg_hba_buf.txt
+RUN echo "host libeufincheck all 127.0.0.1/32 trust" >> /tmp/pg_hba_buf.txt
+RUN echo "host libeufincheck all ::1/128 trust" >> /tmp/pg_hba_buf.txt
+RUN cp /tmp/pg_hba_buf.txt /etc/postgresql/13/main/pg_hba.conf
+# CMD bash
+RUN apt-get install -y jq curl
+CMD service postgresql start && \
+ sudo -u postgres createuser -s root && \
+ createdb -h /var/run/postgresql libeufincheck && \
+ make check
diff --git a/nlnet/task3/Dockerfile b/nlnet/task3/Dockerfile
new file mode 100644
index 0000000..52e7978
--- /dev/null
+++ b/nlnet/task3/Dockerfile
@@ -0,0 +1,15 @@
+FROM debian:stable
+
+RUN apt-get update
+RUN apt-get install -y openjdk-17-jre git python3-pip curl jq sqlite3 postgresql python3-requests python3-click sudo
+
+# Installation
+RUN git clone git://git.taler.net/libeufin
+WORKDIR /libeufin
+RUN git fetch && git checkout 4bc5f38f571a45d427f73813ec3846bf59413afa
+RUN ./bootstrap
+RUN ./configure --prefix=/usr/local
+RUN make install
+COPY keys.sh /
+COPY start.sh /
+ENTRYPOINT ["/start.sh"]
diff --git a/nlnet/task3/keys.sh b/nlnet/task3/keys.sh
new file mode 100755
index 0000000..d1fff07
--- /dev/null
+++ b/nlnet/task3/keys.sh
@@ -0,0 +1,93 @@
+#!/bin/bash
+
+set -eu
+
+# This script prepares the EBICS keys for one subscriber
+# at the PostFinance test platform.
+
+export LIBEUFIN_NEXUS_DB_CONNECTION="jdbc:sqlite:/tmp/postfinance-nexusdb.sqlite3"
+
+NEXUS_USERNAME="netzbon-bridge"
+NEXUS_PASSWORD="secret"
+NEXUS_PORT="5001"
+
+function exit_cleanup()
+{
+ for n in `jobs -p`
+ do
+ kill $n 2> /dev/null || true
+ done
+ wait || true
+}
+
+trap "exit_cleanup" EXIT
+
+echo "Creating the $NEXUS_USERNAME Nexus user..."
+libeufin-nexus superuser $NEXUS_USERNAME --password $NEXUS_PASSWORD
+echo "Launching Nexus on port $NEXUS_PORT..."
+libeufin-nexus \
+ serve --ipv4-only \
+ --log-level debug \
+ --no-localhost-only \
+ --port $NEXUS_PORT > nexus-postfinance.log 2>&1 &
+
+echo -n "Checking Nexus is serving..."
+for i in `seq 1 10`; do
+ echo -n .
+ if test i = 10; then
+ echo Nexus is unreachable
+ exit 1
+ fi
+ if `curl "http://localhost:$NEXUS_PORT/" &> /dev/null`; then
+ break
+ fi
+ sleep 1
+done
+echo OK
+
+export LIBEUFIN_NEXUS_URL="http://localhost:5001/"
+export LIBEUFIN_NEXUS_USERNAME=$NEXUS_USERNAME
+export LIBEUFIN_NEXUS_PASSWORD=$NEXUS_PASSWORD
+
+# FIXME: make connection creation idempotent.
+echo "Creating a EBICS connection at Nexus..."
+libeufin-cli connections new-ebics-connection \
+ --ebics-url https://isotest.postfinance.ch/ebicsweb/ebicsweb \
+ --host-id PFEBICS \
+ --partner-id $EBICS_PARTNER_ID \
+ --ebics-user-id $EBICS_USER_ID \
+ --dialect pf \
+ postfinanceconn || true
+
+# 1, send the keys (INI, HIA)
+# NOTE: these keys will ONLY be considered if the user
+# is in a NEW state, any previous uploaded keys should be reset.
+echo "If that is the case, reset any previous keys via the bank Web UI. Press Enter to continue.. "
+read -s
+echo -n "Sending the new keys to the bank..."
+libeufin-cli connections connect postfinanceconn
+echo DONE
+
+# 2, invite the user to unblock them in the Web UI
+echo "Please enable the new client keys via the bank Web UI, then press Enter.. "
+read -s
+
+# 3, download the bank keys (HPB).
+# That's achieved with another 'connect' action (#7880).
+echo -n "Downloading the bank keys..."
+libeufin-cli connections connect postfinanceconn
+echo DONE
+echo "Found the following bank keys:"
+libeufin-cli connections show-connection postfinanceconn | jq -r '.details | "Auth: \(.bankAuthKeyHash)\nEnc: \(.bankEncKeyHash)"'
+
+echo
+echo "If any bank keys showed up, please check in the bank Web UI if they match."
+echo "If they match, press Enter to continue, otherwise CTRL-C to end."
+read -s
+
+echo -n "Preparing the local keys bundle.."
+libeufin-cli connections export-backup \
+ --passphrase secret \
+ --output-file /tmp/pofi.json \
+ postfinanceconn > /dev/null
+echo DONE
diff --git a/nlnet/task3/salted-incoming-payment-template.csv b/nlnet/task3/salted-incoming-payment-template.csv
new file mode 100644
index 0000000..c539939
--- /dev/null
+++ b/nlnet/task3/salted-incoming-payment-template.csv
@@ -0,0 +1,2 @@
+Product;Channel;Account;Currency;Amount;Reference;Name;Street;Number;Postcode;City;Country;DebtorAddressLine;DebtorAddressLine;DebtorAccount;ReferenceType;UltimateDebtorName;UltimateDebtorStreet;UltimateDebtorNumber;UltimateDebtorPostcode;UltimateDebtorTownName;UltimateDebtorCountry;UltimateDebtorAddressLine;UltimateDebtorAddressLine;RemittanceInformationText
+ QRR;PO;__PAYEE_IBAN__;CHF;33;;D009;Musterstrasse;1;1111;Musterstadt;CH;;;;NON;D009;Musterstrasse;1;1111;Musterstadt;CH;;;__PAYMENT_SALT__
diff --git a/nlnet/task3/start.sh b/nlnet/task3/start.sh
new file mode 100755
index 0000000..2f8b2a2
--- /dev/null
+++ b/nlnet/task3/start.sh
@@ -0,0 +1,79 @@
+#!/bin/bash
+
+set -eu
+
+function finish() {
+ exit 1
+}
+
+trap finish SIGINT
+
+# Expected arguments are:
+#
+# $1 := EBICS user ID
+# $2 := EBICS partner ID (a.k.a. customer ID)
+# $3 := IBAN as assigned by the PostFinance test platform.
+
+# Suggested invocation via 'docker':
+#
+# docker run -it $IMAGE_TAG $EBICS_USER_ID $EBICS_PARTNER_ID
+service postgresql start
+sudo -u postgres createuser -s root
+createdb libeufincheck
+# This script conducts the key exchange with the bank
+# and guides the user to download and upload docuemts
+# to the bank. It pauses the execution to let the user
+# check and set the Web UI as a double-check mean.
+
+# Setting the EBICS keys. It'll place them in the container's
+# /tmp/pofi.json, where Kotlin expects them.
+export EBICS_USER_ID=$1
+export EBICS_PARTNER_ID=$2
+/keys.sh
+
+# If the keys are ready, it proceeds to invoke the uploading
+# and downloading logic.
+
+# Upload test.
+
+# The test runner will upload one pain.001 document to
+# the bank. Thereafter, the user can check the existtence
+# of such document via the bank Web UI. Moreover, the user
+# is offered the possibility to specify a custom payment
+# subject.
+
+MY_IBAN=$3
+PAIN_SALT=$RANDOM
+echo
+echo "Now preparing the pain.001 to upload to the bank via LibEuFin."
+echo "This document instructs the bank to send money to an arbitrary"
+echo "IBAN by debiting the test platform bank account."
+echo "The outgoing payment defaults to have this subject: $PAIN_SALT".
+echo "Please enter any value in this prompt, in case you want to"
+echo -n "change the default subject: "
+read MAYBE_PAIN_SALT
+
+if ! test "x" = "x$MAYBE_PAIN_SALT"; then
+ PAIN_SALT=$MAYBE_PAIN_SALT
+fi
+
+echo "The pain.001 will have this subject: '$PAIN_SALT', now calling"
+echo "LibEuFin to upload it via EBICS.."
+cd /libeufin; ./gradlew -q :nexus:pofi --args="--my-iban \"$MY_IBAN\" upload --subject \"$PAIN_SALT\""; cd -
+echo DONE
+
+echo
+echo "Please check the bank Web UI to find the pain.001 document"
+echo "whose subject is '$PAIN_SALT'. If that is found, then LibEuFin"
+echo "has successfully uploaded it. In the next step, LibEuFin"
+echo "will download the new banking records. If '$PAIN_SALT' is found"
+echo "in the logs, then it succeeded. Press Enter to continue.. "
+read -s
+
+# Download test.
+
+# The test runnner proceeds with downloading the banking
+# records that witness the payment that was uploaded shortly
+# ago. If the logs show the payment subject that belongs
+# to such payment, then the download went through.
+cd /libeufin; ./gradlew -q :nexus:pofi --args="--my-iban \"$MY_IBAN\" download"; cd -
diff --git a/nlnet/task4/Dockerfile b/nlnet/task4/Dockerfile
new file mode 100644
index 0000000..0a3be9a
--- /dev/null
+++ b/nlnet/task4/Dockerfile
@@ -0,0 +1,42 @@
+FROM debian:stable
+
+RUN apt-get update
+RUN apt-get install -y \
+ git \
+ openjdk-17-jre \
+ python3-pip \
+ curl \
+ jq \
+ postgresql \
+ python3-requests \
+ python3-click \
+ sudo \
+ time \
+ autoconf \
+ autopoint \
+ libtool \
+ texinfo \
+ libgcrypt-dev \
+ libidn11-dev \
+ zlib1g-dev \
+ libunistring-dev \
+ libjansson-dev \
+ recutils \
+ libsqlite3-dev \
+ libpq-dev \
+ libcurl4-openssl-dev \
+ libsodium-dev \
+ libqrencode-dev \
+ zip
+
+# Installation
+RUN git clone git://git.taler.net/libeufin
+WORKDIR /libeufin
+RUN git fetch && git checkout 736c3998648ad249577f8930b616e1f27647f938
+RUN ./bootstrap
+RUN ./configure --prefix=/usr/local
+RUN make install
+RUN make install-nexus
+WORKDIR /
+COPY start.sh /
+ENTRYPOINT ["/start.sh"]
diff --git a/nlnet/task4/launch.sh b/nlnet/task4/launch.sh
new file mode 100755
index 0000000..bc1508e
--- /dev/null
+++ b/nlnet/task4/launch.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+# Binds the container internal port 8080 to the host's.
+
+set -eu
+
+docker run -p 8080:8080 -it monitor
diff --git a/nlnet/task4/start.sh b/nlnet/task4/start.sh
new file mode 100755
index 0000000..3b45d57
--- /dev/null
+++ b/nlnet/task4/start.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+set -eu
+
+service postgresql start
+sudo -u postgres createuser -s root
+createdb libeufinbank
+cat << EOF > /usr/bin/taler-config
+#!/bin/bash
+
+echo postgresql:///libeufinbank
+EOF
+chmod +x /usr/bin/taler-config
+sed -i 's/ALLOW_CONVERSION = no/ALLOW_CONVERSION = yes/' \
+ /libeufin/contrib/libeufin-bank.conf
+cat << EOF >> /libeufin/contrib/libeufin-bank.conf
+
+[nexus-ebics]
+currency = EUR
+[nexus-postgres]
+config = postgresql:///libeufinbank
+EOF
+libeufin-bank dbinit -c /libeufin/contrib/libeufin-bank.conf
+libeufin-nexus dbinit -c /libeufin/contrib/libeufin-bank.conf
+/libeufin/contrib/populate-stats.sh /libeufin/contrib/libeufin-bank.conf --one
+libeufin-bank passwd admin nlnet
+libeufin-bank serve -c /libeufin/contrib/libeufin-bank.conf
diff --git a/nlnet/task5/date-range/Dockerfile b/nlnet/task5/date-range/Dockerfile
new file mode 100644
index 0000000..8d1224f
--- /dev/null
+++ b/nlnet/task5/date-range/Dockerfile
@@ -0,0 +1,15 @@
+FROM debian:stable
+
+RUN apt-get update
+RUN apt-get install -y openjdk-17-jre git python3-pip curl jq sqlite3 postgresql python3-requests python3-click sudo
+
+# Installation
+RUN git clone git://git.taler.net/libeufin
+WORKDIR /libeufin
+RUN git fetch && git checkout a614d433a8307468f1074114086ae0a47b848472
+RUN ./bootstrap
+RUN ./configure --prefix=/usr/local
+RUN make install
+COPY start-libeufin.sh /
+COPY start.sh /
+ENTRYPOINT ["/start.sh"]
diff --git a/nlnet/task5/date-range/start-libeufin.sh b/nlnet/task5/date-range/start-libeufin.sh
new file mode 100644
index 0000000..8f000a4
--- /dev/null
+++ b/nlnet/task5/date-range/start-libeufin.sh
@@ -0,0 +1,35 @@
+DB_CONN="postgresql:///libeufincheck"
+export LIBEUFIN_SANDBOX_DB_CONNECTION=$DB_CONN
+export LIBEUFIN_NEXUS_DB_CONNECTION=$DB_CONN
+
+echo -n Delete previous data...
+libeufin-sandbox reset-tables
+libeufin-nexus reset-tables
+echo DONE
+echo -n Configure the default demobank with MANA...
+libeufin-sandbox config --with-signup-bonus --currency MANA default
+echo DONE
+echo -n Setting the default exchange at Sandbox...
+libeufin-sandbox \
+ default-exchange \
+ "https://exchange.example.com/" \
+ "payto://iban/NOTUSED"
+echo DONE
+echo -n Start the bank...
+export LIBEUFIN_SANDBOX_ADMIN_PASSWORD=foo
+libeufin-sandbox serve > sandbox.log 2>&1 &
+SANDBOX_PID=$!
+echo DONE
+echo -n Wait for the bank...
+curl --max-time 4 --retry-all-errors --retry-connrefused --retry-delay 1 --retry 10 http://localhost:5000/ &> /dev/null
+echo DONE
+echo -n Make one superuser at Nexus...
+libeufin-nexus superuser test-user --password x
+echo DONE
+echo -n Launching Nexus...
+libeufin-nexus serve &> nexus.log &
+NEXUS_PID=$!
+echo DONE
+echo -n Waiting for Nexus...
+curl --max-time 4 --retry-all-errors --retry-connrefused --retry-delay 1 --retry 10 http://localhost:5001/ &> /dev/null
+echo DONE
diff --git a/nlnet/task5/date-range/start.sh b/nlnet/task5/date-range/start.sh
new file mode 100755
index 0000000..c61cfee
--- /dev/null
+++ b/nlnet/task5/date-range/start.sh
@@ -0,0 +1,155 @@
+#!/bin/bash
+
+# This script shows how Nexus can request histories from
+# a particular time frame. Such request must succeed via
+# two connection types: EBICS and x-libeufin-bank. EBICS
+# ensures the fetching of fiat payments made to the regional
+# currency authority, whereas x-libeufin-bank does it for
+# the regional currency circuit. Note: the time-framed
+# request is exceptional: it's used only after a complaint
+# from a user where they didn't get their funds as expected.
+
+set -eu
+
+service postgresql start
+sudo -u postgres createuser -s root
+createdb libeufincheck
+
+echo -n Launching and confirguring LibEuFin..
+source /start-libeufin.sh &> /dev/null
+# Register the Sandbox account.
+export LIBEUFIN_SANDBOX_USERNAME=sandbox-user
+export LIBEUFIN_SANDBOX_PASSWORD=foo
+libeufin-cli \
+ sandbox --sandbox-url http://localhost:5000/ \
+ demobank \
+ register
+# x-libeufin-bank connection.
+# Creating the x-libeufin-bank connection at Nexus.
+export LIBEUFIN_NEXUS_USERNAME=test-user
+export LIBEUFIN_NEXUS_PASSWORD=x
+export LIBEUFIN_NEXUS_URL=http://localhost:5001
+libeufin-cli connections new-xlibeufinbank-connection \
+ --bank-url "http://localhost:5000/demobanks/default/access-api" \
+ --username sandbox-user \
+ --password foo \
+ xlibeufinbankconn
+# Connecting the x-libeufin-bank connection...
+libeufin-cli connections connect xlibeufinbankconn
+# Importing the bank account under a local name at Nexus.
+# Importing the x-libeufin-bank account locally..
+libeufin-cli connections import-bank-account \
+ --offered-account-id sandbox-user \
+ --nexus-bank-account-id foo-at-nexus xlibeufinbankconn
+
+# EBICS connection.
+## Sandbox side.
+export LIBEUFIN_SANDBOX_USERNAME=admin
+# "Create EBICS host at Sandbox..."
+libeufin-cli sandbox \
+ --sandbox-url http://localhost:5000 \
+ ebicshost create --host-id wwwebics
+# Create nlnet EBICS subscriber at Sandbox
+libeufin-cli sandbox \
+ --sandbox-url http://localhost:5000 \
+ demobank new-ebicssubscriber --host-id wwwebics \
+ --user-id nlnet --partner-id nlnet \
+ --bank-account sandbox-user # that's a username _and_ a bank account name
+## Nexus side.
+export LIBEUFIN_NEXUS_USERNAME=test-user
+export LIBEUFIN_NEXUS_PASSWORD=x
+export LIBEUFIN_NEXUS_URL=http://localhost:5001
+# Creating the EBICS connection at Nexus...
+libeufin-cli connections new-ebics-connection \
+ --ebics-url "http://localhost:5000/ebicsweb" \
+ --host-id wwwebics \
+ --partner-id nlnet \
+ --ebics-user-id nlnet \
+ ebicsconn
+# Setup EBICS keying...
+libeufin-cli connections connect ebicsconn > /dev/null
+# Download bank account name from Sandbox...
+libeufin-cli connections download-bank-accounts ebicsconn
+# Importing bank account info into Nexus...
+libeufin-cli connections import-bank-account \
+ --offered-account-id sandbox-user \
+ --nexus-bank-account-id bar-at-nexus ebicsconn
+echo DONE
+
+FIRST_JAN_2020="1577833200000" # in milliseconds
+END_DEC_2019="2019-12-30"
+MID_JAN_2020="2020-01-15"
+
+echo Make sample transaction..
+# 0, setup and start services.
+libeufin-sandbox make-transaction \
+ --credit-account=admin \
+ --debit-account=sandbox-user MANA:2 \
+ "task5" # subject.
+echo DONE
+
+echo -n Articifially set the transaction date to $FIRST_JAN_2020..
+# 1, set artificial time for the transaction at January, 1st 2020.
+echo "UPDATE bankaccounttransactions SET date='$FIRST_JAN_2020' WHERE subject='task5'" | psql -q -d libeufincheck
+echo DONE
+
+# 2, retrieve the transaction via Nexus, for both
+# connections and by asking for a (narrow) time frame
+# that includes the 2020-01-01 payment.
+
+echo -n Nexus: syncing banking records for the time frame $END_DEC_2019-$MID_JAN_2020 via EBICS..
+# Fetch time-framed payments via EBICS.
+libeufin-cli \
+ accounts \
+ fetch-transactions \
+ --level=report \
+ --range-type=time-range \
+ --start=$END_DEC_2019 \
+ --end=$MID_JAN_2020 \
+ bar-at-nexus > /dev/null # EBICS
+echo DONE
+
+echo Showing the synced data..
+# Now checks if Nexus ingested and shows the
+# expected payment.
+libeufin-cli \
+ accounts \
+ transactions \
+ bar-at-nexus
+echo DONE
+
+echo Resetting the Nexus database..
+# Bring the database state so that Nexus does not hold any payment.
+echo "DELETE FROM nexusbanktransactions" | psql -d libeufincheck
+echo "DELETE FROM nexusbankmessages" | psql -d libeufincheck
+echo DONE
+
+echo Checking that no payment data appears after the reset..
+# Double-checking that the future steps start
+# without the previous transactions.
+libeufin-cli \
+ accounts \
+ transactions \
+ foo-at-nexus # FIXME: put a 204 No Content check?
+echo DONE
+
+# Fetch time-framed payments via x-libeufin-bank.
+echo Nexus: syncing banking records for the time frame ${END_DEC_2019}_${MID_JAN_2020} via x-libeufin-bank..
+libeufin-cli \
+ accounts \
+ fetch-transactions \
+ --level=statement \
+ --range-type=time-range \
+ --start=$END_DEC_2019 \
+ --end=$MID_JAN_2020 \
+ foo-at-nexus
+echo DONE
+
+# As in the previous case, now Nexus should show
+# the 2020-01-01 the payment.
+echo Showing the synced data..
+libeufin-cli \
+ accounts \
+ transactions \
+ foo-at-nexus # FIXME: put a 200 OK check?
+echo DONE
diff --git a/nlnet/task5/long-poll/Dockerfile b/nlnet/task5/long-poll/Dockerfile
new file mode 100644
index 0000000..713e47e
--- /dev/null
+++ b/nlnet/task5/long-poll/Dockerfile
@@ -0,0 +1,14 @@
+FROM debian:stable
+
+RUN apt-get update
+RUN apt-get install -y openjdk-17-jre git python3-pip curl jq sqlite3 postgresql python3-requests python3-click sudo libgnunet0.19
+
+# Installation
+RUN git clone git://git.taler.net/libeufin
+WORKDIR /libeufin
+RUN git fetch && git checkout 934a73b09b9e9abba348e15ddc058df5bb9cd6a3
+RUN ./bootstrap
+RUN ./configure --prefix=/usr/local
+RUN make install
+COPY start.sh /
+ENTRYPOINT ["/start.sh"]
diff --git a/nlnet/task5/long-poll/start.sh b/nlnet/task5/long-poll/start.sh
new file mode 100755
index 0000000..46a0af2
--- /dev/null
+++ b/nlnet/task5/long-poll/start.sh
@@ -0,0 +1,134 @@
+#!/bin/bash
+
+set -eu
+
+service postgresql start
+sudo -u postgres createuser -s root
+createdb libeufincheck
+
+wire_transfer () {
+ RESERVE_PUB=$(gnunet-ecc -g1 /tmp/www &> /dev/null && gnunet-ecc -p /tmp/www)
+ DB_CONN="postgresql:///libeufincheck"
+ libeufin-sandbox \
+ make-transaction \
+ --credit-account=sandbox-user \
+ --debit-account=admin MANA:2 \
+ $RESERVE_PUB
+}
+
+WITH_TASKS=1
+echo RUNNING SANDBOX-NEXUS EBICS PAIR
+jq --version &> /dev/null || (echo "'jq' command not found"; exit 77)
+curl --version &> /dev/null || (echo "'curl' command not found"; exit 77)
+
+DB_CONN="postgresql:///libeufincheck"
+export LIBEUFIN_SANDBOX_DB_CONNECTION=$DB_CONN
+export LIBEUFIN_NEXUS_DB_CONNECTION=$DB_CONN
+
+echo -n Delete previous data...
+libeufin-sandbox reset-tables
+libeufin-nexus reset-tables
+echo DONE
+echo -n Configure the default demobank with MANA...
+libeufin-sandbox config --with-signup-bonus --currency MANA default
+echo DONE
+echo -n Setting the default exchange at Sandbox...
+libeufin-sandbox \
+ default-exchange \
+ "https://exchange.example.com/" \
+ "payto://iban/NOTUSED"
+echo DONE
+echo -n Start the bank...
+export LIBEUFIN_SANDBOX_ADMIN_PASSWORD=foo
+libeufin-sandbox serve > sandbox.log 2>&1 &
+SANDBOX_PID=$!
+echo DONE
+echo -n Wait for the bank...
+curl --max-time 4 --retry-all-errors --retry-connrefused --retry-delay 1 --retry 10 http://localhost:5000/ &> /dev/null
+echo DONE
+echo -n Make one superuser at Nexus...
+libeufin-nexus superuser test-user --password x
+echo DONE
+echo -n Launching Nexus...
+libeufin-nexus serve &> nexus.log &
+NEXUS_PID=$!
+echo DONE
+echo -n Waiting for Nexus...
+curl --max-time 4 --retry-all-errors --retry-connrefused --retry-delay 1 --retry 10 http://localhost:5001/ &> /dev/null
+echo DONE
+
+echo -n "Register the Sandbox account..."
+export LIBEUFIN_SANDBOX_USERNAME=sandbox-user
+export LIBEUFIN_SANDBOX_PASSWORD=foo
+libeufin-cli \
+ sandbox --sandbox-url http://localhost:5000/ \
+ demobank \
+ register
+echo DONE
+echo -n Creating the x-libeufin-bank connection at Nexus...
+export LIBEUFIN_NEXUS_USERNAME=test-user
+export LIBEUFIN_NEXUS_PASSWORD=x
+export LIBEUFIN_NEXUS_URL=http://localhost:5001
+# echoing the password to STDIN, as that is a "prompt" option.
+libeufin-cli connections new-xlibeufinbank-connection \
+ --bank-url "http://localhost:5000/demobanks/default/access-api" \
+ --username sandbox-user \
+ --password foo \
+ wwwconn
+echo DONE
+echo -n Connecting the x-libeufin-bank connection...
+libeufin-cli connections connect wwwconn
+echo DONE
+# Importing the bank account under a local name at Nexus.
+echo -n Importing the x-libeufin-bank account locally..
+libeufin-cli connections import-bank-account \
+ --offered-account-id sandbox-user \
+ --nexus-bank-account-id foo-at-nexus wwwconn
+echo DONE
+echo -n Create the Taler facade at Nexus...
+libeufin-cli facades \
+ new-taler-wire-gateway-facade \
+ --currency TESTKUDOS --facade-name test-facade \
+ wwwconn foo-at-nexus
+echo DONE
+if test 1 = $WITH_TASKS; then
+ echo -n Creating submit transactions task..
+ libeufin-cli accounts task-schedule \
+ --task-type submit \
+ --task-name www-payments \
+ --task-cronspec "* * *" \
+ foo-at-nexus || true
+ # Tries every second. Ask C52
+ echo DONE
+ echo -n Creating fetch transactions task..
+ # Not idempotent, FIXME #7739
+ libeufin-cli accounts task-schedule \
+ --task-type fetch \
+ --task-name www-history \
+ --task-cronspec "* * *" \
+ --task-param-level statement \
+ --task-param-range-type since-last \
+ foo-at-nexus || true
+ echo DONE
+else
+ echo NOT creating background tasks!
+fi
+
+echo
+echo Services are online! The following shell offers a 'wire_transfer'
+echo command that wires money to Nexus 'test-user'. Give it after having
+echo connected an HTTP client that long-polls to Nexus. As an example, a
+echo 100 seconds long-poller to Nexus is the following command:
+echo curl -v -u test-user:x "'http://localhost:5001/facades/test-facade/taler-wire-gateway/history/incoming?delta=5&long_poll_ms=100000'"
+echo
+echo Hint: after having issued the previous command and having observed
+echo that it actually long-polls, press CTRL-Z to send it in the background,
+echo "then wire the funds to the long-poller with 'wire_transfer',"
+echo "and finally give 'fg 1' to bring the long-poller in the foreground."
+echo If the client now shows a response, then the long-polling mechanism
+echo worked.
+echo
+
+cd /
+export -f wire_transfer
+bash
diff --git a/nlnet/task5/performance/Dockerfile b/nlnet/task5/performance/Dockerfile
new file mode 100644
index 0000000..4daeaf0
--- /dev/null
+++ b/nlnet/task5/performance/Dockerfile
@@ -0,0 +1,70 @@
+FROM debian:stable
+
+RUN apt-get update
+RUN apt-get install -y \
+ git \
+ openjdk-17-jre \
+ python3-pip \
+ curl \
+ jq \
+ postgresql \
+ python3-requests \
+ python3-click \
+ sudo \
+ time \
+ autoconf \
+ autopoint \
+ libtool \
+ texinfo \
+ libgcrypt-dev \
+ libidn11-dev \
+ zlib1g-dev \
+ libunistring-dev \
+ libjansson-dev \
+ recutils \
+ libsqlite3-dev \
+ libpq-dev \
+ libcurl4-openssl-dev \
+ libsodium-dev \
+ libqrencode-dev \
+ zip
+
+# Installation
+RUN git clone git://git.taler.net/libeufin
+WORKDIR /libeufin
+RUN git fetch && git checkout 4bc5f38f571a45d427f73813ec3846bf59413afa
+RUN ./bootstrap
+RUN ./configure --prefix=/usr/local
+RUN make install
+WORKDIR /
+RUN git clone git://git.gnunet.org/libmicrohttpd
+WORKDIR /libmicrohttpd
+RUN ./bootstrap
+RUN ./configure --disable-doc
+RUN make install
+WORKDIR /
+RUN git clone git://git.gnunet.org/gnunet
+WORKDIR /gnunet
+RUN apt-get install -y python3-sphinx python3-sphinx-rtd-theme # Move up?
+RUN ./bootstrap
+RUN ./configure
+RUN pip3 install --break-system-packages htmlark
+RUN make install
+WORKDIR /
+RUN git clone git://git.taler.net/exchange
+WORKDIR /exchange
+RUN ./bootstrap
+RUN ./configure
+RUN make install
+WORKDIR /
+RUN git clone git://git.taler.net/merchant
+WORKDIR /merchant
+RUN ./bootstrap
+RUN ./configure
+RUN make install
+WORKDIR /
+
+COPY start.sh /
+RUN apt-get install -y wget
+RUN apt-get install -y bc
+ENTRYPOINT ["/start.sh"]
diff --git a/nlnet/task5/performance/start.sh b/nlnet/task5/performance/start.sh
new file mode 100755
index 0000000..2cc9175
--- /dev/null
+++ b/nlnet/task5/performance/start.sh
@@ -0,0 +1,107 @@
+#!/bin/bash
+
+# This script shows, via runnuing the benchmark, how
+# the LibEuFin database connections are significantly
+# shorter than the benchmark total time.
+
+# For this reason, it can only be that LibEuFin opens
+# and closes many PostgreSQL connections, as it is required
+# by milestone #3.
+
+set -eu
+
+export HOW_MANY_WITHDRAWALS=100
+
+service postgresql start
+sudo -u postgres createuser -s root
+
+# Activating the disconnection logs.
+sudo -u postgres psql -q -c "ALTER SYSTEM SET log_disconnections = 'on'" -c "SELECT pg_reload_conf()" > /dev/null
+
+# Converts AA:BB:CC.DDD to milliseconds.
+convert_pg_time_to_ms () {
+ awk -F[.:] '{SECS=(60*60*$1)+(60*$2)+$3; MILLI=$4; TOTAL_MS=(SECS*1000)+MILLI; print TOTAL_MS}'
+}
+
+createdb talercheck
+export LD_LIBRARY_PATH=/usr/local/lib
+
+prepare_and_run () {
+ taler-unified-setup.sh \
+ -Wwemtns \
+ -c /exchange/src/benchmark/benchmark-cs.conf \
+ -u exchange-account-2 &> /check_ready.txt &
+ # Wait that the prep. went through.
+ echo -n Waiting the unified setup to complete..
+ READY="NO"
+ for i in `seq 100` true; do
+ if grep -q "<<READY>>" /check_ready.txt; then
+ READY="YES"
+ break
+ fi
+ echo -n "."; sleep 1
+ done
+
+ if test $READY = "YES"; then
+ echo "DONE"
+ else
+ cat /check_ready.txt
+ echo FAIL
+ exit 1
+ fi
+
+ echo Running the benchmark..
+ taler-exchange-benchmark \
+ -c /exchange/src/benchmark/benchmark-cs.conf.edited \
+ -u exchange-account-2 \
+ -L WARNING \
+ -n 1 \
+ -r $HOW_MANY_WITHDRAWALS
+}
+
+export -f prepare_and_run
+/usr/bin/time -o /benchmark-wall-clock-time.txt --format=%e bash -c "prepare_and_run"
+
+NEXUS_PID=$(cat /libeufin-nexus.pid)
+SANDBOX_PID=$(cat /libeufin-sandbox.pid)
+
+if test -z $NEXUS_PID; then
+ echo Could not find Nexus PID, failing.
+ exit 1
+fi
+
+if test -z $SANDBOX_PID; then
+ echo Could not find Sandbox PID, failing.
+ exit 1
+fi
+
+# Convert the wall clock time to milliseconds, to make
+# it compatible with the format as GREPped through Postgres logs.
+BENCHMARK_TOT_MS=$(awk -F. '{t=($1 * 1000 + $2 * 10)} END {print t}' /benchmark-wall-clock-time.txt)
+
+NEXUS_LONGEST_DB_SESSION_MS=$(grep disconnection < /var/log/postgresql/postgresql-15-main.log | grep $NEXUS_PID | grep -o "session time:.*$" | grep -o [0-9]:[0-9][0-9]:[0-9][0-9]\.[0-9][0-9][0-9] | convert_pg_time_to_ms | sort -n | tail -n 1)
+
+SANDBOX_LONGEST_DB_SESSION_MS=$(grep disconnection < /var/log/postgresql/postgresql-15-main.log | grep $SANDBOX_PID | grep -o "session time:.*$" | grep -o [0-9]:[0-9][0-9]:[0-9][0-9]\.[0-9][0-9][0-9] | convert_pg_time_to_ms | sort -n | tail -n 1)
+
+if test $NEXUS_LONGEST_DB_SESSION_MS -gt $BENCHMARK_TOT_MS; then
+ echo Nexus had a DB session longer than the benchmark itself, failing.
+ exit 1
+fi
+
+if test $SANDBOX_LONGEST_DB_SESSION_MS -gt $BENCHMARK_TOT_MS; then
+ echo Sandbox had a DB session longer than the benchmark itself, failing.
+ exit 1
+fi
+
+NEXUS_TIME_PORTION=$(echo "($NEXUS_LONGEST_DB_SESSION_MS / $BENCHMARK_TOT_MS) * 100" | bc -lq | sed 's/^\./0./')
+SANDBOX_TIME_PORTION=$(echo "($SANDBOX_LONGEST_DB_SESSION_MS / $BENCHMARK_TOT_MS) * 100" | bc -lq | sed 's/^\./0./')
+
+# Here: the further from 1 the better.
+echo Nexus longest DB session is $NEXUS_TIME_PORTION percent of the total benchmark time.
+echo Sandbox longest DB session is $SANDBOX_TIME_PORTION percent of the total benchmark time.
+
+# Now show the total space occupied by the database.
+# Although that's a _total_ estimate, it'll anyhow show
+# that _also_ libeufin has reasonable data usage.
+TOTAL_DB_SPACE=$(echo "SELECT pg_size_pretty(pg_database_size('talercheck'))" | psql -d talercheck | grep "^ [0-9]" | tr -d "[:blank:]")
+echo "The total space occupied by the database for $HOW_MANY_WITHDRAWALS withdrawals is $TOTAL_DB_SPACE"
diff --git a/packaging/.gitignore b/packaging/.gitignore
new file mode 100644
index 0000000..7b3eef0
--- /dev/null
+++ b/packaging/.gitignore
@@ -0,0 +1,2 @@
+debian-docker/dist
+ubuntu-mantic-docker/dist
diff --git a/packaging/debian-bookworm/Dockerfile b/packaging/debian-bookworm/Dockerfile
new file mode 100644
index 0000000..0d6c739
--- /dev/null
+++ b/packaging/debian-bookworm/Dockerfile
@@ -0,0 +1,56 @@
+FROM debian:bookworm
+# This file is in the public domain.
+#
+# Docker image to build Debian packages of
+# GNUnet, GNU Taler and GNU Anastasis.
+#
+# We need 'bookworm' for a reasonably recent NodeJS version.
+
+ARG DEBIAN_FRONTEND=noninteractive
+
+# Install dependencies
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get update
+RUN apt-get -y dist-upgrade
+RUN apt-get update
+RUN apt-get -y install build-essential zip jq python3 python3-pip autoconf automake gcc make libtool libltdl-dev libmicrohttpd-dev libpq-dev libsqlite3-dev libunistring-dev libqrencode-dev libgcrypt-dev libsodium-dev libargon2-dev libjansson-dev recutils libgmp-dev texinfo pkgconf zlib1g-dev libopus-dev libextractor-dev libnss3-dev libcurl4-gnutls-dev autopoint linux-libc-dev python3-sphinx python3-sphinxcontrib.httpdomain policykit-1 libzbar-dev default-libmysqlclient-dev mandoc libpulse-dev libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev libbluetooth-dev iptables miniupnpc libpng-dev python3-jinja2 doxygen libjose-dev iproute2 sudo python3-sphinx-rtd-theme wget zile libogg-dev gettext net-tools po-debconf debhelper-compat dbconfig-pgsql nginx libgtk-3-dev libgladeui-dev libmagic-dev policykit-1 libnfc-dev python3-click python3-requests apt-utils nodejs npm openjdk-17-jdk-headless default-jre-headless pandoc groff
+RUN npm install -g node pnpm
+
+ADD "https://www.random.org/cgi-bin/randbyte?nbytes=10&format=h" skipcache
+
+COPY libeufin-build.sh /root/
+RUN chmod +x /root/libeufin-build.sh
+RUN /root/libeufin-build.sh master
+
+COPY gnunet-build.sh /root/
+RUN chmod +x /root/gnunet-build.sh
+RUN /root/gnunet-build.sh master
+
+COPY gnunet-gtk-build.sh /root/
+RUN chmod +x /root/gnunet-gtk-build.sh
+RUN /root/gnunet-gtk-build.sh master
+
+COPY libeufin-build.sh /root/
+RUN chmod +x /root/libeufin-build.sh
+RUN /root/libeufin-build.sh master
+
+COPY exchange-build.sh /root/
+RUN chmod +x /root/exchange-build.sh
+RUN /root/exchange-build.sh master
+
+COPY merchant-build.sh /root/
+RUN chmod +x /root/merchant-build.sh
+RUN /root/merchant-build.sh master
+
+COPY sync-build.sh /root/
+RUN chmod +x /root/sync-build.sh
+RUN /root/sync-build.sh master
+
+COPY anastasis-build.sh /root/
+RUN chmod +x /root/anastasis-build.sh
+RUN /root/anastasis-build.sh master
+
+COPY wallet-build.sh /root/
+RUN chmod +x /root/wallet-build.sh
+RUN /root/wallet-build.sh master
diff --git a/packaging/debian-bookworm/README b/packaging/debian-bookworm/README
new file mode 100644
index 0000000..0a092e5
--- /dev/null
+++ b/packaging/debian-bookworm/README
@@ -0,0 +1,16 @@
+Scripts to build Debian packages from source.
+
+TODO:
+- break up into separate build for GNUnet/Taler/Anastasis
+ => might be good to not run the entire pipeline only
+ because something changes in anastasis/wallet, as
+ that's not a good reason to re-build GNUnet ;-).
+- integrate with buildbot (integrationtests?)
+ to build-on-tag / build nightly and upload resulting
+ Deb packages to reprepro (fully automated for nightly,
+ but with explicit password-protected signature for tagged builds)
+- support other CPU architectures (by running in VM that emulates
+ other CPU architectures)
+- eventually: try to improve scripts to support older
+ Debian versions
+
diff --git a/packaging/ubuntu-docker/anastasis-build.sh b/packaging/debian-bookworm/anastasis-build.sh
index 2f1d533..24643e1 100644
--- a/packaging/ubuntu-docker/anastasis-build.sh
+++ b/packaging/debian-bookworm/anastasis-build.sh
@@ -5,22 +5,23 @@
set -eu
unset LD_LIBRARY_PATH
-mkdir -p /build
-cd /build
+mkdir -p /build/anastasis
+cd /build/anastasis
# Fetch source
-rm -rf anastasis anastasis-gtk
-git clone git://git.taler.net/anastasis
-git clone git://git.taler.net/anastasis-gtk
+rm -rf *
for n in anastasis anastasis-gtk
do
+ git clone git://git.taler.net/$n
cd $n
+ git checkout $1
./bootstrap
dpkg-buildpackage -rfakeroot -b -uc -us
cd ..
dpkg -i *.deb
done
-rm -f packages.tgz
-tar cvf packages.tgz *.deb
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/debian-bookworm/exchange-build.sh b/packaging/debian-bookworm/exchange-build.sh
new file mode 100644
index 0000000..b4a0115
--- /dev/null
+++ b/packaging/debian-bookworm/exchange-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/exchange
+cd exchange
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-docker/gnunet-build.sh b/packaging/debian-bookworm/gnunet-build.sh
index a37bff7..614c5e6 100644
--- a/packaging/ubuntu-docker/gnunet-build.sh
+++ b/packaging/debian-bookworm/gnunet-build.sh
@@ -5,22 +5,25 @@
set -eu
unset LD_LIBRARY_PATH
-mkdir -p /build
-cd /build
+mkdir -p /build/gnunet
+cd /build/gnunet
# Fetch source
-rm -rf gnunet gnunet-gtk
-git clone git://git.gnunet.org/gnunet
-git clone git://git.gnunet.org/gnunet-gtk
+rm -rf *
-for n in gnunet gnunet-gtk
+pip3 install --break-system-packages sphinx-book-theme sphinx-multiversion
+
+for n in gnunet
do
+ git clone git://git.gnunet.org/$n
cd $n
+ git checkout $1
./bootstrap
dpkg-buildpackage -rfakeroot -b -uc -us
cd ..
dpkg -i *.deb
done
-rm -rf packages.tgz
-tar cvf packages.tgz *.deb
+tar cvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/debian-docker/taler-build.sh b/packaging/debian-bookworm/gnunet-gtk-build.sh
index a7bfc62..4414c3f 100644
--- a/packaging/debian-docker/taler-build.sh
+++ b/packaging/debian-bookworm/gnunet-gtk-build.sh
@@ -5,22 +5,23 @@
set -eu
unset LD_LIBRARY_PATH
-mkdir -p /build
-cd /build
+mkdir -p /build/gnunet
+cd /build/gnunet
# Fetch source
-rm -rf exchange merchant
-git clone git://git.taler.net/exchange
-git clone git://git.taler.net/merchant
+rm -rf *
-for n in exchange merchant
+for n in gnunet-gtk
do
+ git clone git://git.gnunet.org/$n
cd $n
+ git checkout $1
./bootstrap
dpkg-buildpackage -rfakeroot -b -uc -us
cd ..
dpkg -i *.deb
done
-rm -f packages.tgz
-tar cvf packages.tgz *.deb
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/debian-bookworm/libeufin-build.sh b/packaging/debian-bookworm/libeufin-build.sh
new file mode 100644
index 0000000..eb440f2
--- /dev/null
+++ b/packaging/debian-bookworm/libeufin-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/libeufin
+cd /build/libeufin
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/libeufin
+cd libeufin
+git checkout $1
+./bootstrap
+export JAVA_HOME=/usr
+./configure --prefix=/usr
+make install
+make deb
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/debian-bookworm/mdb-build.sh b/packaging/debian-bookworm/mdb-build.sh
new file mode 100644
index 0000000..d097240
--- /dev/null
+++ b/packaging/debian-bookworm/mdb-build.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+git clone git://git.taler.net/taler-mdb
+cd taler-mdb
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/debian-bookworm/merchant-build.sh b/packaging/debian-bookworm/merchant-build.sh
new file mode 100644
index 0000000..24f5f9d
--- /dev/null
+++ b/packaging/debian-bookworm/merchant-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+# pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/merchant
+cd merchant
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/debian-bookworm/run.sh b/packaging/debian-bookworm/run.sh
new file mode 100755
index 0000000..2689a33
--- /dev/null
+++ b/packaging/debian-bookworm/run.sh
@@ -0,0 +1,25 @@
+#!/bin/sh
+set -eu
+CONTAINER=$USER/debbuilder:latest
+
+# If we don't specify the ulimit here, fakeroot is extremely slow.
+# See https://github.com/moby/moby/issues/45436
+docker build --ulimit "nofile=1024:1048576" -t $CONTAINER .
+
+rm -rf dist
+mkdir dist
+docker run --read-only $CONTAINER sleep 100 &
+sleep 1
+docker container ls
+ID=$(docker container ls | grep $CONTAINER | head -n1 | awk '{print $1}')
+echo "Extracting files from $ID"
+docker cp "$ID:/build/packages.tgz" .
+echo "Stopping $CONTAINER ($ID)"
+docker container stop $ID
+echo "Removing $CONTAINER"
+docker container rm $ID
+docker image rm $USER/debbuilder
+cd dist
+tar xvf ../packages.tgz
+cd ..
+rm packages.tgz
diff --git a/packaging/ubuntu-docker/taler-build.sh b/packaging/debian-bookworm/sync-build.sh
index a7bfc62..2f5d9df 100644
--- a/packaging/ubuntu-docker/taler-build.sh
+++ b/packaging/debian-bookworm/sync-build.sh
@@ -5,22 +5,23 @@
set -eu
unset LD_LIBRARY_PATH
-mkdir -p /build
-cd /build
+rm -rf /build/sync
-# Fetch source
-rm -rf exchange merchant
-git clone git://git.taler.net/exchange
-git clone git://git.taler.net/merchant
+mkdir -p /build/sync
+cd /build/sync
-for n in exchange merchant
+# Fetch source
+for n in sync
do
+ git clone git://git.taler.net/$n
cd $n
+ git checkout $1
./bootstrap
dpkg-buildpackage -rfakeroot -b -uc -us
cd ..
dpkg -i *.deb
done
-rm -f packages.tgz
-tar cvf packages.tgz *.deb
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-docker/wallet-build.sh b/packaging/debian-bookworm/wallet-build.sh
index f8ad8d4..6d807be 100644
--- a/packaging/ubuntu-docker/wallet-build.sh
+++ b/packaging/debian-bookworm/wallet-build.sh
@@ -5,19 +5,24 @@
set -eu
unset LD_LIBRARY_PATH
-mkdir -p /build
-cd /build
+mkdir -p /build/wallet
+cd /build/wallet
# Fetch source
-rm -rf wallet-core
+rm -rf *
git clone git://git.taler.net/wallet-core
cd wallet-core
+git checkout $1
./bootstrap
-./configure --prefix=/usr
-make install
+
+cd packages/taler-wallet-cli
+
dpkg-buildpackage -rfakeroot -b -uc -us
-cd ..
-rm -f packages.tgz
-tar cvf packages.tgz *.deb
+cd ../taler-harness
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cd ../
+
+tar uvf ../../../packages.tgz *.deb
diff --git a/packaging/debian-docker/run.sh b/packaging/debian-docker/run.sh
deleted file mode 100755
index c03607b..0000000
--- a/packaging/debian-docker/run.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-set -eu
-CONTAINER=$USER/debbuilder:latest
-docker build -t $CONTAINER .
-rm -rf dist
-mkdir dist
-docker run --read-only $CONTAINER sleep 5 &
-sleep 1
-ID=`docker container ls | grep $CONTAINER | awk '{print $1}`
-docker cp "$ID:/build/packages.tgz" .
-docker stop $CONTAINER
-docker container rm $ID
-cd dist
-tar xvf ../packages.tgz
-cd ..
-rm packages.tgz
diff --git a/packaging/docker-alpine/.gitignore b/packaging/docker-alpine/.gitignore
new file mode 100644
index 0000000..8135b62
--- /dev/null
+++ b/packaging/docker-alpine/.gitignore
@@ -0,0 +1,3 @@
+.env
+anastasis-data
+anastasis-config
diff --git a/packaging/docker-alpine/Makefile b/packaging/docker-alpine/Makefile
new file mode 100644
index 0000000..cbfb75a
--- /dev/null
+++ b/packaging/docker-alpine/Makefile
@@ -0,0 +1,48 @@
+author = nullptrderef
+
+all: build
+
+build-builder:
+ ./contrib/build-image.sh $(author) gnunet-builder builder
+
+build-libmicrohttpd-builder:
+ ./contrib/build-image.sh $(author) libmicrohttpd libmicrohttpd-builder builder
+
+build-libmicrohttpd: build-libmicrohttpd-builder
+ ./contrib/build-image.sh $(author) libmicrohttpd libmicrohttpd
+
+build-libgnunet-builder:
+ ./contrib/build-image.sh $(author) libgnunet libgnunet-builder builder
+
+build-libgnunet: build-libgnunet-builder
+ ./contrib/build-image.sh $(author) libgnunet libgnunet
+
+build-taler-exchange-builder:
+ ./contrib/build-image.sh $(author) taler-exchange taler-exchange-builder builder
+
+# TODO: regular non-lib taler exchange image
+
+build-libtalerexchange: build-taler-exchange-builder
+ ./contrib/build-image.sh $(author) libtalerexchange libtalerexchange
+
+build-taler-merchant-builder:
+ ./contrib/build-image.sh $(author) taler-merchant taler-merchant-builder builder
+
+build-libtalermerchant: build-taler-merchant-builder
+ ./contrib/build-image.sh $(author) libtalermerchant libtalermerchant
+
+# TODO: regular non-lib taler merchant image
+
+build-anastasis-builder:
+ ./contrib/build-image.sh $(author) anastasis anastasis-builder builder
+
+build-anastasis: build-anastasis-builder
+ ./contrib/build-image.sh $(author) anastasis anastasis
+
+clean:
+ ./contrib/clean.sh
+
+publish: build
+ ./contrib/publish.sh $(author)
+
+build: build-builder build-libmicrohttpd build-libgnunet build-libtalerexchange build-libtalermerchant build-anastasis
diff --git a/packaging/docker-alpine/README.anastasis.md b/packaging/docker-alpine/README.anastasis.md
new file mode 100644
index 0000000..6e01cf7
--- /dev/null
+++ b/packaging/docker-alpine/README.anastasis.md
@@ -0,0 +1,73 @@
+# nullptrderef/anastasis
+
+An [Anastasis](https://anastasis.lu) Image
+
+## Tags
+
+- [latest](https://hub.docker.com/repository/docker/nullptrderef/anastasis/tags?page=&page_size=&ordering=&name=latest): An image built form [Anastasis Master](https://git.taler.net/anastasis.git) at some point in time, hopefully recently.
+- [builder](https://hub.docker.com/repository/docker/nullptrderef/anastasis/tags?page=&page_size=&ordering=&name=builder): The image used to build `latest`'s binaries.
+
+## Usage
+
+The Image exposes a `/etc/anastasis` volume, which contains the anastasis configuration. By default, Anastasis listens on port `9977` in the container, however this behaviour can natuarlly be changed.
+
+### Docker CLI
+
+An example usage via the docker CLI may look like:
+
+`docker run -d -v './anastasis-config:/etc/anastasis' -p '127.0.0.1:9977:9977' nullptrderef/anastasis:latest`
+
+### Docker Compose
+
+```yml
+services:
+ database:
+ container_name: anastasis-db
+ image: postgres:alpine
+ hostname: database
+ ports:
+ - 127.0.0.1:15432:5432
+ networks:
+ anastasis-pg-net:
+ ipv4_address: 172.168.111.10
+ environment:
+ - POSTGRES_DB=postgres
+ - POSTGRES_USERNAME=postgres
+ - POSTGRES_PASSWORD=password
+ - PGDATA=/var/lib/postgresql/data/
+ volumes:
+ - ./anastasis-data/:/var/lib/postgresql/data/:rw
+
+ anastasis:
+ container_name: anastasis
+ image: nullptrderef/anastasis:latest
+ depends_on:
+ - database
+ networks:
+ - anastasis-pg-net
+ ports:
+ - 127.0.0.1:9977:9977
+ volumes:
+ - ./anastasis-config:/etc/anastasis
+ command: sh -c "while true; do sleep 1; done"
+
+networks:
+ anastasis-pg-net:
+ driver: bridge
+ ipam:
+ driver: default
+ config:
+ - subnet: "172.168.111.0/24"
+```
+
+(Change the password ofc)
+
+### First Execution
+
+Upon first execution, it will exit before getting anywhere, as you'll need to edit the configuation to specify the correct postgres URL.
+
+The config file is created on this first execution.
+
+## Source
+
+This image was packaged from [deployment.git:packaging/docker-alpine](https://git.taler.net/deployment.git/tree/packaging/docker-alpine). It is licensed under the AGPL-3.0-OR-LATER
diff --git a/packaging/docker-alpine/anastasis-builder.Containerfile b/packaging/docker-alpine/anastasis-builder.Containerfile
new file mode 100644
index 0000000..7479d61
--- /dev/null
+++ b/packaging/docker-alpine/anastasis-builder.Containerfile
@@ -0,0 +1,24 @@
+FROM taler-alpine-imaging/taler-merchant:builder
+
+WORKDIR /builder
+RUN git clone https://git.taler.net/anastasis.git anastasis --recursive -j 8
+WORKDIR /builder/anastasis
+RUN apk add py3-pip
+RUN pip install htmlark --break-system-packages
+RUN ./bootstrap
+RUN ./configure \
+ --prefix=/prefix/usr \
+ --sysconfdir=/prefix/etc \
+ --mandir=/prefix/usr/share/man \
+ --localstatedir=/prefix/var \
+ --with-microhttpd=/prefix/usr \
+ --with-gnunet=/prefix/usr \
+ --enable-logging=verbose
+
+RUN make -j
+RUN make install
+
+LABEL org.opencontainers.image.title="Anastasis: Builder"
+LABEL org.opencontainers.image.description="The image used to compile Anastasis. Please don't directly use this tag, it won't work as expected on it's own without libraries being properly installed."
+LABEL org.opencontainers.image.source="https://git.taler.net/deployment.git/tree/packaging/docker-alpine/anastasis-builder.Containerfile"
+LABEL org.opencontainers.image.authors="Nullptrderef <nullptrderef@proton.me>"
diff --git a/packaging/docker-alpine/anastasis-compose.yml b/packaging/docker-alpine/anastasis-compose.yml
new file mode 100644
index 0000000..fc90745
--- /dev/null
+++ b/packaging/docker-alpine/anastasis-compose.yml
@@ -0,0 +1,38 @@
+services:
+ database:
+ container_name: anastasis-db
+ image: postgres:alpine
+ hostname: database
+ ports:
+ - 127.0.0.1:15432:5432
+ networks:
+ anastasis-pg-net:
+ ipv4_address: 172.168.111.10
+ environment:
+ - POSTGRES_DB=postgres
+ - POSTGRES_USERNAME=postgres
+ - POSTGRES_PASSWORD=password
+ - PGDATA=/var/lib/postgresql/data/
+ volumes:
+ - ./anastasis-data/:/var/lib/postgresql/data/:rw
+
+ anastasis:
+ container_name: anastasis
+ image: nullptrderef/anastasis:latest
+ depends_on:
+ - database
+ networks:
+ - anastasis-pg-net
+ ports:
+ - 127.0.0.1:9977:9977
+ volumes:
+ - ./anastasis-config:/etc/anastasis
+ command: sh -c "while true; do sleep 1; done"
+
+networks:
+ anastasis-pg-net:
+ driver: bridge
+ ipam:
+ driver: default
+ config:
+ - subnet: "172.168.111.0/24"
diff --git a/packaging/docker-alpine/anastasis.Containerfile b/packaging/docker-alpine/anastasis.Containerfile
new file mode 100644
index 0000000..b65757e
--- /dev/null
+++ b/packaging/docker-alpine/anastasis.Containerfile
@@ -0,0 +1,16 @@
+FROM taler-alpine-imaging/anastasis:builder AS builder
+COPY ./build-files/anastasis-entrypoint.sh /prefix/usr/bin/anastasis-docker-entrypoint
+
+FROM taler-alpine-imaging/libtalermerchant:latest
+COPY --from=builder /prefix/* /
+
+VOLUME [ "/etc/anastasis" ]
+EXPOSE 9977
+
+CMD [ "anastasis-docker-entrypoint" ]
+
+LABEL org.opencontainers.image.title="Anastasis"
+LABEL org.opencontainers.image.description="A 'keep-your-own-key' crypto-custody solution with password-less key recovery via multi-factor multi-party authentication."
+LABEL org.opencontainers.image.source="https://git.taler.net/deployment.git/tree/packaging/docker-alpine/anastasis.Containerfile"
+LABEL org.opencontainers.image.documentation="https://git.taler.net/deployment.git/tree/packaging/docker-alpine/README.anastasis.md"
+LABEL org.opencontainers.image.authors="Nullptrderef <nullptrderef@proton.me>"
diff --git a/packaging/docker-alpine/build-files/anastasis-entrypoint.sh b/packaging/docker-alpine/build-files/anastasis-entrypoint.sh
new file mode 100755
index 0000000..ec72716
--- /dev/null
+++ b/packaging/docker-alpine/build-files/anastasis-entrypoint.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env sh
+set -e
+
+if ! [ -d "/etc/anastasis" ]; then
+ echo -e "
+\x1b[0;31;1mFATAL\x1b[0m Anastasis-Docker needs /etc/anastasis mounted.
+ If you're calling docker directly, pass '-v ./anastasis-config:/etc/anastasis' to docker run
+ If you're using a docker compose, add the below to the service:
+ volumes:
+ - ./anastasis-config:/etc/anastasis:rw" 1>&2;
+ exit 1;
+fi;
+if ! [ -f "/etc/anastasis/anastasis.conf" ]; then
+ echo -e "\x1b[0;33;1mWARN\x1b[0m Anastasis-Docker needs /etc/anastasis/anastasis.conf.
+ If you're seeing this for the first time after setting up a volume binding, simply continue by modifying your config. A config was just created for you! :)
+ Otherwise, see below for information on adding a volume binding:
+ If you're calling docker directly, pass '-v ./anastasis-config:/etc/anastasis' to docker run
+ If you're using a docker compose, add the below to the service:
+ volumes:
+ - ./anastasis-config:/etc/anastasis:rw" 1>&2;
+ echo '[anastasis]
+SERVE = tcp
+PORT = 9977
+DB = postgres
+
+[stasis-postgres]
+CONFIG = postgres://admin:password@172.168.111.10:5432/postgres
+' > /etc/anastasis/anastasis.conf
+fi;
+
+set -ax
+
+anastasis-dbinit -c /etc/anastasis/anastasis.conf
+anastasis-httpd -c /etc/anastasis/anastasis.conf $@
diff --git a/packaging/docker-alpine/builder.Containerfile b/packaging/docker-alpine/builder.Containerfile
new file mode 100644
index 0000000..20e8488
--- /dev/null
+++ b/packaging/docker-alpine/builder.Containerfile
@@ -0,0 +1,17 @@
+FROM alpine:latest
+
+RUN <<EOT
+apk update && \
+ apk add --no-cache git make cmake automake autoconf libtool clang texinfo gnutls-dev curl-dev coreutils && \
+ mkdir /prefix -p
+EOT
+
+ENV CC=clang
+ENV CFLAGS="-Os"
+
+WORKDIR /builder
+
+LABEL org.opencontainers.image.title="GNUNet Builder"
+LABEL org.opencontainers.image.description="A baseline Alpine builder for LibGNUNet & Taler Alpine Images."
+LABEL org.opencontainers.image.source="https://git.taler.net/deployment.git/tree/packaging/docker-alpine/builder.Containerfile"
+LABEL org.opencontainers.image.authors="Nullptrderef <nullptrderef@proton.me>"
diff --git a/packaging/docker-alpine/contrib/build-image.sh b/packaging/docker-alpine/contrib/build-image.sh
new file mode 100755
index 0000000..6780587
--- /dev/null
+++ b/packaging/docker-alpine/contrib/build-image.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+set -eax
+
+docker buildx build -t "taler-alpine-imaging/$2:${4:-latest}" -f "$3".Containerfile .
+docker image tag "taler-alpine-imaging/$2:${4:-latest}" "$1/$2":${4:-latest}
diff --git a/packaging/docker-alpine/contrib/clean.sh b/packaging/docker-alpine/contrib/clean.sh
new file mode 100755
index 0000000..94c8414
--- /dev/null
+++ b/packaging/docker-alpine/contrib/clean.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env bash
+set -eax
+docker image rm $(docker image ls -a | awk '{print $1 " " $3}' | awk 'NR > 1' | grep taler-alpine-imaging/ | awk '{print $2}') --force
diff --git a/packaging/docker-alpine/contrib/publish.sh b/packaging/docker-alpine/contrib/publish.sh
new file mode 100755
index 0000000..2e98a0b
--- /dev/null
+++ b/packaging/docker-alpine/contrib/publish.sh
@@ -0,0 +1,10 @@
+#!/usr/bin/env bash
+set -e
+AUTHOR="$1";
+IMAGES="$(docker image ls -a | grep -v '<none>' | awk '{print $1 ":" $2 " " $3}' | awk 'NR > 1' | grep "$AUTHOR/")"
+echo "Pushing Images:
+$IMAGES"
+while IFS= read -r IMAGE; do
+ awk '{print "Deploying " $1 " (" $2 ") ..."}' <<< "$IMAGE"
+ docker push $(awk '{print $1}' <<< "$IMAGE")
+done <<< "$IMAGES" \ No newline at end of file
diff --git a/packaging/docker-alpine/libgnunet-builder.Containerfile b/packaging/docker-alpine/libgnunet-builder.Containerfile
new file mode 100644
index 0000000..667bffa
--- /dev/null
+++ b/packaging/docker-alpine/libgnunet-builder.Containerfile
@@ -0,0 +1,34 @@
+FROM taler-alpine-imaging/libmicrohttpd:builder
+
+WORKDIR /builder
+RUN git clone https://git.gnunet.org/gnunet.git gnunet --recursive -j 8
+WORKDIR /builder/gnunet
+# instead of bootstrap for a more minimal build
+RUN git submodule update --init --force --remote
+
+RUN apk add gettext gettext-dev gettext-static gettext-libs py3-gettext libgcrypt libgcrypt-dev jansson jansson-dev libsodium libsodium-dev libunistring libunistring-dev libpq libpq-dev
+ENV AUTOPOINT=autopoint
+
+RUN mkdir -p doc/handbook/texinfo/
+RUN touch doc/handbook/texinfo/gnunet.texi
+
+RUN autoreconf -fi
+RUN ./configure \
+ --prefix=/prefix/usr \
+ --with-microhttpd=/prefix/usr \
+ --sysconfdir=/prefix/etc \
+ --mandir=/prefix/usr/share/man \
+ --localstatedir=/prefix/var \
+ --disable-poisoning \
+ --enable-logging=verbose
+
+WORKDIR /builder/gnunet/src/include
+RUN make install
+WORKDIR /builder/gnunet/src/lib
+RUN make -j
+RUN make install
+
+LABEL org.opencontainers.image.title="LibGNUNet: Builder"
+LABEL org.opencontainers.image.description="The image that was used to build the matching :latest tag"
+LABEL org.opencontainers.image.source="https://git.taler.net/deployment.git/tree/packaging/docker-alpine/libgnunet-builder.Containerfile"
+LABEL org.opencontainers.image.authors="Nullptrderef <nullptrderef@proton.me>"
diff --git a/packaging/docker-alpine/libgnunet.Containerfile b/packaging/docker-alpine/libgnunet.Containerfile
new file mode 100644
index 0000000..8f04b34
--- /dev/null
+++ b/packaging/docker-alpine/libgnunet.Containerfile
@@ -0,0 +1,10 @@
+FROM taler-alpine-imaging/libgnunet:builder AS builder
+
+FROM taler-alpine-imaging/libmicrohttpd:latest
+RUN apk add --no-cache libgcrypt jansson gettext libsodium libunistring libpq curl gnutls libtool
+COPY --from=builder /prefix/* /
+
+LABEL org.opencontainers.image.title="LibGNUNet"
+LABEL org.opencontainers.image.description="An alpine-based image containing LibGNUNet"
+LABEL org.opencontainers.image.source="https://git.taler.net/deployment.git/tree/packaging/docker-alpine/libgnunet.Containerfile"
+LABEL org.opencontainers.image.authors="Nullptrderef <nullptrderef@proton.me>"
diff --git a/packaging/docker-alpine/libmicrohttpd-builder.Containerfile b/packaging/docker-alpine/libmicrohttpd-builder.Containerfile
new file mode 100644
index 0000000..00cb2bf
--- /dev/null
+++ b/packaging/docker-alpine/libmicrohttpd-builder.Containerfile
@@ -0,0 +1,23 @@
+FROM taler-alpine-imaging/gnunet-builder:latest
+
+WORKDIR /builder/libmicrohttpd
+RUN git clone https://git.gnunet.org/libmicrohttpd.git .
+RUN ./bootstrap
+RUN ./configure \
+ --prefix=/prefix/usr \
+ --sysconfdir=/prefix/etc \
+ --mandir=/prefix/usr/share/man \
+ --infodir=/prefix/usr/share/info \
+ --disable-thread-names \
+ --enable-largefile \
+ --enable-curl \
+ --enable-https \
+ --enable-messages
+RUN make -j
+RUN make check
+RUN make install
+
+LABEL org.opencontainers.image.title="LibMicroHTTPD: Builder"
+LABEL org.opencontainers.image.description="An alpine-based image containing LibMicroHTTPD"
+LABEL org.opencontainers.image.source="https://git.taler.net/deployment.git/tree/packaging/docker-alpine/libmicrohttpd.Containerfile"
+LABEL org.opencontainers.image.authors="Nullptrderef <nullptrderef@proton.me>"
diff --git a/packaging/docker-alpine/libmicrohttpd.Containerfile b/packaging/docker-alpine/libmicrohttpd.Containerfile
new file mode 100644
index 0000000..100e38c
--- /dev/null
+++ b/packaging/docker-alpine/libmicrohttpd.Containerfile
@@ -0,0 +1,9 @@
+FROM taler-alpine-imaging/libmicrohttpd:builder AS builder
+
+FROM alpine:latest
+COPY --from=builder /prefix/* /
+
+LABEL org.opencontainers.image.title="LibMicroHTTPD"
+LABEL org.opencontainers.image.description="An alpine-based image containing LibMicroHTTPD"
+LABEL org.opencontainers.image.source="https://git.taler.net/deployment.git/tree/packaging/docker-alpine/libmicrohttpd.Containerfile"
+LABEL org.opencontainers.image.authors="Nullptrderef <nullptrderef@proton.me>"
diff --git a/packaging/docker-alpine/libtalerexchange.Containerfile b/packaging/docker-alpine/libtalerexchange.Containerfile
new file mode 100644
index 0000000..eb938c3
--- /dev/null
+++ b/packaging/docker-alpine/libtalerexchange.Containerfile
@@ -0,0 +1,13 @@
+FROM taler-alpine-imaging/taler-exchange:builder AS builder
+
+FROM alpine AS builder2
+COPY --from=builder /prefix/usr/lib/* /prefix/usr/lib
+# RUN rm -rf /prefix/lib/taler_plugin_*
+
+FROM taler-alpine-imaging/libgnunet:latest
+COPY --from=builder2 /prefix/* /
+
+LABEL org.opencontainers.image.title="LibTalerExchange"
+LABEL org.opencontainers.image.description="An alpine-based image containing the libraries from the Taler Exchange"
+LABEL org.opencontainers.image.source="https://git.taler.net/deployment.git/tree/packaging/docker-alpine/libtalerexchange.Containerfile"
+LABEL org.opencontainers.image.authors="Nullptrderef <nullptrderef@proton.me>"
diff --git a/packaging/docker-alpine/libtalermerchant.Containerfile b/packaging/docker-alpine/libtalermerchant.Containerfile
new file mode 100644
index 0000000..bf5dc9f
--- /dev/null
+++ b/packaging/docker-alpine/libtalermerchant.Containerfile
@@ -0,0 +1,12 @@
+FROM taler-alpine-imaging/taler-merchant:builder AS builder
+
+FROM alpine AS builder2
+COPY --from=builder /prefix/usr/lib/* /prefix/usr/lib
+
+FROM taler-alpine-imaging/libtalerexchange:latest
+COPY --from=builder2 /prefix/* /
+
+LABEL org.opencontainers.image.title="LibTalerMerchant"
+LABEL org.opencontainers.image.description="An alpine-based image containing the libraries from the Taler Merchant"
+LABEL org.opencontainers.image.source="https://git.taler.net/deployment.git/tree/packaging/docker-alpine/libtalermerchant.Containerfile"
+LABEL org.opencontainers.image.authors="Nullptrderef <nullptrderef@proton.me>"
diff --git a/packaging/docker-alpine/taler-exchange-builder.Containerfile b/packaging/docker-alpine/taler-exchange-builder.Containerfile
new file mode 100644
index 0000000..329116f
--- /dev/null
+++ b/packaging/docker-alpine/taler-exchange-builder.Containerfile
@@ -0,0 +1,22 @@
+FROM taler-alpine-imaging/libgnunet:builder
+
+WORKDIR /builder
+RUN git clone https://git.taler.net/exchange.git exchange --recursive -j 8
+WORKDIR /builder/exchange
+RUN apk add pandoc recutils py3-jinja2 jq
+RUN ./bootstrap
+RUN ./configure \
+ --prefix=/prefix/usr \
+ --sysconfdir=/prefix/etc \
+ --mandir=/prefix/usr/share/man \
+ --localstatedir=/prefix/var \
+ --with-microhttpd=/prefix/usr \
+ --enable-logging=verbose
+
+RUN make -j
+RUN make install
+
+LABEL org.opencontainers.image.title="Taler Exchange: Builder"
+LABEL org.opencontainers.image.description="An alpine-based image containing the libraries from the Taler Exchange"
+LABEL org.opencontainers.image.source="https://git.taler.net/deployment.git/tree/packaging/docker-alpine/taler-exchange-builder.Containerfile"
+LABEL org.opencontainers.image.authors="Nullptrderef <nullptrderef@proton.me>"
diff --git a/packaging/docker-alpine/taler-merchant-builder.Containerfile b/packaging/docker-alpine/taler-merchant-builder.Containerfile
new file mode 100644
index 0000000..9c92bb6
--- /dev/null
+++ b/packaging/docker-alpine/taler-merchant-builder.Containerfile
@@ -0,0 +1,22 @@
+FROM taler-alpine-imaging/taler-exchange:builder
+
+WORKDIR /builder
+RUN git clone https://git.taler.net/merchant.git merchant --recursive -j 8
+WORKDIR /builder/merchant
+RUN apk add libqrencode libqrencode-dev
+RUN ./bootstrap
+RUN ./configure \
+ --prefix=/prefix/usr \
+ --sysconfdir=/prefix/etc \
+ --mandir=/prefix/usr/share/man \
+ --localstatedir=/prefix/var \
+ --with-microhttpd=/prefix/usr \
+ --enable-logging=verbose
+
+RUN make -j
+RUN make install
+
+LABEL org.opencontainers.image.title="Taler Merchant: Builder"
+LABEL org.opencontainers.image.description="An alpine-based image containing the libraries from the Taler Merchant"
+LABEL org.opencontainers.image.source="https://git.taler.net/deployment.git/tree/packaging/docker-alpine/taler-merchant-builder.Containerfile"
+LABEL org.opencontainers.image.authors="Nullptrderef <nullptrderef@proton.me>"
diff --git a/packaging/ng/.gitignore b/packaging/ng/.gitignore
new file mode 100644
index 0000000..681bab4
--- /dev/null
+++ b/packaging/ng/.gitignore
@@ -0,0 +1,2 @@
+packages/
+cache/
diff --git a/packaging/ng/README.md b/packaging/ng/README.md
new file mode 100644
index 0000000..50ec7cb
--- /dev/null
+++ b/packaging/ng/README.md
@@ -0,0 +1,25 @@
+# taler-packaging ng
+
+This directory contains the improved implementation of container-based
+packaging for GNU Taler and associated packages.
+
+The major improvement is that a component can be built *without* having to
+rebuild every single package.
+
+Instead, all dependencies are managed via apt. Each package is built in a
+fresh environment, with build dependencies pulled in via apt. Previously built
+packages are available via a file-based apt source.
+
+Build-time dependencies are automatically installed, making sure that
+missing build-time dependencies would be detected.
+
+The packaging logic is also the same for Debian and Ubuntu.
+
+
+## Structure
+
+* `packages/$DISTRO-$DISTRO_VERNAME`: Output folder for debian packages.
+Also contains a `Packages.gz` metadata file generated by `dpkg-scanpackages`
+so that this folder can be directly consumed as a trusted package source.
+
+* `buildscripts/*`: Build scripts used during the package build steps.
diff --git a/packaging/ng/build.sh b/packaging/ng/build.sh
new file mode 100755
index 0000000..2523319
--- /dev/null
+++ b/packaging/ng/build.sh
@@ -0,0 +1,67 @@
+#!/usr/bin/env bash
+
+set -eu
+
+usage() {
+ echo Usage: $0 DISTRO >&2
+ exit 1
+}
+
+if [[ $# != 1 ]]; then
+ usage
+fi
+
+LABEL=$1
+IMAGE_TAG=taler-packaging-$LABEL:latest
+DOCKERFILE=distros/Dockerfile.$LABEL
+PKGDIR=packages/$LABEL
+
+if [[ ! -e "$DOCKERFILE" ]]; then
+ echo Need $DOCKERFILE to build $LABEL >&2
+ exit 1
+fi
+
+
+function build_base() {
+ echo "Building $IMAGE_TAG from $DOCKERFILE"
+ # Build the base image. Usually fast because it's cached.
+ podman build -t $IMAGE_TAG -f $DOCKERFILE .
+}
+
+function run() {
+ SCRIPT=$1
+ shift
+ mkdir -p $PKGDIR
+ mkdir -p cache
+ podman run -it --entrypoint=/bin/bash \
+ --mount type=bind,source="$(pwd)"/buildscripts,target=/buildscripts,readonly \
+ --mount type=bind,source="$(pwd)"/buildconfig,target=/buildconfig,readonly \
+ --mount type=bind,source="$(pwd)"/$PKGDIR,target=/pkgdir \
+ $IMAGE_TAG "/buildscripts/$SCRIPT" "$@"
+}
+
+function debug() {
+ mkdir -p $PKGDIR
+ podman run -it --entrypoint=/bin/bash \
+ --mount type=bind,source="$(pwd)"/buildscripts,target=/buildscripts,readonly \
+ --mount type=bind,source="$(pwd)"/buildconfig,target=/buildconfig,readonly \
+ --mount type=bind,source="$(pwd)"/$PKGDIR,target=/pkgdir \
+ $IMAGE_TAG -i
+}
+
+function build_all() {
+ run generic.sh gnunet
+ run generic.sh gnunet-gtk
+ run generic.sh taler-exchange
+ run generic.sh taler-merchant
+ run generic.sh sync
+ run generic.sh anastasis
+ run generic.sh anastasis-gtk
+ run generic.sh libeufin
+ run generic.sh taler-merchant-demos
+ run generic.sh taler-wallet-cli packages/taler-wallet-cli
+ run generic.sh taler-harness packages/taler-harness
+ run generic.sh libeufin
+}
+
+build_all
diff --git a/packaging/ng/buildconfig/README b/packaging/ng/buildconfig/README
new file mode 100644
index 0000000..005630f
--- /dev/null
+++ b/packaging/ng/buildconfig/README
@@ -0,0 +1,5 @@
+These files determine the git tag from which the respective components are
+built in the base Docker image.
+
+They are in separate files to make modification checking with
+staged Docker builds work nicely.
diff --git a/packaging/ng/buildconfig/anastasis-gtk.giturl b/packaging/ng/buildconfig/anastasis-gtk.giturl
new file mode 100644
index 0000000..83634d2
--- /dev/null
+++ b/packaging/ng/buildconfig/anastasis-gtk.giturl
@@ -0,0 +1 @@
+git://git.taler.net/anastasis-gtk.git
diff --git a/packaging/ng/buildconfig/anastasis-gtk.tag b/packaging/ng/buildconfig/anastasis-gtk.tag
new file mode 100644
index 0000000..5aff472
--- /dev/null
+++ b/packaging/ng/buildconfig/anastasis-gtk.tag
@@ -0,0 +1 @@
+v0.4.1
diff --git a/packaging/ng/buildconfig/anastasis.giturl b/packaging/ng/buildconfig/anastasis.giturl
new file mode 100644
index 0000000..b29317f
--- /dev/null
+++ b/packaging/ng/buildconfig/anastasis.giturl
@@ -0,0 +1 @@
+git://git.taler.net/anastasis.git
diff --git a/packaging/ng/buildconfig/anastasis.tag b/packaging/ng/buildconfig/anastasis.tag
new file mode 100644
index 0000000..e1be908
--- /dev/null
+++ b/packaging/ng/buildconfig/anastasis.tag
@@ -0,0 +1 @@
+v0.4.2-dev.1
diff --git a/packaging/ng/buildconfig/gnunet-gtk.giturl b/packaging/ng/buildconfig/gnunet-gtk.giturl
new file mode 100644
index 0000000..7610ee3
--- /dev/null
+++ b/packaging/ng/buildconfig/gnunet-gtk.giturl
@@ -0,0 +1 @@
+git://git.gnunet.org/gnunet-gtk.git
diff --git a/packaging/ng/buildconfig/gnunet-gtk.tag b/packaging/ng/buildconfig/gnunet-gtk.tag
new file mode 100644
index 0000000..759e855
--- /dev/null
+++ b/packaging/ng/buildconfig/gnunet-gtk.tag
@@ -0,0 +1 @@
+v0.21.0
diff --git a/packaging/ng/buildconfig/gnunet.giturl b/packaging/ng/buildconfig/gnunet.giturl
new file mode 100644
index 0000000..8764364
--- /dev/null
+++ b/packaging/ng/buildconfig/gnunet.giturl
@@ -0,0 +1 @@
+git://git.gnunet.org/gnunet.git
diff --git a/packaging/ng/buildconfig/gnunet.tag b/packaging/ng/buildconfig/gnunet.tag
new file mode 100644
index 0000000..40c8500
--- /dev/null
+++ b/packaging/ng/buildconfig/gnunet.tag
@@ -0,0 +1 @@
+v0.21.1
diff --git a/packaging/ng/buildconfig/libeufin.giturl b/packaging/ng/buildconfig/libeufin.giturl
new file mode 100644
index 0000000..b794afb
--- /dev/null
+++ b/packaging/ng/buildconfig/libeufin.giturl
@@ -0,0 +1 @@
+git://git.taler.net/libeufin.git
diff --git a/packaging/ng/buildconfig/libeufin.tag b/packaging/ng/buildconfig/libeufin.tag
new file mode 100644
index 0000000..c91125d
--- /dev/null
+++ b/packaging/ng/buildconfig/libeufin.tag
@@ -0,0 +1 @@
+v0.10.1
diff --git a/packaging/ng/buildconfig/sync.giturl b/packaging/ng/buildconfig/sync.giturl
new file mode 100644
index 0000000..21cb5d7
--- /dev/null
+++ b/packaging/ng/buildconfig/sync.giturl
@@ -0,0 +1 @@
+git://git.taler.net/sync.git
diff --git a/packaging/ng/buildconfig/sync.tag b/packaging/ng/buildconfig/sync.tag
new file mode 100644
index 0000000..c91125d
--- /dev/null
+++ b/packaging/ng/buildconfig/sync.tag
@@ -0,0 +1 @@
+v0.10.1
diff --git a/packaging/ng/buildconfig/taler-exchange.giturl b/packaging/ng/buildconfig/taler-exchange.giturl
new file mode 100644
index 0000000..19309f9
--- /dev/null
+++ b/packaging/ng/buildconfig/taler-exchange.giturl
@@ -0,0 +1 @@
+git://git.taler.net/exchange.git
diff --git a/packaging/ng/buildconfig/taler-exchange.tag b/packaging/ng/buildconfig/taler-exchange.tag
new file mode 100644
index 0000000..0a39b20
--- /dev/null
+++ b/packaging/ng/buildconfig/taler-exchange.tag
@@ -0,0 +1 @@
+v0.10.3-dev.1
diff --git a/packaging/ng/buildconfig/taler-harness.giturl b/packaging/ng/buildconfig/taler-harness.giturl
new file mode 100644
index 0000000..0d713ec
--- /dev/null
+++ b/packaging/ng/buildconfig/taler-harness.giturl
@@ -0,0 +1 @@
+git://git.taler.net/wallet-core.git
diff --git a/packaging/ng/buildconfig/taler-harness.tag b/packaging/ng/buildconfig/taler-harness.tag
new file mode 100644
index 0000000..129170b
--- /dev/null
+++ b/packaging/ng/buildconfig/taler-harness.tag
@@ -0,0 +1 @@
+v0.10.8-dev.1
diff --git a/packaging/ng/buildconfig/taler-merchant-demos.giturl b/packaging/ng/buildconfig/taler-merchant-demos.giturl
new file mode 100644
index 0000000..8fd4286
--- /dev/null
+++ b/packaging/ng/buildconfig/taler-merchant-demos.giturl
@@ -0,0 +1 @@
+git://git.taler.net/taler-merchant-demos
diff --git a/packaging/ng/buildconfig/taler-merchant-demos.tag b/packaging/ng/buildconfig/taler-merchant-demos.tag
new file mode 100644
index 0000000..c91125d
--- /dev/null
+++ b/packaging/ng/buildconfig/taler-merchant-demos.tag
@@ -0,0 +1 @@
+v0.10.1
diff --git a/packaging/ng/buildconfig/taler-merchant.giturl b/packaging/ng/buildconfig/taler-merchant.giturl
new file mode 100644
index 0000000..cffa220
--- /dev/null
+++ b/packaging/ng/buildconfig/taler-merchant.giturl
@@ -0,0 +1 @@
+git://git.taler.net/merchant.git
diff --git a/packaging/ng/buildconfig/taler-merchant.tag b/packaging/ng/buildconfig/taler-merchant.tag
new file mode 100644
index 0000000..3123ff9
--- /dev/null
+++ b/packaging/ng/buildconfig/taler-merchant.tag
@@ -0,0 +1 @@
+v0.10.2
diff --git a/packaging/ng/buildconfig/taler-wallet-cli.giturl b/packaging/ng/buildconfig/taler-wallet-cli.giturl
new file mode 100644
index 0000000..0d713ec
--- /dev/null
+++ b/packaging/ng/buildconfig/taler-wallet-cli.giturl
@@ -0,0 +1 @@
+git://git.taler.net/wallet-core.git
diff --git a/packaging/ng/buildconfig/taler-wallet-cli.tag b/packaging/ng/buildconfig/taler-wallet-cli.tag
new file mode 100644
index 0000000..b434d4c
--- /dev/null
+++ b/packaging/ng/buildconfig/taler-wallet-cli.tag
@@ -0,0 +1 @@
+v0.10.6
diff --git a/packaging/ng/buildscripts/generic.sh b/packaging/ng/buildscripts/generic.sh
new file mode 100644
index 0000000..85235c6
--- /dev/null
+++ b/packaging/ng/buildscripts/generic.sh
@@ -0,0 +1,54 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+PACKAGE=$1
+# Path of the debian/ folder in the repository
+DEBIANPATH=${2:-.}
+
+echo Building $1 with generic build logic >&2
+
+cd /pkgdir
+dpkg-scanpackages . | xz - > /pkgdir/Packages.xz
+echo "deb [trusted=yes] file:/pkgdir ./" >/etc/apt/sources.list.d/taler-packaging-local.list
+apt-get update
+
+mkdir -p /build
+cd /build
+
+TAG=$(cat /buildconfig/$PACKAGE.tag)
+GITURL=$(cat /buildconfig/$PACKAGE.giturl)
+
+git config --global advice.detachedHead false
+git clone --depth=1 --branch=$TAG "$GITURL" "$PACKAGE"
+
+cd "/build/$PACKAGE/$DEBIANPATH"
+
+# Get current version from debian/control file.
+DEB_VERSION=$(dpkg-parsechangelog -S Version)
+
+echo "Current version of $PACKAGE/$DEBIANPATH is $DEB_VERSION"
+
+apt-cache show "$PACKAGE" | grep "Version: $DEB_VERSION" >/dev/null && found=true || found=false
+if [ $found = true ]; then
+ echo "$PACKAGE version $DEB_VERSION already built, skipping"
+ exit 0
+fi
+
+cd "/build/$PACKAGE"
+./bootstrap
+
+cd "/build/$PACKAGE/$DEBIANPATH"
+
+# Install build-time dependencies.
+mk-build-deps --install --tool='apt-get -o Debug::pkgProblemResolver=yes --no-install-recommends --yes' debian/control
+
+# We do a sparse checkout, so we need to hint
+# the version to the build system.
+echo $DEB_VERSION > .version
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cp ../*.deb /pkgdir/
diff --git a/packaging/ng/distros/Dockerfile.debian-bookworm b/packaging/ng/distros/Dockerfile.debian-bookworm
new file mode 100644
index 0000000..b6aade0
--- /dev/null
+++ b/packaging/ng/distros/Dockerfile.debian-bookworm
@@ -0,0 +1,37 @@
+FROM debian:bookworm
+# This file is in the public domain.
+#
+# Docker image to build Ubuntu packages of
+# GNUnet, GNU Taler and GNU Anastasis.
+#
+ARG DEBIAN_FRONTEND=noninteractive
+
+# Install dependencies
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get -y install build-essential zip jq python3 python3-pip nodejs npm
+RUN apt-get -y install autoconf automake gcc make libtool libltdl-dev libmicrohttpd-dev libpq-dev libsqlite3-dev libunistring-dev libqrencode-dev libgcrypt-dev libsodium-dev libargon2-dev libjansson-dev recutils libgmp-dev texinfo pkgconf zlib1g-dev libopus-dev libextractor-dev libnss3-dev libcurl4-gnutls-dev autopoint
+RUN apt-get -y install libzbar-dev libmariadb-dev-compat libmariadb-dev mandoc libpulse-dev libgstreamer1.0-dev libbluetooth-dev iptables miniupnpc libpng-dev
+RUN apt-get -y install python3-jinja2 doxygen libjose-dev iproute2 sudo
+RUN apt-get -y install wget zile
+RUN apt-get -y install libogg-dev gettext net-tools po-debconf debhelper-compat dbconfig-pgsql nginx
+RUN apt-get -y install libgtk-3-dev libgladeui-dev libmagic-dev policykit-1
+RUN apt-get -y install dbconfig-no-thanks
+RUN apt-get -y install devscripts equivs
+# For libeufin:
+RUN apt-get -y install python3-click python3-requests python3
+
+RUN apt-get -y install \
+ openjdk-17-jre-headless \
+ openjdk-17-jdk-headless \
+ default-jre-headless \
+ ;
+
+
+RUN pip install sphinx_rtd_theme --break-system-packages
+#RUN npm install -g npm
+RUN npm install -g pnpm
+
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get -y dist-upgrade
diff --git a/packaging/ubuntu-docker/Dockerfile b/packaging/ng/distros/Dockerfile.ubuntu-kinetic
index dd68501..0a90fd7 100644
--- a/packaging/ubuntu-docker/Dockerfile
+++ b/packaging/ng/distros/Dockerfile.ubuntu-kinetic
@@ -1,8 +1,10 @@
-FROM ubuntu:jammy
+FROM ubuntu:kinetic
# This file is in the public domain.
#
# Docker image to build Ubuntu packages of
# GNUnet, GNU Taler and GNU Anastasis.
+#
+# We need 'kinetic' for a reasonably recent NodeJS version.
ARG DEBIAN_FRONTEND=noninteractive
@@ -15,33 +17,16 @@ RUN apt-get -y install libzbar-dev libmysqlclient-dev mandoc libpulse-dev libgst
RUN apt-get -y install python3-jinja2 doxygen libjose-dev iproute2 sudo
RUN apt-get -y install wget zile
RUN apt-get -y install libogg-dev gettext net-tools po-debconf debhelper-compat dbconfig-pgsql nginx
-RUN apt-get -y install libgtk-3-dev libgladeui-dev libmagic-dev
+RUN apt-get -y install libgtk-3-dev libgladeui-dev libmagic-dev policykit-1
RUN apt-get -y install dbconfig-no-thanks
+RUN apt-get -y install devscripts equivs
+# For libeufin:
+RUN apt-get -y install openjdk-17-jdk python3-click python3-requests python3
RUN pip install sphinx_rtd_theme
RUN npm install -g npm
RUN /usr/local/bin/npm install -g npm pnpm node
-COPY taler.list /etc/apt/sources.list.d/
-COPY taler-systems.gpg.key /root
-RUN apt-key add /root/taler-systems.gpg.key
-
RUN apt-get update
RUN apt-get -y upgrade
RUN apt-get -y dist-upgrade
-
-COPY gnunet-build.sh /root/
-RUN chmod +x /root/gnunet-build.sh
-RUN /root/gnunet-build.sh
-COPY taler-build.sh /root/
-RUN chmod +x /root/taler-build.sh
-RUN /root/taler-build.sh
-COPY sync-build.sh /root/
-RUN chmod +x /root/sync-build.sh
-RUN /root/sync-build.sh
-COPY anastasis-build.sh /root/
-RUN chmod +x /root/anastasis-build.sh
-RUN /root/anastasis-build.sh
-COPY wallet-build.sh /root/
-RUN chmod +x /root/wallet-build.sh
-RUN /root/wallet-build.sh
diff --git a/packaging/ng/print-latest-versions b/packaging/ng/print-latest-versions
new file mode 100755
index 0000000..ff6668e
--- /dev/null
+++ b/packaging/ng/print-latest-versions
@@ -0,0 +1,22 @@
+#!/usr/bin/bash
+
+function getver() {
+ ver=$(git -c 'versionsort.suffix=-' \
+ ls-remote --exit-code --refs --sort='version:refname' --tags $2 '*.*.*' \
+ | tail --lines=1 \
+ | cut --delimiter='/' --fields=3)
+ curr=$(cat buildconfig/$1.tag)
+ if [[ "$curr" != "$ver" ]]; then
+ echo -n "[!] "
+ fi
+ echo $1 "curr: $curr" latest: $ver
+}
+
+getver exchange git://git.taler.net/exchange
+getver merchant git://git.taler.net/merchant
+getver merchant-demos git://git.taler.net/taler-merchant-demos
+getver libeufin git://git.taler.net/libeufin
+getver wallet git://git.taler.net/wallet-core
+getver gnunet git://git.gnunet.org/gnunet
+getver sync git://git.taler.net/sync
+getver libmhd git://git.gnunet.org/libmicrohttpd
diff --git a/packaging/ubuntu-docker/README b/packaging/ubuntu-docker/README
deleted file mode 100644
index b10636a..0000000
--- a/packaging/ubuntu-docker/README
+++ /dev/null
@@ -1,9 +0,0 @@
-Scripts to build Ubuntu packages from source.
-
-TODO:
-- check build for warnings/missing dependencies
- (espcially GNUnet!)
-- break up into separate build for GNUnet/Taler/Anastasis
-- integrate with buildbot (integrationtests?)
- to build-on-tag
-- support other architectures!
diff --git a/packaging/ubuntu-docker/run.sh b/packaging/ubuntu-docker/run.sh
deleted file mode 100755
index 2152cce..0000000
--- a/packaging/ubuntu-docker/run.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-set -eu
-CONTAINER=$USER/ububuilder:latest
-docker build -t $CONTAINER .
-rm -rf dist
-mkdir dist
-docker start $CONTAINER
-#sleep 1
-ID=`docker container ls -n1 | grep $CONTAINER | awk '{print $1}`
-docker cp "$ID:/build/packages.tgz" .
-docker stop $CONTAINER
-docker container rm $ID
-cd dist
-tar xvf ../packages.tgz
-cd ..
-rm packages.tgz
diff --git a/packaging/ubuntu-docker/taler-systems.gpg.key b/packaging/ubuntu-docker/taler-systems.gpg.key
deleted file mode 100644
index 149997b..0000000
--- a/packaging/ubuntu-docker/taler-systems.gpg.key
+++ /dev/null
@@ -1,41 +0,0 @@
------BEGIN PGP PUBLIC KEY BLOCK-----
-
-mQGNBF/uHpABDAC+fBnKB5ohWM69Sw7qbQqbMOq7TeZ8XyFp+j4hTQi0Fwef9bcx
-w8SSgQYBmp73VhkbxxDfbxqVLnpMZRI1UGgr35rbflBZJ29KOh+mgOpo2y21M3xh
-8UwfzgliCN5kRBq7u7nH+MGgsNNZacT6r5icS8FMX509Wcj/k3a7pTtOCInZUeOU
-tpjot0QADqV5fUlEwbFnHxbTS9hZpVFH4knKrzwiDDlw/qO4DCM8w2G8Bs+Jh8VF
-eldKYB9H07/ppOFt88vY660bireC1B3TRdMVYoRqBbcQ8lCVlQzBm1OGodbWq+XH
-G8XciXFoiNBexwTL9ve+8e9omsQ6MnHOEw/d4jlOLp7gMDTIq54/OvrQQmFcagb7
-R4WiC8C78EXEwRxq7f3Jb3pNs1atWurJa7YXkqdJNgcGM6d/Do90RIDPFxVkwokV
-0glnNPmeX2v5Xt77as35tpvE7WiItKDMiOrZ4epwhjrxhJPBYYs9ruTQxmancTSA
-YHHLOckuj/jDY+8AEQEAAbQkVGFsZXIgU3lzdGVtcyBTQSA8Y29udGFjdEB0YWxl
-ci5uZXQ+iQHUBBMBCgA+FiEEAISZPCxs30cafX79JuVGpf5+AmYFAl/uHpACGwMF
-CQPCZwAFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AACgkQJuVGpf5+AmYYgQwAngdY
-msJf8NlKqM2uYh1woi7T3BCfHbdehhhLQHNIuZtLt41ZugPYZNYzQNkaxIiGHAUr
-TPvWTiGOHDH5i4ra5gIuuCqv7Cu9zTbcFvgcp6TKcN4M/eAWtbQ/CKUjbKnYOSf4
-h0M4w78v1nX0SzhwyF/3t+MwEUhmc6ERbtboVl4UskVPfrbUaYvez/W11kmd3ViU
-LqKLuhat3KsjCkX2GbOAGS/rYvkrjUopu5Isoeq+dWgy8Pf4i0Y9VmDUyq7fJYZI
-+dSVzxFsnX8h7V404wZcsNm4TBgJLm5umzYjDYJRSvgkBiPdsF5W6piAgvXHKt93
-cmVkOXhaGNFX7Vjp2/CYlW+CweWV3bgpeCeP+ikoqQf8BMTE+KdgY69yVRfJZAf2
-3K712TvnOW0XOSpcU4Zds+4B9kOkYtsGs1NYd5EyhyL7xin+Xh4u751mRdtEOavE
-6gBcie/PBnqyy2CC1Cz67fIcXfNcsIMy31A65Lfbc2zuWEavEU1yU9WtnNjluQGN
-BF/uHpABDAC04B19Mlu7CKn1Xd2j9UGJdcTsE4Dl3dIw4wWwGx9RNau7uG2ov2Mr
-gvglW9EfJnCqD6QaDjqzi0bKqN1o1e8rEP67T4Zc/EyXIYWaSfeB2Okfry06BJV+
-lf5wc4BnQTAKIkE+zrD+4pKMfugotItCBxXSI5K7BBjTHFdtHTkM4ehJnWPu24Nc
-sP8QZXAWH5a+pUyNe50Z9tTFC0OQGPohyztzPXnA/0GNFL8Nw+ZbZLviaePZc/cs
-ptsDAMX1d1wZrTim56yF58JmKpg/qSAoTu5VVQGOjRV2aL4UUi2ps/23JzWV8O7J
-rKQY9QIXD+zuZ1p55eKjrC2HLV/u/UCwwc+8T+DBZOZgkEbHEUP+tWwBnD5jQJUB
-Wb7xZF1IPSjkP1lVkKLfPSaZIJTDMDqg26TuvBP+hjGoWACycjHowr+GHTv5izKZ
-oKDhY/y0d+0T+lcjBTHyrnJRLdGrVu/LIG9EW+UGPQkjhduSr/ITwdPC8p6Xm2OJ
-+R7wYr+j+7EAEQEAAYkBvAQYAQoAJhYhBACEmTwsbN9HGn1+/SblRqX+fgJmBQJf
-7h6QAhsMBQkDwmcAAAoJECblRqX+fgJmOakL/jXOTEdorszaWIrIvhlEn9TVy+yr
-u57Ze0y8cGvc1mOpxkgZpJgLkOp/u8vt5eSmu96rzXS/lfvJBx69/hMuS6WWnUM9
-hlfKtwhze1JDVFigBeYMkuCC8FnwtpTNRtYisB7ZxEW7WPGXUhmarEaZ/s2zzJEi
-KJLeJAA/TgBRGWmJJUXvnmGqW1FLrHwveDPtg3UBkD81dufpsuo7g+Ab2pB0NzzR
-E47ChCYUI6qCWgFD0GpbH3dp+ZrWG+R/XLQKaxCvPhnTbrna8AzH/yRXv8WpJHom
-SCbzvun4WkBHf1L6XnUf2de+0Om2FXx5HAbR4Gl87GOP/OggCErJXnstsHiifYe/
-CyQ0Gfn9xwzGblPChrjQCyu9cTZb71Lio3qZpc84xPK/xi6LtCJfQrWAVHaXH9XG
-Wxh819zslZ4SsVSHZSYgPlnwDQ9xdmjdxA+xlcFpuT7v8p49ZmrimOouAn0/12E1
-H4p6tTVrJ0X8GPAs9cgCzkFVp3ZUh1zoJl2Z3A==
-=uVVb
------END PGP PUBLIC KEY BLOCK-----
diff --git a/packaging/ubuntu-lunar/.gitignore b/packaging/ubuntu-lunar/.gitignore
new file mode 100644
index 0000000..849ddff
--- /dev/null
+++ b/packaging/ubuntu-lunar/.gitignore
@@ -0,0 +1 @@
+dist/
diff --git a/packaging/ubuntu-lunar/Dockerfile b/packaging/ubuntu-lunar/Dockerfile
new file mode 100644
index 0000000..f6e39c0
--- /dev/null
+++ b/packaging/ubuntu-lunar/Dockerfile
@@ -0,0 +1,57 @@
+FROM ubuntu:lunar
+# This file is in the public domain.
+#
+# Docker image to build Ubuntu packages of
+# GNUnet, GNU Taler and GNU Anastasis.
+#
+# We need 'kinetic' for a reasonably recent NodeJS version.
+
+ARG DEBIAN_FRONTEND=noninteractive
+
+# Install dependencies
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get -y install build-essential zip jq python3 python3-pip nodejs npm autoconf automake gcc make libtool libltdl-dev libmicrohttpd-dev libpq-dev libsqlite3-dev libunistring-dev libqrencode-dev libgcrypt-dev libsodium-dev libargon2-dev libjansson-dev recutils libgmp-dev texinfo pkgconf zlib1g-dev libopus-dev libextractor-dev libnss3-dev libcurl4-gnutls-dev autopoint libzbar-dev libmysqlclient-dev mandoc libpulse-dev libgstreamer1.0-dev libgstreamer-plugins-good1.0-dev libbluetooth-dev iptables miniupnpc libpng-dev python3-jinja2 doxygen libjose-dev iproute2 sudo wget zile libogg-dev gettext net-tools po-debconf debhelper-compat dbconfig-pgsql nginx libgtk-3-dev libgladeui-dev libmagic-dev policykit-1 libnfc-dev python3-click python3-requests python3-sphinx-rtd-theme pandoc groff
+
+# For libeufin:
+RUN apt-get -y install openjdk-17-jdk default-jre-headless
+# For wallet-core:
+RUN npm install -g node pnpm
+
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get -y dist-upgrade
+
+ADD "https://www.random.org/cgi-bin/randbyte?nbytes=10&format=h" skipcache
+
+COPY gnunet-build.sh /root/
+RUN chmod +x /root/gnunet-build.sh
+RUN /root/gnunet-build.sh master
+
+COPY gnunet-gtk-build.sh /root/
+RUN chmod +x /root/gnunet-gtk-build.sh
+RUN /root/gnunet-gtk-build.sh master
+
+COPY exchange-build.sh /root/
+RUN chmod +x /root/exchange-build.sh
+RUN /root/exchange-build.sh master
+
+COPY merchant-build.sh /root/
+RUN chmod +x /root/merchant-build.sh
+RUN /root/merchant-build.sh master
+
+COPY sync-build.sh /root/
+RUN chmod +x /root/sync-build.sh
+RUN /root/sync-build.sh master
+
+COPY anastasis-build.sh /root/
+RUN chmod +x /root/anastasis-build.sh
+RUN /root/anastasis-build.sh master
+
+COPY libeufin-build.sh /root/
+RUN chmod +x /root/libeufin-build.sh
+RUN /root/libeufin-build.sh master
+
+COPY wallet-build.sh /root/
+RUN chmod +x /root/wallet-build.sh
+RUN /root/wallet-build.sh master
diff --git a/packaging/ubuntu-lunar/README b/packaging/ubuntu-lunar/README
new file mode 100644
index 0000000..f4a4824
--- /dev/null
+++ b/packaging/ubuntu-lunar/README
@@ -0,0 +1,19 @@
+Scripts to build Ubuntu packages from source.
+
+
+TODO:
+- check build for warnings/missing dependencies
+ (espcially GNUnet!)
+- break up into separate build for GNUnet/Taler/Anastasis
+ => might be good to not run the entire pipeline only
+ because something changes in anastasis/wallet, as
+ that's not a good reason to re-build GNUnet ;-).
+- integrate with buildbot (integrationtests?)
+ to build-on-tag / build nightly and upload resulting
+ Deb packages to reprepro (fully automated for nightly,
+ but with explicit password-protected signature for tagged builds)
+- support other CPU architectures (by running in VM that emulates
+ other CPU architectures)
+- eventually: try to improve scripts to support older
+ Debian versions
+
diff --git a/packaging/debian-docker/anastasis-build.sh b/packaging/ubuntu-lunar/anastasis-build.sh
index 2f1d533..24643e1 100644
--- a/packaging/debian-docker/anastasis-build.sh
+++ b/packaging/ubuntu-lunar/anastasis-build.sh
@@ -5,22 +5,23 @@
set -eu
unset LD_LIBRARY_PATH
-mkdir -p /build
-cd /build
+mkdir -p /build/anastasis
+cd /build/anastasis
# Fetch source
-rm -rf anastasis anastasis-gtk
-git clone git://git.taler.net/anastasis
-git clone git://git.taler.net/anastasis-gtk
+rm -rf *
for n in anastasis anastasis-gtk
do
+ git clone git://git.taler.net/$n
cd $n
+ git checkout $1
./bootstrap
dpkg-buildpackage -rfakeroot -b -uc -us
cd ..
dpkg -i *.deb
done
-rm -f packages.tgz
-tar cvf packages.tgz *.deb
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-lunar/exchange-build.sh b/packaging/ubuntu-lunar/exchange-build.sh
new file mode 100644
index 0000000..b4a0115
--- /dev/null
+++ b/packaging/ubuntu-lunar/exchange-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/exchange
+cd exchange
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/debian-docker/gnunet-build.sh b/packaging/ubuntu-lunar/gnunet-build.sh
index a37bff7..614c5e6 100644
--- a/packaging/debian-docker/gnunet-build.sh
+++ b/packaging/ubuntu-lunar/gnunet-build.sh
@@ -5,22 +5,25 @@
set -eu
unset LD_LIBRARY_PATH
-mkdir -p /build
-cd /build
+mkdir -p /build/gnunet
+cd /build/gnunet
# Fetch source
-rm -rf gnunet gnunet-gtk
-git clone git://git.gnunet.org/gnunet
-git clone git://git.gnunet.org/gnunet-gtk
+rm -rf *
-for n in gnunet gnunet-gtk
+pip3 install --break-system-packages sphinx-book-theme sphinx-multiversion
+
+for n in gnunet
do
+ git clone git://git.gnunet.org/$n
cd $n
+ git checkout $1
./bootstrap
dpkg-buildpackage -rfakeroot -b -uc -us
cd ..
dpkg -i *.deb
done
-rm -rf packages.tgz
-tar cvf packages.tgz *.deb
+tar cvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-lunar/gnunet-gtk-build.sh b/packaging/ubuntu-lunar/gnunet-gtk-build.sh
new file mode 100644
index 0000000..4414c3f
--- /dev/null
+++ b/packaging/ubuntu-lunar/gnunet-gtk-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/gnunet
+cd /build/gnunet
+
+# Fetch source
+rm -rf *
+
+for n in gnunet-gtk
+do
+ git clone git://git.gnunet.org/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-lunar/libeufin-build.sh b/packaging/ubuntu-lunar/libeufin-build.sh
new file mode 100644
index 0000000..75713ab
--- /dev/null
+++ b/packaging/ubuntu-lunar/libeufin-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/libeufin
+cd /build/libeufin
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/libeufin
+cd libeufin
+git checkout $1
+./bootstrap
+export JAVA_HOME=/usr
+./configure --prefix=/usr
+make install
+make deb
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-lunar/mdb-build.sh b/packaging/ubuntu-lunar/mdb-build.sh
new file mode 100644
index 0000000..d097240
--- /dev/null
+++ b/packaging/ubuntu-lunar/mdb-build.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+git clone git://git.taler.net/taler-mdb
+cd taler-mdb
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-lunar/merchant-build.sh b/packaging/ubuntu-lunar/merchant-build.sh
new file mode 100644
index 0000000..24f5f9d
--- /dev/null
+++ b/packaging/ubuntu-lunar/merchant-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+# pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/merchant
+cd merchant
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-lunar/run.sh b/packaging/ubuntu-lunar/run.sh
new file mode 100755
index 0000000..a6df3f0
--- /dev/null
+++ b/packaging/ubuntu-lunar/run.sh
@@ -0,0 +1,21 @@
+#!/bin/sh
+set -eu
+CONTAINER=$USER/debbuilder:latest
+docker build -t $CONTAINER .
+rm -rf dist
+mkdir dist
+docker run --read-only $CONTAINER sleep 100 &
+sleep 1
+docker container ls
+ID=`docker container ls | grep $CONTAINER | head -n1 | awk '{print $1}'`
+echo "Extracting files from $ID"
+docker cp "$ID:/build/packages.tgz" .
+echo "Stopping $CONTAINER ($ID)"
+docker container stop $ID
+echo "Removing $CONTAINER"
+docker container rm $ID
+docker image rm $USER/debbuilder
+cd dist
+tar xvf ../packages.tgz
+cd ..
+rm packages.tgz
diff --git a/packaging/ubuntu-docker/sync-build.sh b/packaging/ubuntu-lunar/sync-build.sh
index 342a702..e38a0ee 100644
--- a/packaging/ubuntu-docker/sync-build.sh
+++ b/packaging/ubuntu-lunar/sync-build.sh
@@ -5,21 +5,23 @@
set -eu
unset LD_LIBRARY_PATH
-mkdir -p /build
-cd /build
+mkdir -p /build/sync
+cd /build/sync
# Fetch source
-rm -rf sync
-git clone git://git.taler.net/sync
+rm -rf *
for n in sync
do
+ git clone git://git.taler.net/$n
cd $n
+ git checkout $1
./bootstrap
dpkg-buildpackage -rfakeroot -b -uc -us
cd ..
dpkg -i *.deb
done
-rm -f packages.tgz
-tar cvf packages.tgz *.deb
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-docker/taler.list b/packaging/ubuntu-lunar/taler.list
index 29cea42..29cea42 100644
--- a/packaging/ubuntu-docker/taler.list
+++ b/packaging/ubuntu-lunar/taler.list
diff --git a/packaging/debian-docker/wallet-build.sh b/packaging/ubuntu-lunar/wallet-build.sh
index f8ad8d4..6d807be 100644
--- a/packaging/debian-docker/wallet-build.sh
+++ b/packaging/ubuntu-lunar/wallet-build.sh
@@ -5,19 +5,24 @@
set -eu
unset LD_LIBRARY_PATH
-mkdir -p /build
-cd /build
+mkdir -p /build/wallet
+cd /build/wallet
# Fetch source
-rm -rf wallet-core
+rm -rf *
git clone git://git.taler.net/wallet-core
cd wallet-core
+git checkout $1
./bootstrap
-./configure --prefix=/usr
-make install
+
+cd packages/taler-wallet-cli
+
dpkg-buildpackage -rfakeroot -b -uc -us
-cd ..
-rm -f packages.tgz
-tar cvf packages.tgz *.deb
+cd ../taler-harness
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cd ../
+
+tar uvf ../../../packages.tgz *.deb
diff --git a/packaging/ubuntu-mantic/Dockerfile b/packaging/ubuntu-mantic/Dockerfile
new file mode 100644
index 0000000..6967b42
--- /dev/null
+++ b/packaging/ubuntu-mantic/Dockerfile
@@ -0,0 +1,54 @@
+FROM ubuntu:mantic
+# This file is in the public domain.
+#
+# Docker image to build Ubuntu packages of
+# GNUnet, GNU Taler and GNU Anastasis.
+#
+# We need 'kinetic' for a reasonably recent NodeJS version.
+
+ARG DEBIAN_FRONTEND=noninteractive
+
+# Install dependencies
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get update
+RUN apt-get -y dist-upgrade
+RUN apt-get update
+RUN apt-get -y install build-essential zip jq python3 python3-pip nodejs npm autoconf automake gcc-12 make libtool libltdl-dev libmicrohttpd-dev libpq-dev libsqlite3-dev libunistring-dev libqrencode-dev libgcrypt-dev libsodium-dev libargon2-dev libjansson-dev recutils libgmp-dev texinfo pkgconf zlib1g-dev libopus-dev libextractor-dev libnss3-dev libcurl4-gnutls-dev autopoint libzbar-dev libmysqlclient-dev mandoc libpulse-dev libgstreamer1.0-dev libgstreamer-plugins-good1.0-dev libbluetooth-dev iptables miniupnpc libpng-dev python3-jinja2 doxygen libjose-dev iproute2 sudo wget zile libogg-dev gettext net-tools po-debconf debhelper-compat dbconfig-pgsql nginx libgtk-3-dev libgladeui-dev libmagic-dev policykit-1 libnfc-dev python3-click python3-requests python3-sphinx-rtd-theme openjdk-17-jdk pandoc groff
+RUN npm install -g node pnpm
+
+RUN pip install --break-system-packages sphinx_multiversion
+
+# ADD "https://www.random.org/cgi-bin/randbyte?nbytes=10&format=h" skipcache
+COPY gnunet-build.sh /root/
+RUN chmod +x /root/gnunet-build.sh
+RUN /root/gnunet-build.sh master
+
+COPY gnunet-gtk-build.sh /root/
+RUN chmod +x /root/gnunet-gtk-build.sh
+RUN /root/gnunet-gtk-build.sh master
+
+COPY exchange-build.sh /root/
+RUN chmod +x /root/exchange-build.sh
+RUN /root/exchange-build.sh master
+
+COPY merchant-build.sh /root/
+RUN chmod +x /root/merchant-build.sh
+RUN /root/merchant-build.sh master
+
+COPY sync-build.sh /root/
+RUN chmod +x /root/sync-build.sh
+RUN /root/sync-build.sh master
+
+COPY anastasis-build.sh /root/
+RUN chmod +x /root/anastasis-build.sh
+RUN /root/anastasis-build.sh master
+
+# No wallet on Jammy
+COPY wallet-build.sh /root/
+RUN chmod +x /root/wallet-build.sh
+RUN /root/wallet-build.sh master
+
+COPY libeufin-build.sh /root/
+RUN chmod +x /root/libeufin-build.sh
+RUN /root/libeufin-build.sh master
diff --git a/packaging/ubuntu-mantic/README b/packaging/ubuntu-mantic/README
new file mode 100644
index 0000000..f4a4824
--- /dev/null
+++ b/packaging/ubuntu-mantic/README
@@ -0,0 +1,19 @@
+Scripts to build Ubuntu packages from source.
+
+
+TODO:
+- check build for warnings/missing dependencies
+ (espcially GNUnet!)
+- break up into separate build for GNUnet/Taler/Anastasis
+ => might be good to not run the entire pipeline only
+ because something changes in anastasis/wallet, as
+ that's not a good reason to re-build GNUnet ;-).
+- integrate with buildbot (integrationtests?)
+ to build-on-tag / build nightly and upload resulting
+ Deb packages to reprepro (fully automated for nightly,
+ but with explicit password-protected signature for tagged builds)
+- support other CPU architectures (by running in VM that emulates
+ other CPU architectures)
+- eventually: try to improve scripts to support older
+ Debian versions
+
diff --git a/packaging/ubuntu-mantic/anastasis-build.sh b/packaging/ubuntu-mantic/anastasis-build.sh
new file mode 100644
index 0000000..24643e1
--- /dev/null
+++ b/packaging/ubuntu-mantic/anastasis-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/anastasis
+cd /build/anastasis
+
+# Fetch source
+rm -rf *
+
+for n in anastasis anastasis-gtk
+do
+ git clone git://git.taler.net/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-mantic/exchange-build.sh b/packaging/ubuntu-mantic/exchange-build.sh
new file mode 100644
index 0000000..a94a003
--- /dev/null
+++ b/packaging/ubuntu-mantic/exchange-build.sh
@@ -0,0 +1,25 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+export CC=gcc-12
+# Fetch source
+rm -rf *
+
+pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/exchange
+cd exchange
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-mantic/gnunet-build.sh b/packaging/ubuntu-mantic/gnunet-build.sh
new file mode 100644
index 0000000..614c5e6
--- /dev/null
+++ b/packaging/ubuntu-mantic/gnunet-build.sh
@@ -0,0 +1,29 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/gnunet
+cd /build/gnunet
+
+# Fetch source
+rm -rf *
+
+pip3 install --break-system-packages sphinx-book-theme sphinx-multiversion
+
+for n in gnunet
+do
+ git clone git://git.gnunet.org/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar cvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-mantic/gnunet-gtk-build.sh b/packaging/ubuntu-mantic/gnunet-gtk-build.sh
new file mode 100644
index 0000000..4414c3f
--- /dev/null
+++ b/packaging/ubuntu-mantic/gnunet-gtk-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/gnunet
+cd /build/gnunet
+
+# Fetch source
+rm -rf *
+
+for n in gnunet-gtk
+do
+ git clone git://git.gnunet.org/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-mantic/libeufin-build.sh b/packaging/ubuntu-mantic/libeufin-build.sh
new file mode 100644
index 0000000..7229221
--- /dev/null
+++ b/packaging/ubuntu-mantic/libeufin-build.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/libeufin
+cd /build/libeufin
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/libeufin
+cd libeufin
+git checkout $1
+./bootstrap
+./configure --prefix=/usr/local
+make deb
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-mantic/mdb-build.sh b/packaging/ubuntu-mantic/mdb-build.sh
new file mode 100644
index 0000000..d097240
--- /dev/null
+++ b/packaging/ubuntu-mantic/mdb-build.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+git clone git://git.taler.net/taler-mdb
+cd taler-mdb
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-mantic/merchant-build.sh b/packaging/ubuntu-mantic/merchant-build.sh
new file mode 100644
index 0000000..24f5f9d
--- /dev/null
+++ b/packaging/ubuntu-mantic/merchant-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+# pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/merchant
+cd merchant
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-mantic/run.sh b/packaging/ubuntu-mantic/run.sh
new file mode 100755
index 0000000..a6df3f0
--- /dev/null
+++ b/packaging/ubuntu-mantic/run.sh
@@ -0,0 +1,21 @@
+#!/bin/sh
+set -eu
+CONTAINER=$USER/debbuilder:latest
+docker build -t $CONTAINER .
+rm -rf dist
+mkdir dist
+docker run --read-only $CONTAINER sleep 100 &
+sleep 1
+docker container ls
+ID=`docker container ls | grep $CONTAINER | head -n1 | awk '{print $1}'`
+echo "Extracting files from $ID"
+docker cp "$ID:/build/packages.tgz" .
+echo "Stopping $CONTAINER ($ID)"
+docker container stop $ID
+echo "Removing $CONTAINER"
+docker container rm $ID
+docker image rm $USER/debbuilder
+cd dist
+tar xvf ../packages.tgz
+cd ..
+rm packages.tgz
diff --git a/packaging/debian-docker/sync-build.sh b/packaging/ubuntu-mantic/sync-build.sh
index 342a702..e38a0ee 100644
--- a/packaging/debian-docker/sync-build.sh
+++ b/packaging/ubuntu-mantic/sync-build.sh
@@ -5,21 +5,23 @@
set -eu
unset LD_LIBRARY_PATH
-mkdir -p /build
-cd /build
+mkdir -p /build/sync
+cd /build/sync
# Fetch source
-rm -rf sync
-git clone git://git.taler.net/sync
+rm -rf *
for n in sync
do
+ git clone git://git.taler.net/$n
cd $n
+ git checkout $1
./bootstrap
dpkg-buildpackage -rfakeroot -b -uc -us
cd ..
dpkg -i *.deb
done
-rm -f packages.tgz
-tar cvf packages.tgz *.deb
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-mantic/taler.list b/packaging/ubuntu-mantic/taler.list
new file mode 100644
index 0000000..29cea42
--- /dev/null
+++ b/packaging/ubuntu-mantic/taler.list
@@ -0,0 +1 @@
+deb https://deb.taler.net/apt/ubuntu jammy main
diff --git a/packaging/ubuntu-mantic/wallet-build.sh b/packaging/ubuntu-mantic/wallet-build.sh
new file mode 100644
index 0000000..6d807be
--- /dev/null
+++ b/packaging/ubuntu-mantic/wallet-build.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/wallet
+cd /build/wallet
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/wallet-core
+
+cd wallet-core
+git checkout $1
+./bootstrap
+
+cd packages/taler-wallet-cli
+
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cd ../taler-harness
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cd ../
+
+tar uvf ../../../packages.tgz *.deb
diff --git a/packaging/ubuntu-numbat/Dockerfile b/packaging/ubuntu-numbat/Dockerfile
new file mode 100644
index 0000000..d59d238
--- /dev/null
+++ b/packaging/ubuntu-numbat/Dockerfile
@@ -0,0 +1,53 @@
+FROM ubuntu:numbat
+# This file is in the public domain.
+#
+# Docker image to build Ubuntu packages of
+# GNUnet, GNU Taler and GNU Anastasis.
+#
+
+ARG DEBIAN_FRONTEND=noninteractive
+
+# Install dependencies
+RUN apt-get update
+RUN apt-get -y upgrade
+RUN apt-get update
+RUN apt-get -y dist-upgrade
+RUN apt-get update
+RUN apt-get -y install build-essential zip jq python3 python3-pip nodejs npm autoconf automake gcc-12 make libtool libltdl-dev libmicrohttpd-dev libpq-dev libsqlite3-dev libunistring-dev libqrencode-dev libgcrypt-dev libsodium-dev libargon2-dev libjansson-dev recutils libgmp-dev texinfo pkgconf zlib1g-dev libopus-dev libextractor-dev libnss3-dev libcurl4-gnutls-dev autopoint libzbar-dev libmysqlclient-dev mandoc libpulse-dev libgstreamer1.0-dev libgstreamer-plugins-good1.0-dev libbluetooth-dev iptables miniupnpc libpng-dev python3-jinja2 doxygen libjose-dev iproute2 sudo wget zile libogg-dev gettext net-tools po-debconf debhelper-compat dbconfig-pgsql nginx libgtk-3-dev libgladeui-dev libmagic-dev policykit-1 libnfc-dev python3-click python3-requests python3-sphinx-rtd-theme openjdk-17-jdk pandoc groff
+RUN npm install -g node pnpm
+
+RUN pip install --break-system-packages sphinx_multiversion
+
+# ADD "https://www.random.org/cgi-bin/randbyte?nbytes=10&format=h" skipcache
+COPY gnunet-build.sh /root/
+RUN chmod +x /root/gnunet-build.sh
+RUN /root/gnunet-build.sh master
+
+COPY gnunet-gtk-build.sh /root/
+RUN chmod +x /root/gnunet-gtk-build.sh
+RUN /root/gnunet-gtk-build.sh master
+
+COPY exchange-build.sh /root/
+RUN chmod +x /root/exchange-build.sh
+RUN /root/exchange-build.sh master
+
+COPY merchant-build.sh /root/
+RUN chmod +x /root/merchant-build.sh
+RUN /root/merchant-build.sh master
+
+COPY sync-build.sh /root/
+RUN chmod +x /root/sync-build.sh
+RUN /root/sync-build.sh master
+
+COPY anastasis-build.sh /root/
+RUN chmod +x /root/anastasis-build.sh
+RUN /root/anastasis-build.sh master
+
+# No wallet on Jammy
+COPY wallet-build.sh /root/
+RUN chmod +x /root/wallet-build.sh
+RUN /root/wallet-build.sh master
+
+COPY libeufin-build.sh /root/
+RUN chmod +x /root/libeufin-build.sh
+RUN /root/libeufin-build.sh master
diff --git a/packaging/ubuntu-numbat/README b/packaging/ubuntu-numbat/README
new file mode 100644
index 0000000..0f8c821
--- /dev/null
+++ b/packaging/ubuntu-numbat/README
@@ -0,0 +1,16 @@
+Scripts to build Ubuntu packages from source.
+
+
+TODO:
+- check build for warnings/missing dependencies
+ (espcially GNUnet!)
+- break up into separate build for GNUnet/Taler/Anastasis
+ => might be good to not run the entire pipeline only
+ because something changes in anastasis/wallet, as
+ that's not a good reason to re-build GNUnet ;-).
+- integrate with buildbot (integrationtests?)
+ to build-on-tag / build nightly and upload resulting
+ Deb packages to reprepro (fully automated for nightly,
+ but with explicit password-protected signature for tagged builds)
+- support other CPU architectures (by running in VM that emulates
+ other CPU architectures)
diff --git a/packaging/ubuntu-numbat/anastasis-build.sh b/packaging/ubuntu-numbat/anastasis-build.sh
new file mode 100644
index 0000000..24643e1
--- /dev/null
+++ b/packaging/ubuntu-numbat/anastasis-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/anastasis
+cd /build/anastasis
+
+# Fetch source
+rm -rf *
+
+for n in anastasis anastasis-gtk
+do
+ git clone git://git.taler.net/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-numbat/exchange-build.sh b/packaging/ubuntu-numbat/exchange-build.sh
new file mode 100644
index 0000000..a94a003
--- /dev/null
+++ b/packaging/ubuntu-numbat/exchange-build.sh
@@ -0,0 +1,25 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+export CC=gcc-12
+# Fetch source
+rm -rf *
+
+pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/exchange
+cd exchange
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-numbat/gnunet-build.sh b/packaging/ubuntu-numbat/gnunet-build.sh
new file mode 100644
index 0000000..614c5e6
--- /dev/null
+++ b/packaging/ubuntu-numbat/gnunet-build.sh
@@ -0,0 +1,29 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/gnunet
+cd /build/gnunet
+
+# Fetch source
+rm -rf *
+
+pip3 install --break-system-packages sphinx-book-theme sphinx-multiversion
+
+for n in gnunet
+do
+ git clone git://git.gnunet.org/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar cvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-numbat/gnunet-gtk-build.sh b/packaging/ubuntu-numbat/gnunet-gtk-build.sh
new file mode 100644
index 0000000..4414c3f
--- /dev/null
+++ b/packaging/ubuntu-numbat/gnunet-gtk-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/gnunet
+cd /build/gnunet
+
+# Fetch source
+rm -rf *
+
+for n in gnunet-gtk
+do
+ git clone git://git.gnunet.org/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-numbat/libeufin-build.sh b/packaging/ubuntu-numbat/libeufin-build.sh
new file mode 100644
index 0000000..7229221
--- /dev/null
+++ b/packaging/ubuntu-numbat/libeufin-build.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/libeufin
+cd /build/libeufin
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/libeufin
+cd libeufin
+git checkout $1
+./bootstrap
+./configure --prefix=/usr/local
+make deb
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-numbat/mdb-build.sh b/packaging/ubuntu-numbat/mdb-build.sh
new file mode 100644
index 0000000..d097240
--- /dev/null
+++ b/packaging/ubuntu-numbat/mdb-build.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+git clone git://git.taler.net/taler-mdb
+cd taler-mdb
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-numbat/merchant-build.sh b/packaging/ubuntu-numbat/merchant-build.sh
new file mode 100644
index 0000000..24f5f9d
--- /dev/null
+++ b/packaging/ubuntu-numbat/merchant-build.sh
@@ -0,0 +1,24 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+mkdir -p /build/taler
+cd /build/taler
+
+# Fetch source
+rm -rf *
+
+# pip3 install --break-system-packages htmlark
+
+git clone git://git.taler.net/merchant
+cd merchant
+git checkout $1
+./bootstrap
+dpkg-buildpackage -rfakeroot -b -uc -us
+cd ..
+dpkg -i *.deb
+
+tar uvf ../packages.tgz *.deb
+cd ..
diff --git a/packaging/ubuntu-numbat/run.sh b/packaging/ubuntu-numbat/run.sh
new file mode 100755
index 0000000..a6df3f0
--- /dev/null
+++ b/packaging/ubuntu-numbat/run.sh
@@ -0,0 +1,21 @@
+#!/bin/sh
+set -eu
+CONTAINER=$USER/debbuilder:latest
+docker build -t $CONTAINER .
+rm -rf dist
+mkdir dist
+docker run --read-only $CONTAINER sleep 100 &
+sleep 1
+docker container ls
+ID=`docker container ls | grep $CONTAINER | head -n1 | awk '{print $1}'`
+echo "Extracting files from $ID"
+docker cp "$ID:/build/packages.tgz" .
+echo "Stopping $CONTAINER ($ID)"
+docker container stop $ID
+echo "Removing $CONTAINER"
+docker container rm $ID
+docker image rm $USER/debbuilder
+cd dist
+tar xvf ../packages.tgz
+cd ..
+rm packages.tgz
diff --git a/packaging/ubuntu-numbat/sync-build.sh b/packaging/ubuntu-numbat/sync-build.sh
new file mode 100644
index 0000000..e38a0ee
--- /dev/null
+++ b/packaging/ubuntu-numbat/sync-build.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/sync
+cd /build/sync
+
+# Fetch source
+rm -rf *
+
+for n in sync
+do
+ git clone git://git.taler.net/$n
+ cd $n
+ git checkout $1
+ ./bootstrap
+ dpkg-buildpackage -rfakeroot -b -uc -us
+ cd ..
+ dpkg -i *.deb
+done
+
+tar uvf ../packages.tgz *.deb
+
+cd ..
diff --git a/packaging/ubuntu-numbat/wallet-build.sh b/packaging/ubuntu-numbat/wallet-build.sh
new file mode 100644
index 0000000..6d807be
--- /dev/null
+++ b/packaging/ubuntu-numbat/wallet-build.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+# This file is in the public domain.
+# Helper script to build the latest DEB packages in the container.
+
+set -eu
+unset LD_LIBRARY_PATH
+
+mkdir -p /build/wallet
+cd /build/wallet
+
+# Fetch source
+rm -rf *
+git clone git://git.taler.net/wallet-core
+
+cd wallet-core
+git checkout $1
+./bootstrap
+
+cd packages/taler-wallet-cli
+
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cd ../taler-harness
+dpkg-buildpackage -rfakeroot -b -uc -us
+
+cd ../
+
+tar uvf ../../../packages.tgz *.deb
diff --git a/regional-currency/.gitignore b/regional-currency/.gitignore
new file mode 100644
index 0000000..26790f8
--- /dev/null
+++ b/regional-currency/.gitignore
@@ -0,0 +1,2 @@
+config/
+setup.log \ No newline at end of file
diff --git a/regional-currency/.shellcheckrc b/regional-currency/.shellcheckrc
new file mode 100644
index 0000000..e170f39
--- /dev/null
+++ b/regional-currency/.shellcheckrc
@@ -0,0 +1 @@
+disable=SC2018,SC2019
diff --git a/regional-currency/ChangeLog b/regional-currency/ChangeLog
new file mode 100644
index 0000000..b2310dd
--- /dev/null
+++ b/regional-currency/ChangeLog
@@ -0,0 +1,7 @@
+Sun Mar 10 12:15:15 PM CET 2024
+ Changed the scripts to enable (!) taler-merchant.target
+ instead of just the taler-merchant-httpd service.
+
+ Added automatically setting the wire-fee for IBAN.
+
+ Added code to automatically run taler-exchange-offline daily (#8623).
diff --git a/regional-currency/README b/regional-currency/README
new file mode 100644
index 0000000..599336a
--- /dev/null
+++ b/regional-currency/README
@@ -0,0 +1,2 @@
+Refer to the following document:
+https://docs.taler.net/libeufin/regional-manual.html#guided-basic-setup
diff --git a/regional-currency/config.py b/regional-currency/config.py
new file mode 100755
index 0000000..e382927
--- /dev/null
+++ b/regional-currency/config.py
@@ -0,0 +1,491 @@
+#!/usr/bin/env python3
+"""Python script to ask questions using an interactive prompt"""
+
+import base64
+import os
+import re
+import subprocess
+import urllib.parse
+import uuid
+import getpass
+from base64 import b64decode, b64encode
+from typing import Callable, Dict, TypeVar
+
+import argon2
+from Crypto.Cipher import ChaCha20_Poly1305
+from Crypto.Hash import SHA512
+from Crypto.Protocol.KDF import PBKDF2
+from Crypto.Random import get_random_bytes
+
+# Early exit if already loaded
+if os.environ.get("CONFIG_LOADED") == "y":
+ exit(0)
+
+log = open("setup.log", "ab", buffering=0)
+CONFIG_FILE = "config/user.conf"
+BIC_PATTERN = re.compile("[A-Z0-9]{4}[A-Z]{2}[A-Z0-9]{2}(?:[A-Z0-9]{3})?")
+IBAN_PATTERN = re.compile("[A-Z]{2}[0-9]{2}[A-Z0-9]{,28}")
+
+
+def load_conf() -> Dict[str, str]:
+ """Load user configuration file"""
+ conf = {}
+ with open(CONFIG_FILE, "r") as f:
+ for kv in f.read().splitlines():
+ if len(kv) != 0:
+ [k, v] = [part.strip() for part in kv.split("=", 1)]
+ if v.startswith('"') and v.endswith('"'):
+ conf[k] = v.strip('"').replace('\\"', '"')
+ elif v.startswith("'") and v.endswith("'"):
+ conf[k] = v.strip("'").replace("'\\''", "'").replace("\\'", "'")
+ else:
+ conf[k] = v
+ return conf
+
+
+conf = load_conf()
+result_conf = {**conf, "CONFIG_LOADED": "y"}
+
+def store_conf():
+ """Update the configuration file"""
+ content = ""
+ for key, value in conf.items():
+ escaped = value.replace("'", "'\\''")
+ content += f"{key}='{escaped}'\n"
+ with open(CONFIG_FILE, "w") as f:
+ f.write(content)
+
+def add_conf(name: str, value: str):
+ """Update a user configuration value and update the configuration file"""
+ conf[name] = value
+ result_conf[name] = value
+ store_conf()
+
+def run_cmd(
+ cmd: list[str], input: str | None = None, env: Dict[str, str] | None = None
+) -> int:
+ """Run a command in a child process and return its exit code"""
+ result = subprocess.run(
+ cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ input=input.encode() if input is not None else None,
+ stdin=subprocess.DEVNULL if input is None else None,
+ env=env,
+ )
+ log.write(result.stdout)
+ if result.returncode != 0:
+ print(result.stdout.decode("utf-8"), end="")
+ return result.returncode
+
+
+def try_cmd(
+ cmd: list[str], input: str | None = None, env: Dict[str, str] | None = None
+) -> bool:
+ """Run a command in a child process and return if successful"""
+ return run_cmd(cmd, input, env) == 0
+
+
+A = TypeVar("A")
+T = TypeVar("T")
+
+
+def conf_value(
+ name: str | None,
+ action: Callable[[], str | None],
+ default: T | None = None,
+ check: Callable[[str], T | None] = lambda it: it,
+ fmt: Callable[[T], str] = lambda it: str(it),
+) -> T:
+ """
+ Logic to configure a value
+
+ :param name: if present will try to fetch the current value and will store the new value
+ :param action: how a value will be obtained
+ :param default: default value to use if no value is given
+ :param check: check and normalize the value
+ :param fmt: format value for storage
+ :return: the configuration value
+ """
+ value = None
+
+ # Fetch current value
+ if name is not None:
+ curr = conf.get(name)
+ if curr is not None:
+ # Check the current value and ask again if invalid
+ value = check(curr)
+
+ # Ask for a new value until we get a valid one
+ while value is None:
+ new = action()
+ # Use default if no value was provided else check the new value
+ value = check(new) if new is not None else default
+
+ # Store the new value
+ if name is not None:
+ add_conf(name, fmt(value))
+
+ return value
+
+
+def ask(
+ name: str | None,
+ msg: str,
+ default: T | None = None,
+ check: Callable[[str], T | None] = lambda it: it,
+ fmt: Callable[[T], str] = lambda it: str(it),
+ secret: bool = False
+) -> T:
+ """
+ Prompt the user to configure a value
+ :param name: if present will try to fetch the current value and will store the new value
+ :param msg: the message to prompt the user with
+ :param default: default value to use if no value is obtained
+ :param check: check and normalize the value
+ :param fmt: format value for storage
+ :param secret: hide the input content
+ :return: the configuration value
+ """
+
+ def do_ask() -> str | None:
+ # Log the prompt
+ log.write(msg.encode() + "\n".encode())
+ # Actual prompt
+ if secret:
+ raw = getpass.getpass(msg).strip()
+ else:
+ raw = input(msg).strip()
+ if raw == "":
+ if default is None:
+ print("You must enter a value")
+ return None
+ return raw
+
+ return conf_value(name, do_ask, default, check, fmt)
+
+
+def ask_str(name: str | None, msg: str, default: str | None = None, secret: bool = False) -> str:
+ "Prompt the user to configure a string"
+ return ask(name, msg, default, secret=secret)
+
+
+def ask_bic(name: str | None, msg: str, default: str | None = None) -> str:
+ "Prompt the user to configure a BIC"
+
+ def check_bic(raw: str) -> str | None:
+ raw = raw.translate({ord(i): None for i in " -"})
+ if not BIC_PATTERN.fullmatch(raw):
+ print("Invalid BIC")
+ return None
+ else:
+ return raw
+
+ return ask(name, msg, default, check_bic)
+
+
+def ask_iban(name: str | None, msg: str, default: str | None = None) -> str:
+ "Prompt the user to configure a IBAN"
+
+ def check_iban(raw: str) -> str | None:
+ raw = raw.translate({ord(i): None for i in " -"})
+ if not IBAN_PATTERN.fullmatch(raw):
+ print("Invalid IBAN") # Checksum check ?
+ return None
+ else:
+ return raw
+
+ return ask(name, msg, default, check_iban)
+
+
+def ask_currency(name: str, msg: str, default: str | None = None) -> str:
+ "Prompt the user to configure a currency name"
+
+ def check_currency(currency: str) -> str | None:
+ currency = currency.upper()
+ if not all([c.isascii() and c.isalpha() for c in currency]):
+ print("The currency name must be an ASCII alphabetic string")
+ elif len(currency) < 3 or 11 < len(currency):
+ print("The currency name had to be between 3 and 11 characters long")
+ else:
+ return currency
+ return None
+
+ return ask(name, msg, default, check_currency)
+
+
+def ask_host(name: str, msg: str, default: str | None = None) -> str:
+ "Prompt the user to configure the installation hostname"
+
+ def check_host(host: str) -> str | None:
+ success = True
+ for subdomain in ["backend", "bank", "exchange"]:
+ success = try_cmd(["ping", "-c", "1", f"{subdomain}.{host}"]) and success
+ if success:
+ return host
+ else:
+ return None
+
+ return ask(name, msg, default, check_host)
+
+
+def ask_terms(name: str, msg: str, kind: str) -> str:
+ "Prompt the user to select a ToS/privacy policy"
+
+ # msg = "9.1. Enter the filename of the ToS. Some available options are:\n"
+ tos_msg = msg
+
+ # Recollect example ToS files
+ tos_path = "/usr/share/taler/terms"
+ for f in os.listdir(tos_path):
+ tos_file = os.path.join(tos_path, f)
+ if os.path.isfile(tos_file) and f.endswith(".rst") and kind in f:
+ tos_msg += f"- {tos_file}\n"
+
+ tos_msg += "=> "
+
+ def check_file(path: str) -> str | None:
+ if not os.path.isfile(path):
+ print("Not a file") # Checksum check ?
+ return None
+ else:
+ return path
+
+ return ask(name, tos_msg, None, check_file)
+
+
+def ask_yes_no(name: str | None, msg: str, default: bool | None = None) -> bool:
+ "Prompt the user to configure a boolean"
+
+ def check_yes_no(raw: str) -> bool | None:
+ raw = raw.lower()
+ if raw == "y" or raw == "yes":
+ return True
+ elif raw == "n" or raw == "no":
+ return False
+ else:
+ print("Expected 'y' or 'n'")
+ return None
+
+ return ask(name, msg, default, check_yes_no, lambda it: "y" if it else "n")
+
+
+# ----- Crypto ----- #
+
+
+def ask_config_password() -> str:
+ "Prompt the user to configure a password stored hashed with argon2id"
+ ph = argon2.PasswordHasher()
+ hash = conf.get("CONFIG_PASSWORD")
+ passwd = None
+ if hash is not None:
+ while True:
+ passwd = ask_str(None, "Enter the config password : ", secret=True)
+ try:
+ ph.verify(hash, passwd)
+ break
+ except argon2.exceptions.VerifyMismatchError:
+ print("invalid password")
+ else:
+ passwd = ask_str(None, "1.1 Choose a config password : ", secret=True)
+
+ if hash is None or ph.check_needs_rehash(hash):
+ add_conf("CONFIG_PASSWORD", ph.hash(passwd))
+
+ return passwd
+
+
+def ask_secret(
+ name: str, msg: str, passwd: str | None, default: str | None = None
+) -> str:
+ "Prompt the user to configure a string stored encryped using pbkdf2_sha512 and chacha20_poly1305"
+ if passwd is None:
+ return ask_str(name, msg, default)
+ else:
+ raw = conf.get(name)
+ plaintext = None
+ if raw is not None:
+ method = "$pbkdf2_sha512_chacha20_poly1305$1000000$"
+ if raw.startswith(method):
+ salt, nonce, tag, ciphertext = [
+ b64decode(it) for it in raw.removeprefix(method).split("$", 3)
+ ]
+ key = PBKDF2(passwd, salt, 32, count=1000000, hmac_hash_module=SHA512)
+ cipher = ChaCha20_Poly1305.new(key=key, nonce=nonce)
+ cipher.update(name.encode())
+ plaintext = cipher.decrypt_and_verify(ciphertext, tag).decode()
+ else:
+ salt = get_random_bytes(16)
+ key = PBKDF2(passwd, salt, 32, count=1000000, hmac_hash_module=SHA512)
+ cipher = ChaCha20_Poly1305.new(key=key)
+ cipher.update(name.encode())
+ ciphertext, tag = cipher.encrypt_and_digest(raw.encode())
+ add_conf(
+ name,
+ f"$pbkdf2_sha512_chacha20_poly1305$1000000${base64.b64encode(salt).decode()}${base64.b64encode(cipher.nonce).decode()}${base64.b64encode(tag).decode()}${base64.b64encode(ciphertext).decode()}",
+ )
+ else:
+ plaintext = ask_str(None, msg, default, True)
+ salt = get_random_bytes(16)
+ key = PBKDF2(passwd, salt, 32, count=1000000, hmac_hash_module=SHA512)
+ cipher = ChaCha20_Poly1305.new(key=key)
+ cipher.update(name.encode())
+ ciphertext, tag = cipher.encrypt_and_digest(plaintext.encode())
+ add_conf(
+ name,
+ f"$pbkdf2_sha512_chacha20_poly1305$1000000${base64.b64encode(salt).decode()}${base64.b64encode(cipher.nonce).decode()}${base64.b64encode(tag).decode()}${base64.b64encode(ciphertext).decode()}",
+ )
+ result_conf[name] = plaintext
+ return plaintext
+
+
+# ----- Prompt ----- #
+
+config_passwd = (
+ ask_config_password()
+ if ask_yes_no(
+ "DO_CONFIG_ENCRYPTION",
+ "1. Do you want to encrypt sensitive config values (Y/n): ",
+ True,
+ )
+ else None
+)
+ask_currency(
+ "CURRENCY",
+ "2. Enter the name of the regional currency (e.g. 'NETZBON'): ",
+ "NETZBON",
+)
+do_conversion = ask_yes_no(
+ "DO_CONVERSION",
+ "3. Do you want setup regional currency conversion to fiat currency (Y/n): ",
+ True,
+)
+if do_conversion:
+ ask_currency(
+ "FIAT_CURRENCY",
+ "3.1. Enter the name of the fiat currency (e.g. 'CHF'): ",
+ "CHF",
+ )
+ ask_str(
+ "FIAT_BANK_NAME",
+ "3.2. Enter the name of your fiat bank (e.g. POSTFINANCE AG): ",
+ )
+ iban = ask_iban(
+ "FIAT_ACCOUNT_IBAN",
+ "3.3. Enter the IBAN of your fiat bank account (e.g. 'CH7789144474425692816'): ",
+ )
+ bic = ask_bic(
+ "FIAT_ACCOUNT_BIC",
+ "3.4. Enter the BIC of your fiat bank account (e.g. 'POFICHBEXXX'): ",
+ )
+ name = ask_str(
+ "FIAT_ACCOUNT_NAME", "3.5. Enter the legal name of your fiat bank account: "
+ )
+ params = urllib.parse.urlencode({"receiver-name": name})
+ add_conf("CONVERSION_PAYTO", f"payto://iban/{bic}/{iban}?{params}")
+bank_name = ask_str(
+ "BANK_NAME",
+ "4. Enter the human-readable name of the bank (e.g. 'Taler Bank'): ",
+ "Taler Bank",
+)
+ask_host("DOMAIN_NAME", "5. Enter the domain name (e.g. 'example.com'): ")
+if ask_yes_no("ENABLE_TLS", "6. Setup TLS using Let's Encrypt? (Y/n): ", True):
+ ask_str("TLS_EMAIL", "6.1. Enter an email address for Let's Encrypt: ")
+
+ def ask_tos():
+ print(
+ "6.2. Please read the Terms of Service at https://letsencrypt.org/documents/LE-SA-v1.3-September-21-2022.pdf."
+ )
+ if not ask_yes_no(
+ None,
+ "6.2. You must agree in order to register with the ACME server. Do you agree? (y/n): ",
+ False,
+ ):
+ print("You must agree in order to register with the ACME server")
+ return None
+ else:
+ return "y"
+
+ conf_value("TLS_TOS", ask_tos)
+ add_conf("PROTO", "https")
+else:
+ add_conf("PROTO", "http")
+
+add_conf(
+ "DO_OFFLINE", "y"
+) # TODO support offline setup again when the documentation is ready
+
+if ask_yes_no(
+ "DO_TELESIGN",
+ "7. Setup SMS two-factor authentication using Telesign https://www.telesign.com? (Y/n): ",
+ True,
+):
+
+ def ask_telesign():
+ customer_id = ask_str(None, "7.1. Enter your Telesign Customer ID: ")
+ api_key = ask_str(None, "7.2. Enter your Telesign API Key: ")
+ phone_number = ask_str(
+ None,
+ "6.3. Enter a phone number to test your API key (e.g. '+447911123456'): ",
+ )
+ auth_token = base64.b64encode(f"{customer_id}:{api_key}".encode()).decode()
+ if not try_cmd(
+ ["libeufin-tan-sms.sh", phone_number],
+ f"T-12345 is your verification code for {bank_name} setup",
+ {**os.environ, "AUTH_TOKEN": auth_token},
+ ):
+ print(
+ "Failed to send an SMS using Telesign API, check your credentials and phone number"
+ )
+ return None
+ code = ask_str(None, f"7.4. Enter the code received by {phone_number} : ")
+ if code != "12345" and code != "T-12345":
+ print(
+ f"Wrong code got '{code}' expected '12345', check your credentials and phone number"
+ )
+ return None
+ return auth_token
+
+ conf_value("TELESIGN_AUTH_TOKEN", ask_telesign)
+generated_password= str(uuid.uuid4())
+admin_password = ask_secret(
+ "BANK_ADMIN_PASSWORD",
+ "8. Enter the admin password for the bank (or press enter to autogenerate password): ",
+ config_passwd,
+ generated_password,
+)
+add_conf("BANK_ADMIN_PASSWORD_GENERATED", "y" if generated_password==admin_password else "n")
+
+if ask_yes_no(
+ "DO_EXCHANGE_TERMS",
+ "9. Do you wish to configure terms of service (ToS) for the exchange? (Y/n): ",
+ True,
+):
+ ask_terms(
+ "EXCHANGE_TERMS_FILE",
+ "9.1. Enter the filename of the ToS. Some available options are:\n",
+ "-tos-",
+ )
+
+if ask_yes_no(
+ "DO_EXCHANGE_PRIVACY",
+ "10. Do you wish to configure a privacy policy for the exchange? (Y/n): ",
+ True,
+):
+ ask_terms(
+ "EXCHANGE_PRIVACY_FILE",
+ "10.1. Enter the filename of the privacy policy. Some available options are:\n",
+ "-pp-",
+ )
+
+# Update on disk format even if nothing have changed
+store_conf()
+
+# ----- Return conf ----- #
+
+content = ""
+for key, value in result_conf.items():
+ escaped = value.replace("'", "'\\''")
+ content += f"export {key}='{escaped}'\n"
+with os.fdopen(3, "w") as f:
+ f.write(content)
diff --git a/regional-currency/config_nginx.sh b/regional-currency/config_nginx.sh
new file mode 100755
index 0000000..84df1e8
--- /dev/null
+++ b/regional-currency/config_nginx.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+
+source functions.sh
+source config/user.conf
+source config/internal.conf
+
+export PROTO
+export DOMAIN_NAME
+export BANK_PORT
+
+envsubst <nginx-conf/backend.taler-nginx.conf >"/etc/nginx/sites-available/backend.${DOMAIN_NAME}"
+envsubst <nginx-conf/bank.taler-nginx.conf >"/etc/nginx/sites-available/bank.${DOMAIN_NAME}"
+envsubst <nginx-conf/exchange.taler-nginx.conf >"/etc/nginx/sites-available/exchange.${DOMAIN_NAME}"
+
+# Create nginx symlinks
+
+ln -sf /etc/nginx/sites-available/backend."${DOMAIN_NAME}" /etc/nginx/sites-enabled/backend."${DOMAIN_NAME}"
+ln -sf /etc/nginx/sites-available/bank."${DOMAIN_NAME}" /etc/nginx/sites-enabled/bank."${DOMAIN_NAME}"
+ln -sf /etc/nginx/sites-available/exchange."${DOMAIN_NAME}" /etc/nginx/sites-enabled/exchange."${DOMAIN_NAME}"
+
+if test "${ENABLE_TLS}" == "y"; then
+
+ # Replace http with https in the demobank-ui configuration
+
+ sed -i "s/http:\/\/bank./https:\/\/bank./g" /etc/libeufin/settings.json
+
+ # Certbot
+
+ say "Obtaining TLS certificates using Let's Encrypt"
+
+ certbot --nginx -n --agree-tos -m ${TLS_EMAIL} \
+ -d backend."${DOMAIN_NAME}" \
+ -d bank."${DOMAIN_NAME}" \
+ -d exchange."${DOMAIN_NAME}" &>> setup.log
+else
+ sed -i "s/https:\/\/bank./http:\/\/bank./g" /etc/libeufin/settings.json
+fi
+
+say "Restarting Nginx with new configuration"
+systemctl reload nginx &>> setup.log
diff --git a/regional-currency/diagnose.sh b/regional-currency/diagnose.sh
new file mode 100755
index 0000000..a0c513b
--- /dev/null
+++ b/regional-currency/diagnose.sh
@@ -0,0 +1,125 @@
+#!/usr/bin/env bash
+
+# This file is in the public domain.
+
+# Script for basic diagnostics of a Taler regio deployment.
+# @author Florian Dold <dold@taler.net>
+
+if [ "$(id -u)" -ne 0 ]; then
+ echo "FATAL: Please run as root." >&2
+ exit 1
+fi
+
+libeufin_bank_db=$(libeufin-bank config get libeufin-bankdb-postgres config)
+libeufin_nexus_db=$(libeufin-nexus config get libeufin-nexusdb-postgres config)
+exchange_db=$(taler-config -s exchangedb-postgres -o config)
+
+if [[ $libeufin_nexus_db != $libeufin_bank_db ]]; then
+ echo "FATAL: libeufin-bank and libeufin-nexus don't share the same database" >&2
+ exit 1
+fi
+
+libeufin_db=$libeufin_bank_db
+
+# runsql db RESNAME < query
+function runsql() {
+ local sql
+ read -r -d '' sql
+ res=$(cd / && sudo -u postgres psql "$1" -t --csv -c "$sql")
+ printf -v "$2" '%s' "$res"
+}
+
+#
+# Check for conversion trigger
+#
+
+runsql "$libeufin_db" have_conversion_triggers <<EOF
+select count(*) from information_schema.triggers
+ where trigger_schema='libeufin_nexus'
+ and trigger_name='cashin_link';
+EOF
+
+echo "have_conversion_triggers" $have_conversion_triggers
+
+#
+# Check for transactions
+#
+runsql "$libeufin_db" num_nexus_incoming_transactions <<EOF
+select count(*) from libeufin_nexus.incoming_transactions;
+EOF
+echo num_nexus_incoming_transactions: $num_nexus_incoming_transactions
+
+runsql "$libeufin_db" num_nexus_talerable_transactions <<EOF
+select count(*) from libeufin_nexus.talerable_incoming_transactions;
+EOF
+echo "num_nexus_talerable_transactions:" $num_nexus_talerable_transactions
+
+runsql "$libeufin_db" num_nexus_bounced_transactions <<EOF
+select count(*) from libeufin_nexus.bounced_transactions;
+EOF
+echo "num_nexus_bounced_transactions:" $num_nexus_bounced_transactions
+
+runsql "$libeufin_db" num_bank_exchange_incoming <<EOF
+select count(*) from libeufin_bank.taler_exchange_incoming;
+EOF
+echo "num_bank_exchange_incoming:" $num_bank_exchange_incoming
+
+runsql "$exchange_db" num_exchange_reserves_in <<EOF
+select count(*) from exchange.reserves_in;
+EOF
+echo num_exchange_reserves_in: $num_exchange_reserves_in
+
+runsql "$exchange_db" num_exchange_reserves <<EOF
+select count(*) from exchange.reserves;
+EOF
+echo num_exchange_reserves: $num_exchange_reserves
+
+
+function expect_unit_active() {
+ systemctl --quiet is-active "$1"
+ if [[ $? -ne 0 ]]; then
+ echo "WARNING: expected unit $1 to be active, but it is not active"
+ fi
+}
+
+libeufin_units=(
+libeufin-bank.service
+libeufin-nexus-ebics-fetch.service
+libeufin-nexus-ebics-submit.service
+)
+
+exchange_units=(
+taler-exchange-aggregator.service
+taler-exchange-closer.service
+taler-exchange-expire.service
+taler-exchange-httpd.service
+taler-exchange-secmod-cs.service
+taler-exchange-secmod-eddsa.service
+taler-exchange-secmod-rsa.service
+taler-exchange-transfer.service
+taler-exchange-wirewatch.service
+)
+
+
+merchant_units=(
+taler-merchant-httpd.service
+)
+
+all_units=()
+all_units+=( "${libeufin_units[@]}" "${exchange_units[@]}" "${merchant_units[@]}" )
+
+for unit in ${all_units[@]}; do
+ expect_unit_active "$unit"
+done
+
+SINCE="7 days ago"
+echo "analysing logs since $SINCE"
+
+for unit in ${all_units[@]}; do
+ num_warnings=$(journalctl -u "$unit" --since "$SINCE" | grep WARNING | wc -l)
+ num_errors=$(journalctl -u "$unit" --since "$SINCE" | grep ERROR | wc -l)
+ if [[ ( $num_errors -eq 0 ) && ( $num_warnings -eq 0 ) ]]; then
+ continue
+ fi
+ echo "Please check logs for $unit ($num_warnings warnings, $num_errors errors)"
+done
diff --git a/regional-currency/functions.sh b/regional-currency/functions.sh
new file mode 100755
index 0000000..0663fec
--- /dev/null
+++ b/regional-currency/functions.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+
+notify_err() {
+ say "errexit on line $(caller)"
+ say "Error messages can be found at the end of setup.log"
+ exit 1
+}
+
+trap notify_err ERR
+
+# Message
+function say() {
+ echo "TALER: " "$@" >> setup.log
+ echo "TALER: " "$@"
+}
+
+# Check user if the user is root
+function check_user() {
+ if [ "$(whoami)" != "root" ]; then
+ say "Please run this script as root"
+ exit 1
+ fi
+}
+
+# Set DISTRO to the detected distro or return non-zero
+# status if distro not supported.
+function detect_distro() {
+ unset DISTRO
+ [[ -f /etc/os-release ]] && source /etc/os-release
+ # shellcheck disable=SC2034
+ echo $NAME | grep Ubuntu >/dev/null && DISTRO=ubuntu && return 0
+ # shellcheck disable=SC2034
+ echo $NAME | grep Debian >/dev/null && DISTRO=debian && return 0
+ echo "Unsupported distro, should be either ubuntu or debian" >&2
+ return 1
+}
diff --git a/regional-currency/install_packages.sh b/regional-currency/install_packages.sh
new file mode 100755
index 0000000..3c3f2a5
--- /dev/null
+++ b/regional-currency/install_packages.sh
@@ -0,0 +1,84 @@
+#!/bin/bash
+# This file is in the public domain.
+
+set -eu
+
+source functions.sh
+
+detect_distro
+
+# Program versions
+PG_VERSION=15
+
+say "Installing necessary packages (this may take a while)..."
+
+## Update
+
+apt update &>> setup.log
+
+## General requirements
+
+apt install \
+ uuid-runtime \
+ make \
+ sudo \
+ curl \
+ jq \
+ wget \
+ nginx \
+ gettext-base \
+ postgresql-${PG_VERSION} \
+ postgresql-client-${PG_VERSION} \
+ dbconfig-pgsql \
+ certbot \
+ python3-sphinx \
+ python3-pip \
+ python3-certbot-nginx -y &>> setup.log
+
+pip3 install --break-system-packages \
+ sphinx-markdown-builder \
+ htmlark \
+ argon2-cffi \
+ pycryptodome &>> setup.log
+
+## Add GNU Taler deb.taler.net to /etc/apt/sources.list
+
+say "Adding GNU Taler apt repository"
+say "Detected distro $DISTRO"
+
+case $DISTRO in
+debian)
+ if test ${APT_NIGHTLY:-n} == y; then
+ say "Setup nightly packages"
+ echo "deb [trusted=yes] https://deb.taler.net/apt-nightly bookworm main" >/etc/apt/sources.list.d/taler.list
+ else
+ echo "deb [signed-by=/etc/apt/keyrings/taler-systems.gpg] https://deb.taler.net/apt/debian bookworm main" >/etc/apt/sources.list.d/taler.list
+ fi
+ ;;
+ubuntu)
+ echo "deb [signed-by=/etc/apt/keyrings/taler-systems.gpg] https://deb.taler.net/apt/ubuntu mantic taler-mantic" >/etc/apt/sources.list.d/taler.list
+ ;;
+*)
+ say "Unsupported distro: $DISTRO"
+ exit 1
+ ;;
+esac
+
+wget -P /etc/apt/keyrings https://taler.net/taler-systems.gpg &>> setup.log
+
+## Specific GNU Taler packages
+
+say "Installing GNU Taler packages (this may take a while)..."
+
+apt update &>> setup.log
+apt install \
+ taler-exchange \
+ taler-terms-generator \
+ taler-merchant \
+ taler-harness \
+ taler-wallet-cli \
+ taler-exchange-offline \
+ libeufin-bank \
+ libeufin-nexus \
+ -y \
+ &>> setup.log
diff --git a/regional-currency/list-incoming.sh b/regional-currency/list-incoming.sh
new file mode 100755
index 0000000..bb3a67d
--- /dev/null
+++ b/regional-currency/list-incoming.sh
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+
+# This file is in the public domain.
+
+# Script for basic diagnostics of a Taler regio deployment.
+# @author Florian Dold <dold@taler.net>
+
+if [ "$(id -u)" -ne 0 ]; then
+ echo "FATAL: Please run as root." >&2
+ exit 1
+fi
+
+exchange_db=$(taler-config -s exchangedb-postgres -o config)
+
+# runsql db RESNAME < query
+function runsql() {
+ local sql
+ read -r -d '' sql
+ res=$(cd / && sudo -u postgres psql "$1" -t --csv -c "$sql")
+ printf -v "$2" '%s' "$res"
+}
+
+runsql "$exchange_db" reserves_in <<EOF
+select reserve_pub from exchange.reserves_in;
+EOF
+
+mapfile -t lines <<<$reserves_in
+
+for line in "${lines[@]}"; do
+ python3 -c "import binascii; import sys; sys.stdout.buffer.write(binascii.a2b_hex(sys.argv[1][2:]))" "$line" | gnunet-base32
+ echo
+done
+
diff --git a/regional-currency/main.sh b/regional-currency/main.sh
new file mode 100755
index 0000000..a88ac3d
--- /dev/null
+++ b/regional-currency/main.sh
@@ -0,0 +1,104 @@
+#!/bin/bash
+# This file is in the public domain.
+
+# main.sh is the main script that asks the questions and
+# puts the answers into environment variables located at "config/taler-internal.conf or config/taler.conf" files
+# Nginx configuration - Reads values directly from these "config files".
+
+set -eu
+
+# include functions source file
+
+source functions.sh
+
+# Clear logs
+
+> setup.log
+
+# include variables from configuration
+mkdir -p config/
+touch config/user.conf config/internal.conf
+# Values we generated
+source config/internal.conf
+
+# Ask questions to user
+# START USER INTERACTION
+say "Welcome to the GNU Taler regional currency setup!"
+say ""
+say "All configuration values asked during the setup script"
+say "can be changed in config/user.conf."
+say "Logs are written in setup.log."
+say ""
+
+# END USER INTERACTION
+
+# Check if the user is root, otherwise EXIT.
+check_user
+
+# Installation of deb packages required
+say ""
+say "Installing packages (step 1 of 6)"
+. install_packages.sh
+
+say ""
+say "Interactive configuration (step 2 of 6)"
+{ source <(./config.py 3>&1 >&4 4>&-); } 4>&1
+
+# Remove when libeufin currencies.conf is in sync with exchange
+cat >>/usr/share/libeufin/config.d/netzbon.conf <<EOF
+[CURRENCY-NETZBON]
+enabled=yes
+name=NetzBon
+code=NETZBON
+fractional_input_digits=2
+fractional_normal_digits=2
+fractional_trailing_zero_digits=2
+alt_unit_names={"0":"NETZBON"}
+EOF
+
+if test -z "${BANK_EXCHANGE_PASSWORD:-}"; then
+ BANK_EXCHANGE_PASSWORD=$(uuidgen)
+ echo "BANK_EXCHANGE_PASSWORD=\"${BANK_EXCHANGE_PASSWORD}\"" >>config/internal.conf
+fi
+
+if test -z "${BANK_PORT:-}"; then
+ echo "BANK_PORT=8080" >>config/user.conf
+ export BANK_PORT=8080
+fi
+
+say ""
+say "Configuring nginx (step 3 of 6)"
+./config_nginx.sh
+
+say ""
+say "Setting up libeufin (step 4 of 6)"
+./setup-libeufin.sh
+
+say ""
+say "Setting up exchange (step 5 of 6)"
+./setup-exchange.sh
+
+say ""
+say "Setting up merchant (step 6 of 6)"
+./setup-merchant.sh
+
+# Final message to the user
+say ""
+say "Congratulations, you have successfully installed GNU Taler"
+say "Your bank is at ${PROTO}://bank.${DOMAIN_NAME}/"
+if test ${BANK_ADMIN_PASSWORD_GENERATED} == y; then
+ say "You can connect to the bank web UI as 'admin' using '${BANK_ADMIN_PASSWORD}'"
+else
+ say "You can connect to the bank web UI as 'admin' using the password you entered earlier"
+fi
+say "A merchant is at ${PROTO}://backend.${DOMAIN_NAME}/"
+say "You should set credentials for the merchant soon."
+say "The exchange withdraw URI is taler://withdraw-exchange/exchange.${DOMAIN_NAME}/"
+
+if test ${DO_CONVERSION} == y; then
+ say "For currency conversion to work, you must manually complete"
+ say "the EBICS configuration."
+fi
+
+exit 0
+# END INSTALLATION
diff --git a/regional-currency/nginx-conf/backend.taler-nginx.conf b/regional-currency/nginx-conf/backend.taler-nginx.conf
new file mode 100644
index 0000000..ea267df
--- /dev/null
+++ b/regional-currency/nginx-conf/backend.taler-nginx.conf
@@ -0,0 +1,19 @@
+server {
+
+ listen 80;
+ listen [::]:80;
+
+ server_name backend.${DOMAIN_NAME};
+
+ # Bigger than default timeout to support long polling
+ proxy_read_timeout 6500s;
+ keepalive_requests 1000000;
+ keepalive_timeout 6500s;
+
+ location / {
+ proxy_pass http://unix:/var/run/taler/merchant-httpd/merchant-http.sock;
+ proxy_set_header X-Forwarded-Proto "${PROTO}";
+ proxy_set_header X-Forwarded-Host "backend.${DOMAIN_NAME}";
+ proxy_set_header X-Forwarded-Prefix /;
+ }
+}
diff --git a/regional-currency/nginx-conf/bank.taler-nginx.conf b/regional-currency/nginx-conf/bank.taler-nginx.conf
new file mode 100644
index 0000000..1c6a6d3
--- /dev/null
+++ b/regional-currency/nginx-conf/bank.taler-nginx.conf
@@ -0,0 +1,23 @@
+server {
+ listen 80;
+ listen [::]:80;
+
+ server_name bank.${DOMAIN_NAME};
+
+ access_log /var/log/nginx/libeufin-sandbox.log;
+ error_log /var/log/nginx/libeufin-sandbox.err;
+
+ # Bigger than default timeout to support long polling
+ proxy_read_timeout 6500s;
+ keepalive_requests 1000000;
+ keepalive_timeout 6500s;
+
+ # TODO should we proxy SPA with nginx for perf and fallback to bank server on 404 ?
+ location / {
+ proxy_pass http://localhost:${BANK_PORT};
+ #Fixes withdrawal http request
+ proxy_set_header X-Forwarded-Proto "${PROTO}";
+ proxy_set_header X-Forwarded-Host "bank.${DOMAIN_NAME}";
+ proxy_set_header X-Forwarded-Prefix /;
+ }
+}
diff --git a/regional-currency/nginx-conf/exchange.taler-nginx.conf b/regional-currency/nginx-conf/exchange.taler-nginx.conf
new file mode 100644
index 0000000..b1e9d0a
--- /dev/null
+++ b/regional-currency/nginx-conf/exchange.taler-nginx.conf
@@ -0,0 +1,16 @@
+server {
+
+ listen 80;
+ listen [::]:80;
+
+ server_name exchange.${DOMAIN_NAME};
+
+ # Bigger than default timeout to support long polling
+ proxy_read_timeout 6500s;
+ keepalive_requests 1000000;
+ keepalive_timeout 6500s;
+
+ location / {
+ proxy_pass http://unix:/var/run/taler/exchange-httpd/exchange-http.sock;
+ }
+}
diff --git a/regional-currency/setup-exchange.sh b/regional-currency/setup-exchange.sh
new file mode 100755
index 0000000..91f916c
--- /dev/null
+++ b/regional-currency/setup-exchange.sh
@@ -0,0 +1,242 @@
+#!/bin/bash
+# This file is in the public domain.
+#
+# This script configure and launches the Taler exchange.
+#
+# The environment must provide the following variables:
+#
+# - BANK_EXCHANGE_PASSWORD (exchange password for libeufin-bank)
+# - EXCHANGE_WIRE_GATEWAY_URL (where is the exchange wire gateway / libeufin-nexus)
+# - EXCHANGE_PAYTO (exchange account PAYTO)
+# - ENABLE_TLS (http or https?)
+# - DOMAIN_NAME: DNS domain name to use for the setup
+#
+
+set -eu
+
+notify_exit() {
+ [[ $1 == 0 ]] || echo Script "$0" failed, exit code "$1"
+}
+
+notify_err() {
+ echo "errexit on line $(caller)" >&2
+}
+
+trap '(exit 130)' INT
+trap '(exit 143)' TERM
+trap notify_err ERR
+# shellcheck disable=SC2154
+trap 'rc=$?; notify_exit $rc; exit $rc' EXIT
+
+# End of error handling setup
+
+source functions.sh
+source config/user.conf
+source config/internal.conf
+
+EXCHANGE_DB="taler-exchange"
+
+say "Beginning Exchange setup"
+
+if test -z "${BANK_EXCHANGE_PASSWORD:-}"; then
+ say "Failure: BANK_EXCHANGE_PASSWORD not set"
+ exit 1
+fi
+if test -z "${EXCHANGE_PAYTO:-}"; then
+ say "Failure: EXCHANGE_PAYTO not set"
+ exit 1
+fi
+
+function die() {
+ say "$1"
+ exit 1
+}
+
+# Just try if sudo works for diagnostics
+sudo -i -u taler-exchange-offline id >/dev/null || die "Error: Unable to switch to taler-exchange-offline user"
+
+# Create master key as taler-exchange-offline *unless* user already
+# set the MASTER_PUBLIC_KEY to some value we can use.
+export MASTER_PRIV_DIR=.local/share/taler/exchange/offline-keys
+export MASTER_PRIV_FILE=${MASTER_PRIV_DIR}/master.priv
+export SECMOD_TOFU_FILE=${MASTER_PRIV_DIR}/secm_tofus.pub
+if test -z "${MASTER_PUBLIC_KEY:-}"; then
+ if test "${DO_OFFLINE:-y}" == n; then
+ say "Error: No MASTER_PUBLIC_KEY but DO_OFFLINE set to NO"
+ exit 1
+ fi
+ say "Setting up offline key"
+ echo -e "[exchange-offline]\n"\
+ "MASTER_PRIV_FILE=\$HOME/${MASTER_PRIV_FILE}\n"\
+ "SECM_TOFU_FILE=\$HOME/${SECMOD_TOFU_FILE}\n"\
+ >/etc/taler/conf.d/offline-setup.conf
+
+ MASTER_PUBLIC_KEY=$(sudo -i -u taler-exchange-offline taler-exchange-offline -c /etc/taler/taler.conf -LDEBUG setup 2>> setup.log)
+ echo "MASTER_PUBLIC_KEY=\"${MASTER_PUBLIC_KEY}\"" >>config/user.conf
+ if test -z "${DO_OFFLINE:-}"; then
+ # Set 'DO_OFFLINE'
+ DO_OFFLINE=y
+ echo "DO_OFFLINE=y" >>config/user.conf
+ fi
+else
+ say "Master public key is $MASTER_PUBLIC_KEY"
+ if test ${DO_OFFLINE:-y} == y; then
+ MASTER_PUBLIC_KEY2=$(sudo -i -u taler-exchange-offline taler-exchange-offline -c /etc/taler/taler.conf setup 2>> setup.log)
+ if test "${MASTER_PUBLIC_KEY2}" != "${MASTER_PUBLIC_KEY}"; then
+ say "Error: master public key missmatch ${MASTER_PUBLIC_KEY2} does not match ${MASTER_PUBLIC_KEY}"
+ exit 1
+ fi
+ fi
+fi
+
+say "Stopping running exchange before reconfiguration"
+systemctl stop taler-exchange.target &>> setup.log
+
+say "Configuring exchange"
+
+# Generate terms of service (ToS)
+TERMS_ETAG=
+if test ${DO_EXCHANGE_TERMS} == y; then
+ if test -z "${EXCHANGE_TERMS_FILE:-}"; then
+ say "Error: No EXCHANGE_TERMS_FILE set but DO_EXCHANGE_TERMS set to YES"
+ exit 1
+ fi
+
+ TERMS_ETAG="$(basename "$EXCHANGE_TERMS_FILE" .rst)"
+
+ say "Setting up terms of service (ToS)"
+ taler-terms-generator -i "${EXCHANGE_TERMS_FILE}" &>> setup.log
+fi
+
+# Generate privacy policy
+PRIVACY_ETAG=
+if test ${DO_EXCHANGE_PRIVACY} == y; then
+ if test -z "${EXCHANGE_PRIVACY_FILE:-}"; then
+ say "Error: No EXCHANGE_PRIVACY_FILE set but DO_EXCHANGE_PRIVACY set to YES"
+ exit 1
+ fi
+
+ PRIVACY_ETAG="$(basename "$EXCHANGE_PRIVACY_FILE" .rst)"
+
+ say "Setting up the privacy policy"
+ taler-terms-generator -i "${EXCHANGE_PRIVACY_FILE}" &>> setup.log
+fi
+
+export EXCHANGE_BASE_URL="$PROTO://exchange.${DOMAIN_NAME}/"
+
+cat << EOF > /etc/taler/conf.d/setup.conf
+[taler]
+CURRENCY=${CURRENCY}
+CURRENCY_ROUND_UNIT=${CURRENCY}:0.01
+
+[exchange]
+AML_THRESHOLD=${CURRENCY}:1000000
+MASTER_PUBLIC_KEY=${MASTER_PUBLIC_KEY}
+BASE_URL=${EXCHANGE_BASE_URL}
+STEFAN_ABS=${CURRENCY}:0
+STEFAN_LOG=${CURRENCY}:0
+STEFAN_LIN=0
+
+TERMS_ETAG=${TERMS_ETAG}
+PRIVACY_ETAG=${PRIVACY_ETAG}
+
+[merchant-exchange-${DOMAIN_NAME}]
+MASTER_KEY=${MASTER_PUBLIC_KEY}
+CURRENCY=${CURRENCY}
+EXCHANGE_BASE_URL=${EXCHANGE_BASE_URL}
+
+[exchange-account-default]
+PAYTO_URI=${EXCHANGE_PAYTO}
+ENABLE_DEBIT=YES
+ENABLE_CREDIT=YES
+@inline-secret@ exchange-accountcredentials-default ../secrets/exchange-accountcredentials-default.secret.conf
+EOF
+
+cat << EOF > /etc/taler/secrets/exchange-db.secret.conf
+[exchangedb-postgres]
+CONFIG=postgres:///exchange
+EOF
+
+chmod 440 /etc/taler/secrets/exchange-db.secret.conf
+chown root:taler-exchange-db /etc/taler/secrets/exchange-db.secret.conf
+
+cat << EOF > /etc/taler/secrets/exchange-accountcredentials-default.secret.conf
+
+[exchange-accountcredentials-default]
+WIRE_GATEWAY_URL=${PROTO}://bank.$DOMAIN_NAME/accounts/exchange/taler-wire-gateway/
+WIRE_GATEWAY_AUTH_METHOD=basic
+USERNAME=exchange
+PASSWORD=${BANK_EXCHANGE_PASSWORD}
+EOF
+
+chmod 400 /etc/taler/secrets/exchange-accountcredentials-default.secret.conf
+chown taler-exchange-wire:taler-exchange-db /etc/taler/secrets/exchange-accountcredentials-default.secret.conf
+
+taler-harness deployment gen-coin-config \
+ --min-amount "${CURRENCY}":0.01 \
+ --max-amount "${CURRENCY}":100 |
+ sed -e "s/FEE_DEPOSIT = ${CURRENCY}:0.01/FEE_DEPOSIT = ${CURRENCY}:0/" \
+ >/etc/taler/conf.d/"${CURRENCY}"-coins.conf
+
+say "Initializing exchange database"
+taler-exchange-dbconfig -c /etc/taler/taler.conf &>> setup.log
+
+say "Launching exchange"
+systemctl enable taler-exchange.target &>> setup.log
+systemctl restart taler-exchange.target &>> setup.log
+
+say "Waiting for exchange HTTP service (/config)..."
+curl -sS --max-time 2 \
+ --retry-all-errors \
+ --retry-delay 2 \
+ --retry 10 \
+ "${EXCHANGE_BASE_URL}"config &>> setup.log
+
+say "Waiting for exchange management keys (this may take a while)..."
+curl -sS --max-time 30 \
+ --retry-delay 2 \
+ --retry 60 \
+ "${EXCHANGE_BASE_URL}"management/keys &>> setup.log
+
+if test ${DO_OFFLINE} == y; then
+ say "Offline interaction..."
+ sudo -i -u taler-exchange-offline \
+ taler-exchange-offline \
+ -c /etc/taler/taler.conf \
+ download \
+ sign \
+ upload &>> setup.log
+
+ say "Exchange account setup..."
+ sudo -i -u taler-exchange-offline \
+ taler-exchange-offline \
+ enable-account "${EXCHANGE_PAYTO}" \
+ display-hint 0 "${CURRENCY} Exchange" \
+ wire-fee now x-taler-bank "${CURRENCY}":0 "${CURRENCY}":0 \
+ global-fee now "${CURRENCY}":0 "${CURRENCY}":0 "${CURRENCY}":0 1h 6a 0 \
+ upload &>> setup.log
+
+ say "Enabling timer to automate renewals..."
+ systemctl enable taler-exchange-offline.timer &>> setup.log
+ systemctl restart taler-exchange-offline.timer &>> setup.log
+
+ if test ${DO_CONVERSION} == y; then
+ say "Conversion account setup (restricted to CH-only)..."
+ sudo -i -u taler-exchange-offline taler-exchange-offline \
+ enable-account "${CONVERSION_PAYTO}" \
+ display-hint 10 "${FIAT_BANK_NAME}" \
+ conversion-url "${PROTO}://bank.$DOMAIN_NAME/conversion-info/" \
+ debit-restriction deny \
+ wire-fee now iban "${CURRENCY}":0 "${CURRENCY}":0 \
+ upload &>> setup.log
+ fi
+fi
+
+say "Waiting for exchange /keys..."
+curl -sS --max-time 2 \
+ --retry-connrefused \
+ --retry-delay 2 \
+ --retry 10 \
+ "${EXCHANGE_BASE_URL}"keys &>> setup.log
+
+say "Exchange setup finished"
diff --git a/regional-currency/setup-libeufin.sh b/regional-currency/setup-libeufin.sh
new file mode 100755
index 0000000..47d8725
--- /dev/null
+++ b/regional-currency/setup-libeufin.sh
@@ -0,0 +1,138 @@
+#!/bin/bash
+# This file is in the public domain.
+#
+# This script configure libeufin-bank and libeufin-nexus.
+
+
+set -eu
+
+source functions.sh
+{ source <(./config.py 3>&1 >&4 4>&-); } 4>&1
+source config/internal.conf
+
+say "Beginning LibEuFin setup"
+
+if test -z "${BANK_NAME:-}"; then
+ say "Error: config/user.conf does not specify BANK_NAME"
+ exit 1
+fi
+if test -z "${DOMAIN_NAME:-}"; then
+ say "Error: config/user.conf does not specify DOMAIN_NAME"
+ exit 1
+fi
+if test -z "${BANK_ADMIN_PASSWORD:-}"; then
+ say "Error: config/user.conf does not specify BANK_ADMIN_PASSWORD"
+ exit 1
+fi
+if test -z "${BANK_EXCHANGE_PASSWORD:-}"; then
+ say "Error: config/user.conf does not specify BANK_EXCHANGE_PASSWORD"
+ exit 1
+fi
+
+if test ${DO_CONVERSION} == y; then
+ say "Configuring libeufin-nexus with ${FIAT_CURRENCY}..."
+
+ taler-config -s nexus-ebics -o CURRENCY \
+ -V "$FIAT_CURRENCY" -c /etc/libeufin/libeufin-nexus.conf
+ taler-config -s nexus-ebics -o IBAN \
+ -V "$FIAT_ACCOUNT_IBAN" -c /etc/libeufin/libeufin-nexus.conf
+ taler-config -s nexus-ebics -o BIC \
+ -V "$FIAT_ACCOUNT_BIC" -c /etc/libeufin/libeufin-nexus.conf
+ taler-config -s nexus-ebics -o NAME \
+ -V "$FIAT_ACCOUNT_NAME" -c /etc/libeufin/libeufin-nexus.conf
+fi
+
+
+say "Configuring libeufin-bank with ${CURRENCY}..."
+
+cat >/etc/libeufin/libeufin-bank.conf <<EOF
+[libeufin-bank]
+CURRENCY=${CURRENCY}
+NAME="${BANK_NAME}"
+BASE_URL=bank.${DOMAIN_NAME}
+WIRE_TYPE=x-taler-bank
+X_TALER_BANK_PAYTO_HOSTNAME=bank.${DOMAIN_NAME}
+SUGGESTED_WITHDRAWAL_EXCHANGE=${PROTO}://exchange.${DOMAIN_NAME}/
+SERVE=tcp
+PORT=${BANK_PORT}
+EOF
+
+if test ${DO_CONVERSION} == y; then
+ cat >>/etc/libeufin/libeufin-bank.conf <<EOF
+ALLOW_CONVERSION=yes
+FIAT_CURRENCY=${FIAT_CURRENCY}
+ALLOW_EDIT_CASHOUT_PAYTO_URI=yes
+EOF
+fi
+
+if test -n "${TELESIGN_AUTH_TOKEN:-}"; then
+ cat >>/etc/libeufin/libeufin-bank.conf <<EOF
+TAN_SMS=libeufin-tan-sms.sh
+TAN_SMS_ENV={"AUTH_TOKEN":"$TELESIGN_AUTH_TOKEN"}
+EOF
+fi
+
+say "Setting up libeufin database..."
+
+libeufin-dbconfig &>> setup.log
+
+say "Setting up libeufin-bank..."
+
+
+say "Setting up libeufin-bank admin account..."
+sudo -u libeufin-bank \
+ libeufin-bank passwd \
+ -c /etc/libeufin/libeufin-bank.conf \
+ admin "${BANK_ADMIN_PASSWORD}" &>> setup.log
+
+say "Setting up admin's debt limit..."
+sudo -u libeufin-bank \
+ libeufin-bank edit-account \
+ -c /etc/libeufin/libeufin-bank.conf \
+ admin --debit_threshold=${CURRENCY}:200000000 &>> setup.log
+
+say "Setting up SPA configuration..."
+echo "settings = { bankName: \"${BANK_NAME}\" }" >/etc/libeufin/settings.js
+
+say "Create exchange account..."
+if test -z "${EXCHANGE_PAYTO:-}"; then
+ # FIXME create-account should have a way to update the password if the account already exists
+ EXCHANGE_PAYTO_NEW="$(sudo -u libeufin-bank libeufin-bank create-account -c /etc/libeufin/libeufin-bank.conf --username exchange --password "${BANK_EXCHANGE_PASSWORD}" --name Exchange --exchange 2>> setup.log)?receiver-name=Exchange"
+ echo "EXCHANGE_PAYTO=\"${EXCHANGE_PAYTO_NEW}\"" >> config/user.conf
+fi
+
+
+say "Start the bank..."
+systemctl enable libeufin-bank &>> setup.log
+systemctl restart libeufin-bank &>> setup.log
+
+say "Waiting for the bank (/config)..."
+curl -sS --max-time 2 \
+ --retry-all-errors \
+ --retry-delay 2 \
+ --retry 10 \
+ ${PROTO}://bank.${DOMAIN_NAME}/config &>> setup.log
+
+if test ${DO_CONVERSION} == y; then
+say "Setting conversion rates to 1:1 ..."
+# TODO only set conversion rates if known have been set
+curl -sS -u "admin:${BANK_ADMIN_PASSWORD}" \
+ -H 'Content-Type: application/json; charset=utf-8' \
+ ${PROTO}://bank.${DOMAIN_NAME}/conversion-info/conversion-rate \
+--data-binary @- &>> setup.log << EOF
+{
+ "cashin_ratio": "1",
+ "cashin_fee": "${CURRENCY}:0",
+ "cashin_tiny_amount": "${CURRENCY}:0.01",
+ "cashin_rounding_mode": "nearest",
+ "cashin_min_amount": "${FIAT_CURRENCY}:1",
+ "cashout_ratio": "1",
+ "cashout_fee": "${FIAT_CURRENCY}:0",
+ "cashout_tiny_amount": "${FIAT_CURRENCY}:0.01",
+ "cashout_rounding_mode": "nearest",
+ "cashout_min_amount": "${CURRENCY}:1"
+}
+EOF
+fi
+
+say "LibEuFin setup finished"
diff --git a/regional-currency/setup-merchant.sh b/regional-currency/setup-merchant.sh
new file mode 100755
index 0000000..a892b7a
--- /dev/null
+++ b/regional-currency/setup-merchant.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+set -eu
+
+source functions.sh
+source config/user.conf
+source config/internal.conf
+
+say "Setting up merchant database"
+taler-merchant-dbconfig &>> setup.log
+
+say "Launching taler-merchant-httpd"
+systemctl enable taler-merchant.target &>> setup.log
+systemctl restart taler-merchant.target &>> setup.log
diff --git a/regional-currency/upgrade.sh b/regional-currency/upgrade.sh
new file mode 100755
index 0000000..8924a77
--- /dev/null
+++ b/regional-currency/upgrade.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+# This file is in the public domain.
+set -eu
+
+echo "Fetching package list..."
+apt-get update
+
+echo -n "Stopping Taler services..."
+systemctl disable --now taler-exchange.target &>> upgrade.log
+systemctl disable --now taler-merchant.target &>> upgrade.log
+systemctl disable --now libeufin-bank &>> upgrade.log
+systemctl disable --now libeufin-nexus.target &>> upgrade.log
+echo " OK"
+
+echo "Upgrading packages..."
+apt-get upgrade
+
+echo "Upgrading databases..."
+libeufin-dbconfig &>> upgrade.log
+taler-exchange-dbconfig &>> upgrade.log
+taler-merchant-dbconfig &>> upgrade.log
+
+echo -n "Restarting Taler services..."
+systemctl enable --now taler-exchange.target &>> upgrade.log
+systemctl enable --now taler-merchant.target &>> upgrade.log
+systemctl enable --now libeufin-bank &>> upgrade.log
+systemctl enable --now libeufin-nexus.target &>> upgrade.log
+echo " OK"
+
+exit 0
diff --git a/regional-currency/vagrant/.gitignore b/regional-currency/vagrant/.gitignore
new file mode 100644
index 0000000..8000dd9
--- /dev/null
+++ b/regional-currency/vagrant/.gitignore
@@ -0,0 +1 @@
+.vagrant
diff --git a/regional-currency/vagrant/README b/regional-currency/vagrant/README
new file mode 100644
index 0000000..e9387d3
--- /dev/null
+++ b/regional-currency/vagrant/README
@@ -0,0 +1,2 @@
+This folder contains a vagrant configuration (https://developer.hashicorp.com/vagrant)
+that allows us to easily spin up a virtual machine to test the setup instructions.
diff --git a/regional-currency/vagrant/Vagrantfile b/regional-currency/vagrant/Vagrantfile
new file mode 100644
index 0000000..7cb3574
--- /dev/null
+++ b/regional-currency/vagrant/Vagrantfile
@@ -0,0 +1,77 @@
+# -*- mode: ruby -*-
+# vi: set ft=ruby :
+
+# All Vagrant configuration is done below. The "2" in Vagrant.configure
+# configures the configuration version (we support older styles for
+# backwards compatibility). Please don't change it unless you know what
+# you're doing.
+Vagrant.configure("2") do |config|
+ # The most common configuration options are documented and commented below.
+ # For a complete reference, please see the online documentation at
+ # https://docs.vagrantup.com.
+
+ # Every Vagrant development environment requires a box. You can search for
+ # boxes at https://vagrantcloud.com/search.
+ config.vm.box = "ubuntu/kinetic64"
+
+ config.ssh.forward_agent = true
+ config.ssh.forward_x11 = true
+
+ # Disable automatic box update checking. If you disable this, then
+ # boxes will only be checked for updates when the user runs
+ # `vagrant box outdated`. This is not recommended.
+ # config.vm.box_check_update = false
+
+ # Create a forwarded port mapping which allows access to a specific port
+ # within the machine from a port on the host machine. In the example below,
+ # accessing "localhost:8080" will access port 80 on the guest machine.
+ # NOTE: This will enable public access to the opened port
+ # config.vm.network "forwarded_port", guest: 80, host: 8080
+
+ # Create a forwarded port mapping which allows access to a specific port
+ # within the machine from a port on the host machine and only allow access
+ # via 127.0.0.1 to disable public access
+ # config.vm.network "forwarded_port", guest: 80, host: 8080, host_ip: "127.0.0.1"
+
+ # Create a private network, which allows host-only access to the machine
+ # using a specific IP.
+ # config.vm.network "private_network", ip: "192.168.33.10"
+
+ # Create a public network, which generally matched to bridged network.
+ # Bridged networks make the machine appear as another physical device on
+ # your network.
+ # config.vm.network "public_network"
+
+ # Share an additional folder to the guest VM. The first argument is
+ # the path on the host to the actual folder. The second argument is
+ # the path on the guest to mount the folder. And the optional third
+ # argument is a set of non-required options.
+ # config.vm.synced_folder "../data", "/vagrant_data"
+
+ # Provider-specific configuration so you can fine-tune various
+ # backing providers for Vagrant. These expose provider-specific options.
+ # Example for VirtualBox:
+ #
+ config.vm.provider "virtualbox" do |vb|
+ # Display the VirtualBox GUI when booting the machine
+ vb.gui = true
+
+ # Customize the amount of memory on the VM:
+ vb.memory = "4096"
+
+ # Required, or wayland doesn't seem to work
+ vb.customize ['modifyvm', :id, '--graphicscontroller', 'vmsvga']
+ end
+ #
+ # View the documentation for the provider you are using for more
+ # information on available options.
+
+ # Enable provisioning with a shell script. Additional provisioners such as
+ # Ansible, Chef, Docker, Puppet and Salt are also available. Please see the
+ # documentation for more information about their specific syntax and use.
+ config.vm.provision "shell", reboot: true, inline: <<-SHELL
+ apt-get update
+ apt-get upgrade -y
+ apt-get install -y ubuntu-desktop gnome-shell firefox virtualbox-guest-additions-iso git
+ SHELL
+end
diff --git a/regional-currency/withdraw.sh b/regional-currency/withdraw.sh
new file mode 100755
index 0000000..c0896e5
--- /dev/null
+++ b/regional-currency/withdraw.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+if test "$1" = "--help" || test "$1" = "-h"; then
+ echo "./withdraw [RESERVE_PUB]"
+ echo
+ echo "Injects one incoming CHF payment into nexus database"
+ echo "in order to trigger a Taler withdrawal. The reserve"
+ echo "pub can be passed either as the first parameter, or"
+ echo "it'll be generated by the CLI wallet. In both cases,"
+ echo "the exchange to withdraw from is \$PROTO://exchange.\$DOMAIN"
+
+ exit 0
+fi
+
+RESERVE_PUB="$1" # maybe passed
+set -eu
+
+. config/user.conf # DOMAIN_NAME, CURRENCY & FIAT_CURRENCY
+. config/internal.conf # PROTO
+
+NEXUS_CONFIG_FILE=/etc/libeufin/libeufin-nexus.conf
+if test -z "$RESERVE_PUB"; then
+ RESERVE_PUB=$(taler-wallet-cli \
+ api 'acceptManualWithdrawal' \
+ '{"exchangeBaseUrl":"'${PROTO}'://exchange.'$DOMAIN_NAME'",
+ "amount":"'$CURRENCY':5"
+ }' | jq -r .result.reservePub)
+fi
+DEBTOR_IBAN="CH8389144317421994586"
+sudo -i -u libeufin-nexus libeufin-nexus testing fake-incoming -L DEBUG --subject "$RESERVE_PUB" --amount "$FIAT_CURRENCY:5" "payto://iban/$DEBTOR_IBAN"
+
+taler-wallet-cli run-until-done
diff --git a/selenium/launch_selenium_test b/selenium/launch_selenium_test
deleted file mode 100755
index 12e35ca..0000000
--- a/selenium/launch_selenium_test
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/bash
-
-ulimit -v 6000000
-
-set -eu
-
-# clean /tmp
-rm -fr /tmp/.org.chromium*
-
-cd $HOME/wallet-webex
-git clean -fdx
-
-git fetch
-# reset to updated upstream branch, but only if we're tracking a branch
-branch=$(git rev-parse --abbrev-ref --symbolic-full-name @{u} 2>/dev/null || echo HEAD)
-git reset --hard "$branch"
-
-git submodule update --remote
-
-./configure && make
-
-# call python3 selenium script
-python3 $HOME/wallet-webex/selenium/withdraw_buy.py --ext-unpacked=$HOME/wallet-webex
diff --git a/splitops/README.md b/splitops/README.md
new file mode 100644
index 0000000..7a92b9b
--- /dev/null
+++ b/splitops/README.md
@@ -0,0 +1,128 @@
+# Splitops
+================
+
+Splitops is a script to allow execution of commands only after the approval of
+multiple users.
+
+It is intended to be used with OpenSSH by specifiying it as the "command" option
+for authorized users in `~/.ssh/authorized_keys` (explained below).
+
+Server requirements:
+=====================
+
+GNU/linux server.
+The Splitops program is written in the Python programming language, so you will need to have installed the
+python3 package in your server, please check in your terminal by typing "python3 -V", to see if you have this package already installed.
+
+Please do before using the program
+=====================================
+
+1) From your local computer git clone the deployment.git repository, and within the "splitops" folder,
+copy the file "splitops" from your computer to the remote server (path:/usr/local/bin/). There is no need for you
+to clone the whole deployment.git repository from the remote server to grab this program.
+
+You can use the command "scp" for this.
+
+scp /home/user/deployment/splitops/splitops root@server:/usr/local/bin/
+
+2) In your remote server SSH configuration (/etc/ssh/sshd_config),
+please make sure you have the option "PubkeyAuthentication yes" is
+de-commented. If not, decomment the line, and --reload your ssh service.
+
+3) In your /root/.ssh/authorized_keys, please add the next lines:
+
+command="/usr/local/bin/splitops alice" [... key of alice ...]
+command="/usr/local/bin/splitops bob" [... key of bob ...]
+
+(one for each user, that you want to have approval from for each command request)
+
+"Alice" and "Bob" are just usernames, they don't need to be real user system accounts in the remote server.
+
+The way the splitops program works, is by associating a public SSH key to a username, but this username doesn't need to match,
+with the real username you are using for a specific SSH public key.
+
+Having these 2 lines in the .ssh/authorized_key files, will *force* --anyone trying to login in as root through SSH,
+to execute the program: "splitops", so any other user not listed in this root/.ssh/authorized_key file, won't be able to do anything.
+
+4) Logout from the remote server, and from your client machine, in order to start using the program Splitops,
+try to "request" your very first command. This is done by using the splitops sub-command "propose".
+
+e.g: ssh root@server propose rm -rf /opt/something
+
+After executing this, you will have some sort of answer from the server such as next:
+
+- authenticated as: bob
+- requested command: ['rm', '-rf', '/opt/something']
+- assigned id: ccafbd
+
+That means, your new command request, is waiting for approval.
+
+Now, you (if you are either Alice or Bob), you can "approve" this command request, and afterwards wait for
+others to do the same thing (until the approval is complete for the number of users listed on the .ssh/authorized keys of the remote server).
+
+Let's say you are Bob for now,
+---------------------------------
+
+bob@computer:~$ root@server get # To see if you have anything pending to approve
+
+And you will see a list of pending requests. Now get the the ID of any request you want to approve,
+let's say "ccafbd", then type:
+
+bob@computer:~$ root@server approve ccafbd
+
+Now you have to ask Alice, to approve the same request "ccafbd"
+
+Alice will do,
+
+alice@computer:~$ root@server approve ccafbd
+
+And finally you will be able to --remotely execute "as root", that specific and --approved command:
+
+bob@computer:~$ ssh root@server run ccafbd
+
+Summary
+========
+
+Once the production remote server is up an running using the "splitops" command, a routine for you as "double-check systems administrator"
+would be to use the next splitops sub-commands (get, propose,approve, discargd, run...)
+
+alice@computer:~$ root@server get # To see if you any pending commands to approve
+alice@computer:~$ root@server approve "some ID" # To approve a specific command
+alice@computer:~$ root@server run "some ID" # To run, a specific approved command
+alice@computer:~$ root@server propose cp /opt/something /usr/local/something # (and wait)
+
+e.g 1) IF you try to "run" a command, but you don't have enough approvals, you will receive some output as this:
+
+- authenticated as: --user=javier
+- running command with ID bcb6a5
+- not enough approvals, got 1 but need 2
+
+e.g 2) If you try to connect to the remote server as --root, and without using any Splitops sub-command, you will receive the
+next answer from the server:
+
+alice@computer:~$ ssh root@server
+- authenticated as: --user=alice
+- no command provided, try help
+- Connection to 78.141.227.64 closed.
+
+
+Full command list
+===================
+CMDID= The hash assigned to each command request.
+
+whoami: Check authentication
+propose CMD ...: Propose a new command
+get: Get the currently proposed command
+approve CMDID: Approve a command
+run CMDID: Run a sufficiently approved command
+discard: Discard the currently proposed command
+
+Please remember you have to use this sub-command remotely from your computer, towards the remote server, by using the "root" user.
+Or with the user of your choice, who has on its .ssh/authorized_key file, the users list, with the OpenSSH "command= ..." option.
+
+[*]- Also take into account, if any other user "propose" a new command, the last "proposed" command will be discarded. The program doesn't use a stack
+to store all requests, it works just with 1 single request. So even if you have been approved by several users, a specific CMDID, you won't be able
+to run it, if a new command --proposal has been requested.
+
+[*] We encourage all users interested in using the Splitops command to use it first with a --normal user, and not with the root user.
+So this way you can learn how the program behaves, without losing the connection with your server as the root user.
diff --git a/splitops/splitops b/splitops/splitops
new file mode 100755
index 0000000..5972887
--- /dev/null
+++ b/splitops/splitops
@@ -0,0 +1,143 @@
+#!/usr/bin/env python3
+
+"""
+This script is intended to be used as a SSH command wrapper.
+
+It allows users to propose a command that should be run.
+The command will only be executed after a threshold of
+other users has approved the command.
+"""
+
+import os
+import shlex
+import sys
+import json
+from pathlib import Path
+import uuid
+from dataclasses import dataclass
+import subprocess
+
+# Approval threshold, including the approval
+# of the proposer.
+APPROVAL_THRESHOLD = 2
+
+cmdpath = Path.home() / "cmd.json"
+
+def write_cmd(d):
+ with open(cmdpath, "w") as f:
+ f.write(json.dumps(d))
+
+def read_cmd():
+ try:
+ with open(cmdpath, "r") as f:
+ return json.load(f)
+ except FileNotFoundError:
+ return None
+
+def propose(cmd):
+ request_id = uuid.uuid4().hex.lower()[0:6]
+ for x in cmd:
+ if not x.isascii():
+ print("requested command not ascii")
+ sys.exit(4)
+ print(f"requested command: {cmd}")
+ write_cmd({"cmd": cmd, "request_id": request_id})
+ print(f"assigned id: {request_id}")
+
+def approve(my_user, request_id):
+ print(f"approving command {request_id} as {my_user}")
+ d = read_cmd()
+ if d is None:
+ print("no command proposed")
+ sys.exit(1)
+ if d["request_id"] != request_id:
+ print("request ID does not match")
+ sys.exit(1)
+ approved_by = d.get("approved_by", [])
+ if my_user not in approved_by:
+ approved_by.append(my_user)
+ d["approved_by"] = approved_by
+ write_cmd(d)
+
+def run(request_id):
+ print(f"running command with ID {request_id}")
+ d = read_cmd()
+ if d is None:
+ print("no command proposed")
+ sys.exit(1)
+ if d["request_id"] != request_id:
+ print("request ID does not match")
+ sys.exit(1)
+ approved_by = d.get("approved_by", [])
+ num_approvals = len(approved_by)
+ if num_approvals < APPROVAL_THRESHOLD:
+ print(f"not enough approvals, got {num_approvals} but need {APPROVAL_THRESHOLD}")
+ sys.exit(1)
+ if d.get("executed", False):
+ print("command has already been executed once, please request again")
+ sys.exit(1)
+ cmd = d["cmd"]
+ d["executed"] = True
+ # Mark as executed, can only execute once!
+ write_cmd(d)
+ print("running command", cmd)
+ res = subprocess.run(cmd, capture_output=True, encoding="utf-8")
+ print(f"==stdout==\n{res.stdout}====")
+ print(f"==stderr==\n{res.stderr}====")
+ print(f"exit code: {res.returncode}")
+ # FIXME: Write log to disk?
+
+
+def usage():
+ print("Commands:")
+ print(" whoami: Check authentication.")
+ print(" propose CMD...: Propose a new command.")
+ print(" get: Get the currently proposed command.")
+ print(" approve CMDID: Approve a command.")
+ print(" run CMDID: Run a sufficiently approved command.")
+ print(" discard: Discard the currently proposed command.")
+ sys.exit(1)
+
+def die(msg):
+ print(msg)
+ sys.exit(2)
+
+def main():
+ if len(sys.argv) != 2:
+ die("unexpected usage")
+ user = sys.argv[1]
+ os_user = os.environ["USER"]
+ print(f"authenticated as: {user}")
+ inner_cmd = os.environ.get("SSH_ORIGINAL_COMMAND")
+ if inner_cmd is None:
+ print("no command provided, try help")
+ sys.exit(3)
+ inner_args = shlex.split(inner_cmd)
+ if len(inner_args) < 1:
+ usage()
+ subcommand = inner_args[0]
+ if subcommand == "discard":
+ cmdpath.unlink()
+ elif subcommand == "whoami":
+ print(f"you are {user} on {os_user}")
+ elif subcommand == "propose":
+ propose(inner_args[1:])
+ elif subcommand == "get":
+ print(read_cmd())
+ elif subcommand == "help":
+ usage()
+ elif subcommand == "run":
+ if len(inner_args) != 2:
+ usage()
+ run(inner_args[1])
+ elif subcommand == "approve":
+ if len(inner_args) != 2:
+ usage()
+ approve(user, inner_args[1])
+ else:
+ print(f"unknown subcommand {subcommand}")
+ usage()
+
+if __name__ == '__main__':
+ main()
+
diff --git a/systemd-services/buildbot-worker-wallet.service b/systemd-services/buildbot-worker-codespell.service
index 8cd9647..bd3151b 100644
--- a/systemd-services/buildbot-worker-wallet.service
+++ b/systemd-services/buildbot-worker-codespell.service
@@ -1,9 +1,9 @@
[Unit]
-Description=Buildbot worker service for wallet
-AssertPathExists=/home/walletbuilder/worker
+Description=Buildbot worker service for doc
+AssertPathExists=/home/codespell/worker
[Service]
-WorkingDirectory=/home/walletbuilder/
+WorkingDirectory=/home/codespell/
ExecStart=/usr/bin/buildbot-worker start --nodaemon worker
ExecReload=/usr/bin/buildbot-worker restart --nodaemon worker
ExecStop=/usr/bin/buildbot-worker stop worker
diff --git a/systemd-services/buildbot-worker-auditor.service b/systemd-services/buildbot-worker-compilecheck.service
index 44254b4..d0df3d7 100644
--- a/systemd-services/buildbot-worker-auditor.service
+++ b/systemd-services/buildbot-worker-compilecheck.service
@@ -1,5 +1,5 @@
[Unit]
-Description=Buildbot worker service for auditor
+Description=Buildbot worker service for compile and check
AssertPathExists=%h/worker
[Service]
diff --git a/systemd-services/buildbot-worker-container.service b/systemd-services/buildbot-worker-container.service
new file mode 100644
index 0000000..70d57c4
--- /dev/null
+++ b/systemd-services/buildbot-worker-container.service
@@ -0,0 +1,13 @@
+[Unit]
+Description=Buildbot worker service for container worker
+AssertPathExists=/home/container-worker/worker
+
+[Service]
+WorkingDirectory=/home/container-worker/
+ExecStart=/usr/bin/buildbot-worker start --nodaemon worker
+ExecReload=/usr/bin/buildbot-worker restart --nodaemon worker
+ExecStop=/usr/bin/buildbot-worker stop worker
+Restart=always
+
+[Install]
+WantedBy=default.target
diff --git a/systemd-services/buildbot-worker-linkchecker.service b/systemd-services/buildbot-worker-linkchecker.service
new file mode 100644
index 0000000..5e4a58f
--- /dev/null
+++ b/systemd-services/buildbot-worker-linkchecker.service
@@ -0,0 +1,13 @@
+[Unit]
+Description=Buildbot worker service for doc
+AssertPathExists=/home/linkchecker/worker
+
+[Service]
+WorkingDirectory=/home/linkchecker/
+ExecStart=/usr/bin/buildbot-worker start --nodaemon worker
+ExecReload=/usr/bin/buildbot-worker restart --nodaemon worker
+ExecStop=/usr/bin/buildbot-worker stop worker
+Restart=always
+
+[Install]
+WantedBy=default.target
diff --git a/systemd-services/buildbot-worker-packaging.service b/systemd-services/buildbot-worker-packaging.service
new file mode 100644
index 0000000..4d907f2
--- /dev/null
+++ b/systemd-services/buildbot-worker-packaging.service
@@ -0,0 +1,13 @@
+[Unit]
+Description=Buildbot worker service for creating Debian and Ubuntu packages
+AssertPathExists=%h/packaging-worker
+
+[Service]
+WorkingDirectory=%h
+ExecStart=/usr/bin/buildbot-worker start --nodaemon packaging-worker
+ExecReload=/usr/bin/buildbot-worker restart --nodaemon packaging-worker
+ExecStop=/usr/bin/buildbot-worker stop packaging-worker
+Restart=always
+
+[Install]
+WantedBy=default.target
diff --git a/systemd-services/tips-checker.service b/systemd-services/tips-checker.service
deleted file mode 100644
index 22322be..0000000
--- a/systemd-services/tips-checker.service
+++ /dev/null
@@ -1,13 +0,0 @@
-[Unit]
-Description=Buildbot worker to check tip reserves
-AssertPathExists=%h/tips-checker-dir
-
-[Service]
-WorkingDirectory=%h
-ExecStart=/usr/bin/buildbot-worker start --nodaemon tips-checker-dir
-ExecReload=/usr/bin/buildbot-worker restart --nodaemon tips-checker-dir
-ExecStop=/usr/bin/buildbot-worker stop tips-checker-dir
-Restart=always
-
-[Install]
-WantedBy=default.target
diff --git a/taler-arm/arm.conf b/taler-arm/arm.conf
deleted file mode 100644
index 8c6363d..0000000
--- a/taler-arm/arm.conf
+++ /dev/null
@@ -1,19 +0,0 @@
-[arm]
-# PORT = 2087
-HOSTNAME = localhost
-BINARY = gnunet-service-arm
-ACCEPT_FROM = 127.0.0.1;
-ACCEPT_FROM6 = ::1;
-
-# Special case, uses user runtime dir even for per-system service.
-UNIXPATH = $GNUNET_USER_RUNTIME_DIR/gnunet-service-arm.sock
-UNIX_MATCH_UID = YES
-UNIX_MATCH_GID = YES
-
-# In the "-l" option, format characters from 'strftime' are allowed;
-# In the GLOBAL_POSTFIX, "{}" stands for the name of the respective
-# service. Thus the following option would introduce per-service
-# logging with a new log file each day. Note that only the last 3
-# log files are preserved.
-# GLOBAL_POSTFIX = -l $GNUNET_CACHE_HOME/{}-%Y-%m-%d.log
-GLOBAL_PREFIX =
diff --git a/taler-arm/defaults.conf b/taler-arm/defaults.conf
deleted file mode 100644
index f12f805..0000000
--- a/taler-arm/defaults.conf
+++ /dev/null
@@ -1,20 +0,0 @@
-[PATHS]
-GNUNET_HOME = $HOME
-
-# Persistant data storage
-GNUNET_DATA_HOME = ${XDG_DATA_HOME:-$GNUNET_HOME/.local/share}/gnunet/
-
-# Configuration files
-GNUNET_CONFIG_HOME = ${XDG_CONFIG_HOME:-$GNUNET_HOME/.config}/gnunet/
-
-# Cached data, no big deal if lost
-GNUNET_CACHE_HOME = ${XDG_CACHE_HOME:-$GNUNET_HOME/.cache}/gnunet/
-
-GNUNET_RUNTIME_DIR = ${TMPDIR:-${TMP:-/tmp}}/gnunet-system-runtime/
-
-# Runtime data for per-user services
-GNUNET_USER_RUNTIME_DIR = ${TMPDIR:-${TMP:-/tmp}}/gnunet-${USERHOME:-${USER:-user}}-runtime/
-
-[arm]
-# FIXME: does this work? (if not, need to fix arm_api.c...)
-OPTIONS = -l $HOME/logs/arm-%Y-%m-%d.log
diff --git a/taler-arm/libeufin-nexus.conf b/taler-arm/libeufin-nexus.conf
deleted file mode 100644
index d5dff80..0000000
--- a/taler-arm/libeufin-nexus.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[libeufin-nexus]
-TYPE = simple
-BINARY = taler-log-adapter
-OPTIONS = $HOME/logs/nexus-%Y-%m-%d.log libeufin-nexus serve --port=5222
diff --git a/taler-arm/libeufin-sandbox.conf b/taler-arm/libeufin-sandbox.conf
deleted file mode 100644
index b13337a..0000000
--- a/taler-arm/libeufin-sandbox.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[libeufin-sandbox]
-TYPE = simple
-BINARY = taler-log-adapter
-OPTIONS = $HOME/logs/sandbox-%Y-%m-%d.log libeufin-sandbox serve --port=5111 --no-auth
diff --git a/taler-arm/taler-aggregator.conf b/taler-arm/taler-aggregator.conf
deleted file mode 100644
index d071f2d..0000000
--- a/taler-arm/taler-aggregator.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-aggregator]
-TYPE = simple
-BINARY = taler-exchange-aggregator
-OPTIONS = -y -l $HOME/logs/aggregator-%Y-%m-%d.log
diff --git a/taler-arm/taler-auditor.conf b/taler-arm/taler-auditor.conf
deleted file mode 100644
index cf8ba14..0000000
--- a/taler-arm/taler-auditor.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-auditor]
-TYPE = simple
-BINARY = taler-auditor-httpd
-OPTIONS = -l $HOME/logs/auditor-%Y-%m-%d.log
diff --git a/taler-arm/taler-blog.conf b/taler-arm/taler-blog.conf
deleted file mode 100644
index 9a5bcc5..0000000
--- a/taler-arm/taler-blog.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-blog]
-TYPE = simple
-BINARY = taler-log-adapter
-OPTIONS = $HOME/logs/blog-%Y-%m-%d.log taler-merchant-demos blog
diff --git a/taler-arm/taler-closer.conf b/taler-arm/taler-closer.conf
deleted file mode 100644
index 51efbff..0000000
--- a/taler-arm/taler-closer.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-closer]
-TYPE = simple
-BINARY = taler-exchange-closer
-OPTIONS = -l $HOME/logs/closer-%Y-%m-%d.log
diff --git a/taler-arm/taler-donations.conf b/taler-arm/taler-donations.conf
deleted file mode 100644
index 222f26f..0000000
--- a/taler-arm/taler-donations.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-donations]
-TYPE = simple
-BINARY = taler-log-adapter
-OPTIONS = $HOME/logs/donations-%Y-%m-%d.log taler-merchant-demos donations
diff --git a/taler-arm/taler-exchange-secmod-cs.conf b/taler-arm/taler-exchange-secmod-cs.conf
deleted file mode 100644
index de4044a..0000000
--- a/taler-arm/taler-exchange-secmod-cs.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-exchange-secmod-cs]
-TYPE = simple
-BINARY = taler-exchange-secmod-cs
-OPTIONS = -l $HOME/logs/secmod-cs-%Y-%m-%d.log
diff --git a/taler-arm/taler-exchange-secmod-eddsa.conf b/taler-arm/taler-exchange-secmod-eddsa.conf
deleted file mode 100644
index b83c6cf..0000000
--- a/taler-arm/taler-exchange-secmod-eddsa.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-exchange-secmod-eddsa]
-TYPE = simple
-BINARY = taler-exchange-secmod-eddsa
-OPTIONS = -l $HOME/logs/crypto-eddsa-%Y-%m-%d.log
diff --git a/taler-arm/taler-exchange-secmod-rsa.conf b/taler-arm/taler-exchange-secmod-rsa.conf
deleted file mode 100644
index 32fcc56..0000000
--- a/taler-arm/taler-exchange-secmod-rsa.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-exchange-secmod-rsa]
-TYPE = simple
-BINARY = taler-exchange-secmod-rsa
-OPTIONS = -l $HOME/logs/crypto-rsa-%Y-%m-%d.log
diff --git a/taler-arm/taler-exchange-wirewatch.conf b/taler-arm/taler-exchange-wirewatch.conf
deleted file mode 100644
index 9595f88..0000000
--- a/taler-arm/taler-exchange-wirewatch.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-exchange-wirewatch]
-TYPE = simple
-BINARY = taler-exchange-wirewatch
-OPTIONS = -L INFO -l $HOME/logs/wirewatch-%Y-%m-%d.log
diff --git a/taler-arm/taler-exchange.conf b/taler-arm/taler-exchange.conf
deleted file mode 100644
index 08d5a0d..0000000
--- a/taler-arm/taler-exchange.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-exchange]
-TYPE = simple
-BINARY = taler-exchange-httpd
-OPTIONS = -l $HOME/logs/exchange-%Y-%m-%d.log
diff --git a/taler-arm/taler-landing.conf b/taler-arm/taler-landing.conf
deleted file mode 100644
index 6517125..0000000
--- a/taler-arm/taler-landing.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-landing]
-TYPE = simple
-BINARY = taler-log-adapter
-OPTIONS = $HOME/logs/landing-%Y-%m-%d.log taler-merchant-demos landing
diff --git a/taler-arm/taler-merchant.conf b/taler-arm/taler-merchant.conf
deleted file mode 100644
index acfd354..0000000
--- a/taler-arm/taler-merchant.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-merchant]
-TYPE = simple
-BINARY = taler-merchant-httpd
-OPTIONS = -l $HOME/logs/merchant-%Y-%m-%d.log
diff --git a/taler-arm/taler-postgres-standalone.conf b/taler-arm/taler-postgres-standalone.conf
deleted file mode 100644
index 053d4df..0000000
--- a/taler-arm/taler-postgres-standalone.conf
+++ /dev/null
@@ -1,8 +0,0 @@
-[taler-postgres-standalone]
-TYPE = simple
-BINARY = /usr/lib/postgresql/9.6/bin/postgres
-
-# -D: database configuration files
-# -k: directory hosting the database's listening domain sockets
-# -h "": turns off the TCP/IP layer
-OPTIONS = -D $HOME/talerdb -k $HOME/sockets -h ""
diff --git a/taler-arm/taler-survey.conf b/taler-arm/taler-survey.conf
deleted file mode 100644
index a6de783..0000000
--- a/taler-arm/taler-survey.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-survey]
-TYPE = simple
-BINARY = taler-log-adapter
-OPTIONS = $HOME/logs/survey-%Y-%m-%d.log taler-merchant-demos survey
diff --git a/taler-arm/taler-sync.conf b/taler-arm/taler-sync.conf
deleted file mode 100644
index 0ea8d7e..0000000
--- a/taler-arm/taler-sync.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-sync]
-TYPE = simple
-BINARY = sync-httpd
-OPTIONS = -l $HOME/logs/sync-%Y-%m-%d.log
diff --git a/taler-arm/taler-transfer.conf b/taler-arm/taler-transfer.conf
deleted file mode 100644
index bb97c2d..0000000
--- a/taler-arm/taler-transfer.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-transfer]
-TYPE = simple
-BINARY = taler-exchange-transfer
-OPTIONS = -l $HOME/logs/transfer-%Y-%m-%d.log
diff --git a/taler-arm/taler-twister-bank.conf b/taler-arm/taler-twister-bank.conf
deleted file mode 100644
index f542064..0000000
--- a/taler-arm/taler-twister-bank.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-twister-bank]
-TYPE = simple
-BINARY = taler-twister-service
-OPTIONS = -l $HOME/logs/twister-bank-%Y-%m-%d.log -c $HOME/.config/twister-bank.conf
diff --git a/taler-arm/taler-twister-exchange.conf b/taler-arm/taler-twister-exchange.conf
deleted file mode 100644
index 8150f57..0000000
--- a/taler-arm/taler-twister-exchange.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-twister-exchange]
-TYPE = simple
-BINARY = taler-twister-service
-OPTIONS = -l $HOME/logs/twister-exchange-%Y-%m-%d.log -c $HOME/.config/twister-exchange.conf
diff --git a/taler-arm/taler-twister.conf b/taler-arm/taler-twister.conf
deleted file mode 100644
index fed6b1f..0000000
--- a/taler-arm/taler-twister.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-[taler-twister]
-TYPE = simple
-BINARY = taler-twister-service
-OPTIONS = -l $HOME/logs/twister-%Y-%m-%d.log -c $HOME/.config/taler.conf
diff --git a/taler-sitesbuild/invalidate.sh b/taler-sitesbuild/invalidate.sh
index fbd5321..bb8c22f 100755
--- a/taler-sitesbuild/invalidate.sh
+++ b/taler-sitesbuild/invalidate.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
set -eu
diff --git a/taler-sitesbuild/update_buywith_page.sh b/taler-sitesbuild/update_buywith_page.sh
index 14980a8..a865a6b 100755
--- a/taler-sitesbuild/update_buywith_page.sh
+++ b/taler-sitesbuild/update_buywith_page.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
set -eu
diff --git a/taler-sitesbuild/update_stage.sh b/taler-sitesbuild/update_stage.sh
index 4c9a2b2..19dce15 100755
--- a/taler-sitesbuild/update_stage.sh
+++ b/taler-sitesbuild/update_stage.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
set -eu
@@ -18,7 +18,7 @@ fetch
git submodule update --init --force
# Generate current version under 'work in progress' (wip) folder
./bootstrap
-./configure --variant=stage.taler.net.wip --baseurl="https://stage.taler.net/" --prefix=$HOME
+./configure --baseurl="https://stage.taler.net/" --prefix=$HOME/stage.taler.net.wip
make install
chmod -R g+rx $HOME/stage.taler.net.wip/
# Clean up 'ancient' version (before previous)
diff --git a/taler-sitesbuild/update_twister_page.sh b/taler-sitesbuild/update_twister_page.sh
index 0bbb4e4..6cc3c8c 100755
--- a/taler-sitesbuild/update_twister_page.sh
+++ b/taler-sitesbuild/update_twister_page.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
set -eu
diff --git a/taler-sitesbuild/update_www.sh b/taler-sitesbuild/update_www.sh
index 014c2a9..62acd95 100755
--- a/taler-sitesbuild/update_www.sh
+++ b/taler-sitesbuild/update_www.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/bin/sh
set -eu
@@ -17,7 +17,7 @@ git checkout stable -f
fetch
git submodule update --init --force
./bootstrap
-./configure --variant=www.taler.net.wip --baseurl="https://taler.net/" --prefix=$HOME
+./configure --baseurl="https://taler.net/" --prefix=$HOME/www.taler.net.wip
make install
chmod -R g+rx $HOME/www.taler.net.wip/
diff --git a/typescript/README b/typescript/README
deleted file mode 100644
index 996ec7a..0000000
--- a/typescript/README
+++ /dev/null
@@ -1,9 +0,0 @@
-Building and running the image.
-
-'cd' into 'container/' and run:
-$ podman build -t $tag .
-
-Run it, passing a configuration expression:
-$ podman run -it [-v /host/path/to/config-file:/config.ts] $tag
-
-Please, kill running container with 'podman kill'.
diff --git a/typescript/config.ts b/typescript/config.ts
deleted file mode 100644
index 6b5719a..0000000
--- a/typescript/config.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-var h = require("@gnu-taler/taler-config-lib");
-h()
diff --git a/typescript/container/Dockerfile b/typescript/container/Dockerfile
deleted file mode 100644
index 31a71ba..0000000
--- a/typescript/container/Dockerfile
+++ /dev/null
@@ -1,51 +0,0 @@
-FROM debian:testing
-RUN apt-get update
-RUN apt-get install -y autoconf autopoint libtool texinfo \
- libgcrypt-dev libidn11-dev zlib1g-dev libunistring-dev \
- libjansson-dev python3-pip git recutils libsqlite3-dev \
- libpq-dev postgresql libcurl4-openssl-dev libsodium-dev git \
- libqrencode-dev zip jq nodejs npm openjdk-17-jre nginx procps curl
-RUN pip3 install qrcode click requests jinja2 poetry babel
-
-# NOTE: taler-local is a _copy_ of the official
-# deployment/bin/taler-local, needed due to problems
-# referencing files outside of the Dockerfile's directory.
-COPY taler-local .
-
-# Use taler-local to build from sources for now.
-# This step will be optional, offering to install
-# from Debian packages.
-RUN python3 /taler-local bootstrap --without-repos wallet-core
-RUN python3 /taler-local build
-# Setup the PNPM/TypeScript/Node environment.
-RUN npm install -g pnpm
-RUN pnpm config set global-bin-dir /usr/local/bin
-RUN pnpm install -g typescript
-RUN pnpm install --save-dev @types/node
-
-# Disable logins:
-RUN systemctl mask console-getty
-RUN systemctl mask systemd-logind
-
-# This unit file will start along the boot process.
-# It'll create the database, and finally call the config
-# interpreter.
-COPY prepare.service /etc/systemd/system
-RUN chmod 664 /etc/systemd/system/prepare.service
-RUN systemctl enable prepare
-
-# Install 'taler-config-lib'.
-RUN git clone git://git.taler.net/wallet-core
-RUN cd /wallet-core && ./bootstrap && ./configure && make config-lib
-
-# Can be moved up (next to its unit file); here to
-# avoid huge re-buildings. prepare.sh creates the
-# database and finally calls the configuration interpreter
-# / generator: taler-config.js.
-COPY prepare.sh .
-
-# Compiles the TypeScript file passed in by the user,
-# sets NODE_PATH, and finally triggers the configuration.
-COPY taler-config.sh .
-
-CMD ["/sbin/init"]
diff --git a/typescript/container/prepare.service b/typescript/container/prepare.service
deleted file mode 100644
index 560be16..0000000
--- a/typescript/container/prepare.service
+++ /dev/null
@@ -1,10 +0,0 @@
-[Unit]
-Description=PrepareDatabase
-After=postgresql.service
-
-[Service]
-ExecStart=/prepare.sh
-StandardOutput=journal+console
-
-[Install]
-WantedBy=multi-user.target
diff --git a/typescript/container/prepare.sh b/typescript/container/prepare.sh
deleted file mode 100755
index 7be811e..0000000
--- a/typescript/container/prepare.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-set -e
-
-su -c "createuser --superuser root && createdb taler" postgres
-
-/taler-config.sh
diff --git a/typescript/container/taler-config.sh b/typescript/container/taler-config.sh
deleted file mode 100755
index 74c199a..0000000
--- a/typescript/container/taler-config.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-set -eu
-
-tsc /config.ts
-export NODE_PATH=$(pnpm root -g)
-node /config.js
diff --git a/typescript/container/taler-local b/typescript/container/taler-local
deleted file mode 100755
index bd683eb..0000000
--- a/typescript/container/taler-local
+++ /dev/null
@@ -1,1889 +0,0 @@
-#!/usr/bin/env python3
-
-# This file is part of GNU Taler.
-#
-# GNU Taler is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# GNU Taler is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Taler. If not, see <https://www.gnu.org/licenses/>.
-
-import qrcode
-import signal
-import socket
-import shutil
-import atexit
-import click
-import types
-import os
-import sys
-import os.path
-import subprocess
-import time
-import random
-import logging
-import json
-from os import listdir
-from os.path import isdir, join, basename
-from pathlib import Path
-from typing import List, Callable
-from shutil import copy
-from multiprocessing import Process
-from string import ascii_letters, ascii_uppercase
-from sys import exit
-from urllib.parse import urljoin, quote
-from os import remove
-import requests
-from collections import OrderedDict
-import errno
-from pathlib import Path
-from subprocess import Popen, DEVNULL, PIPE
-from datetime import datetime
-
-
-TALER_ROOT_DIR = Path.home() / ".taler"
-TALER_PREFIX = Path.home() / ".local"
-
-# Print No Newline.
-def print_nn(msg):
- print(msg, end="")
- sys.stdout.flush()
-
-class Repo:
- def __init__(self, name, url, deps, builder, version="master"):
- self.name = name
- self.url = url
- self.deps = deps
- self.builder = builder
- self.version = version
-
-@click.group()
-def cli():
- pass
-
-# Parses the command-line-given and comma-separated repos list
-# into a list of names.
-def split_repos_list(repos):
- return [repo for repo in repos.split(",") if repo != ""]
-
-# fetch the remote. No timestamp deletion here
-def update_checkout(r: Repo, p: Path):
- """Clean the repository's working directory and
- update it to the match the latest version of the upstream branch
- that we are tracking."""
- subprocess.run(["git", "-C", str(p), "clean", "-fdx"], check=True) # remove unversioned files.
-
- # Equivalent to "git pull". Does nothing if in detached HEAD
- # but pulls new code into the local copy otherwise.
- subprocess.run(["git", "-C", str(p), "fetch"], check=True)
- subprocess.run(["git", "-C", str(p), "reset"], check=True)
-
- # Makes the last step "--hard", namely removes files not
- # belonging to the current version.
- res = subprocess.run(
- [
- "git",
- "-C",
- str(p),
- "rev-parse",
- "--abbrev-ref",
- "--symbolic-full-name",
- "@{u}",
- ],
- stderr=subprocess.DEVNULL,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if res.returncode != 0:
- ref = "HEAD"
- else:
- ref = res.stdout.strip("\n ")
- print(f"resetting {r.name} to ref {ref}")
- subprocess.run(["git", "-C", str(p), "reset", "--hard", ref], check=True)
-
-
-def default_configure(*extra):
- extra_list = list(extra)
- subprocess.run(["./configure", f"--prefix={TALER_PREFIX}"] + extra_list, check=True)
-
-def pyconfigure(*extra):
- """For python programs, --prefix doesn't work."""
- subprocess.run(["./configure"] + list(extra), check=True)
-
-def build_libeufin(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_libmicrohttpd(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure("--disable-doc")
- subprocess.run(["make"], check=True)
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_gnunet(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- "--disable-documentation",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_exchange(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_wallet(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_twister(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_merchant(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_sync(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-merchant={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-
-def build_demos(r, p):
- update_checkout(r, p)
- pfx = Path.home() / ".local"
- pyconfigure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_backoffice(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"])
- subprocess.run(["./configure"])
- subprocess.run(["make", "build-single"])
- (p / "taler-buildstamp").touch()
-
-repos = {
- "libmicrohttpd": Repo(
- "libmicrohttpd",
- "git://git.gnunet.org/libmicrohttpd.git",
- [],
- build_libmicrohttpd,
- ),
- "gnunet": Repo(
- "gnunet",
- "git://git.gnunet.org/gnunet.git",
- ["libmicrohttpd"],
- build_gnunet
- ),
- "exchange": Repo(
- "exchange",
- "git://git.taler.net/exchange",
- ["gnunet", "libmicrohttpd"],
- build_exchange,
- ),
- "merchant": Repo(
- "merchant",
- "git://git.taler.net/merchant",
- ["exchange","libmicrohttpd","gnunet"],
- build_merchant,
- ),
- "sync": Repo(
- "sync",
- "git://git.taler.net/sync",
- ["exchange",
- "merchant",
- "gnunet",
- "libmicrohttpd"],
- build_sync,
- ),
- "wallet-core": Repo(
- "wallet-core",
- "git://git.taler.net/wallet-core",
- [],
- build_wallet,
- ),
- "libeufin": Repo(
- "libeufin",
- "git://git.taler.net/libeufin.git",
- [],
- build_libeufin,
- ),
- "taler-merchant-demos": Repo(
- "taler-merchant-demos",
- "git://git.taler.net/taler-merchant-demos",
- [],
- build_demos,
- ),
- "twister": Repo(
- "twister",
- "git://git.taler.net/twister",
- ["gnunet", "libmicrohttpd"],
- build_twister,
- ),
-}
-
-def get_repos_names() -> List[str]:
- r_dir = TALER_ROOT_DIR / "sources"
- if not r_dir.is_dir():
- print(f"'{r_dir}' not found. Did bootstrap run?")
- return []
- return [el for el in listdir(r_dir) if isdir(join(r_dir, el)) and repos.get(el)]
-
-# Get 'Repo' objects (globally defined),
-# using their names as index.
-def load_repos(reposNames) -> List[Repo]:
- ret = []
- for repo in repos.keys():
- if repo in reposNames:
- ret.append(repos[repo])
- return ret
-
-# Return the list of repos (equipped with their version)
-# to install.
-def load_repos_with_envcfg(envcfg_path) -> List[Repo]:
- envcfg_path = Path(envcfg_path)
- if not os.path.isfile(envcfg_path):
- print(f"{envcfg_path} is not a file")
- sys.exit(1)
- cfgtext = envcfg_path.read_text()
- cfg = types.ModuleType("taler_deployment_cfg")
- try:
- exec(cfgtext, cfg.__dict__)
- except SyntaxError:
- print(f"{envcfg_path} is not Python.")
- exit(1)
- ret = []
- for repo in repos.keys():
- try:
- envcfg_entry = getattr(cfg, "tag_" + repo.replace("-", "_"))
- except AttributeError:
- # 'env' files doesn't have this repo, continue looping.
- continue
- repos[repo].version = envcfg_entry
- ret.append(repos[repo])
- return ret
-
-# Flag as stale the projects set on 'master' that
-# aren't in line with upstream. Detached head projects
-# aren't affected.
-def update_repos(repos: List[Repo], force) -> None:
- for r in repos:
- r_dir = TALER_ROOT_DIR / "sources" / r.name
- subprocess.run(["git", "-C", str(r_dir), "fetch"], check=True)
- res = subprocess.run(
- ["git", "-C", str(r_dir), "status", "-sb"],
- check=True,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if "behind" in res.stdout or force:
- print(f"{r.name} will be compiled")
- s = r_dir / "taler-buildstamp"
- if s.exists():
- s.unlink()
-
-# projects without the build timestamp are considered stale,
-# even if one of their dependencies _got_ marked as stale.
-def get_stale_repos(repos: List[Repo]) -> List[Repo]:
- timestamps = {}
- stale = []
- for r in repos:
- r_dir = TALER_ROOT_DIR / "sources" / r.name
- s = r_dir / "taler-buildstamp"
- if not s.exists():
- timestamps[r.name] = time.time()
- stale.append(r)
- continue
- ts = timestamps[r.name] = s.stat().st_mtime
- for dep in r.deps:
- # When 'dep' in not found, it has been
- # excluded from the compilation.
- if timestamps.get("dep", 0) > ts:
- stale.append(r)
- break
- return stale
-
-@cli.command()
-@click.option(
- "--without-repos", metavar="WITHOUT REPOS",
- help="WITHOUT REPOS is a unspaced and comma-separated list \
-of the repositories to _exclude_ from compilation",
- default="")
-@click.option(
- "--only-repos", metavar="ONLY REPOS",
- help="ONLY REPOS is a unspaced and comma-separated exclusive list \
-of the repositories to include in the compilation",
- default="")
-@click.option(
- "--dry/--no-dry", default=False,
- help="Only getting changes, without actual build."
-)
-@click.option(
- "--with-envcfg", metavar="PATH",
- help="python file pinning each codebase version.",
-)
-# Normally, we don't rebuild dependent projects when one
-# of their dependency changed. This lets check whether non
-# breaking changes are really so; this option invalidates
-# this policy by letting all the codebases be compiled.
-@click.option(
- "--force/--no-force", default=False,
- help="build all the projects.",
-)
-def build(without_repos, only_repos, dry, with_envcfg, force) -> None:
- """Build the deployment from source."""
- if only_repos != "" and without_repos != "":
- print("Either use --only-repos or --without-repos")
- exit(1)
- repos_names = get_repos_names()
- if only_repos != "":
- repos_names = list(filter(
- lambda x: x in split_repos_list(only_repos),
- repos_names
- ))
- if without_repos != "":
- repos_names = list(filter(
- lambda x: x not in split_repos_list(without_repos),
- repos_names
- ))
- if with_envcfg:
- target_repos = load_repos_with_envcfg(with_envcfg)
- else:
- target_repos = load_repos(repos_names)
- # enforce version here.
- sources = TALER_ROOT_DIR / "sources"
- for r in target_repos:
- subprocess.run(
- ["git", "-C", str(sources / r.name),
- "checkout", "-q", "-f",
- r.version, "--"], check=True
- )
- update_repos(target_repos, force)
- stale = get_stale_repos(target_repos)
- print(f"found stale repos: {[r.name for r in stale]}")
- for r in stale:
- # Inform, if a dependency is not being built:
- diff = set(r.deps) - set(repos_names)
- if len(diff) > 0:
- print(f"Info: those dependencies are not being built: {diff}")
- p = TALER_ROOT_DIR / "sources" / r.name
- os.chdir(str(p))
- if dry:
- print("dry running")
- continue
- r.builder(r, p)
-
-# Only git-clone the codebases. The 'build' step
-# will run all the update logic. At this point, a
-# 'env' file - as well as the --repos option - will
-# only express which codebases are to clone.
-@cli.command()
-@click.option(
- "--repos", "-r",
- metavar="REPOS",
- help="REPOS is a unspaced and comma-separated list of the repositories to clone.",
- default="libmicrohttpd,gnunet,exchange,merchant,wallet-core,taler-merchant-demos,sync,libeufin",
- show_default=True,
-)
-@click.option(
- "--without-repos",
- metavar="REPOS",
- help="REPOS is a unspaced and comma-separated list of the repositories NOT to clone."
-)
-@click.option(
- "--list-repos/--no-list-repos", default=False,
- help="Lists the repositories that were bootstrapped.",
-)
-@click.option(
- "--with-envcfg", metavar="PATH",
- help="python file pinning each codebase version.",
-)
-@click.option(
- "--dry/--no-dry", default=False,
- help="Print steps, without downloading any repository.",
-)
-def bootstrap(list_repos, repos, with_envcfg, dry, without_repos) -> None:
- """Clone all the specified repositories."""
- # Only saying _which_ repo were installed. No further action
- if list_repos:
- for repo in get_repos_names():
- print(repo)
- return
-
- # Download the repositories.
- def clone_repos(repos: List[Repo]):
- if len(repos) == 0:
- print("No repositories can be checked out. Spelled correctly?")
- return
- sources = TALER_ROOT_DIR / "sources"
- for r in repos:
- print(f"Bootstrapping '{r.name}', at version '{r.version}'")
- if dry:
- print("dry running")
- continue
- r_dir = sources / r.name
- if not r_dir.exists():
- r_dir.mkdir(parents=True, exist_ok=True)
- subprocess.run(
- ["git", "-C", str(sources),
- "clone", r.url], check=True
- )
- subprocess.run(
- ["git", "-C", str(r_dir),
- "checkout", "-q", "-f",
- r.version, "--"], check=True
- )
-
- # Get list of to-be-cloned repos from the 'env' file.
- if with_envcfg:
- # 'with_envcfg' is a path to a "envcfg.py" file.
- preparedRepos = load_repos_with_envcfg(with_envcfg)
- # Get list of to-be-cloned repos from the command line
- # (or its default)
- else:
- # 'repos' is here "repo1,repo2,.."
- reposList = split_repos_list(repos)
- # 'reposList' is here ["repo1", "repo2", ...]
- preparedRepos = load_repos(reposList)
- if without_repos:
- for exclude_repo in split_repos_list(without_repos):
- preparedRepos = [el for el in preparedRepos if el.name != exclude_repo]
- clone_repos(preparedRepos)
-
-# Globals sharead accross multiple sub-commands:
-# needed to configure and launch the reverse proxy.
-REV_PROXY_HOSTNAME = "localhost"
-REV_PROXY_PORT = "8080"
-REV_PROXY_NETLOC = REV_PROXY_HOSTNAME + ":" + REV_PROXY_PORT
-REV_PROXY_PROTO = "http"
-REV_PROXY_URL = f"{REV_PROXY_PROTO}://{REV_PROXY_NETLOC}"
-UNIX_SOCKETS_DIR = TALER_ROOT_DIR / "sockets"
-LOG_DIR = TALER_ROOT_DIR / "logs"
-# needed to create the customer's bank account and
-# to let them subsequently withdraw via the Access API.
-CUSTOMER_BANK_ACCOUNT = "sandbox-account-customer"
-CUSTOMER_BANK_PASSWORD = "secret"
-# needed along preparation and later to withdraw via
-# the Access API.
-CURRENCY = "EUR"
-
-@cli.command()
-@click.option(
- "--x-forwarded-host", metavar="HOST",
- help="Instruct Nginx to set HOST as the X-Forwarded-Host.",
- default=REV_PROXY_NETLOC
-)
-@click.option(
- "--x-forwarded-proto", metavar="PROTO",
- help="Instruct Nginx to set PROTO as the X-Forwarded-Proto.",
- default="http"
-)
-@click.option(
- "--postgres-db-name", metavar="DBNAME",
- help="Set postgres database name for all the services.",
- default="taler"
-)
-def prepare(x_forwarded_host, x_forwarded_proto, postgres_db_name):
- """Generate configuration, run-time blobs, instances, euFin accounts."""
- def is_serving(check_url, tries=10):
- for i in range(tries):
- try:
- print_nn(".")
- # Raises if the service is not reachable.
- response = requests.get(
- check_url,
- timeout=1
- )
- # The reverse proxy may return 500 if the
- # end service is not ready, therefore this
- # case should be tolerated.
- response.raise_for_status()
- except:
- time.sleep(0.5)
- if i == tries - 1:
- return False
- continue
- break
- return True
-
- def fail(reason=None):
- if reason:
- print("ERROR:", reason)
- exit(1)
-
- def kill(proc):
- proc.terminate()
- proc.wait()
-
- def get_nexus_cli_env(
- username,
- password,
- nexus_url
- ):
- env = os.environ.copy()
- env["LIBEUFIN_NEXUS_USERNAME"] = username
- env["LIBEUFIN_NEXUS_PASSWORD"] = password
- env["LIBEUFIN_NEXUS_URL"] = nexus_url
- return env
-
- def get_sandbox_cli_env(
- username, password
- ):
- env = os.environ.copy()
- env["LIBEUFIN_SANDBOX_USERNAME"] = username
- env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- return env
-
- # Will be extended to include a SANDBOX_ADMIN_TOKEN
- # that will obsolete the 'superuser' flag of ordinary
- # user accounts. Likewise, the client side will be
- # modified to use such token.
- def get_sandbox_server_env(db_file, base_url, admin_password):
- env = os.environ.copy()
- env["LIBEUFIN_SANDBOX_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
- env["LIBEUFIN_SANDBOX_BASE_URL"] = base_url
- env["LIBEUFIN_SANDBOX_ADMIN_PASSWORD"] = admin_password
- return env
-
- def get_nexus_server_env(db_file, base_url):
- env = os.environ.copy()
- env["LIBEUFIN_NEXUS_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
- env["LIBEUFIN_NEXUS_BASE_URL"] = base_url
- return env
-
- def urljoin_nodrop(a, b):
- a = a + "/" # urljoin will drop extra trailing slashes.
- b = "/".join([x for x in b.split("/") if x != ""]) # remove leading slashes.
- return urljoin(a, b)
-
- def prepare_nexus_account(
- ebics_url,
- ebics_host_id,
- ebics_partner_id,
- ebics_user_id,
- bank_connection_name,
- bank_account_name_sandbox,
- bank_account_name_nexus,
- env
- ):
- # make connection
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "new-ebics-connection",
- "--ebics-url", ebics_url,
- "--host-id", ebics_host_id,
- "--partner-id", ebics_partner_id,
- "--ebics-user-id", ebics_user_id,
- bank_connection_name
- ],
- env
- ).run()
- # connect
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "connect", bank_connection_name
- ],
- env
- ).run()
- # Import bank account
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "download-bank-accounts",
- bank_connection_name
- ],
- env
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "import-bank-account",
- "--offered-account-id",
- bank_account_name_sandbox,
- "--nexus-bank-account-id",
- bank_account_name_nexus,
- bank_connection_name
- ],
- env
- ).run()
- # Set background tasks.
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "accounts",
- "task-schedule", bank_account_name_nexus,
- "--task-type", "submit",
- "--task-name", "submit-payments-each-second",
- "--task-cronspec", "* * *"
- ],
- env
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "accounts",
- "task-schedule", bank_account_name_nexus,
- "--task-type", "fetch",
- "--task-name", "fetch-reports-each-second",
- "--task-cronspec", "* * *",
- "--task-param-level", "report",
- "--task-param-range-type", "latest"
- ],
- env
- ).run()
-
- def get_sandbox_account_info(
- sandbox_url,
- bank_account_label,
- password,
- ):
- customer_env = os.environ.copy()
- customer_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_label
- customer_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
- r = Command([
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
- "--sandbox-url", demobank_url,
- "demobank", "info",
- "--bank-account", bank_account_label],
- env = customer_env,
- capture_stdout=True
- ).run()
- return json.loads(r)
-
- def prepare_sandbox_account(
- sandbox_url,
- ebics_host_id,
- ebics_partner_id,
- ebics_user_id,
- person_name,
- # This value is BOTH a username
- # and a bank account label.
- bank_account_name,
- password,
- is_public=False
- ):
- demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
- user_env = os.environ.copy()
- user_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_name
- user_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- register_cmd = [
- f"{TALER_PREFIX}/bin/libeufin-cli",
- "sandbox", "--sandbox-url", demobank_url,
- "demobank", "register"
- ]
- if is_public:
- register_cmd.append("--public")
- Command(register_cmd, env = user_env).run()
- admin_env = os.environ.copy()
- admin_env["LIBEUFIN_SANDBOX_USERNAME"] = SANDBOX_ADMIN_USERNAME
- admin_env["LIBEUFIN_SANDBOX_PASSWORD"] = SANDBOX_ADMIN_PASSWORD
- Command([
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
- "--sandbox-url", demobank_url,
- "demobank", "new-ebicssubscriber",
- "--host-id", ebics_host_id,
- "--partner-id", ebics_partner_id,
- "--user-id", ebics_user_id,
- "--bank-account", bank_account_name
- ],
- env = admin_env
- ).run()
-
-
- WIRE_METHOD = "iban"
- # euFin URLs
- SANDBOX_URL = REV_PROXY_URL + "/sandbox"
- NEXUS_URL = REV_PROXY_URL + "/nexus"
-
- # Filesystem's paths
- CFG_OUTDIR = TALER_ROOT_DIR / "config"
- TALER_RUNTIME_DIR = TALER_ROOT_DIR / "runtime"
- TALER_DATA_DIR = TALER_ROOT_DIR / "data"
- TALER_UNIT_FILES_DIR = systemd_user_dir = Path.home() / ".config" / "systemd" / "user"
-
- def get_link(path = ""):
- return x_forwarded_proto + "://" + x_forwarded_host + path
-
- def create_tip_reserve():
- payto = Command([
- f"{TALER_PREFIX}/bin/taler-merchant-setup-reserve",
- "--amount", f"{CURRENCY}:20",
- "--exchange-url", get_link("/exchange/"),
- "--merchant-url", get_link("/merchant-backend/instances/survey/"),
- "--apikey", f"Bearer {FRONTENDS_API_TOKEN}",
- "--wire-method", WIRE_METHOD],
- capture_stdout=True
- ).run()
-
- Command([
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox", "--sandbox-url",
- SANDBOX_URL + "/demobanks/default/", "demobank",
- "new-transaction", "--bank-account", "sandbox-account-survey",
- "--payto-with-subject", payto, "--amount", f"{CURRENCY}:20"],
- env = get_sandbox_cli_env(
- username = "sandbox-account-survey",
- password = ALL_INSTANCES_BANK_PASSWORD
- )).run()
-
- def get_random_iban():
- cc_no_check = 131400 # is "DE00"
- bban = "".join(random.choices("0123456789", k=4))
- check_digits = 98 - (int(f"{bban}{cc_no_check}") % 97)
- return "DE" + (f"0{check_digits}"[-2:]) + bban
-
- # IBANs
-
- IBAN_MERCHANT_DEFAULT = get_random_iban()
- IBAN_MERCHANT_DEMOSHOP = get_random_iban()
-
- # Instances
- INSTANCES = [
- dict(name="GNUnet", isPublic=True),
- dict(name="Taler", isPublic=True),
- dict(name="Tor", isPublic=True),
- dict(name="survey"),
- dict(name="blog"),
- ]
-
- # Credentials / API keys
- EXCHANGE_NEXUS_USERNAME = "exchange-nexus-user"
- EXCHANGE_NEXUS_PASSWORD = "exchange-nexus-password"
- FRONTENDS_API_TOKEN = "secret-token:secret"
- TALER_MERCHANT_TOKEN = "secret-token:secret"
- ALL_INSTANCES_BANK_PASSWORD = "secret"
- EXCHANGE_BANK_ACCOUNT_SANDBOX = "sandbox-account-exchange"
- EXCHANGE_BANK_ACCOUNT_PASSWORD = "secret"
-
- # EBICS
- EBICS_HOST_ID = "ebicsDeployedHost"
- EXCHANGE_EBICS_USER_ID = "exchangeEbicsUserId"
- EXCHANGE_EBICS_PARTNER_ID = "exchangeEbicsPartnerId"
- EBICS_URL = REV_PROXY_URL + "/sandbox/ebicsweb"
-
- # euFin
- EXCHANGE_BANK_ACCOUNT_NEXUS = "exchange-imported-account-nexus"
- EXCHANGE_BANK_CONNECTION = "exchange-ebics-connection"
- NEXUS_DB_FILE = "/tmp/nexus.sqlite"
- SANDBOX_DB_FILE = "/tmp/sandbox.sqlite"
- EXCHANGE_FACADE_NAME = "exchange-taler-facade"
- SANDBOX_ADMIN_USERNAME = "admin"
- SANDBOX_ADMIN_PASSWORD = "secret"
-
- class Command:
- def __init__(
- self, cmd, env=os.environ, log_dir=LOG_DIR,
- custom_name=None, capture_stdout=False
- ):
- if len(cmd) == 0:
- fail("Command to execute was given empty.")
- self.name = custom_name if custom_name else basename(cmd[0])
- self.cmd = cmd
- self.capture_stdout = capture_stdout
- self.log_dir = log_dir
- self.env = env
-
- def run(self):
- self.do()
- return_code = self.handle.wait()
- self.cleanup() # Mainly closes the log file.
- if return_code != 0:
- fail(f"Command {self.name} failed. Logs in {self.log_dir}")
- if self.capture_stdout:
- return self.handle.communicate()[0].decode("utf-8").rstrip()
-
- def get_log_filename(self):
- return self.log_file.name
-
- def cleanup(self):
- self.log_file.flush()
- self.log_file.close()
-
- def do(self):
- if not self.log_dir.is_dir():
- os.makedirs(self.log_dir)
- try:
- log_filename = self.log_dir / f"{self.name}.log"
- self.log_file = open(log_filename, "a+")
- except Exception as error:
- fail(f"Could not open log file: {log_filename}: {error}")
- try:
- self.handle = Popen(
- self.cmd, # list
- stdin=DEVNULL,
- stdout=self.log_file if not self.capture_stdout else PIPE,
- stderr=self.log_file,
- env=self.env
- )
- except Exception as error:
- fail(f"Could not execute: {' '.join(self.cmd)}: {error}")
-
- class ConfigFile:
- def __init__(self, filename):
- self.sections = OrderedDict()
- self.filename = filename
-
- def destroy(self):
- del self.sections
- self.sections = OrderedDict()
-
- def cfg_put(self, section_name, key, value):
- s = self.sections[section_name] = self.sections.get(section_name, OrderedDict())
- s[key] = value
-
- def cfg_write(self, outdir):
- if outdir:
- if not os.path.isdir(outdir):
- os.makedirs(outdir)
- fstream = open(os.path.join(outdir, self.filename), "w")
- else:
- fstream = open(sys.stdout)
-
- for section_name, section in self.sections.items():
- fstream.write("[" + section_name + "]" + "\n")
- for key, value in section.items():
- fstream.write(key + " = " + value + "\n")
- fstream.write("\n")
- fstream.close()
-
- def config_specify_master_pub(
- filename,
- currency,
- exchange_master_pub
- ):
- Command([
- f"{TALER_PREFIX}/bin/taler-config", "-c", filename,
- "-s", "exchange", "-o", "master_public_key",
- "-V", exchange_master_pub
- ]).run()
- Command([
- f"{TALER_PREFIX}/bin/taler-config", "-c", filename,
- "-s", f"merchant-exchange-{currency}",
- "-o", "master_key",
- "-V", exchange_master_pub
- ]).run()
-
- # When called, there is no exchange master pub yet.
- # taler-exchange-offline will prouce the key _after_
- # taler.conf is generated. Only after that, we'll
- # specify the master key where it is missing; namely
- # in the merchant backend and exchange HTTP daemon sections.
-
- def config_main(
- filename,
- outdir,
- unix_sockets_dir,
- currency,
- rev_proxy_url,
- wire_method,
- exchange_wire_gateway_username,
- exchange_wire_gateway_password,
- frontend_api_key,
- taler_runtime_dir,
- postgres_db_name
- ):
- def coin(
- obj,
- currency,
- name,
- value,
- d_withdraw="3 years",
- d_spend="5 years",
- d_legal="10 years",
- f_withdraw="0.01",
- f_deposit="0.01",
- f_refresh="0.01",
- f_refund="0.01",
- rsa_keysize="2048",
- ):
- sec = "coin_" + currency + "_" + name
- obj.cfg_put(sec, "cipher", "RSA")
- obj.cfg_put(sec, "value", currency + ":" + value)
- obj.cfg_put(sec, "duration_withdraw", d_withdraw)
- obj.cfg_put(sec, "duration_spend", d_spend)
- obj.cfg_put(sec, "duration_legal", d_legal)
- obj.cfg_put(sec, "fee_withdraw", currency + ":" + f_withdraw)
- obj.cfg_put(sec, "fee_refresh", currency + ":" + f_refresh)
- obj.cfg_put(sec, "fee_refund", currency + ":" + f_refund)
- obj.cfg_put(sec, "fee_deposit", currency + ":" + f_deposit)
- obj.cfg_put(sec, "rsa_keysize", rsa_keysize)
-
- obj = ConfigFile("taler.conf")
- obj.cfg_put("paths", "TALER_DATA_HOME", str(TALER_DATA_DIR))
- if not taler_runtime_dir.is_dir():
- os.makedirs(taler_runtime_dir)
- obj.cfg_put("paths", "TALER_RUNTIME_DIR", str(taler_runtime_dir))
- obj.cfg_put("taler", "CURRENCY", currency)
- obj.cfg_put("taler", "CURRENCY_ROUND_UNIT", f"{currency}:0.01")
-
- obj.cfg_put("bank", "serve", "uwsgi")
- obj.cfg_put("bank", "uwsgi_serve", "unix")
- obj.cfg_put("bank", "uwsgi_unixpath", str(unix_sockets_dir / "bank.sock"))
- obj.cfg_put("bank", "uwsgi_unixpath_mode", "660")
- obj.cfg_put("bank", "database", "taler")
- obj.cfg_put("bank", "max_debt", "%s:500.0" % currency)
- obj.cfg_put("bank", "max_debt_bank", "%s:1000000000.0" % currency)
- obj.cfg_put("bank", "allow_registrations", "YES")
- obj.cfg_put("bank", "base_url", rev_proxy_url + "/bank/")
- obj.cfg_put("bank", "database", f"postgres:///{postgres_db_name}")
- obj.cfg_put("bank", "suggested_exchange", rev_proxy_url + "/exchange/")
-
- obj.cfg_put("donations", "serve", "http")
- obj.cfg_put("donations", "http_serve", "unix")
- obj.cfg_put("donations", "http_unixpath", str(unix_sockets_dir / "donations.sock"))
- obj.cfg_put("donations", "http_unixpath_mode", "660")
-
- obj.cfg_put("landing", "serve", "http")
- obj.cfg_put("landing", "http_serve", "unix")
- obj.cfg_put("landing", "http_unixpath", str(unix_sockets_dir / "landing.sock"))
- obj.cfg_put("landing", "http_unixpath_mode", "660")
-
- obj.cfg_put("blog", "serve", "http")
- obj.cfg_put("blog", "http_serve", "unix")
- obj.cfg_put("blog", "http_unixpath", str(unix_sockets_dir / "blog.sock"))
- obj.cfg_put("blog", "http_unixpath_mode", "660")
-
- obj.cfg_put("survey", "serve", "http")
- obj.cfg_put("survey", "http_serve", "unix")
- obj.cfg_put("survey", "http_unixpath", str(unix_sockets_dir / "survey.sock"))
- obj.cfg_put("survey", "http_unixpath_mode", "660")
- obj.cfg_put("survey", "bank_password", "x")
-
- obj.cfg_put("merchant", "serve", "unix")
- obj.cfg_put("merchant", "unixpath", str(unix_sockets_dir / "merchant-backend.sock"))
- obj.cfg_put("merchant", "wire_transfer_delay", "0 s")
- obj.cfg_put("merchant", "default_max_wire_fee", currency + ":" + "0.01")
- obj.cfg_put("merchant", "default_max_deposit_fee", currency + ":" + "0.05")
- obj.cfg_put("merchantdb-postgres", "config", f"postgres:///{postgres_db_name}")
-
- obj.cfg_put("frontends", "backend", rev_proxy_url + "/merchant-backend/")
- obj.cfg_put(
- "merchant-exchange-{}".format(currency),
- "exchange_base_url", rev_proxy_url + "/exchange/",
- )
- obj.cfg_put(
- "merchant-exchange-{}".format(currency),
- "currency", currency
- )
- obj.cfg_put("auditor", "serve", "unix")
- # FIXME: both below used?
- obj.cfg_put("auditor", "base_url", rev_proxy_url + "/auditor")
- obj.cfg_put("auditor", "auditor_url", rev_proxy_url + "/auditor")
- obj.cfg_put("auditor", "unixpath", str(unix_sockets_dir / "auditor.sock"))
- obj.cfg_put("auditor", "tiny_amount", currency + ":0.01")
-
- obj.cfg_put(
- "taler-exchange-secmod-eddsa",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-eddsa.sock")
- )
- obj.cfg_put(
- "taler-exchange-secmod-cs",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-cs.sock")
- )
- obj.cfg_put("taler-exchange-secmod-cs", "sm_priv_key",
- "${TALER_DATA_HOME}/taler-exchange-secmod-cs/secmod-private-key"
- )
- obj.cfg_put(
- "taler-exchange-secmod-rsa",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-rsa.sock")
- )
- obj.cfg_put("taler-exchange-secmod-rsa", "sm_priv_key",
- "${TALER_DATA_HOME}/taler-exchange-secmod-rsa/secmod-private-key"
- )
- obj.cfg_put("exchange", "base_url", rev_proxy_url + "/exchange/")
- obj.cfg_put("exchange", "serve", "unix")
- obj.cfg_put("exchange", "unixpath", str(unix_sockets_dir / "exchange.sock"))
- obj.cfg_put("exchange", "terms_etag", "0")
- obj.cfg_put("exchange", "terms_dir", "$HOME/.local/share/taler-exchange/tos")
- obj.cfg_put("exchange", "privacy_etag", "0")
- obj.cfg_put("exchange", "privacy_dir", "$HOME/.local/share/taler-exchange/pp")
-
- obj.cfg_put("exchangedb-postgres", "db_conn_str", f"postgres:///{postgres_db_name}")
- obj.cfg_put("exchangedb-postgres", "config", f"postgres:///{postgres_db_name}")
- obj.cfg_put("auditordb-postgres", "db_conn_str", f"postgres:///{postgres_db_name}")
- obj.cfg_put("auditordb-postgres", "config", f"postgres:///{postgres_db_name}")
- obj.cfg_put("exchange-account-1", "enable_debit", "yes")
- obj.cfg_put("exchange-account-1", "enable_credit", "yes")
- obj.cfg_put("merchant-account-merchant",
- "wire_response",
- "${TALER_DATA_HOME}/merchant/wire/merchant.json",
- )
- obj.cfg_put("merchant-account-merchant", "wire_file_mode", "770")
-
- obj.cfg_put("frontends", "backend_apikey", f"{frontend_api_key}")
- coin(obj, currency, "ct_10", "0.10")
- coin(obj, currency, "1", "1")
- coin(obj, currency, "2", "2")
- coin(obj, currency, "5", "5")
- coin(obj, currency, "10", "10")
- coin(obj, currency, "1000", "1000")
- obj.cfg_write(outdir)
- return obj
-
- def config_sync(
- filename, outdir,
- unix_sockets_dir,
- currency, api_key,
- rev_proxy_url,
- postgres_db_name
- ):
- obj = ConfigFile(filename)
- obj.cfg_put("taler", "currency", currency)
- obj.cfg_put("sync", "serve", "unix")
- obj.cfg_put("sync", "unixpath", str(unix_sockets_dir / "sync.sock"))
- obj.cfg_put("sync", "apikey", f"Bearer secret-token:{api_key}")
- obj.cfg_put("sync", "annual_fee", f"{currency}:0.1")
- obj.cfg_put("sync", "fulfillment_url", "taler://fulfillment-success/")
- obj.cfg_put("sync", "payment_backend_url", rev_proxy_url + "merchant-backend/instances/Taler/")
- obj.cfg_put("syncdb-postgres", "config", f"postgres:///{postgres_db_name}")
- obj.cfg_write(outdir)
-
- def unit_file_content(description, cmd, env=None):
- executable_name = cmd.split(" ")[0].split("/")[-1]
- content = (
- "[Unit]\n"
- f"Description={description}\n"
- "[Service]\n"
- f"ExecStart={cmd}\n"
- f"StandardOutput=append:{LOG_DIR / executable_name}.log\n"
- f"StandardError=append:{LOG_DIR / executable_name}.log"
- )
- if env:
- content += f"\nEnvironmentFile={env}"
- return content
-
-
- print_nn("Ensure no service is running...")
- if is_serving(REV_PROXY_URL + "/", tries=3):
- fail("Reverse proxy is unexpectedly running!")
- if UNIX_SOCKETS_DIR.is_dir():
- for left_socket in os.listdir(UNIX_SOCKETS_DIR):
- s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- socket_file = str(UNIX_SOCKETS_DIR / left_socket)
- if s.connect_ex(socket_file.encode("utf-8")) == 0:
- fail(f"A service is unexpectedly running and bound to {socket_file}!")
- print(" OK")
-
- print_nn("Remove stale data and config...")
- if TALER_DATA_DIR.exists():
- shutil.rmtree(TALER_DATA_DIR)
- if TALER_RUNTIME_DIR.exists():
- shutil.rmtree(TALER_RUNTIME_DIR)
- if CFG_OUTDIR.exists():
- shutil.rmtree(CFG_OUTDIR)
- print(" OK")
-
- print_nn("Generate preliminary taler.conf...")
- mc = config_main(
- "taler.conf",
- outdir=CFG_OUTDIR,
- unix_sockets_dir=UNIX_SOCKETS_DIR,
- currency=CURRENCY,
- rev_proxy_url=get_link(), # Gets X-Forwarded-* compatible base URL.
- wire_method=WIRE_METHOD,
- exchange_wire_gateway_username=EXCHANGE_NEXUS_USERNAME,
- exchange_wire_gateway_password=EXCHANGE_NEXUS_PASSWORD,
- frontend_api_key=FRONTENDS_API_TOKEN,
- taler_runtime_dir=TALER_RUNTIME_DIR,
- postgres_db_name=postgres_db_name
- )
- print(" OK")
-
- print_nn("Installing SystemD unit files...")
- if not systemd_user_dir.exists():
- systemd_user_dir.mkdir(parents=True, exist_ok=True)
-
- if not TALER_UNIT_FILES_DIR.exists():
- TALER_UNIT_FILES_DIR.mkdir(parents=True, exist_ok=True)
-
- # Internal redirect of X-Forwarded-Host's port
- # to the port Nginx binds to. Allows clients
- # connecting from within a container to still
- # reach services at X-Forwarded-Host.
- try:
- x_forwarded_port = x_forwarded_host.split(":")[1]
- except IndexError:
- x_forwarded_port = None
-
- need_redirect = (x_forwarded_port) and (x_forwarded_port != REV_PROXY_PORT)
- with open(TALER_UNIT_FILES_DIR / "taler-local-port-redirect.service", "w") as port_redirect_unit:
- port_redirect_unit.write(unit_file_content(
- description = "Port redirect allowing configuration at X-Forwarded-Host",
- cmd = f"socat TCP4-LISTEN:{x_forwarded_port},fork TCP4:{REV_PROXY_NETLOC}" if need_redirect else "true",
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-httpd.service", "w") as exchange_unit:
- exchange_unit.write(unit_file_content(
- description = "Taler Exchange HTTP daemon",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-httpd -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-wirewatch.service", "w") as exchange_wirewatch_unit:
- exchange_wirewatch_unit.write(unit_file_content(
- description = "Taler Exchange Wirewatch",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-wirewatch -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-aggregator.service", "w") as exchange_aggregator_unit:
- exchange_aggregator_unit.write(unit_file_content(
- description = "Taler Exchange Aggregator",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-aggregator --kyc-off -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-transfer.service", "w") as exchange_transfer_unit:
- exchange_transfer_unit.write(unit_file_content(
- description = "Taler Exchange Transfer",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-transfer -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-cs.service", "w") as exchange_cs_unit:
- exchange_cs_unit.write(unit_file_content(
- description = "Taler Exchange CS security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-cs -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
-
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-rsa.service", "w") as exchange_rsa_unit:
- exchange_rsa_unit.write(unit_file_content(
- description = "Taler Exchange RSA security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-rsa -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-eddsa.service", "w") as exchange_eddsa_unit:
- exchange_eddsa_unit.write(unit_file_content(
- description = "Taler Exchange EDDSA security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-eddsa -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-merchant-backend.service", "w") as merchant_unit:
- merchant_unit.write(unit_file_content(
- description = "Taler Merchant backend",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-merchant-backend-token.service", "w") as merchant_token_unit:
- merchant_token_unit.write(unit_file_content(
- description = "Taler Merchant backend with auth token to allow default instance creation.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -a {TALER_MERCHANT_TOKEN} -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- # Custom Postgres connection.
- if os.environ.get("PGPORT"):
- with open(TALER_UNIT_FILES_DIR / "taler-local-postgres.env", "w") as postgres_env:
- postgres_env.write(f"PGPORT={os.environ.get('PGPORT')}")
-
- # euFin unit files.
- with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.service", "w") as sandbox_unit:
- sandbox_unit.write(unit_file_content(
- description = "euFin Sandbox",
- cmd = f"{TALER_PREFIX}/bin/libeufin-sandbox serve --with-unix-socket {UNIX_SOCKETS_DIR / 'sandbox.sock'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-sandbox.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.service", "w") as nexus_unit:
- nexus_unit.write(unit_file_content(
- description = "euFin Nexus",
- cmd = f"{TALER_PREFIX}/bin/libeufin-nexus serve --with-unix-socket {UNIX_SOCKETS_DIR / 'nexus.sock'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-nexus.env"
- ))
- # euFin env files.
- with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.env", "w") as sandbox_env:
- sandbox_env.write(f"LIBEUFIN_SANDBOX_DB_CONNECTION=jdbc:sqlite:{SANDBOX_DB_FILE}\n")
- sandbox_env.write(f"LIBEUFIN_SANDBOX_ADMIN_PASSWORD={SANDBOX_ADMIN_PASSWORD}\n")
- sandbox_env.write(f"TALER_ENV_URL_INTRO={get_link('/landing/')}\n")
- sandbox_env.write(f"TALER_ENV_URL_BANK={get_link('/sandbox/demobanks/default')}\n")
- sandbox_env.write(f"TALER_ENV_URL_MERCHANT_BLOG={get_link('/blog/')}\n")
- sandbox_env.write(f"TALER_ENV_URL_MERCHANT_DONATIONS={get_link('/donations/')}\n")
- sandbox_env.write(f"TALER_ENV_URL_MERCHANT_SURVEY={get_link('/survey/')}\n")
-
- with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.env", "w") as nexus_env:
- nexus_env.write(f"LIBEUFIN_NEXUS_DB_CONNECTION=jdbc:sqlite:{NEXUS_DB_FILE}\n")
- nexus_env.write((
- f"TALER_ENV_URL_INTRO={get_link('/landing/')}\n"
- f"TALER_ENV_URL_BANK={get_link('/sandbox/demobanks/default')}\n"
- f"TALER_ENV_URL_MERCHANT_BLOG={get_link('/blog/')}\n"
- f"TALER_ENV_URL_MERCHANT_DONATIONS={get_link('/donations/')}\n"
- f"TALER_ENV_URL_MERCHANT_SURVEY={get_link('/survey/')}\n"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-donations.service", "w") as donations_unit:
- donations_unit.write(unit_file_content(
- description = "Donation Website that accepts Taler payments.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos donations -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-blog.service", "w") as blog_unit:
- blog_unit.write(unit_file_content(
- description = "Blog that accepts Taler payments.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos blog -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-survey.service", "w") as survey_unit:
- survey_unit.write(unit_file_content(
- description = "Survey Website awarding tips via Taler.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos survey -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-landing.service", "w") as landing_unit:
- landing_unit.write(unit_file_content(
- description = "Landing Website of Taler demo.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos landing -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-frontends.env", "w") as frontends_env:
- frontends_env.write((
- f"PATH={os.environ.get('PATH')}\n"
- f"TALER_CONFIG_FILE={CFG_OUTDIR / 'taler.conf'}\n"
- f"TALER_ENV_URL_INTRO={get_link('/landing/')}\n"
- f"TALER_ENV_URL_BANK={get_link('/sandbox/demobanks/default/')}\n"
- f"TALER_ENV_URL_MERCHANT_BLOG={get_link('/blog/')}\n"
- f"TALER_ENV_URL_MERCHANT_DONATIONS={get_link('/donations/')}\n"
- f"TALER_ENV_URL_MERCHANT_SURVEY={get_link('/survey/')}\n"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-nginx.service", "w") as nginx_unit:
- nginx_unit.write(unit_file_content(
- description = "Nginx: reverse proxy for taler-local.",
- cmd = f"nginx -c {CFG_OUTDIR / 'nginx.conf'}",
- ))
- print(" OK")
- print_nn("Reload SystemD...")
- Command(["systemctl", "--user", "daemon-reload"]).run()
- atexit.register(lambda: subprocess.run(
- ["systemctl", "--user", "stop", "taler-local-*.service"],
- check=True
- )
- )
- print(" OK")
- print_nn("Generate exchange's master key...")
- EXCHANGE_MASTER_PUB = Command(
- [
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "setup"
- ],
- capture_stdout=True
- ).run()
- print(" OK")
- print_nn("Specify exchange master pub in taler.conf...")
- config_specify_master_pub(
- CFG_OUTDIR / "taler.conf",
- CURRENCY,
- EXCHANGE_MASTER_PUB
- )
- print(" OK")
- print_nn("Generating sync.conf...")
- config_sync(
- "sync.conf",
- outdir=CFG_OUTDIR,
- unix_sockets_dir=UNIX_SOCKETS_DIR,
- currency=CURRENCY,
- api_key=FRONTENDS_API_TOKEN,
- rev_proxy_url=get_link(),
- postgres_db_name=postgres_db_name
- )
- print(" OK")
- print_nn("Reset and init exchange DB..")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"]
- ).run()
- print(" OK")
-
- print_nn("Launching X-Forwarded-Host port redirect...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-port-redirect.service"], check=True)
- time.sleep(1)
- print(" OK")
- print_nn("Launching the reverse proxy...")
- with open(CFG_OUTDIR / "nginx.conf", "w") as nginx_conf:
- nginx_conf.write((
- f"error_log {LOG_DIR / 'nginx.log'};\n"
- f"pid {TALER_ROOT_DIR / 'nginx.pid'};\n"
- "daemon off;\n"
- "events {}\n"
- "http {\n"
- f"access_log {LOG_DIR / 'nginx.log'};\n"
- "server {\n"
- f"listen {REV_PROXY_PORT};\n"
- f"listen [::]:{REV_PROXY_PORT};\n"
- "location / {\n"
- "return 200 'Hello, I am Nginx - proxying taler-local\n';\n"
- "}\n"
- "location ~* ^/(?<component>[a-z\-]+)(/(?<taler_uri>.*))? {\n"
- "proxy_redirect off;\n"
- "proxy_set_header X-Forwarded-Prefix /$component;\n"
- f"proxy_set_header X-Forwarded-Host {x_forwarded_host};\n"
- f"proxy_set_header X-Forwarded-Proto {x_forwarded_proto};\n"
- f"client_body_temp_path /tmp/taler-local-nginx;\n"
- f"proxy_pass http://unix:{UNIX_SOCKETS_DIR}/$component.sock:/$taler_uri?$args;\n"
- "}\n"
- "}\n"
- "}\n"
- ))
- subprocess.run(["systemctl", "--user", "start", "taler-local-nginx.service"], check=True)
- if not is_serving(REV_PROXY_URL + "/"):
- fail(f"Reverse proxy did not start correctly")
- # Do check.
- print(" OK")
- print_nn("Launching the exchange RSA helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-rsa.service"])
- print(" OK")
- print_nn("Launching the exchange EDDSA helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-eddsa.service"])
- print(" OK")
- print_nn("Launching the exchange CS helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-cs.service"])
- print(" OK")
- print_nn("Launching the exchange...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-httpd.service"])
- if not is_serving(REV_PROXY_URL + "/exchange/"):
- fail(f"Exchange did not start correctly.")
- print(" OK")
- print_nn("exchange-offline: signing key material...")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "download", "sign", "upload"
- ]).run()
- print(" OK")
- # Set up wire fees for next 5 years
- NOW = datetime.now()
- YEAR = NOW.year
- print_nn("Setting wire fees for the next 5 years...")
- for year in range(YEAR, YEAR+5):
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "wire-fee",
- str(year),
- WIRE_METHOD,
- CURRENCY + ":0.01",
- CURRENCY + ":0.01",
- CURRENCY + ":0.01",
- "upload"
- ],
- custom_name="set-wire-fee"
- ).run()
- print(" OK")
- print_nn("Reset and init auditor DB..")
- Command([
- f"{TALER_PREFIX}/bin/taler-auditor-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"]
- ).run()
- print(" OK")
- print_nn("Add this exchange to the auditor...")
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-auditor-exchange",
- "-c", CFG_OUTDIR / "taler.conf",
- "-m", EXCHANGE_MASTER_PUB,
- "-u", REV_PROXY_URL + "/exchange/"
- ],
- ).run()
- print(" OK")
- ## Step 4: Set up euFin
- print_nn("Resetting euFin databases...")
- try:
- remove(SANDBOX_DB_FILE)
- remove(NEXUS_DB_FILE)
- except OSError as error:
- if error.errno != errno.ENOENT:
- raise error
- print(" OK")
- # Make the 'default' demobank at Sandbox. (No signup bonus)
- Command([
- f"{TALER_PREFIX}/bin/libeufin-sandbox",
- "config", "--currency", "CHF", "--without-signup-bonus", "default"],
- env={
- "PATH": os.environ["PATH"],
- "LIBEUFIN_SANDBOX_DB_CONNECTION": f"jdbc:sqlite:{SANDBOX_DB_FILE}"
- }).run()
- # This step transparantly creates a default demobank.
- print_nn("Launching Sandbox...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-sandbox.service"])
- if not is_serving(SANDBOX_URL):
- fail(f"Sandbox did not start correctly.")
- print(" OK")
- print_nn("Make Sandbox EBICS host...")
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
- "--sandbox-url", SANDBOX_URL,
- "ebicshost", "create",
- "--host-id", EBICS_HOST_ID,
- ],
- env=get_sandbox_cli_env(
- SANDBOX_ADMIN_USERNAME,
- SANDBOX_ADMIN_PASSWORD,
- ),
- custom_name="sandbox-create-ebicshost",
- ).run()
- print(" OK")
-
- print_nn("Create Exchange account at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
- ebics_user_id=EXCHANGE_EBICS_USER_ID,
- person_name="Exchange Owner",
- bank_account_name=EXCHANGE_BANK_ACCOUNT_SANDBOX,
- password=EXCHANGE_BANK_ACCOUNT_PASSWORD
- )
- print(" OK")
- print_nn("Getting exchange payto-URI from the bank...")
- exchange_bank_account_info = get_sandbox_account_info(
- SANDBOX_URL,
- EXCHANGE_BANK_ACCOUNT_SANDBOX,
- EXCHANGE_BANK_ACCOUNT_PASSWORD
- )
- EXCHANGE_PAYTO = exchange_bank_account_info["paytoUri"]
- print(" OK")
- print_nn("Specify own payto-URI to exchange's configuration..")
- Command([
- f"{TALER_PREFIX}/bin/taler-config", "-c", CFG_OUTDIR / 'taler.conf',
- "-s", "exchange-account-1", "-o", "payto_uri", "-V",
- EXCHANGE_PAYTO
- ]).run()
- print(" OK")
- print_nn(f"exchange-offline: enabling {EXCHANGE_PAYTO}...")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "enable-account", EXCHANGE_PAYTO, "upload"
- ]).run()
- print(" OK")
-
- # Give each instance a Sandbox account (note: 'default'
- # won't have one, as it should typically only manage other
- # instances).
- for instance in INSTANCES:
- instance_id = instance["name"]
- print_nn(f"Create account of {instance_id} at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedMerchantEbicsPartnerId",
- ebics_user_id=f"unused{instance_id}EbicsUserId",
- person_name=f"Shop Owner of {instance_id}",
- bank_account_name=f"sandbox-account-{instance_id.lower()}",
- password=ALL_INSTANCES_BANK_PASSWORD,
- is_public=instance.get("isPublic")
- )
- print(" OK")
- print_nn("Create Customer account at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedCustomerEbicsPartnerId",
- ebics_user_id="unusedCustomerEbicsUserId",
- person_name="Customer Person",
- bank_account_name=CUSTOMER_BANK_ACCOUNT,
- password=CUSTOMER_BANK_PASSWORD
- )
- print(" OK")
- print_nn("Make Nexus superuser ...")
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-nexus", "superuser",
- EXCHANGE_NEXUS_USERNAME,
- "--password", EXCHANGE_NEXUS_PASSWORD
- ],
- env=get_nexus_server_env(
- NEXUS_DB_FILE,
- NEXUS_URL
- ),
- custom_name="nexus-superuser",
- ).run()
- print(" OK")
-
- print_nn("Launching Nexus...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-nexus.service"])
- if not is_serving(NEXUS_URL):
- fail(f"Nexus did not start correctly")
- print(" OK")
-
- print_nn("Create Exchange account at Nexus...")
- prepare_nexus_account(
- ebics_url=EBICS_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
- ebics_user_id=EXCHANGE_EBICS_USER_ID,
- bank_connection_name=EXCHANGE_BANK_CONNECTION,
- bank_account_name_sandbox=EXCHANGE_BANK_ACCOUNT_SANDBOX,
- bank_account_name_nexus=EXCHANGE_BANK_ACCOUNT_NEXUS,
- env=get_nexus_cli_env(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD,
- NEXUS_URL
- )
- )
- print(" OK")
-
- print_nn("Create Taler facade ...")
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "facades",
- "new-taler-wire-gateway-facade",
- "--currency", CURRENCY,
- "--facade-name", EXCHANGE_FACADE_NAME,
- EXCHANGE_BANK_CONNECTION,
- EXCHANGE_BANK_ACCOUNT_NEXUS
- ],
- env=get_nexus_cli_env(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD,
- NEXUS_URL
- ),
- custom_name="create-taler-facade",
- ).run()
- print(" OK")
- try:
- response = requests.get(
- NEXUS_URL + "/facades",
- auth=requests.auth.HTTPBasicAuth(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD
- )
- )
- response.raise_for_status()
- except Exception as error:
- fail(error)
- FACADE_URL = response.json().get("facades")[0].get("baseUrl")
- print_nn("Set suggested exchange at Sandbox...")
- Command([
- f"{TALER_PREFIX}/bin/libeufin-sandbox",
- "default-exchange",
- get_link('/exchange/'),
- EXCHANGE_PAYTO],
- env={
- "PATH": os.environ["PATH"],
- "LIBEUFIN_SANDBOX_DB_CONNECTION": f"jdbc:sqlite:{SANDBOX_DB_FILE}"
- }).run()
- print(" OK")
-
- # Point the exchange to the facade.
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "wire_gateway_auth_method",
- "-V", "basic"
- ],
- custom_name="specify-wire-gateway-auth-method",
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "wire_gateway_url",
- "-V", FACADE_URL
- ],
- custom_name="specify-facade-url",
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "username",
- "-V", EXCHANGE_NEXUS_USERNAME
- ],
- custom_name="specify-username-for-facade",
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "password",
- "-V", EXCHANGE_NEXUS_PASSWORD
- ],
- custom_name="specify-password-for-facade",
- ).run()
-
- ## Step 6: Set up merchant
-
- print_nn("Reset and init merchant database...")
- Command([
- f"{TALER_PREFIX}/bin/taler-merchant-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"
- ]).run()
- print(" OK")
-
- def ensure_instance(
- currency,
- instance_id,
- backend_url,
- bank_hostname,
- wire_method,
- auth_token
- ):
- auth_header = {"Authorization": f"Bearer {auth_token}"}
- resp = requests.get(
- urljoin_nodrop(backend_url, f"management/instances/{instance_id}"),
- headers = auth_header
- )
- bankaccount_info = get_sandbox_account_info(
- SANDBOX_URL,
- f"sandbox-account-{instance_id.lower()}",
- ALL_INSTANCES_BANK_PASSWORD
- )
- req = dict(
- id=instance_id,
- name=f"Name of '{instance_id}'",
- payto_uris=[bankaccount_info["paytoUri"]],
- address=dict(),
- jurisdiction=dict(),
- default_max_wire_fee=f"{currency}:1",
- default_wire_fee_amortization=3,
- default_max_deposit_fee=f"{currency}:1",
- default_wire_transfer_delay=dict(d_us="forever"),
- default_pay_delay=dict(d_us="forever"),
- auth=dict(method="token", token=auth_token),
- )
- http_method = requests.post
- endpoint = "management/instances"
-
- # Instance exists, patching it.
- if resp.status_code == 200:
- print(f"Patching instance '{instance_id}'")
- http_method = requests.patch
- endpoint = f"management/instances/{instance_id}"
-
- resp = http_method(
- urljoin_nodrop(backend_url, endpoint),
- json=req,
- headers = auth_header
- )
- if resp.status_code < 200 or resp.status_code >= 300:
- print(f"Backend responds: {resp.status_code}/{resp.text}")
- fail(f"Could not create (or patch) instance '{instance_id}'")
-
- print_nn(f"Start merchant (with TALER_MERCHANT_TOKEN into the env)...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend-token.service"], check=True)
- if not is_serving(REV_PROXY_URL + "/merchant-backend/config"):
- fail(
- f"Merchant backend did not start correctly.",
- )
- print(" OK")
- print_nn("Give default instance a bank account...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedMerchantEbicsPartnerId",
- ebics_user_id=f"unusedDefaultInstanceEbicsUserId",
- person_name=f"Shop Owner of default instance",
- bank_account_name="sandbox-account-default",
- password=ALL_INSTANCES_BANK_PASSWORD
- )
- print(" OK")
- ensure_instance(
- currency=CURRENCY,
- instance_id="default",
- backend_url = REV_PROXY_URL + "/merchant-backend",
- bank_hostname = REV_PROXY_NETLOC + "/sandbox",
- wire_method = WIRE_METHOD,
- auth_token=FRONTENDS_API_TOKEN
- )
-
- print_nn("Restarting the merchant WITHOUT the auth-token in the env...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend.service"], check=True)
- if not is_serving(REV_PROXY_URL + "/merchant-backend/config"):
- # check_running logs errors already.
- fail(f"Merchant backend did not re start correctly.")
- print(" OK")
-
- for instance in INSTANCES:
- instance_id = instance["name"]
- print_nn(f"Creating the {instance_id} instance...")
- ensure_instance(
- currency=CURRENCY,
- instance_id=instance_id,
- backend_url = REV_PROXY_URL + "/merchant-backend",
- bank_hostname = REV_PROXY_NETLOC + "/sandbox",
- wire_method = WIRE_METHOD,
- auth_token=FRONTENDS_API_TOKEN
- )
- print(" OK")
- print_nn("Creating tip reserve...")
- create_tip_reserve()
- print(" OK")
- # 1 second to let Nexus read the payment from
- # Sandbox, 1 second to let the Exchange Wirewatch
- # to read the payment from Nexus.
- print_nn("Sleep 2 seconds to let the tip reserve settle...")
- time.sleep(2)
- print(" OK")
-
-@cli.command()
-def launch():
- subprocess.run(["systemctl", "--user", "start", "taler-local-port-redirect.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-nginx.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-rsa.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-eddsa.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-cs.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-httpd.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-wirewatch.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-aggregator.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-transfer.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-sandbox.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-nexus.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-donations.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-blog.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-survey.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-landing.service"], check=True)
-
- print((
- "\n"
- "Taler launched!\n\n"
- f"Serving {REV_PROXY_URL + '/$service'}\n\n"
- "Services:\n"
- " - landing\n"
- " - exchange\n"
- " - merchant-backend\n"
- " - sandbox\n"
- " - nexus\n"
- " - blog\n"
- " - survey\n"
- " - donations\n"
- ))
-
-@cli.command()
-def stop():
- subprocess.run(["systemctl", "--user", "stop", "taler-local-*.service"], check=True)
-
-@cli.command()
-@click.option(
- "--bank-host", metavar="HOST",
- help="Host serving the bank, defaults to 'localhost:8080'.",
- default="localhost:8080"
-)
-@click.option(
- "--bank-proto", metavar="PROTO",
- help="Protocol accepred by the bank, defaults to 'http'",
- default="http"
-)
-@click.option(
- "--with-qr-code", is_flag=True,
- help="""When specified, it prints the QR code on screen,
-and waits the user's input before confirming the operation
-at the bank.""",
- default=False
-)
-def withdraw(bank_host, bank_proto, with_qr_code):
- print_nn("Create withdrawal operation...")
- bank_base_url = bank_proto + "://" + bank_host
- resp = requests.post(bank_base_url +
- f"/sandbox/demobanks/default/access-api/accounts/{CUSTOMER_BANK_ACCOUNT}/withdrawals",
- json = dict(amount=CURRENCY + ":5"),
- auth = requests.auth.HTTPBasicAuth(CUSTOMER_BANK_ACCOUNT, CUSTOMER_BANK_PASSWORD)
- )
- try:
- resp.raise_for_status()
- except Exception as error:
- print("Could not create withdrawal")
- print(error)
- exit(1)
- withdrawal_id = resp.json()["withdrawal_id"]
- withdraw_uri = resp.json()["taler_withdraw_uri"]
- print(" OK")
- print("Let wallet specify the reserve public key at the bank...")
- # Let wallet give the reserve public key to the bank.
- if with_qr_code:
- withdraw_QR_code = qrcode.QRCode()
- withdraw_QR_code.add_data(withdraw_uri)
- withdraw_QR_code.print_ascii()
- print(withdraw_uri)
- input("After scanning the code, press ENTER to wire funds to the Exchange: ")
- else:
- subprocess.run(["taler-wallet-cli", "handle-uri", withdraw_uri], check=True)
- # Let the user confirm the withdrawal operation and
- # get the bank wire the funds.
- print_nn("Confirm withdrawal operation at the bank...")
- resp = requests.post(bank_base_url +
- f"/sandbox/demobanks/default/access-api/accounts/{CUSTOMER_BANK_ACCOUNT}/withdrawals/{withdrawal_id}/confirm",
- auth = requests.auth.HTTPBasicAuth(CUSTOMER_BANK_ACCOUNT, CUSTOMER_BANK_PASSWORD)
- )
- try:
- resp.raise_for_status()
- except Exception as error:
- print("Could not create withdrawal")
- print(error)
- exit(1)
- print(" OK")
- if not with_qr_code:
- print("Let wallet complete all pending operations")
- # FIXME: Why the following operation twice?
- subprocess.run(["taler-wallet-cli", "handle-uri", withdraw_uri], check=True)
- subprocess.run(["taler-wallet-cli", "run-until-done"], check=True)
-
-if __name__ == "__main__":
- cli()