summaryrefslogtreecommitdiff
path: root/bin
diff options
context:
space:
mode:
authorMS <ms@taler.net>2022-07-20 21:00:57 +0200
committerMS <ms@taler.net>2022-07-20 21:00:57 +0200
commitb4614bb577ecbfd083b78bc20b572cfece117c89 (patch)
tree076d7a3cf57ac57313cf9c3b230a1b1df03c946f /bin
parentff764c5de20a03fb6d248bf861094579f6932c2f (diff)
downloaddeployment-b4614bb577ecbfd083b78bc20b572cfece117c89.tar.gz
deployment-b4614bb577ecbfd083b78bc20b572cfece117c89.tar.bz2
deployment-b4614bb577ecbfd083b78bc20b572cfece117c89.zip
demo deployment with euFin
Diffstat (limited to 'bin')
-rwxr-xr-xbin/taler-tripwire1681
1 files changed, 1681 insertions, 0 deletions
diff --git a/bin/taler-tripwire b/bin/taler-tripwire
new file mode 100755
index 0000000..bed8089
--- /dev/null
+++ b/bin/taler-tripwire
@@ -0,0 +1,1681 @@
+#!/usr/bin/env python3
+
+# This file is part of GNU Taler.
+#
+# GNU Taler is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# GNU Taler is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Taler. If not, see <https://www.gnu.org/licenses/>.
+
+import qrcode
+import signal
+import socket
+import shutil
+import atexit
+import click
+import types
+import os
+import sys
+import os.path
+import subprocess
+import time
+import random
+import logging
+import json
+from os import listdir
+from os.path import isdir, join, basename
+from pathlib import Path
+from typing import List, Callable
+from shutil import copy
+from multiprocessing import Process
+from string import ascii_letters, ascii_uppercase
+from sys import exit
+from urllib.parse import urljoin, quote
+from os import remove
+import requests
+from collections import OrderedDict
+import errno
+from pathlib import Path
+from subprocess import Popen, DEVNULL, PIPE
+from datetime import datetime
+
+
+TALER_ROOT_DIR = Path.home()
+TALER_PREFIX = Path.home() / "local"
+
+# Print No Newline.
+def print_nn(msg):
+ print(msg, end="")
+ sys.stdout.flush()
+
+class Repo:
+ def __init__(self, name, url, deps, builder, version="master"):
+ self.name = name
+ self.url = url
+ self.deps = deps
+ self.builder = builder
+ self.version = version
+
+@click.group()
+def cli():
+ pass
+
+# Parses the command-line-given and comma-separated repos list
+# into a list of names.
+def split_repos_list(repos):
+ return [repo for repo in repos.split(",") if repo != ""]
+
+# fetch the remote. No timestamp deletion here
+def update_checkout(r: Repo, p: Path):
+ """Clean the repository's working directory and
+ update it to the match the latest version of the upstream branch
+ that we are tracking."""
+ subprocess.run(["git", "-C", str(p), "clean", "-fdx"], check=True) # remove unversioned files.
+
+ # Equivalent to "git pull". Does nothing if in detached HEAD
+ # but pulls new code into the local copy otherwise.
+ subprocess.run(["git", "-C", str(p), "fetch"], check=True)
+ subprocess.run(["git", "-C", str(p), "reset"], check=True)
+
+ # Makes the last step "--hard", namely removes files not
+ # belonging to the current version.
+ res = subprocess.run(
+ [
+ "git",
+ "-C",
+ str(p),
+ "rev-parse",
+ "--abbrev-ref",
+ "--symbolic-full-name",
+ "@{u}",
+ ],
+ stderr=subprocess.DEVNULL,
+ stdout=subprocess.PIPE,
+ encoding="utf-8",
+ )
+ if res.returncode != 0:
+ ref = "HEAD"
+ else:
+ ref = res.stdout.strip("\n ")
+ print(f"resetting {r.name} to ref {ref}")
+ subprocess.run(["git", "-C", str(p), "reset", "--hard", ref], check=True)
+
+
+def default_configure(*extra):
+ extra_list = list(extra)
+ subprocess.run(["./configure", f"--prefix={TALER_PREFIX}"] + extra_list, check=True)
+
+def pyconfigure(*extra):
+ """For python programs, --prefix doesn't work."""
+ subprocess.run(["./configure"] + list(extra), check=True)
+
+def build_libeufin(r: Repo, p: Path):
+ update_checkout(r, p)
+ subprocess.run(["./bootstrap"], check=True)
+ default_configure()
+ subprocess.run(["make", "install"], check=True)
+ (p / "taler-buildstamp").touch()
+
+def build_libmicrohttpd(r: Repo, p: Path):
+ update_checkout(r, p)
+ subprocess.run(["./bootstrap"], check=True)
+ default_configure("--disable-doc")
+ subprocess.run(["make"], check=True)
+ subprocess.run(["make", "install"], check=True)
+ (p / "taler-buildstamp").touch()
+
+def build_gnunet(r: Repo, p: Path):
+ update_checkout(r, p)
+ subprocess.run(["./bootstrap"], check=True)
+ pfx = Path.home() / ".local"
+ default_configure(
+ "--enable-logging=verbose",
+ f"--with-microhttpd={pfx}",
+ "--disable-documentation",
+ )
+ subprocess.run(["make", "install"], check=True)
+ (p / "taler-buildstamp").touch()
+
+def build_exchange(r: Repo, p: Path):
+ update_checkout(r, p)
+ subprocess.run(["./bootstrap"], check=True)
+ pfx = Path.home() / ".local"
+ default_configure(
+ "CFLAGS=-ggdb -O0",
+ "--enable-logging=verbose",
+ f"--with-microhttpd={pfx}",
+ f"--with-gnunet={pfx}",
+ "--disable-doc",
+ )
+ subprocess.run(["make", "install"], check=True)
+ (p / "taler-buildstamp").touch()
+
+def build_wallet(r, p):
+ update_checkout(r, p)
+ subprocess.run(["./bootstrap"], check=True)
+ default_configure()
+ subprocess.run(["make", "install"], check=True)
+ (p / "taler-buildstamp").touch()
+
+def build_twister(r, p):
+ update_checkout(r, p)
+ subprocess.run(["./bootstrap"], check=True)
+ pfx = Path.home() / ".local"
+ default_configure(
+ "CFLAGS=-ggdb -O0",
+ "--enable-logging=verbose",
+ f"--with-exchange={pfx}",
+ f"--with-gnunet={pfx}",
+ )
+ subprocess.run(["make", "install"], check=True)
+ (p / "taler-buildstamp").touch()
+
+
+def build_merchant(r, p):
+ update_checkout(r, p)
+ subprocess.run(["./bootstrap"], check=True)
+ pfx = Path.home() / ".local"
+ default_configure(
+ "CFLAGS=-ggdb -O0",
+ "--enable-logging=verbose",
+ f"--with-microhttpd={pfx}",
+ f"--with-exchange={pfx}",
+ f"--with-gnunet={pfx}",
+ "--disable-doc",
+ )
+ subprocess.run(["make", "install"], check=True)
+ (p / "taler-buildstamp").touch()
+
+def build_sync(r, p):
+ update_checkout(r, p)
+ subprocess.run(["./bootstrap"], check=True)
+ pfx = Path.home() / ".local"
+ default_configure(
+ "CFLAGS=-ggdb -O0",
+ "--enable-logging=verbose",
+ f"--with-microhttpd={pfx}",
+ f"--with-exchange={pfx}",
+ f"--with-merchant={pfx}",
+ f"--with-gnunet={pfx}",
+ "--disable-doc",
+ )
+ subprocess.run(["make", "install"], check=True)
+ (p / "taler-buildstamp").touch()
+
+
+
+def build_demos(r, p):
+ update_checkout(r, p)
+ pfx = Path.home() / ".local"
+ pyconfigure()
+ subprocess.run(["make", "install"], check=True)
+ (p / "taler-buildstamp").touch()
+
+def build_backoffice(r, p):
+ update_checkout(r, p)
+ subprocess.run(["./bootstrap"])
+ subprocess.run(["./configure"])
+ subprocess.run(["make", "build-single"])
+ (p / "taler-buildstamp").touch()
+
+repos = {
+ "libmicrohttpd": Repo(
+ "libmicrohttpd",
+ "git://git.gnunet.org/libmicrohttpd.git",
+ [],
+ build_libmicrohttpd,
+ ),
+ "gnunet": Repo(
+ "gnunet",
+ "git://git.gnunet.org/gnunet.git",
+ ["libmicrohttpd"],
+ build_gnunet
+ ),
+ "exchange": Repo(
+ "exchange",
+ "git://git.taler.net/exchange",
+ ["gnunet", "libmicrohttpd"],
+ build_exchange,
+ ),
+ "merchant": Repo(
+ "merchant",
+ "git://git.taler.net/merchant",
+ ["exchange","libmicrohttpd","gnunet"],
+ build_merchant,
+ ),
+ "sync": Repo(
+ "sync",
+ "git://git.taler.net/sync",
+ ["exchange",
+ "merchant",
+ "gnunet",
+ "libmicrohttpd"],
+ build_sync,
+ ),
+ "wallet-core": Repo(
+ "wallet-core",
+ "git://git.taler.net/wallet-core",
+ [],
+ build_wallet,
+ ),
+ "libeufin": Repo(
+ "libeufin",
+ "git://git.taler.net/libeufin.git",
+ [],
+ build_libeufin,
+ ),
+ "taler-merchant-demos": Repo(
+ "taler-merchant-demos",
+ "git://git.taler.net/taler-merchant-demos",
+ [],
+ build_demos,
+ ),
+ "twister": Repo(
+ "twister",
+ "git://git.taler.net/twister",
+ ["gnunet", "libmicrohttpd"],
+ build_twister,
+ ),
+}
+
+def get_repos_names() -> List[str]:
+ r_dir = TALER_ROOT_DIR / "sources"
+ if not r_dir.is_dir():
+ print(f"'{r_dir}' not found. Did bootstrap run?")
+ return []
+ return [el for el in listdir(r_dir) if isdir(join(r_dir, el)) and repos.get(el)]
+
+# Get 'Repo' objects (globally defined),
+# using their names as index.
+def load_repos(reposNames) -> List[Repo]:
+ ret = []
+ for repo in repos.keys():
+ if repo in reposNames:
+ ret.append(repos[repo])
+ return ret
+
+# Return the list of repos (equipped with their version)
+# to install.
+def load_repos_with_envcfg(envcfg_path) -> List[Repo]:
+ envcfg_path = Path(envcfg_path)
+ if not os.path.isfile(envcfg_path):
+ print(f"{envcfg_path} is not a file")
+ sys.exit(1)
+ cfgtext = envcfg_path.read_text()
+ cfg = types.ModuleType("taler_deployment_cfg")
+ try:
+ exec(cfgtext, cfg.__dict__)
+ except SyntaxError:
+ print(f"{envcfg_path} is not Python.")
+ exit(1)
+ ret = []
+ for repo in repos.keys():
+ try:
+ envcfg_entry = getattr(cfg, "tag_" + repo.replace("-", "_"))
+ except AttributeError:
+ # 'env' files doesn't have this repo, continue looping.
+ continue
+ repos[repo].version = envcfg_entry
+ ret.append(repos[repo])
+ return ret
+
+# Flag as stale the projects set on 'master' that
+# aren't in line with upstream. Detached head projects
+# aren't affected.
+def update_repos(repos: List[Repo], force) -> None:
+ for r in repos:
+ r_dir = TALER_ROOT_DIR / "sources" / r.name
+ subprocess.run(["git", "-C", str(r_dir), "fetch"], check=True)
+ res = subprocess.run(
+ ["git", "-C", str(r_dir), "status", "-sb"],
+ check=True,
+ stdout=subprocess.PIPE,
+ encoding="utf-8",
+ )
+ if "behind" in res.stdout or force:
+ print(f"{r.name} will be compiled")
+ s = r_dir / "taler-buildstamp"
+ if s.exists():
+ s.unlink()
+
+# projects without the build timestamp are considered stale,
+# even if one of their dependencies _got_ marked as stale.
+def get_stale_repos(repos: List[Repo]) -> List[Repo]:
+ timestamps = {}
+ stale = []
+ for r in repos:
+ r_dir = TALER_ROOT_DIR / "sources" / r.name
+ s = r_dir / "taler-buildstamp"
+ if not s.exists():
+ timestamps[r.name] = time.time()
+ stale.append(r)
+ continue
+ ts = timestamps[r.name] = s.stat().st_mtime
+ for dep in r.deps:
+ # When 'dep' in not found, it has been
+ # excluded from the compilation.
+ if timestamps.get("dep", 0) > ts:
+ stale.append(r)
+ break
+ return stale
+
+@cli.command()
+@click.option(
+ "--without-repos", metavar="WITHOUT REPOS",
+ help="WITHOUT REPOS is a unspaced and comma-separated list \
+of the repositories to _exclude_ from compilation",
+ default="")
+@click.option(
+ "--only-repos", metavar="ONLY REPOS",
+ help="ONLY REPOS is a unspaced and comma-separated exclusive list \
+of the repositories to include in the compilation",
+ default="")
+@click.option(
+ "--dry/--no-dry", default=False,
+ help="Only getting changes, without actual build."
+)
+@click.option(
+ "--with-envcfg", metavar="PATH",
+ help="python file pinning each codebase version.",
+)
+# Normally, we don't rebuild dependent projects when one
+# of their dependency changed. This lets check whether non
+# breaking changes are really so; this option invalidates
+# this policy by letting all the codebases be compiled.
+@click.option(
+ "--force/--no-force", default=False,
+ help="build all the projects.",
+)
+def build(without_repos, only_repos, dry, with_envcfg, force) -> None:
+ """Build the deployment from source."""
+ if only_repos != "" and without_repos != "":
+ print("Either use --only-repos or --without-repos")
+ exit(1)
+ repos_names = get_repos_names()
+ if only_repos != "":
+ repos_names = list(filter(
+ lambda x: x in split_repos_list(only_repos),
+ repos_names
+ ))
+ if without_repos != "":
+ repos_names = list(filter(
+ lambda x: x not in split_repos_list(without_repos),
+ repos_names
+ ))
+ if with_envcfg:
+ target_repos = load_repos_with_envcfg(with_envcfg)
+ else:
+ target_repos = load_repos(repos_names)
+ # enforce version here.
+ sources = TALER_ROOT_DIR / "sources"
+ for r in target_repos:
+ subprocess.run(
+ ["git", "-C", str(sources / r.name),
+ "checkout", "-q", "-f",
+ r.version, "--"], check=True
+ )
+ update_repos(target_repos, force)
+ stale = get_stale_repos(target_repos)
+ print(f"found stale repos: {[r.name for r in stale]}")
+ for r in stale:
+ # Inform, if a dependency is not being built:
+ diff = set(r.deps) - set(repos_names)
+ if len(diff) > 0:
+ print(f"Info: those dependencies are not being built: {diff}")
+ p = TALER_ROOT_DIR / "sources" / r.name
+ os.chdir(str(p))
+ if dry:
+ print("dry running")
+ continue
+ r.builder(r, p)
+
+# Only git-clone the codebases. The 'build' step
+# will run all the update logic. At this point, a
+# 'env' file - as well as the --repos option - will
+# only express which codebases are to clone.
+@cli.command()
+@click.option(
+ "--repos", "-r",
+ metavar="REPOS",
+ help="REPOS is a unspaced and comma-separated list of the repositories to clone.",
+ default="libmicrohttpd,gnunet,exchange,merchant,wallet-core,taler-merchant-demos,sync,libeufin",
+ show_default=True,
+)
+@click.option(
+ "--without-repos",
+ metavar="REPOS",
+ help="REPOS is a unspaced and comma-separated list of the repositories NOT to clone."
+)
+@click.option(
+ "--list-repos/--no-list-repos", default=False,
+ help="Lists the repositories that were bootstrapped.",
+)
+@click.option(
+ "--with-envcfg", metavar="PATH",
+ help="python file pinning each codebase version.",
+)
+@click.option(
+ "--dry/--no-dry", default=False,
+ help="Print steps, without downloading any repository.",
+)
+def bootstrap(list_repos, repos, with_envcfg, dry, without_repos) -> None:
+ """Clone all the specified repositories."""
+ # Only saying _which_ repo were installed. No further action
+ if list_repos:
+ for repo in get_repos_names():
+ print(repo)
+ return
+
+ # Download the repositories.
+ def clone_repos(repos: List[Repo]):
+ if len(repos) == 0:
+ print("No repositories can be checked out. Spelled correctly?")
+ return
+ sources = TALER_ROOT_DIR / "sources"
+ for r in repos:
+ print(f"Bootstrapping '{r.name}', at version '{r.version}'")
+ if dry:
+ print("dry running")
+ continue
+ r_dir = sources / r.name
+ if not r_dir.exists():
+ r_dir.mkdir(parents=True, exist_ok=True)
+ subprocess.run(
+ ["git", "-C", str(sources),
+ "clone", r.url], check=True
+ )
+ subprocess.run(
+ ["git", "-C", str(r_dir),
+ "checkout", "-q", "-f",
+ r.version, "--"], check=True
+ )
+
+ # Get list of to-be-cloned repos from the 'env' file.
+ if with_envcfg:
+ # 'with_envcfg' is a path to a "envcfg.py" file.
+ preparedRepos = load_repos_with_envcfg(with_envcfg)
+ # Get list of to-be-cloned repos from the command line
+ # (or its default)
+ else:
+ # 'repos' is here "repo1,repo2,.."
+ reposList = split_repos_list(repos)
+ # 'reposList' is here ["repo1", "repo2", ...]
+ preparedRepos = load_repos(reposList)
+ if without_repos:
+ for exclude_repo in split_repos_list(without_repos):
+ preparedRepos = [el for el in preparedRepos if el.name != exclude_repo]
+ clone_repos(preparedRepos)
+
+# Globals sharead accross multiple sub-commands:
+# needed to configure and launch the reverse proxy.
+UNIX_SOCKETS_DIR = TALER_ROOT_DIR / "sockets"
+LOG_DIR = TALER_ROOT_DIR / "logs"
+# needed to create the customer's bank account and
+# to let them subsequently withdraw via the Access API.
+CUSTOMER_BANK_ACCOUNT = "sandbox-account-customer"
+CUSTOMER_BANK_PASSWORD = "secret"
+# needed along preparation and later to withdraw via
+# the Access API.
+CURRENCY = "CHF"
+
+@cli.command()
+@click.option(
+ "--postgres-db-name", metavar="DBNAME",
+ help="Set postgres database name for all the services.",
+ default="talerdemo"
+)
+def prepare(postgres_db_name):
+ """Generate configuration, run-time blobs, instances, euFin accounts."""
+
+ def fail(reason=None):
+ if reason:
+ print("ERROR:", reason)
+ exit(1)
+
+ def kill(proc):
+ proc.terminate()
+ proc.wait()
+
+ def get_nexus_cli_env(
+ username,
+ password,
+ nexus_url
+ ):
+ env = os.environ.copy()
+ env["LIBEUFIN_NEXUS_USERNAME"] = username
+ env["LIBEUFIN_NEXUS_PASSWORD"] = password
+ env["LIBEUFIN_NEXUS_URL"] = nexus_url
+ return env
+
+ def get_sandbox_cli_env(
+ username, password
+ ):
+ env = os.environ.copy()
+ env["LIBEUFIN_SANDBOX_USERNAME"] = username
+ env["LIBEUFIN_SANDBOX_PASSWORD"] = password
+ return env
+
+ # Will be extended to include a SANDBOX_ADMIN_TOKEN
+ # that will obsolete the 'superuser' flag of ordinary
+ # user accounts. Likewise, the client side will be
+ # modified to use such token.
+ def get_sandbox_server_env(db_file, base_url, admin_password):
+ env = os.environ.copy()
+ env["LIBEUFIN_SANDBOX_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
+ env["LIBEUFIN_SANDBOX_BASE_URL"] = base_url
+ env["LIBEUFIN_SANDBOX_ADMIN_PASSWORD"] = admin_password
+ return env
+
+ def get_nexus_server_env(db_file, base_url):
+ env = os.environ.copy()
+ env["LIBEUFIN_NEXUS_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
+ env["LIBEUFIN_NEXUS_BASE_URL"] = base_url
+ return env
+
+ def urljoin_nodrop(a, b):
+ a = a + "/" # urljoin will drop extra trailing slashes.
+ b = "/".join([x for x in b.split("/") if x != ""]) # remove leading slashes.
+ return urljoin(a, b)
+
+ def prepare_nexus_account(
+ ebics_url,
+ ebics_host_id,
+ ebics_partner_id,
+ ebics_user_id,
+ bank_connection_name,
+ bank_account_name_sandbox,
+ bank_account_name_nexus,
+ env
+ ):
+ # make connection
+ Command(
+ [
+ f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
+ "new-ebics-connection",
+ "--ebics-url", ebics_url,
+ "--host-id", ebics_host_id,
+ "--partner-id", ebics_partner_id,
+ "--ebics-user-id", ebics_user_id,
+ bank_connection_name
+ ],
+ env
+ ).run()
+ # connect
+ Command(
+ [
+ f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
+ "connect", bank_connection_name
+ ],
+ env
+ ).run()
+ # Import bank account
+ Command(
+ [
+ f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
+ "download-bank-accounts",
+ bank_connection_name
+ ],
+ env
+ ).run()
+ Command(
+ [
+ f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
+ "import-bank-account",
+ "--offered-account-id",
+ bank_account_name_sandbox,
+ "--nexus-bank-account-id",
+ bank_account_name_nexus,
+ bank_connection_name
+ ],
+ env
+ ).run()
+ # Set background tasks.
+ Command(
+ [
+ f"{TALER_PREFIX}/bin/libeufin-cli", "accounts",
+ "task-schedule", bank_account_name_nexus,
+ "--task-type", "submit",
+ "--task-name", "submit-payments-each-second",
+ "--task-cronspec", "* * *"
+ ],
+ env
+ ).run()
+ Command(
+ [
+ f"{TALER_PREFIX}/bin/libeufin-cli", "accounts",
+ "task-schedule", bank_account_name_nexus,
+ "--task-type", "fetch",
+ "--task-name", "fetch-reports-each-second",
+ "--task-cronspec", "* * *",
+ "--task-param-level", "report",
+ "--task-param-range-type", "latest"
+ ],
+ env
+ ).run()
+
+ def get_sandbox_account_info(
+ sandbox_url,
+ bank_account_label,
+ password,
+ ):
+ customer_env = os.environ.copy()
+ customer_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_label
+ customer_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
+ demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
+ r = Command([
+ f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
+ "--sandbox-url", demobank_url,
+ "demobank", "info",
+ "--bank-account", bank_account_label],
+ env = customer_env,
+ capture_stdout=True
+ ).run()
+ return json.loads(r)
+
+ def prepare_sandbox_account(
+ sandbox_url,
+ ebics_host_id,
+ ebics_partner_id,
+ ebics_user_id,
+ person_name,
+ # This value is BOTH a username
+ # and a bank account label.
+ bank_account_name,
+ password,
+ is_public=False
+ ):
+ demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
+ user_env = os.environ.copy()
+ user_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_name
+ user_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
+ register_cmd = [
+ f"{TALER_PREFIX}/bin/libeufin-cli",
+ "sandbox", "--sandbox-url", demobank_url,
+ "demobank", "register"
+ ]
+ if is_public:
+ register_cmd.append("--public")
+ Command(register_cmd, env = user_env).run()
+ admin_env = os.environ.copy()
+ admin_env["LIBEUFIN_SANDBOX_USERNAME"] = SANDBOX_ADMIN_USERNAME
+ admin_env["LIBEUFIN_SANDBOX_PASSWORD"] = SANDBOX_ADMIN_PASSWORD
+ Command([
+ f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
+ "--sandbox-url", demobank_url,
+ "demobank", "new-ebicssubscriber",
+ "--host-id", ebics_host_id,
+ "--partner-id", ebics_partner_id,
+ "--user-id", ebics_user_id,
+ "--bank-account", bank_account_name
+ ],
+ env = admin_env
+ ).run()
+
+
+ WIRE_METHOD = "iban"
+ # euFin URLs
+ SANDBOX_URL = "https://bank.demo.taler.net/"
+ NEXUS_URL = "https://nexus.demo.taler.net/"
+
+ # Filesystem's paths
+ CFG_OUTDIR = TALER_ROOT_DIR / "config"
+ TALER_RUNTIME_DIR = TALER_ROOT_DIR / "runtime"
+ TALER_DATA_DIR = TALER_ROOT_DIR / "data"
+ TALER_UNIT_FILES_DIR = systemd_user_dir = Path.home() / ".config" / "systemd" / "user"
+
+ def create_tip_reserve():
+ payto = Command([
+ f"{TALER_PREFIX}/bin/taler-merchant-setup-reserve",
+ "--amount", f"{CURRENCY}:20",
+ "--exchange-url", "https://exchange.demo.taler.net/",
+ "--merchant-url", "https://backend.demo.taler.net/",
+ "--apikey", f"Bearer {FRONTENDS_API_TOKEN}",
+ "--wire-method", WIRE_METHOD],
+ capture_stdout=True
+ ).run()
+
+ Command([
+ f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox", "--sandbox-url",
+ SANDBOX_URL + "/demobanks/default/", "demobank",
+ "new-transaction", "--bank-account", "sandbox-account-survey",
+ "--payto-with-subject", payto, "--amount", "20"],
+ env = get_sandbox_cli_env(
+ username = "sandbox-account-survey",
+ password = ALL_INSTANCES_BANK_PASSWORD
+ )).run()
+
+ def get_random_iban():
+ cc_no_check = 131400 # is "DE00"
+ bban = "".join(random.choices("0123456789", k=4))
+ check_digits = 98 - (int(f"{bban}{cc_no_check}") % 97)
+ return "DE" + (f"0{check_digits}"[-2:]) + bban
+
+ # IBANs
+
+ IBAN_MERCHANT_DEFAULT = get_random_iban()
+ IBAN_MERCHANT_DEMOSHOP = get_random_iban()
+
+ # Instances
+ INSTANCES = [
+ dict(name="GNUnet", isPublic=True),
+ dict(name="Taler", isPublic=True),
+ dict(name="Tor", isPublic=True),
+ dict(name="survey"),
+ dict(name="blog"),
+ ]
+
+ # Credentials / API keys
+ EXCHANGE_NEXUS_USERNAME = "exchange-nexus-user"
+ EXCHANGE_NEXUS_PASSWORD = "exchange-nexus-password"
+ FRONTENDS_API_TOKEN = "secret-token:secret"
+ TALER_MERCHANT_TOKEN = "secret-token:secret"
+ ALL_INSTANCES_BANK_PASSWORD = "secret"
+ EXCHANGE_BANK_ACCOUNT_SANDBOX = "sandbox-account-exchange"
+ EXCHANGE_BANK_ACCOUNT_PASSWORD = "secret"
+
+ # EBICS
+ EBICS_HOST_ID = "ebicsDeployedHost"
+ EXCHANGE_EBICS_USER_ID = "exchangeEbicsUserId"
+ EXCHANGE_EBICS_PARTNER_ID = "exchangeEbicsPartnerId"
+ EBICS_URL = "https://bank.demo.taler.net/ebicsweb"
+
+ # euFin
+ EXCHANGE_BANK_ACCOUNT_NEXUS = "exchange-imported-account-nexus"
+ EXCHANGE_BANK_CONNECTION = "exchange-ebics-connection"
+ NEXUS_DB_FILE = "/tmp/nexus.sqlite"
+ SANDBOX_DB_FILE = "/tmp/sandbox.sqlite"
+ EXCHANGE_FACADE_NAME = "exchange-taler-facade"
+ SANDBOX_ADMIN_USERNAME = "admin"
+ SANDBOX_ADMIN_PASSWORD = "secret"
+
+ class Command:
+ def __init__(
+ self, cmd, env=os.environ, log_dir=LOG_DIR,
+ custom_name=None, capture_stdout=False
+ ):
+ if len(cmd) == 0:
+ fail("Command to execute was given empty.")
+ self.name = custom_name if custom_name else basename(cmd[0])
+ self.cmd = cmd
+ self.capture_stdout = capture_stdout
+ self.log_dir = log_dir
+ self.env = env
+
+ def run(self):
+ self.do()
+ return_code = self.handle.wait()
+ self.cleanup() # Mainly closes the log file.
+ if return_code != 0:
+ fail(f"Command {self.name} failed. Logs in {self.log_dir}")
+ if self.capture_stdout:
+ return self.handle.communicate()[0].decode("utf-8").rstrip()
+
+ def get_log_filename(self):
+ return self.log_file.name
+
+ def cleanup(self):
+ self.log_file.flush()
+ self.log_file.close()
+
+ def do(self):
+ if not self.log_dir.is_dir():
+ os.makedirs(self.log_dir)
+ try:
+ log_filename = self.log_dir / f"{self.name}.log"
+ self.log_file = open(log_filename, "a+")
+ except Exception as error:
+ fail(f"Could not open log file: {log_filename}: {error}")
+ try:
+ self.handle = Popen(
+ self.cmd, # list
+ stdin=DEVNULL,
+ stdout=self.log_file if not self.capture_stdout else PIPE,
+ stderr=self.log_file,
+ env=self.env
+ )
+ except Exception as error:
+ fail(f"Could not execute: {' '.join(self.cmd)}: {error}")
+
+ class ConfigFile:
+ def __init__(self, filename):
+ self.sections = OrderedDict()
+ self.filename = filename
+
+ def destroy(self):
+ del self.sections
+ self.sections = OrderedDict()
+
+ def cfg_put(self, section_name, key, value):
+ s = self.sections[section_name] = self.sections.get(section_name, OrderedDict())
+ s[key] = value
+
+ def cfg_write(self, outdir):
+ if outdir:
+ if not os.path.isdir(outdir):
+ os.makedirs(outdir)
+ fstream = open(os.path.join(outdir, self.filename), "w")
+ else:
+ fstream = open(sys.stdout)
+
+ for section_name, section in self.sections.items():
+ fstream.write("[" + section_name + "]" + "\n")
+ for key, value in section.items():
+ fstream.write(key + " = " + value + "\n")
+ fstream.write("\n")
+ fstream.close()
+
+ def config_specify_master_pub(
+ filename,
+ currency,
+ exchange_master_pub
+ ):
+ Command([
+ f"{TALER_PREFIX}/bin/taler-config", "-c", filename,
+ "-s", "exchange", "-o", "master_public_key",
+ "-V", exchange_master_pub
+ ]).run()
+ Command([
+ f"{TALER_PREFIX}/bin/taler-config", "-c", filename,
+ "-s", f"merchant-exchange-{currency}",
+ "-o", "master_key",
+ "-V", exchange_master_pub
+ ]).run()
+
+ # When called, there is no exchange master pub yet.
+ # taler-exchange-offline will prouce the key _after_
+ # taler.conf is generated. Only after that, we'll
+ # specify the master key where it is missing; namely
+ # in the merchant backend and exchange HTTP daemon sections.
+
+ def config_main(
+ filename,
+ outdir,
+ unix_sockets_dir,
+ currency,
+ wire_method,
+ exchange_wire_gateway_username,
+ exchange_wire_gateway_password,
+ frontend_api_key,
+ taler_runtime_dir,
+ postgres_db_name
+ ):
+ def coin(
+ obj,
+ currency,
+ name,
+ value,
+ d_withdraw="3 years",
+ d_spend="5 years",
+ d_legal="10 years",
+ f_withdraw="0.01",
+ f_deposit="0.01",
+ f_refresh="0.01",
+ f_refund="0.01",
+ rsa_keysize="2048",
+ ):
+ sec = "coin_" + currency + "_" + name
+ obj.cfg_put(sec, "cipher", "RSA")
+ obj.cfg_put(sec, "value", currency + ":" + value)
+ obj.cfg_put(sec, "duration_withdraw", d_withdraw)
+ obj.cfg_put(sec, "duration_spend", d_spend)
+ obj.cfg_put(sec, "duration_legal", d_legal)
+ obj.cfg_put(sec, "fee_withdraw", currency + ":" + f_withdraw)
+ obj.cfg_put(sec, "fee_refresh", currency + ":" + f_refresh)
+ obj.cfg_put(sec, "fee_refund", currency + ":" + f_refund)
+ obj.cfg_put(sec, "fee_deposit", currency + ":" + f_deposit)
+ obj.cfg_put(sec, "rsa_keysize", rsa_keysize)
+
+ obj = ConfigFile("taler.conf")
+ obj.cfg_put("paths", "TALER_DATA_HOME", str(TALER_DATA_DIR))
+ if not taler_runtime_dir.is_dir():
+ os.makedirs(taler_runtime_dir)
+ obj.cfg_put("paths", "TALER_RUNTIME_DIR", str(taler_runtime_dir))
+ obj.cfg_put("taler", "CURRENCY", currency)
+ obj.cfg_put("taler", "CURRENCY_ROUND_UNIT", f"{currency}:0.01")
+
+ obj.cfg_put("donations", "serve", "http")
+ obj.cfg_put("donations", "http_serve", "unix")
+ obj.cfg_put("donations", "http_unixpath", str(unix_sockets_dir / "donations.sock"))
+ obj.cfg_put("donations", "http_unixpath_mode", "660")
+
+ obj.cfg_put("landing", "serve", "http")
+ obj.cfg_put("landing", "http_serve", "unix")
+ obj.cfg_put("landing", "http_unixpath", str(unix_sockets_dir / "landing.sock"))
+ obj.cfg_put("landing", "http_unixpath_mode", "660")
+
+ obj.cfg_put("blog", "serve", "http")
+ obj.cfg_put("blog", "http_serve", "unix")
+ obj.cfg_put("blog", "http_unixpath", str(unix_sockets_dir / "blog.sock"))
+ obj.cfg_put("blog", "http_unixpath_mode", "660")
+
+ obj.cfg_put("survey", "serve", "http")
+ obj.cfg_put("survey", "http_serve", "unix")
+ obj.cfg_put("survey", "http_unixpath", str(unix_sockets_dir / "survey.sock"))
+ obj.cfg_put("survey", "http_unixpath_mode", "660")
+ obj.cfg_put("survey", "bank_password", "x")
+
+ obj.cfg_put("merchant", "serve", "unix")
+ obj.cfg_put("merchant", "unixpath", str(unix_sockets_dir / "merchant-backend.sock"))
+ obj.cfg_put("merchant", "wire_transfer_delay", "0 s")
+ obj.cfg_put("merchant", "default_max_wire_fee", currency + ":" + "0.01")
+ obj.cfg_put("merchant", "default_max_deposit_fee", currency + ":" + "0.05")
+ obj.cfg_put("merchantdb-postgres", "config", f"postgres:///{postgres_db_name}")
+
+ obj.cfg_put("frontends", "backend", "https://backend.demo.taler.net/")
+ obj.cfg_put(
+ "merchant-exchange-{}".format(currency),
+ "exchange_base_url", "https://exchange.demo.taler.net/",
+ )
+ obj.cfg_put(
+ "merchant-exchange-{}".format(currency),
+ "currency", currency
+ )
+ obj.cfg_put("auditor", "serve", "unix")
+ # FIXME: both below used?
+ obj.cfg_put("auditor", "base_url", "https://auditor.demo.taler.net/")
+ obj.cfg_put("auditor", "auditor_url", "https://auditor.demo.taler.net/")
+ obj.cfg_put("auditor", "unixpath", str(unix_sockets_dir / "auditor.sock"))
+ obj.cfg_put("auditor", "tiny_amount", currency + ":0.01")
+
+ obj.cfg_put(
+ "taler-exchange-secmod-eddsa",
+ "unixpath",
+ str(unix_sockets_dir / "exchange-secmod-eddsa.sock")
+ )
+ obj.cfg_put(
+ "taler-exchange-secmod-cs",
+ "unixpath",
+ str(unix_sockets_dir / "exchange-secmod-cs.sock")
+ )
+ obj.cfg_put("taler-exchange-secmod-cs", "sm_priv_key",
+ "${TALER_DATA_HOME}/taler-exchange-secmod-cs/secmod-private-key"
+ )
+ obj.cfg_put(
+ "taler-exchange-secmod-rsa",
+ "unixpath",
+ str(unix_sockets_dir / "exchange-secmod-rsa.sock")
+ )
+ obj.cfg_put("taler-exchange-secmod-rsa", "sm_priv_key",
+ "${TALER_DATA_HOME}/taler-exchange-secmod-rsa/secmod-private-key"
+ )
+ obj.cfg_put("exchange", "base_url", "https://exchange.demo.taler.net/")
+ obj.cfg_put("exchange", "serve", "unix")
+ obj.cfg_put("exchange", "unixpath", str(unix_sockets_dir / "exchange.http"))
+ obj.cfg_put("exchange", "terms_etag", "0")
+ obj.cfg_put("exchange", "terms_dir", "$HOME/.local/share/taler-exchange/tos")
+ obj.cfg_put("exchange", "privacy_etag", "0")
+ obj.cfg_put("exchange", "privacy_dir", "$HOME/.local/share/taler-exchange/pp")
+
+ obj.cfg_put("exchangedb-postgres", "db_conn_str", f"postgres:///{postgres_db_name}")
+ obj.cfg_put("exchangedb-postgres", "config", f"postgres:///{postgres_db_name}")
+ obj.cfg_put("auditordb-postgres", "db_conn_str", f"postgres:///{postgres_db_name}")
+ obj.cfg_put("auditordb-postgres", "config", f"postgres:///{postgres_db_name}")
+ obj.cfg_put("exchange-account-1", "enable_debit", "yes")
+ obj.cfg_put("exchange-account-1", "enable_credit", "yes")
+ obj.cfg_put("merchant-account-merchant",
+ "wire_response",
+ "${TALER_DATA_HOME}/merchant/wire/merchant.json",
+ )
+ obj.cfg_put("merchant-account-merchant", "wire_file_mode", "770")
+
+ obj.cfg_put("frontends", "backend_apikey", f"{frontend_api_key}")
+ coin(obj, currency, "ct_10", "0.10")
+ coin(obj, currency, "1", "1")
+ coin(obj, currency, "2", "2")
+ coin(obj, currency, "5", "5")
+ coin(obj, currency, "10", "10")
+ coin(obj, currency, "1000", "1000")
+ obj.cfg_write(outdir)
+ return obj
+
+ def config_sync(
+ filename, outdir,
+ unix_sockets_dir,
+ currency, api_key,
+ postgres_db_name
+ ):
+ obj = ConfigFile(filename)
+ obj.cfg_put("taler", "currency", currency)
+ obj.cfg_put("sync", "serve", "unix")
+ obj.cfg_put("sync", "unixpath", str(unix_sockets_dir / "sync.http"))
+ obj.cfg_put("sync", "apikey", f"Bearer secret-token:{api_key}")
+ obj.cfg_put("sync", "annual_fee", f"{currency}:0.1")
+ obj.cfg_put("sync", "fulfillment_url", "taler://fulfillment-success/")
+ obj.cfg_put(
+ "sync",
+ "payment_backend_url",
+ "https://backend.demo.taler.net/instances/Taler/"
+ )
+ obj.cfg_put("syncdb-postgres", "config", f"postgres:///{postgres_db_name}")
+ obj.cfg_write(outdir)
+
+ def unit_file_content(description, cmd, env=None):
+ executable_name = cmd.split(" ")[0].split("/")[-1]
+ content = (
+ "[Unit]\n"
+ f"Description={description}\n"
+ "[Service]\n"
+ f"ExecStart={cmd}\n"
+ f"StandardOutput=append:{LOG_DIR / executable_name}.log\n"
+ f"StandardError=append:{LOG_DIR / executable_name}.log"
+ )
+ if env:
+ content += f"\nEnvironmentFile={env}"
+ return content
+
+ if UNIX_SOCKETS_DIR.is_dir():
+ for left_socket in os.listdir(UNIX_SOCKETS_DIR):
+ s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ socket_file = str(UNIX_SOCKETS_DIR / left_socket)
+ if s.connect_ex(socket_file.encode("utf-8")) == 0:
+ fail(f"A service is unexpectedly running and bound to {socket_file}!")
+ print(" OK")
+
+ print_nn("Remove stale data and config...")
+ if TALER_DATA_DIR.exists():
+ shutil.rmtree(TALER_DATA_DIR)
+ if TALER_RUNTIME_DIR.exists():
+ shutil.rmtree(TALER_RUNTIME_DIR)
+ if CFG_OUTDIR.exists():
+ shutil.rmtree(CFG_OUTDIR)
+ print(" OK")
+
+ print_nn("Generate preliminary taler.conf...")
+ mc = config_main(
+ "taler.conf",
+ outdir=CFG_OUTDIR,
+ unix_sockets_dir=UNIX_SOCKETS_DIR,
+ currency=CURRENCY,
+ wire_method=WIRE_METHOD,
+ exchange_wire_gateway_username=EXCHANGE_NEXUS_USERNAME,
+ exchange_wire_gateway_password=EXCHANGE_NEXUS_PASSWORD,
+ frontend_api_key=FRONTENDS_API_TOKEN,
+ taler_runtime_dir=TALER_RUNTIME_DIR,
+ postgres_db_name=postgres_db_name
+ )
+ print(" OK")
+
+ print_nn("Installing SystemD unit files...")
+ if not systemd_user_dir.exists():
+ systemd_user_dir.mkdir(parents=True, exist_ok=True)
+
+ if not TALER_UNIT_FILES_DIR.exists():
+ TALER_UNIT_FILES_DIR.mkdir(parents=True, exist_ok=True)
+
+ with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-httpd.service", "w") as exchange_unit:
+ exchange_unit.write(unit_file_content(
+ description = "Taler Exchange HTTP daemon",
+ cmd = f"{TALER_PREFIX}/bin/taler-exchange-httpd -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
+ env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
+ ))
+ with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-wirewatch.service", "w") as exchange_wirewatch_unit:
+ exchange_wirewatch_unit.write(unit_file_content(
+ description = "Taler Exchange Wirewatch",
+ cmd = f"{TALER_PREFIX}/bin/taler-exchange-wirewatch -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
+ env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
+ ))
+ with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-aggregator.service", "w") as exchange_aggregator_unit:
+ exchange_aggregator_unit.write(unit_file_content(
+ description = "Taler Exchange Aggregator",
+ cmd = f"{TALER_PREFIX}/bin/taler-exchange-aggregator --kyc-off -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
+ env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
+ ))
+ with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-transfer.service", "w") as exchange_transfer_unit:
+ exchange_transfer_unit.write(unit_file_content(
+ description = "Taler Exchange Transfer",
+ cmd = f"{TALER_PREFIX}/bin/taler-exchange-transfer -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
+ env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
+ ))
+ with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-cs.service", "w") as exchange_cs_unit:
+ exchange_cs_unit.write(unit_file_content(
+ description = "Taler Exchange CS security module",
+ cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-cs -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
+ ))
+
+ with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-rsa.service", "w") as exchange_rsa_unit:
+ exchange_rsa_unit.write(unit_file_content(
+ description = "Taler Exchange RSA security module",
+ cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-rsa -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
+ ))
+ with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-eddsa.service", "w") as exchange_eddsa_unit:
+ exchange_eddsa_unit.write(unit_file_content(
+ description = "Taler Exchange EDDSA security module",
+ cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-eddsa -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
+ ))
+ with open(TALER_UNIT_FILES_DIR / "taler-local-merchant-backend.service", "w") as merchant_unit:
+ merchant_unit.write(unit_file_content(
+ description = "Taler Merchant backend",
+ cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
+ env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
+ ))
+ with open(TALER_UNIT_FILES_DIR / "taler-local-merchant-backend-token.service", "w") as merchant_token_unit:
+ merchant_token_unit.write(unit_file_content(
+ description = "Taler Merchant backend with auth token to allow default instance creation.",
+ cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -a {TALER_MERCHANT_TOKEN} -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
+ env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
+ ))
+ # Custom Postgres connection.
+ if os.environ.get("PGPORT"):
+ with open(TALER_UNIT_FILES_DIR / "taler-local-postgres.env", "w") as postgres_env:
+ postgres_env.write(f"PGPORT={os.environ.get('PGPORT')}")
+
+ # euFin unit files.
+ with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.service", "w") as sandbox_unit:
+ sandbox_unit.write(unit_file_content(
+ description = "euFin Sandbox",
+ cmd = f"{TALER_PREFIX}/bin/libeufin-sandbox serve --with-unix-socket {UNIX_SOCKETS_DIR / 'sandbox.sock'}",
+ env = TALER_UNIT_FILES_DIR / "taler-local-sandbox.env"
+ ))
+ with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.service", "w") as nexus_unit:
+ nexus_unit.write(unit_file_content(
+ description = "euFin Nexus",
+ cmd = f"{TALER_PREFIX}/bin/libeufin-nexus serve --with-unix-socket {UNIX_SOCKETS_DIR / 'nexus.sock'}",
+ env = TALER_UNIT_FILES_DIR / "taler-local-nexus.env"
+ ))
+ # euFin env files.
+ with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.env", "w") as sandbox_env:
+ sandbox_env.write(f"LIBEUFIN_SANDBOX_DB_CONNECTION=jdbc:sqlite:{SANDBOX_DB_FILE}\n")
+ sandbox_env.write(f"LIBEUFIN_SANDBOX_ADMIN_PASSWORD={SANDBOX_ADMIN_PASSWORD}\n")
+ # The following populates the bank UI navigation bar.
+ sandbox_env.write(f"TALER_ENV_URL_INTRO=https://demo.taler.net/\n")
+ sandbox_env.write(f"TALER_ENV_URL_BANK=https://bank.demo.taler.net/\n")
+ sandbox_env.write(f"TALER_ENV_URL_MERCHANT_BLOG=https://shop.demo.taler.net/\n")
+ sandbox_env.write(f"TALER_ENV_URL_MERCHANT_DONATIONS=https://donations.demo.taler.net/\n")
+ sandbox_env.write(f"TALER_ENV_URL_MERCHANT_SURVEY=https://survey.demo.taler.net/\n")
+
+ with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.env", "w") as nexus_env:
+ nexus_env.write(f"LIBEUFIN_NEXUS_DB_CONNECTION=jdbc:sqlite:{NEXUS_DB_FILE}\n")
+ with open(TALER_UNIT_FILES_DIR / "taler-local-donations.service", "w") as donations_unit:
+ donations_unit.write(unit_file_content(
+ description = "Donation Website that accepts Taler payments.",
+ cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos donations -c {CFG_OUTDIR / 'taler.conf'}",
+ env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
+ ))
+ with open(TALER_UNIT_FILES_DIR / "taler-local-blog.service", "w") as blog_unit:
+ blog_unit.write(unit_file_content(
+ description = "Blog that accepts Taler payments.",
+ cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos blog -c {CFG_OUTDIR / 'taler.conf'}",
+ env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
+ ))
+ with open(TALER_UNIT_FILES_DIR / "taler-local-survey.service", "w") as survey_unit:
+ survey_unit.write(unit_file_content(
+ description = "Survey Website awarding tips via Taler.",
+ cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos survey -c {CFG_OUTDIR / 'taler.conf'}",
+ env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
+ ))
+ with open(TALER_UNIT_FILES_DIR / "taler-local-landing.service", "w") as landing_unit:
+ landing_unit.write(unit_file_content(
+ description = "Landing Website of Taler demo.",
+ cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos landing -c {CFG_OUTDIR / 'taler.conf'}",
+ env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
+ ))
+ with open(TALER_UNIT_FILES_DIR / "taler-local-frontends.env", "w") as frontends_env:
+ # To populate the navigation bar:
+ frontends_env.write((
+ f"PATH={os.environ.get('PATH')}\n"
+ f"TALER_CONFIG_FILE={CFG_OUTDIR / 'taler.conf'}\n"
+ f"TALER_ENV_URL_INTRO=https://demo.taler.net/\n"
+ f"TALER_ENV_URL_BANK=https://bank.demo.taler.net/\n"
+ f"TALER_ENV_URL_MERCHANT_BLOG=https://shop.demo.taler.net/\n"
+ f"TALER_ENV_URL_MERCHANT_DONATIONS=https://donations.demo.taler.net/\n"
+ f"TALER_ENV_URL_MERCHANT_SURVEY=https://survey.demo.taler.net/\n"
+ ))
+ print(" OK")
+ print_nn("Reload SystemD...")
+ Command(["systemctl", "--user", "daemon-reload"]).run()
+ atexit.register(lambda: subprocess.run(
+ ["systemctl", "--user", "stop", "taler-local-*.service"],
+ check=True
+ )
+ )
+ print(" OK")
+ print_nn("Generate exchange's master key...")
+ EXCHANGE_MASTER_PUB = Command(
+ [
+ f"{TALER_PREFIX}/bin/taler-exchange-offline",
+ "-c", CFG_OUTDIR / "taler.conf",
+ "setup"
+ ],
+ capture_stdout=True
+ ).run()
+ print(" OK")
+ print_nn("Specify exchange master pub in taler.conf...")
+ config_specify_master_pub(
+ CFG_OUTDIR / "taler.conf",
+ CURRENCY,
+ EXCHANGE_MASTER_PUB
+ )
+ print(" OK")
+ print_nn("Generating sync.conf...")
+ config_sync(
+ "sync.conf",
+ outdir=CFG_OUTDIR,
+ unix_sockets_dir=UNIX_SOCKETS_DIR,
+ currency=CURRENCY,
+ api_key=FRONTENDS_API_TOKEN,
+ postgres_db_name=postgres_db_name
+ )
+ print(" OK")
+ print_nn("Reset and init exchange DB..")
+ Command([
+ f"{TALER_PREFIX}/bin/taler-exchange-dbinit",
+ "-c", CFG_OUTDIR / "taler.conf",
+ "--reset"]
+ ).run()
+ print(" OK")
+
+ print_nn("Launching X-Forwarded-Host port redirect...")
+ subprocess.run(["systemctl", "--user", "start", "taler-local-port-redirect.service"], check=True)
+ time.sleep(1)
+ print(" OK")
+ print_nn("Launching the exchange RSA helper...")
+ subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-rsa.service"])
+ print(" OK")
+ print_nn("Launching the exchange EDDSA helper...")
+ subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-eddsa.service"])
+ print(" OK")
+ print_nn("Launching the exchange CS helper...")
+ subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-cs.service"])
+ print(" OK")
+ print_nn("Launching the exchange...")
+ subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-httpd.service"])
+ print(" OK")
+ print_nn("exchange-offline: signing key material...")
+ Command([
+ f"{TALER_PREFIX}/bin/taler-exchange-offline",
+ "-c", CFG_OUTDIR / "taler.conf",
+ "download", "sign", "upload"
+ ]).run()
+ print(" OK")
+ # Set up wire fees for next 5 years
+ NOW = datetime.now()
+ YEAR = NOW.year
+ print_nn("Setting wire fees for the next 5 years...")
+ for year in range(YEAR, YEAR+5):
+ Command(
+ [
+ f"{TALER_PREFIX}/bin/taler-exchange-offline",
+ "-c", CFG_OUTDIR / "taler.conf",
+ "wire-fee",
+ str(year),
+ WIRE_METHOD,
+ CURRENCY + ":0.01",
+ CURRENCY + ":0.01",
+ CURRENCY + ":0.01",
+ "upload"
+ ],
+ custom_name="set-wire-fee"
+ ).run()
+ print(" OK")
+ print_nn("Reset and init auditor DB..")
+ Command([
+ f"{TALER_PREFIX}/bin/taler-auditor-dbinit",
+ "-c", CFG_OUTDIR / "taler.conf",
+ "--reset"]
+ ).run()
+ print(" OK")
+ print_nn("Add this exchange to the auditor...")
+ Command(
+ [
+ f"{TALER_PREFIX}/bin/taler-auditor-exchange",
+ "-c", CFG_OUTDIR / "taler.conf",
+ "-m", EXCHANGE_MASTER_PUB,
+ "-u", "https://exchange.demo.taler.net/"
+ ],
+ ).run()
+ print(" OK")
+ ## Step 4: Set up euFin
+ print_nn("Resetting euFin databases...")
+ try:
+ remove(SANDBOX_DB_FILE)
+ remove(NEXUS_DB_FILE)
+ except OSError as error:
+ if error.errno != errno.ENOENT:
+ raise error
+ print(" OK")
+ # Make the 'default' demobank at Sandbox. (No signup bonus)
+ Command([
+ f"{TALER_PREFIX}/bin/libeufin-sandbox",
+ "config", "--currency", CURRENCY, "--with-signup-bonus", "default"],
+ env={
+ "PATH": os.environ["PATH"],
+ "LIBEUFIN_SANDBOX_DB_CONNECTION": f"jdbc:sqlite:{SANDBOX_DB_FILE}"
+ }).run()
+ # This step transparantly creates a default demobank.
+ print_nn("Launching Sandbox...")
+ subprocess.run(["systemctl", "--user", "start", "taler-local-sandbox.service"])
+ print(" OK")
+ print_nn("Make Sandbox EBICS host...")
+ Command(
+ [
+ f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
+ "--sandbox-url", SANDBOX_URL,
+ "ebicshost", "create",
+ "--host-id", EBICS_HOST_ID,
+ ],
+ env=get_sandbox_cli_env(
+ SANDBOX_ADMIN_USERNAME,
+ SANDBOX_ADMIN_PASSWORD,
+ ),
+ custom_name="sandbox-create-ebicshost",
+ ).run()
+ print(" OK")
+
+ print_nn("Create Exchange account at Sandbox...")
+ prepare_sandbox_account(
+ sandbox_url=SANDBOX_URL,
+ ebics_host_id=EBICS_HOST_ID,
+ ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
+ ebics_user_id=EXCHANGE_EBICS_USER_ID,
+ person_name="Exchange Owner",
+ bank_account_name=EXCHANGE_BANK_ACCOUNT_SANDBOX,
+ password=EXCHANGE_BANK_ACCOUNT_PASSWORD
+ )
+ print(" OK")
+ print_nn("Getting exchange payto-URI from the bank...")
+ exchange_bank_account_info = get_sandbox_account_info(
+ SANDBOX_URL,
+ EXCHANGE_BANK_ACCOUNT_SANDBOX,
+ EXCHANGE_BANK_ACCOUNT_PASSWORD
+ )
+ EXCHANGE_PAYTO = exchange_bank_account_info["paytoUri"]
+ print(" OK")
+ print_nn("Specify own payto-URI to exchange's configuration..")
+ Command([
+ f"{TALER_PREFIX}/bin/taler-config", "-c", CFG_OUTDIR / 'taler.conf',
+ "-s", "exchange-account-1", "-o", "payto_uri", "-V",
+ EXCHANGE_PAYTO
+ ]).run()
+ print(" OK")
+ print_nn(f"exchange-offline: enabling {EXCHANGE_PAYTO}...")
+ Command([
+ f"{TALER_PREFIX}/bin/taler-exchange-offline",
+ "-c", CFG_OUTDIR / "taler.conf",
+ "enable-account", EXCHANGE_PAYTO, "upload"
+ ]).run()
+ print(" OK")
+
+ # Give each instance a Sandbox account (note: 'default'
+ # won't have one, as it should typically only manage other
+ # instances).
+ for instance in INSTANCES:
+ instance_id = instance["name"]
+ print_nn(f"Create account of {instance_id} at Sandbox...")
+ prepare_sandbox_account(
+ sandbox_url=SANDBOX_URL,
+ ebics_host_id=EBICS_HOST_ID,
+ ebics_partner_id="unusedMerchantEbicsPartnerId",
+ ebics_user_id=f"unused{instance_id}EbicsUserId",
+ person_name=f"Shop Owner of {instance_id}",
+ bank_account_name=f"sandbox-account-{instance_id.lower()}",
+ password=ALL_INSTANCES_BANK_PASSWORD,
+ is_public=instance.get("isPublic")
+ )
+ print(" OK")
+ print_nn("Create Customer account at Sandbox...")
+ prepare_sandbox_account(
+ sandbox_url=SANDBOX_URL,
+ ebics_host_id=EBICS_HOST_ID,
+ ebics_partner_id="unusedCustomerEbicsPartnerId",
+ ebics_user_id="unusedCustomerEbicsUserId",
+ person_name="Customer Person",
+ bank_account_name=CUSTOMER_BANK_ACCOUNT,
+ password=CUSTOMER_BANK_PASSWORD
+ )
+ print(" OK")
+ print_nn("Make Nexus superuser ...")
+ Command(
+ [
+ f"{TALER_PREFIX}/bin/libeufin-nexus", "superuser",
+ EXCHANGE_NEXUS_USERNAME,
+ "--password", EXCHANGE_NEXUS_PASSWORD
+ ],
+ env=get_nexus_server_env(
+ NEXUS_DB_FILE,
+ NEXUS_URL
+ ),
+ custom_name="nexus-superuser",
+ ).run()
+ print(" OK")
+
+ print_nn("Launching Nexus...")
+ subprocess.run(["systemctl", "--user", "start", "taler-local-nexus.service"])
+ print(" OK")
+
+ print_nn("Create Exchange account at Nexus...")
+ prepare_nexus_account(
+ ebics_url=EBICS_URL,
+ ebics_host_id=EBICS_HOST_ID,
+ ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
+ ebics_user_id=EXCHANGE_EBICS_USER_ID,
+ bank_connection_name=EXCHANGE_BANK_CONNECTION,
+ bank_account_name_sandbox=EXCHANGE_BANK_ACCOUNT_SANDBOX,
+ bank_account_name_nexus=EXCHANGE_BANK_ACCOUNT_NEXUS,
+ env=get_nexus_cli_env(
+ EXCHANGE_NEXUS_USERNAME,
+ EXCHANGE_NEXUS_PASSWORD,
+ NEXUS_URL
+ )
+ )
+ print(" OK")
+
+ print_nn("Create Taler facade ...")
+ Command(
+ [
+ f"{TALER_PREFIX}/bin/libeufin-cli", "facades",
+ "new-taler-wire-gateway-facade",
+ "--currency", CURRENCY,
+ "--facade-name", EXCHANGE_FACADE_NAME,
+ EXCHANGE_BANK_CONNECTION,
+ EXCHANGE_BANK_ACCOUNT_NEXUS
+ ],
+ env=get_nexus_cli_env(
+ EXCHANGE_NEXUS_USERNAME,
+ EXCHANGE_NEXUS_PASSWORD,
+ NEXUS_URL
+ ),
+ custom_name="create-taler-facade",
+ ).run()
+ print(" OK")
+ try:
+ response = requests.get(
+ NEXUS_URL + "/facades",
+ auth=requests.auth.HTTPBasicAuth(
+ EXCHANGE_NEXUS_USERNAME,
+ EXCHANGE_NEXUS_PASSWORD
+ )
+ )
+ response.raise_for_status()
+ except Exception as error:
+ fail(error)
+ FACADE_URL = response.json().get("facades")[0].get("baseUrl")
+ print_nn("Set suggested exchange at Sandbox...")
+ Command([
+ f"{TALER_PREFIX}/bin/libeufin-sandbox",
+ "default-exchange",
+ "https://exchange.demo.taler.net/",
+ EXCHANGE_PAYTO],
+ env={
+ "PATH": os.environ["PATH"],
+ "LIBEUFIN_SANDBOX_DB_CONNECTION": f"jdbc:sqlite:{SANDBOX_DB_FILE}"
+ }).run()
+ print(" OK")
+
+ # Point the exchange to the facade.
+ Command(
+ [
+ f"{TALER_PREFIX}/bin/taler-config",
+ "-c", CFG_OUTDIR / "taler.conf",
+ "-s", "exchange-accountcredentials-1",
+ "-o" "wire_gateway_auth_method",
+ "-V", "basic"
+ ],
+ custom_name="specify-wire-gateway-auth-method",
+ ).run()
+ Command(
+ [
+ f"{TALER_PREFIX}/bin/taler-config",
+ "-c", CFG_OUTDIR / "taler.conf",
+ "-s", "exchange-accountcredentials-1",
+ "-o" "wire_gateway_url",
+ "-V", FACADE_URL
+ ],
+ custom_name="specify-facade-url",
+ ).run()
+ Command(
+ [
+ f"{TALER_PREFIX}/bin/taler-config",
+ "-c", CFG_OUTDIR / "taler.conf",
+ "-s", "exchange-accountcredentials-1",
+ "-o" "username",
+ "-V", EXCHANGE_NEXUS_USERNAME
+ ],
+ custom_name="specify-username-for-facade",
+ ).run()
+ Command(
+ [
+ f"{TALER_PREFIX}/bin/taler-config",
+ "-c", CFG_OUTDIR / "taler.conf",
+ "-s", "exchange-accountcredentials-1",
+ "-o" "password",
+ "-V", EXCHANGE_NEXUS_PASSWORD
+ ],
+ custom_name="specify-password-for-facade",
+ ).run()
+
+ ## Step 6: Set up merchant
+
+ print_nn("Reset and init merchant database...")
+ Command([
+ f"{TALER_PREFIX}/bin/taler-merchant-dbinit",
+ "-c", CFG_OUTDIR / "taler.conf",
+ "--reset"
+ ]).run()
+ print(" OK")
+
+ def ensure_instance(
+ currency,
+ instance_id,
+ backend_url,
+ wire_method,
+ auth_token
+ ):
+ auth_header = {"Authorization": f"Bearer {auth_token}"}
+ resp = requests.get(
+ urljoin_nodrop(backend_url, f"management/instances/{instance_id}"),
+ headers = auth_header
+ )
+ bankaccount_info = get_sandbox_account_info(
+ SANDBOX_URL,
+ f"sandbox-account-{instance_id.lower()}",
+ ALL_INSTANCES_BANK_PASSWORD
+ )
+ req = dict(
+ id=instance_id,
+ name=f"Name of '{instance_id}'",
+ payto_uris=[bankaccount_info["paytoUri"]],
+ address=dict(),
+ jurisdiction=dict(),
+ default_max_wire_fee=f"{currency}:1",
+ default_wire_fee_amortization=3,
+ default_max_deposit_fee=f"{currency}:1",
+ default_wire_transfer_delay=dict(d_us="forever"),
+ default_pay_delay=dict(d_us="forever"),
+ auth=dict(method="token", token=auth_token),
+ )
+ http_method = requests.post
+ endpoint = "management/instances"
+
+ # Instance exists, patching it.
+ if resp.status_code == 200:
+ print(f"Patching instance '{instance_id}'")
+ http_method = requests.patch
+ endpoint = f"management/instances/{instance_id}"
+
+ resp = http_method(
+ urljoin_nodrop(backend_url, endpoint),
+ json=req,
+ headers = auth_header
+ )
+ if resp.status_code < 200 or resp.status_code >= 300:
+ print(f"Backend responds: {resp.status_code}/{resp.text}")
+ fail(f"Could not create (or patch) instance '{instance_id}'")
+
+ print_nn(f"Start merchant (with TALER_MERCHANT_TOKEN into the env)...")
+ subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend-token.service"], check=True)
+ print(" OK")
+ print_nn("Give default instance a bank account...")
+ prepare_sandbox_account(
+ sandbox_url=SANDBOX_URL,
+ ebics_host_id=EBICS_HOST_ID,
+ ebics_partner_id="unusedMerchantEbicsPartnerId",
+ ebics_user_id=f"unusedDefaultInstanceEbicsUserId",
+ person_name=f"Shop Owner of default instance",
+ bank_account_name="sandbox-account-default",
+ password=ALL_INSTANCES_BANK_PASSWORD
+ )
+ print(" OK")
+ ensure_instance(
+ currency=CURRENCY,
+ instance_id="default",
+ backend_url = "https://backend.demo.taler.net/",
+ wire_method = WIRE_METHOD,
+ auth_token=FRONTENDS_API_TOKEN
+ )
+
+ print_nn("Restarting the merchant WITHOUT the auth-token in the env...")
+ subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend.service"], check=True)
+ print(" OK")
+ for instance in INSTANCES:
+ instance_id = instance["name"]
+ print_nn(f"Creating the {instance_id} instance...")
+ ensure_instance(
+ currency=CURRENCY,
+ instance_id=instance_id,
+ backend_url = "https://backend.demo.taler.net/",
+ wire_method = WIRE_METHOD,
+ auth_token=FRONTENDS_API_TOKEN
+ )
+ print(" OK")
+ print_nn("Creating tip reserve...")
+ create_tip_reserve()
+ print(" OK")
+ # 1 second to let Nexus read the payment from
+ # Sandbox, 1 second to let the Exchange Wirewatch
+ # to read the payment from Nexus.
+ print_nn("Sleep 2 seconds to let the tip reserve settle...")
+ time.sleep(2)
+ print(" OK")
+
+@cli.command()
+def launch():
+ subprocess.run(["systemctl", "--user", "start", "taler-local-port-redirect.service"], check=True)
+ subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-rsa.service"], check=True)
+ subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-eddsa.service"], check=True)
+ subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-cs.service"], check=True)
+ subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-httpd.service"], check=True)
+ subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-wirewatch.service"], check=True)
+ subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-aggregator.service"], check=True)
+ subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-transfer.service"], check=True)
+ subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend.service"], check=True)
+ subprocess.run(["systemctl", "--user", "start", "taler-local-sandbox.service"], check=True)
+ subprocess.run(["systemctl", "--user", "start", "taler-local-nexus.service"], check=True)
+ subprocess.run(["systemctl", "--user", "start", "taler-local-donations.service"], check=True)
+ subprocess.run(["systemctl", "--user", "start", "taler-local-blog.service"], check=True)
+ subprocess.run(["systemctl", "--user", "start", "taler-local-survey.service"], check=True)
+ subprocess.run(["systemctl", "--user", "start", "taler-local-landing.service"], check=True)
+
+@cli.command()
+def stop():
+ subprocess.run(["systemctl", "--user", "stop", "taler-local-*.service"], check=True)
+
+if __name__ == "__main__":
+ cli()