summaryrefslogtreecommitdiff
path: root/bin
diff options
context:
space:
mode:
authorFlorian Dold <florian@dold.me>2022-11-02 16:37:18 +0100
committerFlorian Dold <florian@dold.me>2022-11-02 16:37:18 +0100
commitb9d4d1f1aa8291525652a546c604eb86c674ca05 (patch)
treea9d3597cc6885e36a5bc1a892320ea193ca3e7d8 /bin
parent5dc3d396529f3ee409b6fd6d235c235580e034f3 (diff)
downloaddeployment-b9d4d1f1aa8291525652a546c604eb86c674ca05.tar.gz
deployment-b9d4d1f1aa8291525652a546c604eb86c674ca05.tar.bz2
deployment-b9d4d1f1aa8291525652a546c604eb86c674ca05.zip
obsolete
Diffstat (limited to 'bin')
-rwxr-xr-xbin/taler-gv1789
-rwxr-xr-xbin/taler-local1840
2 files changed, 0 insertions, 3629 deletions
diff --git a/bin/taler-gv b/bin/taler-gv
deleted file mode 100755
index 0f9c098..0000000
--- a/bin/taler-gv
+++ /dev/null
@@ -1,1789 +0,0 @@
-#!/usr/bin/env python3
-
-# This file is part of GNU Taler.
-#
-# GNU Taler is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# GNU Taler is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Taler. If not, see <https://www.gnu.org/licenses/>.
-
-import socket
-import shutil
-import atexit
-import click
-import types
-import os
-import sys
-import os.path
-import subprocess
-import time
-import random
-import json
-from os import listdir
-from os.path import isdir, join, basename
-from pathlib import Path
-from typing import List
-from sys import exit
-from urllib.parse import urljoin
-from os import remove
-import requests
-from collections import OrderedDict
-import errno
-from pathlib import Path
-from subprocess import Popen, DEVNULL, PIPE
-from datetime import datetime
-
-
-TALER_ROOT_DIR = Path.home()
-TALER_PREFIX = Path.home() / "local"
-
-# Print No Newline.
-def print_nn(msg):
- print(msg, end="")
- sys.stdout.flush()
-
-class Repo:
- def __init__(self, name, url, deps, builder, version="master"):
- self.name = name
- self.url = url
- self.deps = deps
- self.builder = builder
- self.version = version
-
-@click.group()
-def cli():
- pass
-
-# Parses the command-line-given and comma-separated repos list
-# into a list of names.
-def split_repos_list(repos):
- return [repo for repo in repos.split(",") if repo != ""]
-
-# fetch the remote. No timestamp deletion here
-def update_checkout(r: Repo, p: Path):
- """Clean the repository's working directory and
- update it to the match the latest version of the upstream branch
- that we are tracking."""
- subprocess.run(["git", "-C", str(p), "clean", "-fdx"], check=True) # remove unversioned files.
-
- # Equivalent to "git pull". Does nothing if in detached HEAD
- # but pulls new code into the local copy otherwise.
- subprocess.run(["git", "-C", str(p), "fetch"], check=True)
- subprocess.run(["git", "-C", str(p), "reset"], check=True)
-
- # Makes the last step "--hard", namely removes files not
- # belonging to the current version.
- res = subprocess.run(
- [
- "git",
- "-C",
- str(p),
- "rev-parse",
- "--abbrev-ref",
- "--symbolic-full-name",
- "@{u}",
- ],
- stderr=subprocess.DEVNULL,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if res.returncode != 0:
- ref = "HEAD"
- else:
- ref = res.stdout.strip("\n ")
- print(f"resetting {r.name} to ref {ref}")
- subprocess.run(["git", "-C", str(p), "reset", "--hard", ref], check=True)
-
-
-def default_configure(*extra):
- extra_list = list(extra)
- subprocess.run(["./configure", f"--prefix={TALER_PREFIX}"] + extra_list, check=True)
-
-def pyconfigure(*extra):
- """For python programs, --prefix doesn't work."""
- subprocess.run(["./configure"] + list(extra), check=True)
-
-def build_libeufin(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_libmicrohttpd(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure("--disable-doc")
- subprocess.run(["make"], check=True)
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_gnunet(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = TALER_PREFIX
- default_configure(
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- "--disable-documentation",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_exchange(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = TALER_PREFIX
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_wallet(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_twister(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = TALER_PREFIX
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_merchant(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = TALER_PREFIX
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_sync(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = TALER_PREFIX
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-merchant={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-
-def build_demos(r, p):
- update_checkout(r, p)
- pfx = TALER_PREFIX
- pyconfigure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_backoffice(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"])
- subprocess.run(["./configure"])
- subprocess.run(["make", "build-single"])
- (p / "taler-buildstamp").touch()
-
-repos = {
- "libmicrohttpd": Repo(
- "libmicrohttpd",
- "git://git.gnunet.org/libmicrohttpd.git",
- [],
- build_libmicrohttpd,
- ),
- "gnunet": Repo(
- "gnunet",
- "git://git.gnunet.org/gnunet.git",
- ["libmicrohttpd"],
- build_gnunet
- ),
- "exchange": Repo(
- "exchange",
- "git://git.taler.net/exchange",
- ["gnunet", "libmicrohttpd"],
- build_exchange,
- ),
- "merchant": Repo(
- "merchant",
- "git://git.taler.net/merchant",
- ["exchange","libmicrohttpd","gnunet"],
- build_merchant,
- ),
- "sync": Repo(
- "sync",
- "git://git.taler.net/sync",
- ["exchange",
- "merchant",
- "gnunet",
- "libmicrohttpd"],
- build_sync,
- ),
- "wallet-core": Repo(
- "wallet-core",
- "git://git.taler.net/wallet-core",
- [],
- build_wallet,
- ),
- "libeufin": Repo(
- "libeufin",
- "git://git.taler.net/libeufin.git",
- [],
- build_libeufin,
- ),
- "taler-merchant-demos": Repo(
- "taler-merchant-demos",
- "git://git.taler.net/taler-merchant-demos",
- [],
- build_demos,
- ),
- "twister": Repo(
- "twister",
- "git://git.taler.net/twister",
- ["gnunet", "libmicrohttpd"],
- build_twister,
- ),
-}
-
-def get_repos_names() -> List[str]:
- r_dir = TALER_ROOT_DIR / "sources"
- if not r_dir.is_dir():
- print(f"'{r_dir}' not found. Did bootstrap run?")
- return []
- return [el for el in listdir(r_dir) if isdir(join(r_dir, el)) and repos.get(el)]
-
-# Get 'Repo' objects (globally defined),
-# using their names as index.
-def load_repos(reposNames) -> List[Repo]:
- ret = []
- for repo in repos.keys():
- if repo in reposNames:
- ret.append(repos[repo])
- return ret
-
-# Return the list of repos (equipped with their version)
-# to install.
-def load_repos_with_envcfg(envcfg_path) -> List[Repo]:
- envcfg_path = Path(envcfg_path)
- if not os.path.isfile(envcfg_path):
- print(f"{envcfg_path} is not a file")
- sys.exit(1)
- cfgtext = envcfg_path.read_text()
- cfg = types.ModuleType("taler_deployment_cfg")
- try:
- exec(cfgtext, cfg.__dict__)
- except SyntaxError:
- print(f"{envcfg_path} is not Python.")
- exit(1)
- ret = []
- for repo in repos.keys():
- try:
- envcfg_entry = getattr(cfg, "tag_" + repo.replace("-", "_"))
- except AttributeError:
- # 'env' files doesn't have this repo, continue looping.
- continue
- repos[repo].version = envcfg_entry
- ret.append(repos[repo])
- return ret
-
-# Flag as stale the projects set on 'master' that
-# aren't in line with upstream. Detached head projects
-# aren't affected.
-def update_repos(repos: List[Repo], force) -> None:
- for r in repos:
- r_dir = TALER_ROOT_DIR / "sources" / r.name
- subprocess.run(["git", "-C", str(r_dir), "fetch"], check=True)
- res = subprocess.run(
- ["git", "-C", str(r_dir), "status", "-sb"],
- check=True,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if "behind" in res.stdout or force:
- print(f"{r.name} will be compiled")
- s = r_dir / "taler-buildstamp"
- if s.exists():
- s.unlink()
-
-# projects without the build timestamp are considered stale,
-# even if one of their dependencies _got_ marked as stale.
-def get_stale_repos(repos: List[Repo]) -> List[Repo]:
- timestamps = {}
- stale = []
- for r in repos:
- r_dir = TALER_ROOT_DIR / "sources" / r.name
- s = r_dir / "taler-buildstamp"
- if not s.exists():
- timestamps[r.name] = time.time()
- stale.append(r)
- continue
- ts = timestamps[r.name] = s.stat().st_mtime
- for dep in r.deps:
- # When 'dep' in not found, it has been
- # excluded from the compilation.
- if timestamps.get("dep", 0) > ts:
- stale.append(r)
- break
- return stale
-
-@cli.command()
-@click.option(
- "--without-repos", metavar="WITHOUT REPOS",
- help="WITHOUT REPOS is a unspaced and comma-separated list \
-of the repositories to _exclude_ from compilation",
- default="")
-@click.option(
- "--only-repos", metavar="ONLY REPOS",
- help="ONLY REPOS is a unspaced and comma-separated exclusive list \
-of the repositories to include in the compilation",
- default="")
-@click.option(
- "--dry/--no-dry", default=False,
- help="Only getting changes, without actual build."
-)
-@click.option(
- "--with-envcfg", metavar="PATH",
- help="python file pinning each codebase version.",
-)
-# Normally, we don't rebuild dependent projects when one
-# of their dependency changed. This lets check whether non
-# breaking changes are really so; this option invalidates
-# this policy by letting all the codebases be compiled.
-@click.option(
- "--force/--no-force", default=False,
- help="build all the projects.",
-)
-def build(without_repos, only_repos, dry, with_envcfg, force) -> None:
- """Build the deployment from source."""
- if only_repos != "" and without_repos != "":
- print("Either use --only-repos or --without-repos")
- exit(1)
- repos_names = get_repos_names()
- if only_repos != "":
- repos_names = list(filter(
- lambda x: x in split_repos_list(only_repos),
- repos_names
- ))
- if without_repos != "":
- repos_names = list(filter(
- lambda x: x not in split_repos_list(without_repos),
- repos_names
- ))
- if with_envcfg:
- target_repos = load_repos_with_envcfg(with_envcfg)
- else:
- target_repos = load_repos(repos_names)
- # enforce version here.
- sources = TALER_ROOT_DIR / "sources"
- for r in target_repos:
- subprocess.run(
- ["git", "-C", str(sources / r.name),
- "checkout", "-q", "-f",
- r.version, "--"], check=True
- )
- update_repos(target_repos, force)
- stale = get_stale_repos(target_repos)
- print(f"found stale repos: {[r.name for r in stale]}")
- for r in stale:
- # Inform, if a dependency is not being built:
- diff = set(r.deps) - set(repos_names)
- if len(diff) > 0:
- print(f"Info: those dependencies are not being built: {diff}")
- p = TALER_ROOT_DIR / "sources" / r.name
- os.chdir(str(p))
- if dry:
- print("dry running")
- continue
- r.builder(r, p)
-
-# Only git-clone the codebases. The 'build' step
-# will run all the update logic. At this point, a
-# 'env' file - as well as the --repos option - will
-# only express which codebases are to clone.
-@cli.command()
-@click.option(
- "--repos", "-r",
- metavar="REPOS",
- help="REPOS is a unspaced and comma-separated list of the repositories to clone.",
- default="libmicrohttpd,gnunet,exchange,merchant,wallet-core,taler-merchant-demos,sync,libeufin",
- show_default=True,
-)
-@click.option(
- "--without-repos",
- metavar="REPOS",
- help="REPOS is a unspaced and comma-separated list of the repositories NOT to clone."
-)
-@click.option(
- "--list-repos/--no-list-repos", default=False,
- help="Lists the repositories that were bootstrapped.",
-)
-@click.option(
- "--with-envcfg", metavar="PATH",
- help="python file pinning each codebase version.",
-)
-@click.option(
- "--dry/--no-dry", default=False,
- help="Print steps, without downloading any repository.",
-)
-def bootstrap(list_repos, repos, with_envcfg, dry, without_repos) -> None:
- """Clone all the specified repositories."""
- # Only saying _which_ repo were installed. No further action
- if list_repos:
- for repo in get_repos_names():
- print(repo)
- return
-
- # Download the repositories.
- def clone_repos(repos: List[Repo]):
- if len(repos) == 0:
- print("No repositories can be checked out. Spelled correctly?")
- return
- sources = TALER_ROOT_DIR / "sources"
- for r in repos:
- print(f"Bootstrapping '{r.name}', at version '{r.version}'")
- if dry:
- print("dry running")
- continue
- r_dir = sources / r.name
- if not r_dir.exists():
- r_dir.mkdir(parents=True, exist_ok=True)
- subprocess.run(
- ["git", "-C", str(sources),
- "clone", r.url], check=True
- )
- subprocess.run(
- ["git", "-C", str(r_dir),
- "checkout", "-q", "-f",
- r.version, "--"], check=True
- )
-
- # Get list of to-be-cloned repos from the 'env' file.
- if with_envcfg:
- # 'with_envcfg' is a path to a "envcfg.py" file.
- preparedRepos = load_repos_with_envcfg(with_envcfg)
- # Get list of to-be-cloned repos from the command line
- # (or its default)
- else:
- # 'repos' is here "repo1,repo2,.."
- reposList = split_repos_list(repos)
- # 'reposList' is here ["repo1", "repo2", ...]
- preparedRepos = load_repos(reposList)
- if without_repos:
- for exclude_repo in split_repos_list(without_repos):
- preparedRepos = [el for el in preparedRepos if el.name != exclude_repo]
- clone_repos(preparedRepos)
-
-# Globals sharead accross multiple sub-commands:
-# needed to configure and launch the reverse proxy.
-UNIX_SOCKETS_DIR = TALER_ROOT_DIR / "sockets"
-LOG_DIR = TALER_ROOT_DIR / "logs"
-# needed to create the customer's bank account and
-# to let them subsequently withdraw via the Access API.
-CUSTOMER_BANK_ACCOUNT = "sandbox-account-customer"
-CUSTOMER_BANK_PASSWORD = "secret"
-# needed along preparation and later to withdraw via
-# the Access API.
-CURRENCY = "KUDOS"
-
-@cli.command()
-@click.option(
- "--postgres-db-name", metavar="DBNAME",
- help="Set postgres database name for all the services.",
- default="talerdemo"
-)
-def prepare(postgres_db_name):
- """Generate configuration, run-time blobs, instances, euFin accounts."""
-
- def is_serving(check_url, tries=10):
- for i in range(tries):
- try:
- print_nn(".")
- # Raises if the service is not reachable.
- response = requests.get(
- check_url,
- timeout=1
- )
- # The reverse proxy may return 500 if the
- # end service is not ready, therefore this
- # case should be tolerated.
- response.raise_for_status()
- except:
- time.sleep(0.5)
- if i == tries - 1:
- return False
- continue
- break
- return True
-
- def fail(reason=None):
- if reason:
- print("ERROR:", reason)
- exit(1)
-
- def kill(proc):
- proc.terminate()
- proc.wait()
-
- def get_nexus_cli_env(
- username,
- password,
- nexus_url
- ):
- env = os.environ.copy()
- env["LIBEUFIN_NEXUS_USERNAME"] = username
- env["LIBEUFIN_NEXUS_PASSWORD"] = password
- env["LIBEUFIN_NEXUS_URL"] = nexus_url
- return env
-
- def get_sandbox_cli_env(
- username, password
- ):
- env = os.environ.copy()
- env["LIBEUFIN_SANDBOX_USERNAME"] = username
- env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- return env
-
- # Will be extended to include a SANDBOX_ADMIN_TOKEN
- # that will obsolete the 'superuser' flag of ordinary
- # user accounts. Likewise, the client side will be
- # modified to use such token.
- def get_sandbox_server_env(db_file, base_url, admin_password):
- env = os.environ.copy()
- env["LIBEUFIN_SANDBOX_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
- env["LIBEUFIN_SANDBOX_BASE_URL"] = base_url
- env["LIBEUFIN_SANDBOX_ADMIN_PASSWORD"] = admin_password
- return env
-
- def get_nexus_server_env(db_file, base_url):
- env = os.environ.copy()
- env["LIBEUFIN_NEXUS_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
- env["LIBEUFIN_NEXUS_BASE_URL"] = base_url
- return env
-
- def urljoin_nodrop(a, b):
- a = a + "/" # urljoin will drop extra trailing slashes.
- b = "/".join([x for x in b.split("/") if x != ""]) # remove leading slashes.
- return urljoin(a, b)
-
- def prepare_nexus_account(
- ebics_url,
- ebics_host_id,
- ebics_partner_id,
- ebics_user_id,
- bank_connection_name,
- bank_account_name_sandbox,
- bank_account_name_nexus,
- env
- ):
- # make connection
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "new-ebics-connection",
- "--ebics-url", ebics_url,
- "--host-id", ebics_host_id,
- "--partner-id", ebics_partner_id,
- "--ebics-user-id", ebics_user_id,
- bank_connection_name
- ],
- env
- ).run()
- # connect
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "connect", bank_connection_name
- ],
- env
- ).run()
- # Import bank account
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "download-bank-accounts",
- bank_connection_name
- ],
- env
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "import-bank-account",
- "--offered-account-id",
- bank_account_name_sandbox,
- "--nexus-bank-account-id",
- bank_account_name_nexus,
- bank_connection_name
- ],
- env
- ).run()
- # Set background tasks.
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "accounts",
- "task-schedule", bank_account_name_nexus,
- "--task-type", "submit",
- "--task-name", "submit-payments-each-second",
- "--task-cronspec", "* * *"
- ],
- env
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "accounts",
- "task-schedule", bank_account_name_nexus,
- "--task-type", "fetch",
- "--task-name", "fetch-reports-each-second",
- "--task-cronspec", "* * *",
- "--task-param-level", "report",
- "--task-param-range-type", "latest"
- ],
- env
- ).run()
-
- def get_sandbox_account_info(
- sandbox_url,
- bank_account_label,
- password,
- ):
- customer_env = os.environ.copy()
- customer_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_label
- customer_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
- r = Command([
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
- "--sandbox-url", demobank_url,
- "demobank", "info",
- "--bank-account", bank_account_label],
- env = customer_env,
- capture_stdout=True
- ).run()
- return json.loads(r)
-
- def prepare_sandbox_account(
- sandbox_url,
- ebics_host_id,
- ebics_partner_id,
- ebics_user_id,
- person_name,
- # This value is BOTH a username
- # and a bank account label.
- bank_account_name,
- password,
- is_public=False
- ):
- demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
- user_env = os.environ.copy()
- user_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_name
- user_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- register_cmd = [
- f"{TALER_PREFIX}/bin/libeufin-cli",
- "sandbox", "--sandbox-url", demobank_url,
- "demobank", "register"
- ]
- if is_public:
- register_cmd.append("--public")
- Command(register_cmd, env = user_env).run()
- admin_env = os.environ.copy()
- admin_env["LIBEUFIN_SANDBOX_USERNAME"] = SANDBOX_ADMIN_USERNAME
- admin_env["LIBEUFIN_SANDBOX_PASSWORD"] = SANDBOX_ADMIN_PASSWORD
- Command([
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
- "--sandbox-url", demobank_url,
- "demobank", "new-ebicssubscriber",
- "--host-id", ebics_host_id,
- "--partner-id", ebics_partner_id,
- "--user-id", ebics_user_id,
- "--bank-account", bank_account_name
- ],
- env = admin_env
- ).run()
-
-
- WIRE_METHOD = "iban"
- # euFin URLs
- SANDBOX_URL = "https://bank.demo.taler.net/"
- NEXUS_URL = "https://nexus.demo.taler.net/"
-
- # Filesystem's paths
- CFG_OUTDIR = TALER_ROOT_DIR / "config"
- TALER_RUNTIME_DIR = TALER_ROOT_DIR / "runtime"
- TALER_DATA_DIR = TALER_ROOT_DIR / "data"
- TALER_UNIT_FILES_DIR = systemd_user_dir = Path.home() / ".config" / "systemd" / "user"
-
- def create_tip_reserve():
- payto = Command([
- f"{TALER_PREFIX}/bin/taler-merchant-setup-reserve",
- "--amount", f"{CURRENCY}:20",
- "--exchange-url", "https://exchange.demo.taler.net/",
- "--merchant-url", "https://backend.demo.taler.net/instances/survey/",
- "--apikey", f"Bearer {FRONTENDS_API_TOKEN}",
- "--wire-method", WIRE_METHOD],
- capture_stdout=True
- ).run()
-
- Command([
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox", "--sandbox-url",
- SANDBOX_URL + "/demobanks/default/", "demobank",
- "new-transaction", "--bank-account", "sandbox-account-survey",
- "--payto-with-subject", payto, "--amount", "20"],
- env = get_sandbox_cli_env(
- username = "sandbox-account-survey",
- password = ALL_INSTANCES_BANK_PASSWORD
- )).run()
-
- def get_random_iban():
- cc_no_check = 131400 # is "DE00"
- bban = "".join(random.choices("0123456789", k=4))
- check_digits = 98 - (int(f"{bban}{cc_no_check}") % 97)
- return "DE" + (f"0{check_digits}"[-2:]) + bban
-
- # IBANs
-
- IBAN_MERCHANT_DEFAULT = get_random_iban()
- IBAN_MERCHANT_DEMOSHOP = get_random_iban()
-
- # Instances
- INSTANCES = [
- dict(name="GNUnet", isPublic=True),
- dict(name="Taler", isPublic=True),
- dict(name="Tor", isPublic=True),
- dict(name="survey"),
- dict(name="blog"),
- ]
-
- # Credentials / API keys
- EXCHANGE_NEXUS_USERNAME = "exchange-nexus-user"
- EXCHANGE_NEXUS_PASSWORD = "exchange-nexus-password"
- FRONTENDS_API_TOKEN = "secret-token:secret"
- TALER_MERCHANT_TOKEN = "secret-token:secret"
- ALL_INSTANCES_BANK_PASSWORD = "secret"
- EXCHANGE_BANK_ACCOUNT_SANDBOX = "sandbox-account-exchange"
- EXCHANGE_BANK_ACCOUNT_PASSWORD = "secret"
-
- # EBICS
- EBICS_HOST_ID = "ebicsDeployedHost"
- EXCHANGE_EBICS_USER_ID = "exchangeEbicsUserId"
- EXCHANGE_EBICS_PARTNER_ID = "exchangeEbicsPartnerId"
- EBICS_URL = "https://bank.demo.taler.net/ebicsweb"
-
- # euFin
- EXCHANGE_BANK_ACCOUNT_NEXUS = "exchange-imported-account-nexus"
- EXCHANGE_BANK_CONNECTION = "exchange-ebics-connection"
- NEXUS_DB_FILE = TALER_ROOT_DIR / "nexus.sqlite3"
- SANDBOX_DB_FILE = TALER_ROOT_DIR / "sandbox.sqlite3"
- EXCHANGE_FACADE_NAME = "exchange-taler-facade"
- SANDBOX_ADMIN_USERNAME = "admin"
- SANDBOX_ADMIN_PASSWORD = "secret"
-
- class Command:
- def __init__(
- self, cmd, env=os.environ, log_dir=LOG_DIR,
- custom_name=None, capture_stdout=False
- ):
- if len(cmd) == 0:
- fail("Command to execute was given empty.")
- self.name = custom_name if custom_name else basename(cmd[0])
- self.cmd = cmd
- self.capture_stdout = capture_stdout
- self.log_dir = log_dir
- self.env = env
-
- def run(self):
- self.do()
- return_code = self.handle.wait()
- self.cleanup() # Mainly closes the log file.
- if return_code != 0:
- fail(f"Command {self.name} failed. Logs in {self.log_dir}")
- if self.capture_stdout:
- return self.handle.communicate()[0].decode("utf-8").rstrip()
-
- def get_log_filename(self):
- return self.log_file.name
-
- def cleanup(self):
- self.log_file.flush()
- self.log_file.close()
-
- def do(self):
- if not self.log_dir.is_dir():
- os.makedirs(self.log_dir)
- try:
- log_filename = self.log_dir / f"{self.name}.log"
- self.log_file = open(log_filename, "a+")
- except Exception as error:
- fail(f"Could not open log file: {log_filename}: {error}")
- try:
- self.handle = Popen(
- self.cmd, # list
- stdin=DEVNULL,
- stdout=self.log_file if not self.capture_stdout else PIPE,
- stderr=self.log_file,
- env=self.env
- )
- except Exception as error:
- fail(f"Could not execute: {' '.join(self.cmd)}: {error}")
-
- class ConfigFile:
- def __init__(self, filename):
- self.sections = OrderedDict()
- self.filename = filename
-
- def destroy(self):
- del self.sections
- self.sections = OrderedDict()
-
- def cfg_put(self, section_name, key, value):
- s = self.sections[section_name] = self.sections.get(section_name, OrderedDict())
- s[key] = value
-
- def cfg_write(self, outdir):
- if outdir:
- if not os.path.isdir(outdir):
- os.makedirs(outdir)
- fstream = open(os.path.join(outdir, self.filename), "w")
- else:
- fstream = open(sys.stdout)
-
- for section_name, section in self.sections.items():
- fstream.write("[" + section_name + "]" + "\n")
- for key, value in section.items():
- fstream.write(key + " = " + value + "\n")
- fstream.write("\n")
- fstream.close()
-
- def config_specify_master_pub(
- filename,
- currency,
- exchange_master_pub
- ):
- Command([
- f"{TALER_PREFIX}/bin/taler-config", "-F", "-c", filename,
- "-s", "exchange", "-o", "master_public_key",
- "-V", exchange_master_pub
- ]).run()
- Command([
- f"{TALER_PREFIX}/bin/taler-config", "-F", "-c", filename,
- "-s", f"merchant-exchange-{currency}",
- "-o", "master_key",
- "-V", exchange_master_pub
- ]).run()
-
- # When called, there is no exchange master pub yet.
- # taler-exchange-offline will prouce the key _after_
- # taler.conf is generated. Only after that, we'll
- # specify the master key where it is missing; namely
- # in the merchant backend and exchange HTTP daemon sections.
-
- def config_main(
- filename,
- outdir,
- unix_sockets_dir,
- currency,
- wire_method,
- exchange_wire_gateway_username,
- exchange_wire_gateway_password,
- frontend_api_key,
- taler_runtime_dir,
- postgres_db_name
- ):
- def coin(
- obj,
- currency,
- name,
- value,
- d_withdraw="3 years",
- d_spend="5 years",
- d_legal="10 years",
- f_withdraw="0.01",
- f_deposit="0.01",
- f_refresh="0.01",
- f_refund="0.01",
- rsa_keysize="2048",
- ):
- sec = "coin_" + currency + "_" + name + "_age_restricted"
- obj.cfg_put(sec, "cipher", "RSA")
- obj.cfg_put(sec, "value", currency + ":" + value)
- obj.cfg_put(sec, "duration_withdraw", d_withdraw)
- obj.cfg_put(sec, "duration_spend", d_spend)
- obj.cfg_put(sec, "duration_legal", d_legal)
- obj.cfg_put(sec, "fee_withdraw", currency + ":" + f_withdraw)
- obj.cfg_put(sec, "fee_refresh", currency + ":" + f_refresh)
- obj.cfg_put(sec, "fee_refund", currency + ":" + f_refund)
- obj.cfg_put(sec, "fee_deposit", currency + ":" + f_deposit)
- obj.cfg_put(sec, "rsa_keysize", rsa_keysize)
- obj.cfg_put(sec, "age_restricted", "YES")
-
- obj = ConfigFile("taler.conf")
- obj.cfg_put("paths", "TALER_DATA_HOME", str(TALER_DATA_DIR))
- if not taler_runtime_dir.is_dir():
- os.makedirs(taler_runtime_dir)
- obj.cfg_put("paths", "TALER_RUNTIME_DIR", str(taler_runtime_dir))
- obj.cfg_put("taler", "CURRENCY", currency)
- obj.cfg_put("taler", "CURRENCY_ROUND_UNIT", f"{currency}:0.01")
-
- obj.cfg_put("kyc-provider-example-persona", "COST", "42");
- obj.cfg_put("kyc-provider-example-persona", "LOGIC", "persona");
- obj.cfg_put("kyc-provider-example-persona", "USER_TYPE", "INDIVIDUAL");
- obj.cfg_put("kyc-provider-example-persona", "PROVIDED_CHECKS", "DUMMY");
- obj.cfg_put("kyc-provider-example-persona", "PERSONA_VALIDITY", "forever");
- obj.cfg_put("kyc-provider-example-persona", "PERSONA_SUBDOMAIN", "taler");
- obj.cfg_put("kyc-provider-example-persona", "PERSONA_AUTH_TOKEN", "persona_sandbox_b1c70e49-b333-4f3c-b356-f0ed05029241");
- obj.cfg_put("kyc-provider-example-persona", "PERSONA_TEMPLATE_ID", "itmpl_Uj6X5J3GPT9kbuAZTLg7AUMx");
- obj.cfg_put("kyc-provider-example-persona", "KYC_POST_URL", "https://demo.taler.net/");
-
- obj.cfg_put("kyc-legitimization-balance-high", "OPERATION_TYPE", "BALANCE");
- obj.cfg_put("kyc-legitimization-balance-high", "REQUIRED_CHECKS", "DUMMY");
- obj.cfg_put("kyc-legitimization-balance-high", "THRESHOLD", f"{currency}:30");
- obj.cfg_put("kyc-legitimization-deposit-any", "OPERATION_TYPE", "DEPOSIT");
- obj.cfg_put("kyc-legitimization-deposit-any", "REQUIRED_CHECKS", "DUMMY");
- obj.cfg_put("kyc-legitimization-deposit-any", "THRESHOLD", f"{currency}:15");
- obj.cfg_put("kyc-legitimization-deposit-any", "TIMEFRAME", "1d");
- obj.cfg_put("kyc-legitimization-withdraw", "OPERATION_TYPE", "WITHDRAW");
- obj.cfg_put("kyc-legitimization-withdraw", "REQUIRED_CHECKS", "DUMMY");
- obj.cfg_put("kyc-legitimization-withdraw", "THRESHOLD", f"{currency}:15");
- obj.cfg_put("kyc-legitimization-withdraw", "TIMEFRAME", "1d");
- obj.cfg_put("kyc-legitimization-merge", "OPERATION_TYPE", "MERGE");
- obj.cfg_put("kyc-legitimization-merge", "REQUIRED_CHECKS", "DUMMY");
- obj.cfg_put("kyc-legitimization-merge", "THRESHOLD", f"{currency}:15");
- obj.cfg_put("kyc-legitimization-merge", "TIMEFRAME", "1d");
- obj.cfg_put("exchange-extension-age_restriction", "ENABLED", "YES");
- obj.cfg_put("exchange-extension-age_restriction", "AGE_GROUPS", "8:10:12:14:16:18");
-
-
- obj.cfg_put("donations", "serve", "http")
- obj.cfg_put("donations", "http_serve", "unix")
- obj.cfg_put("donations", "http_unixpath", str(unix_sockets_dir / "donations.http"))
- obj.cfg_put("donations", "http_unixpath_mode", "660")
-
- obj.cfg_put("landing", "serve", "http")
- obj.cfg_put("landing", "http_serve", "unix")
- obj.cfg_put("landing", "http_unixpath", str(unix_sockets_dir / "landing.http"))
- obj.cfg_put("landing", "http_unixpath_mode", "660")
-
- obj.cfg_put("blog", "serve", "http")
- obj.cfg_put("blog", "http_serve", "unix")
- obj.cfg_put("blog", "http_unixpath", str(unix_sockets_dir / "blog.http"))
- obj.cfg_put("blog", "http_unixpath_mode", "660")
-
- obj.cfg_put("survey", "serve", "http")
- obj.cfg_put("survey", "http_serve", "unix")
- obj.cfg_put("survey", "http_unixpath", str(unix_sockets_dir / "survey.http"))
- obj.cfg_put("survey", "http_unixpath_mode", "660")
- obj.cfg_put("survey", "bank_password", "x")
-
- obj.cfg_put("merchant", "serve", "unix")
- obj.cfg_put("merchant", "unixpath", str(unix_sockets_dir / "merchant.http"))
- obj.cfg_put("merchant", "wire_transfer_delay", "0 s")
- obj.cfg_put("merchant", "default_max_wire_fee", currency + ":" + "0.01")
- obj.cfg_put("merchant", "default_max_deposit_fee", currency + ":" + "0.05")
- obj.cfg_put("merchantdb-postgres", "config", f"postgres:///{postgres_db_name}")
-
- obj.cfg_put("frontends", "backend", "https://backend.demo.taler.net/")
- obj.cfg_put(
- "merchant-exchange-{}".format(currency),
- "exchange_base_url", "https://exchange.demo.taler.net/",
- )
- obj.cfg_put(
- "merchant-exchange-{}".format(currency),
- "currency", currency
- )
- obj.cfg_put("auditor", "serve", "unix")
- # FIXME: both below used?
- obj.cfg_put("auditor", "base_url", "https://auditor.demo.taler.net/")
- obj.cfg_put("auditor", "auditor_url", "https://auditor.demo.taler.net/")
- obj.cfg_put("auditor", "unixpath", str(unix_sockets_dir / "auditor.http"))
- obj.cfg_put("auditor", "tiny_amount", currency + ":0.01")
-
- obj.cfg_put(
- "taler-exchange-secmod-eddsa",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-eddsa.http")
- )
- obj.cfg_put(
- "taler-exchange-secmod-cs",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-cs.http")
- )
- obj.cfg_put("taler-exchange-secmod-cs", "sm_priv_key",
- "${TALER_DATA_HOME}/taler-exchange-secmod-cs/secmod-private-key"
- )
- obj.cfg_put(
- "taler-exchange-secmod-rsa",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-rsa.http")
- )
- obj.cfg_put("taler-exchange-secmod-rsa", "sm_priv_key",
- "${TALER_DATA_HOME}/taler-exchange-secmod-rsa/secmod-private-key"
- )
- obj.cfg_put("exchange", "base_url", "https://exchange.demo.taler.net/")
- obj.cfg_put("exchange", "serve", "unix")
- obj.cfg_put("exchange", "unixpath", str(unix_sockets_dir / "exchange.http"))
- obj.cfg_put("exchange", "terms_etag", "tos")
- obj.cfg_put("exchange", "terms_dir", f"{TALER_PREFIX}/share/taler-exchange/tos")
- obj.cfg_put("exchange", "privacy_etag", "0")
- obj.cfg_put("exchange", "privacy_dir", f"{TALER_PREFIX}/share/taler-exchange/pp")
- obj.cfg_put("exchangedb-postgres", "config", f"postgres:///{postgres_db_name}")
- obj.cfg_put("auditordb-postgres", "db_conn_str", f"postgres:///{postgres_db_name}")
- obj.cfg_put("auditordb-postgres", "config", f"postgres:///{postgres_db_name}")
- obj.cfg_put("exchange-account-1", "enable_debit", "yes")
- obj.cfg_put("exchange-account-1", "enable_credit", "yes")
- obj.cfg_put("merchant-account-merchant",
- "wire_response",
- "${TALER_DATA_HOME}/merchant/wire/merchant.json",
- )
- obj.cfg_put("merchant-account-merchant", "wire_file_mode", "770")
- obj.cfg_put("frontends", "backend_apikey", frontend_api_key)
- coin(obj, currency, "ct_10", "0.10")
- coin(obj, currency, "1", "1")
- coin(obj, currency, "2", "2")
- coin(obj, currency, "5", "5")
- coin(obj, currency, "10", "10")
- coin(obj, currency, "1000", "1000")
- obj.cfg_write(outdir)
- return obj
-
- def config_sync(
- filename, outdir,
- unix_sockets_dir,
- currency, api_key,
- postgres_db_name
- ):
- obj = ConfigFile(filename)
- obj.cfg_put("taler", "currency", currency)
- obj.cfg_put("sync", "serve", "unix")
- obj.cfg_put("sync", "unixpath", str(unix_sockets_dir / "sync.http"))
- obj.cfg_put("sync", "api_key", f"Bearer {api_key}")
- obj.cfg_put("sync", "annual_fee", f"{currency}:0.1")
- obj.cfg_put("sync", "fulfillment_url", "taler://fulfillment-success/")
- obj.cfg_put(
- "sync",
- "payment_backend_url",
- "https://backend.demo.taler.net/instances/Taler/"
- )
- obj.cfg_put("syncdb-postgres", "config", f"postgres:///{postgres_db_name}")
- obj.cfg_write(outdir)
-
- def unit_file_content(description, cmd, env=None):
- executable_name = cmd.split(" ")[0].split("/")[-1]
- content = (
- "[Unit]\n"
- f"Description={description}\n"
- "[Service]\n"
- f"ExecStart={cmd}\n"
- f"StandardOutput=append:{LOG_DIR / executable_name}.log\n"
- f"StandardError=append:{LOG_DIR / executable_name}.log\n"
- )
- if env:
- content += f"\nEnvironmentFile={env}\n"
- return content
-
- if UNIX_SOCKETS_DIR.is_dir():
- for left_socket in os.listdir(UNIX_SOCKETS_DIR):
- s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- socket_file = str(UNIX_SOCKETS_DIR / left_socket)
- if s.connect_ex(socket_file.encode("utf-8")) == 0:
- fail(f"A service is unexpectedly running and bound to {socket_file}!")
-
- print_nn("Remove stale data and config...")
- if TALER_DATA_DIR.exists():
- shutil.rmtree(TALER_DATA_DIR)
- if TALER_RUNTIME_DIR.exists():
- shutil.rmtree(TALER_RUNTIME_DIR)
- if CFG_OUTDIR.exists():
- shutil.rmtree(CFG_OUTDIR)
- print(" OK")
-
- print_nn("Generate preliminary taler.conf...")
- mc = config_main(
- "taler.conf",
- outdir=CFG_OUTDIR,
- unix_sockets_dir=UNIX_SOCKETS_DIR,
- currency=CURRENCY,
- wire_method=WIRE_METHOD,
- exchange_wire_gateway_username=EXCHANGE_NEXUS_USERNAME,
- exchange_wire_gateway_password=EXCHANGE_NEXUS_PASSWORD,
- frontend_api_key=FRONTENDS_API_TOKEN,
- taler_runtime_dir=TALER_RUNTIME_DIR,
- postgres_db_name=postgres_db_name
- )
- print(" OK")
-
- print_nn("Installing SystemD unit files...")
- if not systemd_user_dir.exists():
- systemd_user_dir.mkdir(parents=True, exist_ok=True)
-
- if not TALER_UNIT_FILES_DIR.exists():
- TALER_UNIT_FILES_DIR.mkdir(parents=True, exist_ok=True)
-
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-httpd.service", "w") as exchange_unit:
- exchange_unit.write(unit_file_content(
- description = "Taler Exchange HTTP daemon",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-httpd -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-wirewatch.service", "w") as exchange_wirewatch_unit:
- exchange_wirewatch_unit.write(unit_file_content(
- description = "Taler Exchange Wirewatch",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-wirewatch -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-aggregator.service", "w") as exchange_aggregator_unit:
- exchange_aggregator_unit.write(unit_file_content(
- description = "Taler Exchange Aggregator",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-aggregator --kyc-off -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-transfer.service", "w") as exchange_transfer_unit:
- exchange_transfer_unit.write(unit_file_content(
- description = "Taler Exchange Transfer",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-transfer -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-cs.service", "w") as exchange_cs_unit:
- exchange_cs_unit.write(unit_file_content(
- description = "Taler Exchange CS security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-cs -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
-
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-rsa.service", "w") as exchange_rsa_unit:
- exchange_rsa_unit.write(unit_file_content(
- description = "Taler Exchange RSA security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-rsa -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-eddsa.service", "w") as exchange_eddsa_unit:
- exchange_eddsa_unit.write(unit_file_content(
- description = "Taler Exchange EDDSA security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-eddsa -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-merchant-backend.service", "w") as merchant_unit:
- merchant_unit.write(unit_file_content(
- description = "Taler Merchant backend",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-sync.service", "w") as sync_unit:
- sync_unit.write(unit_file_content(
- description = "Taler Sync",
- cmd = f"{TALER_PREFIX}/bin/sync-httpd -L DEBUG -c {CFG_OUTDIR / 'sync.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-merchant-backend-token.service", "w") as merchant_token_unit:
- merchant_token_unit.write(unit_file_content(
- description = "Taler Merchant backend with auth token to allow default instance creation.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -a {TALER_MERCHANT_TOKEN} -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- # Custom Postgres connection.
- if os.environ.get("PGPORT"):
- with open(TALER_UNIT_FILES_DIR / "taler-local-postgres.env", "w") as postgres_env:
- postgres_env.write(f"PGPORT={os.environ.get('PGPORT')}")
-
- # FIXME/REMINDER: libEufin needs to enable writes for the
- # domain socket's group, otherwise Nginx won't be able to
- # proxy_pass requests.
-
- # euFin unit files.
- with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.service", "w") as sandbox_unit:
- sandbox_unit.write(unit_file_content(
- description = "euFin Sandbox",
- cmd = f"{TALER_PREFIX}/bin/libeufin-sandbox serve", # takes port 5000
- # cmd = f"{TALER_PREFIX}/bin/libeufin-sandbox serve --with-unix-socket {UNIX_SOCKETS_DIR / 'bank.http'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-sandbox.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.service", "w") as nexus_unit:
- nexus_unit.write(unit_file_content(
- description = "euFin Nexus",
- cmd = f"{TALER_PREFIX}/bin/libeufin-nexus serve", # takes port 5001
- # cmd = f"{TALER_PREFIX}/bin/libeufin-nexus serve --with-unix-socket {UNIX_SOCKETS_DIR / 'nexus.http'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-nexus.env"
- ))
- # euFin env files.
- with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.env", "w") as sandbox_env:
- sandbox_env.write(f"LIBEUFIN_SANDBOX_DB_CONNECTION=jdbc:sqlite:{SANDBOX_DB_FILE}\n")
- sandbox_env.write(f"LIBEUFIN_SANDBOX_ADMIN_PASSWORD={SANDBOX_ADMIN_PASSWORD}\n")
- # The following populates the bank UI navigation bar.
- sandbox_env.write(f"TALER_ENV_URL_INTRO=https://demo.taler.net/\n")
- sandbox_env.write(f"TALER_ENV_URL_BANK=https://bank.demo.taler.net/\n")
- sandbox_env.write(f"TALER_ENV_URL_MERCHANT_BLOG=https://shop.demo.taler.net/\n")
- sandbox_env.write(f"TALER_ENV_URL_MERCHANT_DONATIONS=https://donations.demo.taler.net/\n")
- sandbox_env.write(f"TALER_ENV_URL_MERCHANT_SURVEY=https://survey.demo.taler.net/\n")
-
- with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.env", "w") as nexus_env:
- nexus_env.write(f"LIBEUFIN_NEXUS_DB_CONNECTION=jdbc:sqlite:{NEXUS_DB_FILE}\n")
- # FIXME: demo sites ignore the '-c' flag now. They access ~/.config/taler.conf
- # which is a symlink to ~/config/taler.conf
- with open(TALER_UNIT_FILES_DIR / "taler-local-donations.service", "w") as donations_unit:
- donations_unit.write(unit_file_content(
- description = "Donation Website that accepts Taler payments.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos donations",
- # cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos donations -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-blog.service", "w") as blog_unit:
- blog_unit.write(unit_file_content(
- description = "Blog that accepts Taler payments.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos blog",
- # cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos blog -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-survey.service", "w") as survey_unit:
- survey_unit.write(unit_file_content(
- description = "Survey Website awarding tips via Taler.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos survey",
- # cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos survey -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-landing.service", "w") as landing_unit:
- landing_unit.write(unit_file_content(
- description = "Landing Website of Taler demo.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos landing",
- # cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos landing -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-frontends.env", "w") as frontends_env:
- # To populate the navigation bar:
- frontends_env.write((
- f"PYTHONUSERBASE={TALER_PREFIX}\n"
- f"PATH={os.environ.get('PATH')}\n"
- f"TALER_CONFIG_FILE={CFG_OUTDIR / 'taler.conf'}\n"
- f"TALER_ENV_URL_INTRO=https://demo.taler.net/\n"
- f"TALER_ENV_URL_BANK=https://bank.demo.taler.net/\n"
- f"TALER_ENV_URL_MERCHANT_BLOG=https://shop.demo.taler.net/\n"
- f"TALER_ENV_URL_MERCHANT_DONATIONS=https://donations.demo.taler.net/\n"
- f"TALER_ENV_URL_MERCHANT_SURVEY=https://survey.demo.taler.net/\n"
- ))
- print(" OK")
- print_nn("Reload SystemD...")
- Command(["systemctl", "--user", "daemon-reload"]).run()
- atexit.register(lambda: subprocess.run(
- ["systemctl", "--user", "stop", "taler-local-*.service"],
- check=True
- )
- )
- print(" OK")
- print_nn("Generate exchange's master key...")
- EXCHANGE_MASTER_PUB = Command(
- [
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "setup"
- ],
- capture_stdout=True
- ).run()
- print(" OK")
- print_nn("Specify exchange master pub in taler.conf...")
- config_specify_master_pub(
- CFG_OUTDIR / "taler.conf",
- CURRENCY,
- EXCHANGE_MASTER_PUB
- )
- print(" OK")
- print_nn("Generating sync.conf...")
- config_sync(
- "sync.conf",
- outdir=CFG_OUTDIR,
- unix_sockets_dir=UNIX_SOCKETS_DIR,
- currency=CURRENCY,
- api_key=FRONTENDS_API_TOKEN,
- postgres_db_name=postgres_db_name
- )
- print(" OK")
- print_nn("Reset and init exchange DB..")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"]
- ).run()
- print(" OK")
-
- print_nn("Launching the exchange RSA helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-rsa.service"])
- print(" OK")
- print_nn("Launching the exchange EDDSA helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-eddsa.service"])
- print(" OK")
- print_nn("Launching the exchange CS helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-cs.service"])
- print(" OK")
- print_nn("Launching the exchange...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-httpd.service"])
- if not is_serving("https://exchange.demo.taler.net/"):
- fail(f"Exchange did not start correctly.")
- print(" OK")
- print_nn("exchange-offline: signing extensions...")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "extensions", "sign", "upload"
- ]).run()
- print(" OK")
- print_nn("exchange-offline: signing key material...")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "download", "sign", "upload"
- ]).run()
- print(" OK")
- # Set up wire fees for next 5 years
- NOW = datetime.now()
- YEAR = NOW.year
- print_nn("Setting wire fees for the next 5 years...")
- for year in range(YEAR, YEAR+5):
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "wire-fee",
- str(year),
- WIRE_METHOD,
- CURRENCY + ":0.01",
- CURRENCY + ":0.01",
- CURRENCY + ":0.01",
- "upload"
- ],
- custom_name="set-wire-fee"
- ).run()
- print(" OK")
- print_nn("Setting global fees for the next 5 years...")
- for year in range(YEAR, YEAR+5):
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "global-fee",
- str(year),
- CURRENCY + ":0.00",
- CURRENCY + ":0.00",
- CURRENCY + ":0.00",
- CURRENCY + ":0.00",
- "3000s",
- "30000s",
- "300000s",
- "0",
- "upload"
- ],
- custom_name="set-wire-fee"
- ).run()
- print(" OK")
- print_nn("Reset and init auditor DB..")
- Command([
- f"{TALER_PREFIX}/bin/taler-auditor-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"]
- ).run()
- print(" OK")
- print_nn("Add this exchange to the auditor...")
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-auditor-exchange",
- "-c", CFG_OUTDIR / "taler.conf",
- "-m", EXCHANGE_MASTER_PUB,
- "-u", "https://exchange.demo.taler.net/"
- ],
- ).run()
- print(" OK")
- ## Step 4: Set up euFin
- print_nn("Resetting euFin databases...")
- try:
- remove(SANDBOX_DB_FILE)
- remove(NEXUS_DB_FILE)
- except OSError as error:
- if error.errno != errno.ENOENT:
- raise error
- print(" OK")
- # Make the 'default' demobank at Sandbox. (No signup bonus)
- Command([
- f"{TALER_PREFIX}/bin/libeufin-sandbox",
- "config", "--currency", CURRENCY, "--with-signup-bonus", "default"],
- env={
- "PATH": os.environ["PATH"],
- "LIBEUFIN_SANDBOX_DB_CONNECTION": f"jdbc:sqlite:{SANDBOX_DB_FILE}"
- }).run()
- print_nn("Launching Sandbox...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-sandbox.service"])
- time.sleep(3)
- print(" OK")
- print_nn("Make Sandbox EBICS host...")
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
- "--sandbox-url", SANDBOX_URL,
- "ebicshost", "create",
- "--host-id", EBICS_HOST_ID,
- ],
- env=get_sandbox_cli_env(
- SANDBOX_ADMIN_USERNAME,
- SANDBOX_ADMIN_PASSWORD,
- ),
- custom_name="sandbox-create-ebicshost",
- ).run()
- print(" OK")
-
- print_nn("Create Exchange account at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
- ebics_user_id=EXCHANGE_EBICS_USER_ID,
- person_name="Exchange Owner",
- bank_account_name=EXCHANGE_BANK_ACCOUNT_SANDBOX,
- password=EXCHANGE_BANK_ACCOUNT_PASSWORD
- )
- print(" OK")
- print_nn("Getting exchange payto-URI from the bank...")
- exchange_bank_account_info = get_sandbox_account_info(
- SANDBOX_URL,
- EXCHANGE_BANK_ACCOUNT_SANDBOX,
- EXCHANGE_BANK_ACCOUNT_PASSWORD
- )
- EXCHANGE_PAYTO = exchange_bank_account_info["paytoUri"]
- print(" OK")
- print_nn("Specify own payto-URI to exchange's configuration..")
- Command([
- f"{TALER_PREFIX}/bin/taler-config", "-F", "-c", CFG_OUTDIR / 'taler.conf',
- "-s", "exchange-account-1", "-o", "payto_uri", "-V",
- EXCHANGE_PAYTO
- ]).run()
- print(" OK")
- print_nn(f"exchange-offline: enabling {EXCHANGE_PAYTO}...")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "enable-account", EXCHANGE_PAYTO, "upload"
- ]).run()
- print(" OK")
-
- # Give each instance a Sandbox account (note: 'default'
- # won't have one, as it should typically only manage other
- # instances).
- for instance in INSTANCES:
- instance_id = instance["name"]
- print_nn(f"Create account of {instance_id} at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedMerchantEbicsPartnerId",
- ebics_user_id=f"unused{instance_id}EbicsUserId",
- person_name=f"Shop Owner of {instance_id}",
- bank_account_name=f"sandbox-account-{instance_id.lower()}",
- password=ALL_INSTANCES_BANK_PASSWORD,
- is_public=instance.get("isPublic")
- )
- print(" OK")
- print_nn("Create Customer account at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedCustomerEbicsPartnerId",
- ebics_user_id="unusedCustomerEbicsUserId",
- person_name="Customer Person",
- bank_account_name=CUSTOMER_BANK_ACCOUNT,
- password=CUSTOMER_BANK_PASSWORD
- )
- print(" OK")
- print_nn("Make Nexus superuser ...")
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-nexus", "superuser",
- EXCHANGE_NEXUS_USERNAME,
- "--password", EXCHANGE_NEXUS_PASSWORD
- ],
- env=get_nexus_server_env(
- NEXUS_DB_FILE,
- NEXUS_URL
- ),
- custom_name="nexus-superuser",
- ).run()
- print(" OK")
-
- print_nn("Launching Nexus...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-nexus.service"])
- if not is_serving("https://nexus.demo.taler.net/"):
- fail(f"Nexus did not start correctly.")
- print(" OK")
- print_nn("Create Exchange account at Nexus...")
- prepare_nexus_account(
- ebics_url=EBICS_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
- ebics_user_id=EXCHANGE_EBICS_USER_ID,
- bank_connection_name=EXCHANGE_BANK_CONNECTION,
- bank_account_name_sandbox=EXCHANGE_BANK_ACCOUNT_SANDBOX,
- bank_account_name_nexus=EXCHANGE_BANK_ACCOUNT_NEXUS,
- env=get_nexus_cli_env(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD,
- NEXUS_URL
- )
- )
- print(" OK")
-
- print_nn("Create Taler facade ...")
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "facades",
- "new-taler-wire-gateway-facade",
- "--currency", CURRENCY,
- "--facade-name", EXCHANGE_FACADE_NAME,
- EXCHANGE_BANK_CONNECTION,
- EXCHANGE_BANK_ACCOUNT_NEXUS
- ],
- env=get_nexus_cli_env(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD,
- NEXUS_URL
- ),
- custom_name="create-taler-facade",
- ).run()
- print(" OK")
- try:
- response = requests.get(
- NEXUS_URL + "/facades",
- auth=requests.auth.HTTPBasicAuth(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD
- )
- )
- response.raise_for_status()
- except Exception as error:
- fail(error)
- FACADE_URL = response.json().get("facades")[0].get("baseUrl")
- print_nn("Set suggested exchange at Sandbox...")
- Command([
- f"{TALER_PREFIX}/bin/libeufin-sandbox",
- "default-exchange",
- "https://exchange.demo.taler.net/",
- EXCHANGE_PAYTO],
- env={
- "PATH": os.environ["PATH"],
- "LIBEUFIN_SANDBOX_DB_CONNECTION": f"jdbc:sqlite:{SANDBOX_DB_FILE}"
- }).run()
- print(" OK")
-
- # Point the exchange to the facade.
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-F",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "wire_gateway_auth_method",
- "-V", "basic"
- ],
- custom_name="specify-wire-gateway-auth-method",
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-F",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "wire_gateway_url",
- "-V", FACADE_URL
- ],
- custom_name="specify-facade-url",
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-F",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "username",
- "-V", EXCHANGE_NEXUS_USERNAME
- ],
- custom_name="specify-username-for-facade",
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-F",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "password",
- "-V", EXCHANGE_NEXUS_PASSWORD
- ],
- custom_name="specify-password-for-facade",
- ).run()
-
- ## Step 6: Set up merchant
-
- print_nn("Reset and init merchant database...")
- Command([
- f"{TALER_PREFIX}/bin/taler-merchant-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"
- ]).run()
- print(" OK")
-
- def ensure_instance(
- currency,
- instance_id,
- backend_url,
- wire_method,
- auth_token
- ):
- auth_header = {"Authorization": f"Bearer {auth_token}"}
- resp = requests.get(
- urljoin_nodrop(backend_url, f"management/instances/{instance_id}"),
- headers = auth_header
- )
- bankaccount_info = get_sandbox_account_info(
- SANDBOX_URL,
- f"sandbox-account-{instance_id.lower()}",
- ALL_INSTANCES_BANK_PASSWORD
- )
- req = dict(
- id=instance_id,
- name=f"Name of '{instance_id}'",
- payto_uris=[bankaccount_info["paytoUri"]],
- address=dict(),
- jurisdiction=dict(),
- default_max_wire_fee=f"{currency}:1",
- default_wire_fee_amortization=3,
- default_max_deposit_fee=f"{currency}:1",
- default_wire_transfer_delay=dict(d_us=0),
- default_pay_delay=dict(d_us=24*60*60*1000000),
- auth=dict(method="token", token=auth_token),
- )
- http_method = requests.post
- endpoint = "management/instances"
-
- # Instance exists, patching it.
- if resp.status_code == 200:
- print(f"Patching instance '{instance_id}'")
- http_method = requests.patch
- endpoint = f"management/instances/{instance_id}"
-
- resp = http_method(
- urljoin_nodrop(backend_url, endpoint),
- json=req,
- headers = auth_header
- )
- if resp.status_code < 200 or resp.status_code >= 300:
- print(f"Backend responds: {resp.status_code}/{resp.text}")
- fail(f"Could not create (or patch) instance '{instance_id}'")
-
- print_nn(f"Start merchant (with TALER_MERCHANT_TOKEN into the env)...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend-token.service"], check=True)
- if not is_serving("https://backend.demo.taler.net/"):
- fail(f"Merchant backend (with auth token) did not start correctly.")
- print(" OK")
- print_nn("Give default instance a bank account...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedMerchantEbicsPartnerId",
- ebics_user_id=f"unusedDefaultInstanceEbicsUserId",
- person_name=f"Shop Owner of default instance",
- bank_account_name="sandbox-account-default",
- password=ALL_INSTANCES_BANK_PASSWORD
- )
- print(" OK")
- ensure_instance(
- currency=CURRENCY,
- instance_id="default",
- backend_url = "https://backend.demo.taler.net/",
- wire_method = WIRE_METHOD,
- auth_token=FRONTENDS_API_TOKEN
- )
- print_nn("Stopping the merchant with TALER_MERCHANT_TOKEN into the env...")
- subprocess.run(["systemctl", "--user", "stop", "taler-local-merchant-backend-token.service"], check=True)
- print(" OK")
- print_nn("Restarting the merchant WITHOUT the auth-token in the env...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend.service"], check=True)
- if not is_serving("https://backend.demo.taler.net/"):
- fail(f"Merchant backend (without auth token) did not start correctly.")
- print(" OK")
- for instance in INSTANCES:
- instance_id = instance["name"]
- print_nn(f"Creating the {instance_id} instance...")
- ensure_instance(
- currency=CURRENCY,
- instance_id=instance_id,
- backend_url = "https://backend.demo.taler.net/",
- wire_method = WIRE_METHOD,
- auth_token=FRONTENDS_API_TOKEN
- )
- print(" OK")
- print_nn("Creating tip reserve...")
- create_tip_reserve()
- print(" OK")
- # 1 second to let Nexus read the payment from
- # Sandbox, 1 second to let the Exchange Wirewatch
- # to read the payment from Nexus.
- print_nn("Sleep 2 seconds to let the tip reserve settle...")
- time.sleep(2)
- print(" OK")
- # Configure Sync.
- print_nn("Reset and init Sync DB..")
- Command([
- f"{TALER_PREFIX}/bin/sync-dbinit",
- "-c", CFG_OUTDIR / "sync.conf",
- "--reset"]
- ).run()
- print(" OK")
- print_nn("Stopping any running service..")
- subprocess.run(["systemctl", "--user", "stop", "taler-local-*.service"], check=True)
- print(" OK")
-
-@cli.command()
-def launch():
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-rsa.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-eddsa.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-cs.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-httpd.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-wirewatch.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-aggregator.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-transfer.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-sync.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-sandbox.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-nexus.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-donations.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-blog.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-survey.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-landing.service"], check=True)
-
-@cli.command()
-def stop():
- subprocess.run(["systemctl", "--user", "stop", "taler-local-*.service"], check=True)
-
-if __name__ == "__main__":
- cli()
diff --git a/bin/taler-local b/bin/taler-local
deleted file mode 100755
index d33c06d..0000000
--- a/bin/taler-local
+++ /dev/null
@@ -1,1840 +0,0 @@
-#!/usr/bin/env python3
-
-# This file is part of GNU Taler.
-#
-# GNU Taler is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# GNU Taler is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Taler. If not, see <https://www.gnu.org/licenses/>.
-
-import socket
-import shutil
-import atexit
-import click
-import types
-import os
-import sys
-import os.path
-import subprocess
-import time
-import random
-import json
-from os import listdir
-from os.path import isdir, join, basename
-from pathlib import Path
-from typing import List
-from sys import exit
-from urllib.parse import urljoin
-from os import remove
-import requests
-from collections import OrderedDict
-import errno
-from pathlib import Path
-from subprocess import Popen, DEVNULL, PIPE
-from datetime import datetime
-
-
-TALER_ROOT_DIR = Path.home() / ".taler"
-TALER_PREFIX = Path.home() / ".local"
-
-# Print No Newline.
-def print_nn(msg):
- print(msg, end="")
- sys.stdout.flush()
-
-class Repo:
- def __init__(self, name, url, deps, builder, version="master"):
- self.name = name
- self.url = url
- self.deps = deps
- self.builder = builder
- self.version = version
-
-@click.group()
-def cli():
- pass
-
-# Parses the command-line-given and comma-separated repos list
-# into a list of names.
-def split_repos_list(repos):
- return [repo for repo in repos.split(",") if repo != ""]
-
-# fetch the remote. No timestamp deletion here
-def update_checkout(r: Repo, p: Path):
- """Clean the repository's working directory and
- update it to the match the latest version of the upstream branch
- that we are tracking."""
- subprocess.run(["git", "-C", str(p), "clean", "-fdx"], check=True) # remove unversioned files.
-
- # Equivalent to "git pull". Does nothing if in detached HEAD
- # but pulls new code into the local copy otherwise.
- subprocess.run(["git", "-C", str(p), "fetch"], check=True)
- subprocess.run(["git", "-C", str(p), "reset"], check=True)
-
- # Makes the last step "--hard", namely removes files not
- # belonging to the current version.
- res = subprocess.run(
- [
- "git",
- "-C",
- str(p),
- "rev-parse",
- "--abbrev-ref",
- "--symbolic-full-name",
- "@{u}",
- ],
- stderr=subprocess.DEVNULL,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if res.returncode != 0:
- ref = "HEAD"
- else:
- ref = res.stdout.strip("\n ")
- print(f"resetting {r.name} to ref {ref}")
- subprocess.run(["git", "-C", str(p), "reset", "--hard", ref], check=True)
-
-
-def default_configure(*extra):
- extra_list = list(extra)
- subprocess.run(["./configure", f"--prefix={TALER_PREFIX}"] + extra_list, check=True)
-
-def pyconfigure(*extra):
- """For python programs, --prefix doesn't work."""
- subprocess.run(["./configure"] + list(extra), check=True)
-
-def build_libeufin(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_libmicrohttpd(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure("--disable-doc")
- subprocess.run(["make"], check=True)
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_gnunet(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- "--disable-documentation",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_exchange(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_wallet(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_twister(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_merchant(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_sync(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-merchant={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-
-def build_demos(r, p):
- update_checkout(r, p)
- pfx = Path.home() / ".local"
- pyconfigure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_backoffice(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"])
- subprocess.run(["./configure"])
- subprocess.run(["make", "build-single"])
- (p / "taler-buildstamp").touch()
-
-repos = {
- "libmicrohttpd": Repo(
- "libmicrohttpd",
- "git://git.gnunet.org/libmicrohttpd.git",
- [],
- build_libmicrohttpd,
- ),
- "gnunet": Repo(
- "gnunet",
- "git://git.gnunet.org/gnunet.git",
- ["libmicrohttpd"],
- build_gnunet
- ),
- "exchange": Repo(
- "exchange",
- "git://git.taler.net/exchange",
- ["gnunet", "libmicrohttpd"],
- build_exchange,
- ),
- "merchant": Repo(
- "merchant",
- "git://git.taler.net/merchant",
- ["exchange","libmicrohttpd","gnunet"],
- build_merchant,
- ),
- "sync": Repo(
- "sync",
- "git://git.taler.net/sync",
- ["exchange",
- "merchant",
- "gnunet",
- "libmicrohttpd"],
- build_sync,
- ),
- "wallet-core": Repo(
- "wallet-core",
- "git://git.taler.net/wallet-core",
- [],
- build_wallet,
- ),
- "libeufin": Repo(
- "libeufin",
- "git://git.taler.net/libeufin.git",
- [],
- build_libeufin,
- ),
- "taler-merchant-demos": Repo(
- "taler-merchant-demos",
- "git://git.taler.net/taler-merchant-demos",
- [],
- build_demos,
- ),
- "twister": Repo(
- "twister",
- "git://git.taler.net/twister",
- ["gnunet", "libmicrohttpd"],
- build_twister,
- ),
-}
-
-def get_repos_names() -> List[str]:
- r_dir = TALER_ROOT_DIR / "sources"
- if not r_dir.is_dir():
- print(f"'{r_dir}' not found. Did bootstrap run?")
- return []
- return [el for el in listdir(r_dir) if isdir(join(r_dir, el)) and repos.get(el)]
-
-# Get 'Repo' objects (globally defined),
-# using their names as index.
-def load_repos(reposNames) -> List[Repo]:
- ret = []
- for repo in repos.keys():
- if repo in reposNames:
- ret.append(repos[repo])
- return ret
-
-# Return the list of repos (equipped with their version)
-# to install.
-def load_repos_with_envcfg(envcfg_path) -> List[Repo]:
- envcfg_path = Path(envcfg_path)
- if not os.path.isfile(envcfg_path):
- print(f"{envcfg_path} is not a file")
- sys.exit(1)
- cfgtext = envcfg_path.read_text()
- cfg = types.ModuleType("taler_deployment_cfg")
- try:
- exec(cfgtext, cfg.__dict__)
- except SyntaxError:
- print(f"{envcfg_path} is not Python.")
- exit(1)
- ret = []
- for repo in repos.keys():
- try:
- envcfg_entry = getattr(cfg, "tag_" + repo.replace("-", "_"))
- except AttributeError:
- # 'env' files doesn't have this repo, continue looping.
- continue
- repos[repo].version = envcfg_entry
- ret.append(repos[repo])
- return ret
-
-# Flag as stale the projects set on 'master' that
-# aren't in line with upstream. Detached head projects
-# aren't affected.
-def update_repos(repos: List[Repo], force) -> None:
- for r in repos:
- r_dir = TALER_ROOT_DIR / "sources" / r.name
- subprocess.run(["git", "-C", str(r_dir), "fetch"], check=True)
- res = subprocess.run(
- ["git", "-C", str(r_dir), "status", "-sb"],
- check=True,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if "behind" in res.stdout or force:
- print(f"{r.name} will be compiled")
- s = r_dir / "taler-buildstamp"
- if s.exists():
- s.unlink()
-
-# projects without the build timestamp are considered stale,
-# even if one of their dependencies _got_ marked as stale.
-def get_stale_repos(repos: List[Repo]) -> List[Repo]:
- timestamps = {}
- stale = []
- for r in repos:
- r_dir = TALER_ROOT_DIR / "sources" / r.name
- s = r_dir / "taler-buildstamp"
- if not s.exists():
- timestamps[r.name] = time.time()
- stale.append(r)
- continue
- ts = timestamps[r.name] = s.stat().st_mtime
- for dep in r.deps:
- # When 'dep' in not found, it has been
- # excluded from the compilation.
- if timestamps.get("dep", 0) > ts:
- stale.append(r)
- break
- return stale
-
-@cli.command()
-@click.option(
- "--without-repos", metavar="WITHOUT REPOS",
- help="WITHOUT REPOS is a unspaced and comma-separated list \
-of the repositories to _exclude_ from compilation",
- default="")
-@click.option(
- "--only-repos", metavar="ONLY REPOS",
- help="ONLY REPOS is a unspaced and comma-separated exclusive list \
-of the repositories to include in the compilation",
- default="")
-@click.option(
- "--dry/--no-dry", default=False,
- help="Only getting changes, without actual build."
-)
-@click.option(
- "--with-envcfg", metavar="PATH",
- help="python file pinning each codebase version.",
-)
-# Normally, we don't rebuild dependent projects when one
-# of their dependency changed. This lets check whether non
-# breaking changes are really so; this option invalidates
-# this policy by letting all the codebases be compiled.
-@click.option(
- "--force/--no-force", default=False,
- help="build all the projects.",
-)
-def build(without_repos, only_repos, dry, with_envcfg, force) -> None:
- """Build the deployment from source."""
- if only_repos != "" and without_repos != "":
- print("Either use --only-repos or --without-repos")
- exit(1)
- repos_names = get_repos_names()
- if only_repos != "":
- repos_names = list(filter(
- lambda x: x in split_repos_list(only_repos),
- repos_names
- ))
- if without_repos != "":
- repos_names = list(filter(
- lambda x: x not in split_repos_list(without_repos),
- repos_names
- ))
- if with_envcfg:
- target_repos = load_repos_with_envcfg(with_envcfg)
- else:
- target_repos = load_repos(repos_names)
- # enforce version here.
- sources = TALER_ROOT_DIR / "sources"
- for r in target_repos:
- subprocess.run(
- ["git", "-C", str(sources / r.name),
- "checkout", "-q", "-f",
- r.version, "--"], check=True
- )
- update_repos(target_repos, force)
- stale = get_stale_repos(target_repos)
- print(f"found stale repos: {[r.name for r in stale]}")
- for r in stale:
- # Inform, if a dependency is not being built:
- diff = set(r.deps) - set(repos_names)
- if len(diff) > 0:
- print(f"Info: those dependencies are not being built: {diff}")
- p = TALER_ROOT_DIR / "sources" / r.name
- os.chdir(str(p))
- if dry:
- print("dry running")
- continue
- r.builder(r, p)
-
-# Only git-clone the codebases. The 'build' step
-# will run all the update logic. At this point, a
-# 'env' file - as well as the --repos option - will
-# only express which codebases are to clone.
-@cli.command()
-@click.option(
- "--repos", "-r",
- metavar="REPOS",
- help="REPOS is a unspaced and comma-separated list of the repositories to clone.",
- default="libmicrohttpd,gnunet,exchange,merchant,wallet-core,taler-merchant-demos,sync,libeufin",
- show_default=True,
-)
-@click.option(
- "--without-repos",
- metavar="REPOS",
- help="REPOS is a unspaced and comma-separated list of the repositories NOT to clone."
-)
-@click.option(
- "--list-repos/--no-list-repos", default=False,
- help="Lists the repositories that were bootstrapped.",
-)
-@click.option(
- "--with-envcfg", metavar="PATH",
- help="python file pinning each codebase version.",
-)
-@click.option(
- "--dry/--no-dry", default=False,
- help="Print steps, without downloading any repository.",
-)
-def bootstrap(list_repos, repos, with_envcfg, dry, without_repos) -> None:
- """Clone all the specified repositories."""
- # Only saying _which_ repo were installed. No further action
- if list_repos:
- for repo in get_repos_names():
- print(repo)
- return
-
- # Download the repositories.
- def clone_repos(repos: List[Repo]):
- if len(repos) == 0:
- print("No repositories can be checked out. Spelled correctly?")
- return
- sources = TALER_ROOT_DIR / "sources"
- for r in repos:
- print(f"Bootstrapping '{r.name}', at version '{r.version}'")
- if dry:
- print("dry running")
- continue
- r_dir = sources / r.name
- if not r_dir.exists():
- r_dir.mkdir(parents=True, exist_ok=True)
- subprocess.run(
- ["git", "-C", str(sources),
- "clone", r.url], check=True
- )
- subprocess.run(
- ["git", "-C", str(r_dir),
- "checkout", "-q", "-f",
- r.version, "--"], check=True
- )
-
- # Get list of to-be-cloned repos from the 'env' file.
- if with_envcfg:
- # 'with_envcfg' is a path to a "envcfg.py" file.
- preparedRepos = load_repos_with_envcfg(with_envcfg)
- # Get list of to-be-cloned repos from the command line
- # (or its default)
- else:
- # 'repos' is here "repo1,repo2,.."
- reposList = split_repos_list(repos)
- # 'reposList' is here ["repo1", "repo2", ...]
- preparedRepos = load_repos(reposList)
- if without_repos:
- for exclude_repo in split_repos_list(without_repos):
- preparedRepos = [el for el in preparedRepos if el.name != exclude_repo]
- clone_repos(preparedRepos)
-
-# Globals sharead accross multiple sub-commands:
-# needed to configure and launch the reverse proxy.
-REV_PROXY_HOSTNAME = "localhost"
-REV_PROXY_PORT = "8080"
-REV_PROXY_NETLOC = REV_PROXY_HOSTNAME + ":" + REV_PROXY_PORT
-REV_PROXY_PROTO = "http"
-REV_PROXY_URL = f"{REV_PROXY_PROTO}://{REV_PROXY_NETLOC}"
-UNIX_SOCKETS_DIR = TALER_ROOT_DIR / "sockets"
-LOG_DIR = TALER_ROOT_DIR / "logs"
-# needed to create the customer's bank account and
-# to let them subsequently withdraw via the Access API.
-CUSTOMER_BANK_ACCOUNT = "sandbox-account-customer"
-CUSTOMER_BANK_PASSWORD = "secret"
-# needed along preparation and later to withdraw via
-# the Access API.
-CURRENCY = "CHF"
-
-@cli.command()
-@click.option(
- "--x-forwarded-host", metavar="HOST",
- help="Instruct Nginx to set HOST as the X-Forwarded-Host.",
- default=REV_PROXY_NETLOC
-)
-@click.option(
- "--x-forwarded-proto", metavar="PROTO",
- help="Instruct Nginx to set PROTO as the X-Forwarded-Proto.",
- default="http"
-)
-@click.option(
- "--postgres-db-name", metavar="DBNAME",
- help="Set postgres database name for all the services.",
- default="taler"
-)
-def prepare(x_forwarded_host, x_forwarded_proto, postgres_db_name):
- """Generate configuration, run-time blobs, instances, euFin accounts."""
- def is_serving(check_url, tries=10):
- for i in range(tries):
- try:
- print_nn(".")
- # Raises if the service is not reachable.
- response = requests.get(
- check_url,
- timeout=1
- )
- # The reverse proxy may return 500 if the
- # end service is not ready, therefore this
- # case should be tolerated.
- response.raise_for_status()
- except:
- time.sleep(0.5)
- if i == tries - 1:
- return False
- continue
- break
- return True
-
- def fail(reason=None):
- if reason:
- print("ERROR:", reason)
- exit(1)
-
- def kill(proc):
- proc.terminate()
- proc.wait()
-
- def get_nexus_cli_env(
- username,
- password,
- nexus_url
- ):
- env = os.environ.copy()
- env["LIBEUFIN_NEXUS_USERNAME"] = username
- env["LIBEUFIN_NEXUS_PASSWORD"] = password
- env["LIBEUFIN_NEXUS_URL"] = nexus_url
- return env
-
- def get_sandbox_cli_env(
- username, password
- ):
- env = os.environ.copy()
- env["LIBEUFIN_SANDBOX_USERNAME"] = username
- env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- return env
-
- # Will be extended to include a SANDBOX_ADMIN_TOKEN
- # that will obsolete the 'superuser' flag of ordinary
- # user accounts. Likewise, the client side will be
- # modified to use such token.
- def get_sandbox_server_env(db_file, base_url, admin_password):
- env = os.environ.copy()
- env["LIBEUFIN_SANDBOX_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
- env["LIBEUFIN_SANDBOX_BASE_URL"] = base_url
- env["LIBEUFIN_SANDBOX_ADMIN_PASSWORD"] = admin_password
- return env
-
- def get_nexus_server_env(db_file, base_url):
- env = os.environ.copy()
- env["LIBEUFIN_NEXUS_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
- env["LIBEUFIN_NEXUS_BASE_URL"] = base_url
- return env
-
- def urljoin_nodrop(a, b):
- a = a + "/" # urljoin will drop extra trailing slashes.
- b = "/".join([x for x in b.split("/") if x != ""]) # remove leading slashes.
- return urljoin(a, b)
-
- def prepare_nexus_account(
- ebics_url,
- ebics_host_id,
- ebics_partner_id,
- ebics_user_id,
- bank_connection_name,
- bank_account_name_sandbox,
- bank_account_name_nexus,
- env
- ):
- # make connection
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "new-ebics-connection",
- "--ebics-url", ebics_url,
- "--host-id", ebics_host_id,
- "--partner-id", ebics_partner_id,
- "--ebics-user-id", ebics_user_id,
- bank_connection_name
- ],
- env
- ).run()
- # connect
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "connect", bank_connection_name
- ],
- env
- ).run()
- # Import bank account
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "download-bank-accounts",
- bank_connection_name
- ],
- env
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
- "import-bank-account",
- "--offered-account-id",
- bank_account_name_sandbox,
- "--nexus-bank-account-id",
- bank_account_name_nexus,
- bank_connection_name
- ],
- env
- ).run()
- # Set background tasks.
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "accounts",
- "task-schedule", bank_account_name_nexus,
- "--task-type", "submit",
- "--task-name", "submit-payments-each-second",
- "--task-cronspec", "* * *"
- ],
- env
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "accounts",
- "task-schedule", bank_account_name_nexus,
- "--task-type", "fetch",
- "--task-name", "fetch-reports-each-second",
- "--task-cronspec", "* * *",
- "--task-param-level", "report",
- "--task-param-range-type", "latest"
- ],
- env
- ).run()
-
- def get_sandbox_account_info(
- sandbox_url,
- bank_account_label,
- password,
- ):
- customer_env = os.environ.copy()
- customer_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_label
- customer_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
- r = Command([
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
- "--sandbox-url", demobank_url,
- "demobank", "info",
- "--bank-account", bank_account_label],
- env = customer_env,
- capture_stdout=True
- ).run()
- return json.loads(r)
-
- def prepare_sandbox_account(
- sandbox_url,
- ebics_host_id,
- ebics_partner_id,
- ebics_user_id,
- person_name,
- # This value is BOTH a username
- # and a bank account label.
- bank_account_name,
- password,
- is_public=False
- ):
- demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
- user_env = os.environ.copy()
- user_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_name
- user_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- register_cmd = [
- f"{TALER_PREFIX}/bin/libeufin-cli",
- "sandbox", "--sandbox-url", demobank_url,
- "demobank", "register"
- ]
- if is_public:
- register_cmd.append("--public")
- Command(register_cmd, env = user_env).run()
- admin_env = os.environ.copy()
- admin_env["LIBEUFIN_SANDBOX_USERNAME"] = SANDBOX_ADMIN_USERNAME
- admin_env["LIBEUFIN_SANDBOX_PASSWORD"] = SANDBOX_ADMIN_PASSWORD
- Command([
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
- "--sandbox-url", demobank_url,
- "demobank", "new-ebicssubscriber",
- "--host-id", ebics_host_id,
- "--partner-id", ebics_partner_id,
- "--user-id", ebics_user_id,
- "--bank-account", bank_account_name
- ],
- env = admin_env
- ).run()
-
-
- WIRE_METHOD = "iban"
- # euFin URLs
- SANDBOX_URL = REV_PROXY_URL + "/sandbox"
- NEXUS_URL = REV_PROXY_URL + "/nexus"
-
- # Filesystem's paths
- CFG_OUTDIR = TALER_ROOT_DIR / "config"
- TALER_RUNTIME_DIR = TALER_ROOT_DIR / "runtime"
- TALER_DATA_DIR = TALER_ROOT_DIR / "data"
- TALER_UNIT_FILES_DIR = systemd_user_dir = Path.home() / ".config" / "systemd" / "user"
-
- def get_link(path = ""):
- return x_forwarded_proto + "://" + x_forwarded_host + path
-
- def create_tip_reserve():
- payto = Command([
- f"{TALER_PREFIX}/bin/taler-merchant-setup-reserve",
- "--amount", f"{CURRENCY}:20",
- "--exchange-url", get_link("/exchange/"),
- "--merchant-url", get_link("/merchant-backend/instances/survey/"),
- "--apikey", f"Bearer {FRONTENDS_API_TOKEN}",
- "--wire-method", WIRE_METHOD],
- capture_stdout=True
- ).run()
-
- Command([
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox", "--sandbox-url",
- SANDBOX_URL + "/demobanks/default/", "demobank",
- "new-transaction", "--bank-account", "sandbox-account-survey",
- "--payto-with-subject", payto, "--amount", "20"],
- env = get_sandbox_cli_env(
- username = "sandbox-account-survey",
- password = ALL_INSTANCES_BANK_PASSWORD
- )).run()
-
- def get_random_iban():
- cc_no_check = 131400 # is "DE00"
- bban = "".join(random.choices("0123456789", k=4))
- check_digits = 98 - (int(f"{bban}{cc_no_check}") % 97)
- return "DE" + (f"0{check_digits}"[-2:]) + bban
-
- # IBANs
-
- IBAN_MERCHANT_DEFAULT = get_random_iban()
- IBAN_MERCHANT_DEMOSHOP = get_random_iban()
-
- # Instances
- INSTANCES = [
- dict(name="GNUnet", isPublic=True),
- dict(name="Taler", isPublic=True),
- dict(name="Tor", isPublic=True),
- dict(name="survey"),
- dict(name="blog"),
- ]
-
- # Credentials / API keys
- EXCHANGE_NEXUS_USERNAME = "exchange-nexus-user"
- EXCHANGE_NEXUS_PASSWORD = "exchange-nexus-password"
- FRONTENDS_API_TOKEN = "secret-token:secret"
- TALER_MERCHANT_TOKEN = "secret-token:secret"
- ALL_INSTANCES_BANK_PASSWORD = "secret"
- EXCHANGE_BANK_ACCOUNT_SANDBOX = "sandbox-account-exchange"
- EXCHANGE_BANK_ACCOUNT_PASSWORD = "secret"
-
- # EBICS
- EBICS_HOST_ID = "ebicsDeployedHost"
- EXCHANGE_EBICS_USER_ID = "exchangeEbicsUserId"
- EXCHANGE_EBICS_PARTNER_ID = "exchangeEbicsPartnerId"
- EBICS_URL = REV_PROXY_URL + "/sandbox/ebicsweb"
-
- # euFin
- EXCHANGE_BANK_ACCOUNT_NEXUS = "exchange-imported-account-nexus"
- EXCHANGE_BANK_CONNECTION = "exchange-ebics-connection"
- NEXUS_DB_FILE = "/tmp/nexus.sqlite"
- SANDBOX_DB_FILE = "/tmp/sandbox.sqlite"
- EXCHANGE_FACADE_NAME = "exchange-taler-facade"
- SANDBOX_ADMIN_USERNAME = "admin"
- SANDBOX_ADMIN_PASSWORD = "secret"
-
- class Command:
- def __init__(
- self, cmd, env=os.environ, log_dir=LOG_DIR,
- custom_name=None, capture_stdout=False
- ):
- if len(cmd) == 0:
- fail("Command to execute was given empty.")
- self.name = custom_name if custom_name else basename(cmd[0])
- self.cmd = cmd
- self.capture_stdout = capture_stdout
- self.log_dir = log_dir
- self.env = env
-
- def run(self):
- self.do()
- return_code = self.handle.wait()
- self.cleanup() # Mainly closes the log file.
- if return_code != 0:
- fail(f"Command {self.name} failed. Logs in {self.log_dir}")
- if self.capture_stdout:
- return self.handle.communicate()[0].decode("utf-8").rstrip()
-
- def get_log_filename(self):
- return self.log_file.name
-
- def cleanup(self):
- self.log_file.flush()
- self.log_file.close()
-
- def do(self):
- if not self.log_dir.is_dir():
- os.makedirs(self.log_dir)
- try:
- log_filename = self.log_dir / f"{self.name}.log"
- self.log_file = open(log_filename, "a+")
- except Exception as error:
- fail(f"Could not open log file: {log_filename}: {error}")
- try:
- self.handle = Popen(
- self.cmd, # list
- stdin=DEVNULL,
- stdout=self.log_file if not self.capture_stdout else PIPE,
- stderr=self.log_file,
- env=self.env
- )
- except Exception as error:
- fail(f"Could not execute: {' '.join(self.cmd)}: {error}")
-
- class ConfigFile:
- def __init__(self, filename):
- self.sections = OrderedDict()
- self.filename = filename
-
- def destroy(self):
- del self.sections
- self.sections = OrderedDict()
-
- def cfg_put(self, section_name, key, value):
- s = self.sections[section_name] = self.sections.get(section_name, OrderedDict())
- s[key] = value
-
- def cfg_write(self, outdir):
- if outdir:
- if not os.path.isdir(outdir):
- os.makedirs(outdir)
- fstream = open(os.path.join(outdir, self.filename), "w")
- else:
- fstream = open(sys.stdout)
-
- for section_name, section in self.sections.items():
- fstream.write("[" + section_name + "]" + "\n")
- for key, value in section.items():
- fstream.write(key + " = " + value + "\n")
- fstream.write("\n")
- fstream.close()
-
- def config_specify_master_pub(
- filename,
- currency,
- exchange_master_pub
- ):
- Command([
- f"{TALER_PREFIX}/bin/taler-config", "-c", filename,
- "-s", "exchange", "-o", "master_public_key",
- "-V", exchange_master_pub
- ]).run()
- Command([
- f"{TALER_PREFIX}/bin/taler-config", "-c", filename,
- "-s", f"merchant-exchange-{currency}",
- "-o", "master_key",
- "-V", exchange_master_pub
- ]).run()
-
- # When called, there is no exchange master pub yet.
- # taler-exchange-offline will prouce the key _after_
- # taler.conf is generated. Only after that, we'll
- # specify the master key where it is missing; namely
- # in the merchant backend and exchange HTTP daemon sections.
-
- def config_main(
- filename,
- outdir,
- unix_sockets_dir,
- currency,
- rev_proxy_url,
- wire_method,
- exchange_wire_gateway_username,
- exchange_wire_gateway_password,
- frontend_api_key,
- taler_runtime_dir,
- postgres_db_name
- ):
- def coin(
- obj,
- currency,
- name,
- value,
- d_withdraw="3 years",
- d_spend="5 years",
- d_legal="10 years",
- f_withdraw="0.01",
- f_deposit="0.01",
- f_refresh="0.01",
- f_refund="0.01",
- rsa_keysize="2048",
- ):
- sec = "coin_" + currency + "_" + name
- obj.cfg_put(sec, "cipher", "RSA")
- obj.cfg_put(sec, "value", currency + ":" + value)
- obj.cfg_put(sec, "duration_withdraw", d_withdraw)
- obj.cfg_put(sec, "duration_spend", d_spend)
- obj.cfg_put(sec, "duration_legal", d_legal)
- obj.cfg_put(sec, "fee_withdraw", currency + ":" + f_withdraw)
- obj.cfg_put(sec, "fee_refresh", currency + ":" + f_refresh)
- obj.cfg_put(sec, "fee_refund", currency + ":" + f_refund)
- obj.cfg_put(sec, "fee_deposit", currency + ":" + f_deposit)
- obj.cfg_put(sec, "rsa_keysize", rsa_keysize)
-
- obj = ConfigFile("taler.conf")
- obj.cfg_put("paths", "TALER_DATA_HOME", str(TALER_DATA_DIR))
- if not taler_runtime_dir.is_dir():
- os.makedirs(taler_runtime_dir)
- obj.cfg_put("paths", "TALER_RUNTIME_DIR", str(taler_runtime_dir))
- obj.cfg_put("taler", "CURRENCY", currency)
- obj.cfg_put("taler", "CURRENCY_ROUND_UNIT", f"{currency}:0.01")
-
- obj.cfg_put("bank", "serve", "uwsgi")
- obj.cfg_put("bank", "uwsgi_serve", "unix")
- obj.cfg_put("bank", "uwsgi_unixpath", str(unix_sockets_dir / "bank.sock"))
- obj.cfg_put("bank", "uwsgi_unixpath_mode", "660")
- obj.cfg_put("bank", "database", "taler")
- obj.cfg_put("bank", "max_debt", "%s:500.0" % currency)
- obj.cfg_put("bank", "max_debt_bank", "%s:1000000000.0" % currency)
- obj.cfg_put("bank", "allow_registrations", "YES")
- obj.cfg_put("bank", "base_url", rev_proxy_url + "/bank/")
- obj.cfg_put("bank", "database", f"postgres:///{postgres_db_name}")
- obj.cfg_put("bank", "suggested_exchange", rev_proxy_url + "/exchange/")
-
- obj.cfg_put("donations", "serve", "http")
- obj.cfg_put("donations", "http_serve", "unix")
- obj.cfg_put("donations", "http_unixpath", str(unix_sockets_dir / "donations.sock"))
- obj.cfg_put("donations", "http_unixpath_mode", "660")
-
- obj.cfg_put("landing", "serve", "http")
- obj.cfg_put("landing", "http_serve", "unix")
- obj.cfg_put("landing", "http_unixpath", str(unix_sockets_dir / "landing.sock"))
- obj.cfg_put("landing", "http_unixpath_mode", "660")
-
- obj.cfg_put("blog", "serve", "http")
- obj.cfg_put("blog", "http_serve", "unix")
- obj.cfg_put("blog", "http_unixpath", str(unix_sockets_dir / "blog.sock"))
- obj.cfg_put("blog", "http_unixpath_mode", "660")
-
- obj.cfg_put("survey", "serve", "http")
- obj.cfg_put("survey", "http_serve", "unix")
- obj.cfg_put("survey", "http_unixpath", str(unix_sockets_dir / "survey.sock"))
- obj.cfg_put("survey", "http_unixpath_mode", "660")
- obj.cfg_put("survey", "bank_password", "x")
-
- obj.cfg_put("merchant", "serve", "unix")
- obj.cfg_put("merchant", "unixpath", str(unix_sockets_dir / "merchant-backend.sock"))
- obj.cfg_put("merchant", "wire_transfer_delay", "0 s")
- obj.cfg_put("merchant", "default_max_wire_fee", currency + ":" + "0.01")
- obj.cfg_put("merchant", "default_max_deposit_fee", currency + ":" + "0.05")
- obj.cfg_put("merchantdb-postgres", "config", f"postgres:///{postgres_db_name}")
-
- obj.cfg_put("frontends", "backend", rev_proxy_url + "/merchant-backend/")
- obj.cfg_put(
- "merchant-exchange-{}".format(currency),
- "exchange_base_url", rev_proxy_url + "/exchange/",
- )
- obj.cfg_put(
- "merchant-exchange-{}".format(currency),
- "currency", currency
- )
- obj.cfg_put("auditor", "serve", "unix")
- # FIXME: both below used?
- obj.cfg_put("auditor", "base_url", rev_proxy_url + "/auditor")
- obj.cfg_put("auditor", "auditor_url", rev_proxy_url + "/auditor")
- obj.cfg_put("auditor", "unixpath", str(unix_sockets_dir / "auditor.sock"))
- obj.cfg_put("auditor", "tiny_amount", currency + ":0.01")
-
- obj.cfg_put(
- "taler-exchange-secmod-eddsa",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-eddsa.sock")
- )
- obj.cfg_put(
- "taler-exchange-secmod-cs",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-cs.sock")
- )
- obj.cfg_put("taler-exchange-secmod-cs", "sm_priv_key",
- "${TALER_DATA_HOME}/taler-exchange-secmod-cs/secmod-private-key"
- )
- obj.cfg_put(
- "taler-exchange-secmod-rsa",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-rsa.sock")
- )
- obj.cfg_put("taler-exchange-secmod-rsa", "sm_priv_key",
- "${TALER_DATA_HOME}/taler-exchange-secmod-rsa/secmod-private-key"
- )
- obj.cfg_put("exchange", "base_url", rev_proxy_url + "/exchange/")
- obj.cfg_put("exchange", "serve", "unix")
- obj.cfg_put("exchange", "unixpath", str(unix_sockets_dir / "exchange.sock"))
- obj.cfg_put("exchange", "terms_etag", "0")
- obj.cfg_put("exchange", "terms_dir", "$HOME/.local/share/taler-exchange/tos")
- obj.cfg_put("exchange", "privacy_etag", "0")
- obj.cfg_put("exchange", "privacy_dir", "$HOME/.local/share/taler-exchange/pp")
- obj.cfg_put("exchangedb-postgres", "config", f"postgres:///{postgres_db_name}")
- obj.cfg_put("auditordb-postgres", "db_conn_str", f"postgres:///{postgres_db_name}")
- obj.cfg_put("auditordb-postgres", "config", f"postgres:///{postgres_db_name}")
- obj.cfg_put("exchange-account-1", "enable_debit", "yes")
- obj.cfg_put("exchange-account-1", "enable_credit", "yes")
- obj.cfg_put("merchant-account-merchant",
- "wire_response",
- "${TALER_DATA_HOME}/merchant/wire/merchant.json",
- )
- obj.cfg_put("merchant-account-merchant", "wire_file_mode", "770")
- # The demo shops prepend the "Bearer " part. NOTE: should
- # this be changed to match 'sync', since it expects also the
- # "Bearer " part?
- obj.cfg_put("frontends", "backend_apikey", frontend_api_key)
- coin(obj, currency, "ct_10", "0.10")
- coin(obj, currency, "1", "1")
- coin(obj, currency, "2", "2")
- coin(obj, currency, "5", "5")
- coin(obj, currency, "10", "10")
- coin(obj, currency, "1000", "1000")
- obj.cfg_write(outdir)
- return obj
-
- def config_sync(
- filename, outdir,
- unix_sockets_dir,
- currency, api_key,
- rev_proxy_url,
- postgres_db_name
- ):
- obj = ConfigFile(filename)
- obj.cfg_put("taler", "currency", currency)
- obj.cfg_put("sync", "serve", "unix")
- obj.cfg_put("sync", "unixpath", str(unix_sockets_dir / "sync.sock"))
- obj.cfg_put("sync", "api_key", f"Bearer {api_key}")
- obj.cfg_put("sync", "annual_fee", f"{currency}:0.1")
- obj.cfg_put("sync", "fulfillment_url", "taler://fulfillment-success/")
- obj.cfg_put("sync", "payment_backend_url", rev_proxy_url + "/merchant-backend/instances/Taler/")
- obj.cfg_put("syncdb-postgres", "config", f"postgres:///{postgres_db_name}")
- obj.cfg_write(outdir)
-
- def unit_file_content(description, cmd, env=None):
- executable_name = cmd.split(" ")[0].split("/")[-1]
- content = (
- "[Unit]\n"
- f"Description={description}\n"
- "[Service]\n"
- f"ExecStart={cmd}\n"
- f"StandardOutput=append:{LOG_DIR / executable_name}.log\n"
- f"StandardError=append:{LOG_DIR / executable_name}.log"
- )
- if env:
- content += f"\nEnvironmentFile={env}"
- return content
-
-
- print_nn("Ensure no service is running...")
- if is_serving(REV_PROXY_URL + "/", tries=3):
- fail("Reverse proxy is unexpectedly running!")
- if UNIX_SOCKETS_DIR.is_dir():
- for left_socket in os.listdir(UNIX_SOCKETS_DIR):
- s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- socket_file = str(UNIX_SOCKETS_DIR / left_socket)
- if s.connect_ex(socket_file.encode("utf-8")) == 0:
- fail(f"A service is unexpectedly running and bound to {socket_file}!")
- print(" OK")
-
- print_nn("Remove stale data and config...")
- if TALER_DATA_DIR.exists():
- shutil.rmtree(TALER_DATA_DIR)
- if TALER_RUNTIME_DIR.exists():
- shutil.rmtree(TALER_RUNTIME_DIR)
- if CFG_OUTDIR.exists():
- shutil.rmtree(CFG_OUTDIR)
- print(" OK")
-
- print_nn("Generate preliminary taler.conf...")
- mc = config_main(
- "taler.conf",
- outdir=CFG_OUTDIR,
- unix_sockets_dir=UNIX_SOCKETS_DIR,
- currency=CURRENCY,
- rev_proxy_url=get_link(), # Gets X-Forwarded-* compatible base URL.
- wire_method=WIRE_METHOD,
- exchange_wire_gateway_username=EXCHANGE_NEXUS_USERNAME,
- exchange_wire_gateway_password=EXCHANGE_NEXUS_PASSWORD,
- frontend_api_key=FRONTENDS_API_TOKEN,
- taler_runtime_dir=TALER_RUNTIME_DIR,
- postgres_db_name=postgres_db_name
- )
- print(" OK")
-
- print_nn("Installing SystemD unit files...")
- if not systemd_user_dir.exists():
- systemd_user_dir.mkdir(parents=True, exist_ok=True)
-
- if not TALER_UNIT_FILES_DIR.exists():
- TALER_UNIT_FILES_DIR.mkdir(parents=True, exist_ok=True)
-
- # Internal redirect of X-Forwarded-Host's port
- # to the port Nginx binds to. Allows clients
- # connecting from within a container to still
- # reach services at X-Forwarded-Host.
- try:
- x_forwarded_port = x_forwarded_host.split(":")[1]
- except IndexError:
- x_forwarded_port = None
-
- need_redirect = (x_forwarded_port) and (x_forwarded_port != REV_PROXY_PORT)
- with open(TALER_UNIT_FILES_DIR / "taler-local-port-redirect.service", "w") as port_redirect_unit:
- port_redirect_unit.write(unit_file_content(
- description = "Port redirect allowing configuration at X-Forwarded-Host",
- cmd = f"socat TCP4-LISTEN:{x_forwarded_port},fork TCP4:{REV_PROXY_NETLOC}" if need_redirect else "true",
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-httpd.service", "w") as exchange_unit:
- exchange_unit.write(unit_file_content(
- description = "Taler Exchange HTTP daemon",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-httpd -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-wirewatch.service", "w") as exchange_wirewatch_unit:
- exchange_wirewatch_unit.write(unit_file_content(
- description = "Taler Exchange Wirewatch",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-wirewatch -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-aggregator.service", "w") as exchange_aggregator_unit:
- exchange_aggregator_unit.write(unit_file_content(
- description = "Taler Exchange Aggregator",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-aggregator --kyc-off -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-transfer.service", "w") as exchange_transfer_unit:
- exchange_transfer_unit.write(unit_file_content(
- description = "Taler Exchange Transfer",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-transfer -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-cs.service", "w") as exchange_cs_unit:
- exchange_cs_unit.write(unit_file_content(
- description = "Taler Exchange CS security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-cs -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
-
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-rsa.service", "w") as exchange_rsa_unit:
- exchange_rsa_unit.write(unit_file_content(
- description = "Taler Exchange RSA security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-rsa -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-eddsa.service", "w") as exchange_eddsa_unit:
- exchange_eddsa_unit.write(unit_file_content(
- description = "Taler Exchange EDDSA security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-eddsa -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-merchant-backend.service", "w") as merchant_unit:
- merchant_unit.write(unit_file_content(
- description = "Taler Merchant backend",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-sync.service", "w") as sync_unit:
- sync_unit.write(unit_file_content(
- description = "Taler Sync",
- cmd = f"{TALER_PREFIX}/bin/sync-httpd -L DEBUG -c {CFG_OUTDIR / 'sync.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-merchant-backend-token.service", "w") as merchant_token_unit:
- merchant_token_unit.write(unit_file_content(
- description = "Taler Merchant backend with auth token to allow default instance creation.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -a {TALER_MERCHANT_TOKEN} -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- # Custom Postgres connection.
- if os.environ.get("PGPORT"):
- with open(TALER_UNIT_FILES_DIR / "taler-local-postgres.env", "w") as postgres_env:
- postgres_env.write(f"PGPORT={os.environ.get('PGPORT')}")
-
- # euFin unit files.
- with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.service", "w") as sandbox_unit:
- sandbox_unit.write(unit_file_content(
- description = "euFin Sandbox",
- cmd = f"{TALER_PREFIX}/bin/libeufin-sandbox serve --with-unix-socket {UNIX_SOCKETS_DIR / 'sandbox.sock'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-sandbox.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.service", "w") as nexus_unit:
- nexus_unit.write(unit_file_content(
- description = "euFin Nexus",
- cmd = f"{TALER_PREFIX}/bin/libeufin-nexus serve --with-unix-socket {UNIX_SOCKETS_DIR / 'nexus.sock'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-nexus.env"
- ))
- # euFin env files.
- with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.env", "w") as sandbox_env:
- sandbox_env.write(f"LIBEUFIN_SANDBOX_DB_CONNECTION=jdbc:sqlite:{SANDBOX_DB_FILE}\n")
- sandbox_env.write(f"LIBEUFIN_SANDBOX_ADMIN_PASSWORD={SANDBOX_ADMIN_PASSWORD}\n")
- sandbox_env.write(f"TALER_ENV_URL_INTRO={get_link('/landing/')}\n")
- sandbox_env.write(f"TALER_ENV_URL_BANK={get_link('/sandbox/demobanks/default')}\n")
- sandbox_env.write(f"TALER_ENV_URL_MERCHANT_BLOG={get_link('/blog/')}\n")
- sandbox_env.write(f"TALER_ENV_URL_MERCHANT_DONATIONS={get_link('/donations/')}\n")
- sandbox_env.write(f"TALER_ENV_URL_MERCHANT_SURVEY={get_link('/survey/')}\n")
-
- with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.env", "w") as nexus_env:
- nexus_env.write(f"LIBEUFIN_NEXUS_DB_CONNECTION=jdbc:sqlite:{NEXUS_DB_FILE}\n")
- nexus_env.write((
- f"TALER_ENV_URL_INTRO={get_link('/landing/')}\n"
- f"TALER_ENV_URL_BANK={get_link('/sandbox/demobanks/default')}\n"
- f"TALER_ENV_URL_MERCHANT_BLOG={get_link('/blog/')}\n"
- f"TALER_ENV_URL_MERCHANT_DONATIONS={get_link('/donations/')}\n"
- f"TALER_ENV_URL_MERCHANT_SURVEY={get_link('/survey/')}\n"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-donations.service", "w") as donations_unit:
- donations_unit.write(unit_file_content(
- description = "Donation Website that accepts Taler payments.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos donations -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-blog.service", "w") as blog_unit:
- blog_unit.write(unit_file_content(
- description = "Blog that accepts Taler payments.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos blog -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-survey.service", "w") as survey_unit:
- survey_unit.write(unit_file_content(
- description = "Survey Website awarding tips via Taler.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos survey -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-landing.service", "w") as landing_unit:
- landing_unit.write(unit_file_content(
- description = "Landing Website of Taler demo.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos landing -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-frontends.env", "w") as frontends_env:
- frontends_env.write((
- f"PATH={os.environ.get('PATH')}\n"
- f"TALER_CONFIG_FILE={CFG_OUTDIR / 'taler.conf'}\n"
- f"TALER_ENV_URL_INTRO={get_link('/landing/')}\n"
- f"TALER_ENV_URL_BANK={get_link('/sandbox/demobanks/default/')}\n"
- f"TALER_ENV_URL_MERCHANT_BLOG={get_link('/blog/')}\n"
- f"TALER_ENV_URL_MERCHANT_DONATIONS={get_link('/donations/')}\n"
- f"TALER_ENV_URL_MERCHANT_SURVEY={get_link('/survey/')}\n"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-nginx.service", "w") as nginx_unit:
- nginx_unit.write(unit_file_content(
- description = "Nginx: reverse proxy for taler-local.",
- cmd = f"nginx -c {CFG_OUTDIR / 'nginx.conf'}",
- ))
- print(" OK")
- print_nn("Reload SystemD...")
- Command(["systemctl", "--user", "daemon-reload"]).run()
- atexit.register(lambda: subprocess.run(
- ["systemctl", "--user", "stop", "taler-local-*.service"],
- check=True
- )
- )
- print(" OK")
- print_nn("Generate exchange's master key...")
- EXCHANGE_MASTER_PUB = Command(
- [
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "setup"
- ],
- capture_stdout=True
- ).run()
- print(" OK")
- print_nn("Specify exchange master pub in taler.conf...")
- config_specify_master_pub(
- CFG_OUTDIR / "taler.conf",
- CURRENCY,
- EXCHANGE_MASTER_PUB
- )
- print(" OK")
- print_nn("Generating sync.conf...")
- config_sync(
- "sync.conf",
- outdir=CFG_OUTDIR,
- unix_sockets_dir=UNIX_SOCKETS_DIR,
- currency=CURRENCY,
- api_key=FRONTENDS_API_TOKEN,
- rev_proxy_url=get_link(),
- postgres_db_name=postgres_db_name
- )
- print(" OK")
- print_nn("Reset and init exchange DB..")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"]
- ).run()
- print(" OK")
-
- print_nn("Launching X-Forwarded-Host port redirect...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-port-redirect.service"], check=True)
- time.sleep(1)
- print(" OK")
- print_nn("Launching the reverse proxy...")
- with open(CFG_OUTDIR / "nginx.conf", "w") as nginx_conf:
- nginx_conf.write((
- f"error_log {LOG_DIR / 'nginx.log'};\n"
- f"pid {TALER_ROOT_DIR / 'nginx.pid'};\n"
- "daemon off;\n"
- "events {}\n"
- "http {\n"
- f"access_log {LOG_DIR / 'nginx.log'};\n"
- "server {\n"
- f"listen {REV_PROXY_PORT};\n"
- f"listen [::]:{REV_PROXY_PORT};\n"
- "location / {\n"
- "return 200 'Hello, I am Nginx - proxying taler-local\n';\n"
- "}\n"
- "location ~* ^/(?<component>[a-z\-]+)(/(?<taler_uri>.*))? {\n"
- "proxy_redirect off;\n"
- "proxy_set_header X-Forwarded-Prefix /$component;\n"
- f"proxy_set_header X-Forwarded-Host {x_forwarded_host};\n"
- f"proxy_set_header X-Forwarded-Proto {x_forwarded_proto};\n"
- f"client_body_temp_path /tmp/taler-local-nginx;\n"
- f"proxy_pass http://unix:{UNIX_SOCKETS_DIR}/$component.sock:/$taler_uri?$args;\n"
- "}\n"
- "}\n"
- "}\n"
- ))
- subprocess.run(["systemctl", "--user", "start", "taler-local-nginx.service"], check=True)
- if not is_serving(REV_PROXY_URL + "/"):
- fail(f"Reverse proxy did not start correctly")
- # Do check.
- print(" OK")
- print_nn("Launching the exchange RSA helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-rsa.service"])
- print(" OK")
- print_nn("Launching the exchange EDDSA helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-eddsa.service"])
- print(" OK")
- print_nn("Launching the exchange CS helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-cs.service"])
- print(" OK")
- print_nn("Launching the exchange...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-httpd.service"])
- if not is_serving(REV_PROXY_URL + "/exchange/"):
- fail(f"Exchange did not start correctly.")
- print(" OK")
- print_nn("exchange-offline: signing key material...")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "download", "sign", "upload"
- ]).run()
- print(" OK")
- # Set up wire fees for next 5 years
- NOW = datetime.now()
- YEAR = NOW.year
- print_nn("Setting wire fees for the next 5 years...")
- for year in range(YEAR, YEAR+5):
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "wire-fee",
- str(year),
- WIRE_METHOD,
- CURRENCY + ":0.01",
- CURRENCY + ":0.01",
- CURRENCY + ":0.01",
- "upload"
- ],
- custom_name="set-wire-fee"
- ).run()
- print(" OK")
- print_nn("Reset and init auditor DB..")
- Command([
- f"{TALER_PREFIX}/bin/taler-auditor-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"]
- ).run()
- print(" OK")
- print_nn("Add this exchange to the auditor...")
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-auditor-exchange",
- "-c", CFG_OUTDIR / "taler.conf",
- "-m", EXCHANGE_MASTER_PUB,
- "-u", REV_PROXY_URL + "/exchange/"
- ],
- ).run()
- print(" OK")
- ## Step 4: Set up euFin
- print_nn("Resetting euFin databases...")
- try:
- remove(SANDBOX_DB_FILE)
- remove(NEXUS_DB_FILE)
- except OSError as error:
- if error.errno != errno.ENOENT:
- raise error
- print(" OK")
- # Make the 'default' demobank at Sandbox. (No signup bonus)
- Command([
- f"{TALER_PREFIX}/bin/libeufin-sandbox",
- "config", "--currency", CURRENCY, "--with-signup-bonus", "default"],
- env={
- "PATH": os.environ["PATH"],
- "LIBEUFIN_SANDBOX_DB_CONNECTION": f"jdbc:sqlite:{SANDBOX_DB_FILE}"
- }).run()
- # This step transparantly creates a default demobank.
- print_nn("Launching Sandbox...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-sandbox.service"])
- if not is_serving(SANDBOX_URL):
- fail(f"Sandbox did not start correctly.")
- print(" OK")
- print_nn("Make Sandbox EBICS host...")
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
- "--sandbox-url", SANDBOX_URL,
- "ebicshost", "create",
- "--host-id", EBICS_HOST_ID,
- ],
- env=get_sandbox_cli_env(
- SANDBOX_ADMIN_USERNAME,
- SANDBOX_ADMIN_PASSWORD,
- ),
- custom_name="sandbox-create-ebicshost",
- ).run()
- print(" OK")
-
- print_nn("Create Exchange account at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
- ebics_user_id=EXCHANGE_EBICS_USER_ID,
- person_name="Exchange Owner",
- bank_account_name=EXCHANGE_BANK_ACCOUNT_SANDBOX,
- password=EXCHANGE_BANK_ACCOUNT_PASSWORD
- )
- print(" OK")
- print_nn("Getting exchange payto-URI from the bank...")
- exchange_bank_account_info = get_sandbox_account_info(
- SANDBOX_URL,
- EXCHANGE_BANK_ACCOUNT_SANDBOX,
- EXCHANGE_BANK_ACCOUNT_PASSWORD
- )
- EXCHANGE_PAYTO = exchange_bank_account_info["paytoUri"]
- print(" OK")
- print_nn("Specify own payto-URI to exchange's configuration..")
- Command([
- f"{TALER_PREFIX}/bin/taler-config", "-c", CFG_OUTDIR / 'taler.conf',
- "-s", "exchange-account-1", "-o", "payto_uri", "-V",
- EXCHANGE_PAYTO
- ]).run()
- print(" OK")
- print_nn(f"exchange-offline: enabling {EXCHANGE_PAYTO}...")
- Command([
- f"{TALER_PREFIX}/bin/taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "enable-account", EXCHANGE_PAYTO, "upload"
- ]).run()
- print(" OK")
-
- # Give each instance a Sandbox account (note: 'default'
- # won't have one, as it should typically only manage other
- # instances).
- for instance in INSTANCES:
- instance_id = instance["name"]
- print_nn(f"Create account of {instance_id} at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedMerchantEbicsPartnerId",
- ebics_user_id=f"unused{instance_id}EbicsUserId",
- person_name=f"Shop Owner of {instance_id}",
- bank_account_name=f"sandbox-account-{instance_id.lower()}",
- password=ALL_INSTANCES_BANK_PASSWORD,
- is_public=instance.get("isPublic")
- )
- print(" OK")
- print_nn("Create Customer account at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedCustomerEbicsPartnerId",
- ebics_user_id="unusedCustomerEbicsUserId",
- person_name="Customer Person",
- bank_account_name=CUSTOMER_BANK_ACCOUNT,
- password=CUSTOMER_BANK_PASSWORD
- )
- print(" OK")
- print_nn("Make Nexus superuser ...")
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-nexus", "superuser",
- EXCHANGE_NEXUS_USERNAME,
- "--password", EXCHANGE_NEXUS_PASSWORD
- ],
- env=get_nexus_server_env(
- NEXUS_DB_FILE,
- NEXUS_URL
- ),
- custom_name="nexus-superuser",
- ).run()
- print(" OK")
-
- print_nn("Launching Nexus...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-nexus.service"])
- if not is_serving(NEXUS_URL):
- fail(f"Nexus did not start correctly")
- print(" OK")
-
- print_nn("Create Exchange account at Nexus...")
- prepare_nexus_account(
- ebics_url=EBICS_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
- ebics_user_id=EXCHANGE_EBICS_USER_ID,
- bank_connection_name=EXCHANGE_BANK_CONNECTION,
- bank_account_name_sandbox=EXCHANGE_BANK_ACCOUNT_SANDBOX,
- bank_account_name_nexus=EXCHANGE_BANK_ACCOUNT_NEXUS,
- env=get_nexus_cli_env(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD,
- NEXUS_URL
- )
- )
- print(" OK")
-
- print_nn("Create Taler facade ...")
- Command(
- [
- f"{TALER_PREFIX}/bin/libeufin-cli", "facades",
- "new-taler-wire-gateway-facade",
- "--currency", CURRENCY,
- "--facade-name", EXCHANGE_FACADE_NAME,
- EXCHANGE_BANK_CONNECTION,
- EXCHANGE_BANK_ACCOUNT_NEXUS
- ],
- env=get_nexus_cli_env(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD,
- NEXUS_URL
- ),
- custom_name="create-taler-facade",
- ).run()
- print(" OK")
- try:
- response = requests.get(
- NEXUS_URL + "/facades",
- auth=requests.auth.HTTPBasicAuth(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD
- )
- )
- response.raise_for_status()
- except Exception as error:
- fail(error)
- FACADE_URL = response.json().get("facades")[0].get("baseUrl")
- print_nn("Set suggested exchange at Sandbox...")
- Command([
- f"{TALER_PREFIX}/bin/libeufin-sandbox",
- "default-exchange",
- get_link('/exchange/'),
- EXCHANGE_PAYTO],
- env={
- "PATH": os.environ["PATH"],
- "LIBEUFIN_SANDBOX_DB_CONNECTION": f"jdbc:sqlite:{SANDBOX_DB_FILE}"
- }).run()
- print(" OK")
-
- # Point the exchange to the facade.
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "wire_gateway_auth_method",
- "-V", "basic"
- ],
- custom_name="specify-wire-gateway-auth-method",
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "wire_gateway_url",
- "-V", FACADE_URL
- ],
- custom_name="specify-facade-url",
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "username",
- "-V", EXCHANGE_NEXUS_USERNAME
- ],
- custom_name="specify-username-for-facade",
- ).run()
- Command(
- [
- f"{TALER_PREFIX}/bin/taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "password",
- "-V", EXCHANGE_NEXUS_PASSWORD
- ],
- custom_name="specify-password-for-facade",
- ).run()
-
- ## Step 6: Set up merchant
-
- print_nn("Reset and init merchant database...")
- Command([
- f"{TALER_PREFIX}/bin/taler-merchant-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"
- ]).run()
- print(" OK")
-
- def ensure_instance(
- currency,
- instance_id,
- backend_url,
- bank_hostname,
- wire_method,
- auth_token
- ):
- auth_header = {"Authorization": f"Bearer {auth_token}"}
- resp = requests.get(
- urljoin_nodrop(backend_url, f"management/instances/{instance_id}"),
- headers = auth_header
- )
- bankaccount_info = get_sandbox_account_info(
- SANDBOX_URL,
- f"sandbox-account-{instance_id.lower()}",
- ALL_INSTANCES_BANK_PASSWORD
- )
- req = dict(
- id=instance_id,
- name=f"Name of '{instance_id}'",
- payto_uris=[bankaccount_info["paytoUri"]],
- address=dict(),
- jurisdiction=dict(),
- default_max_wire_fee=f"{currency}:1",
- default_wire_fee_amortization=3,
- default_max_deposit_fee=f"{currency}:1",
- default_wire_transfer_delay=dict(d_us=0),
- default_pay_delay=dict(d_us=24*60*60*1000000),
- auth=dict(method="token", token=auth_token),
- )
- http_method = requests.post
- endpoint = "management/instances"
-
- # Instance exists, patching it.
- if resp.status_code == 200:
- print(f"Patching instance '{instance_id}'")
- http_method = requests.patch
- endpoint = f"management/instances/{instance_id}"
-
- resp = http_method(
- urljoin_nodrop(backend_url, endpoint),
- json=req,
- headers = auth_header
- )
- if resp.status_code < 200 or resp.status_code >= 300:
- print(f"Backend responds: {resp.status_code}/{resp.text}")
- fail(f"Could not create (or patch) instance '{instance_id}'")
-
- print_nn(f"Start merchant (with TALER_MERCHANT_TOKEN into the env)...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend-token.service"], check=True)
- if not is_serving(REV_PROXY_URL + "/merchant-backend/config"):
- fail(
- f"Merchant backend did not start correctly.",
- )
- print(" OK")
- print_nn("Give default instance a bank account...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedMerchantEbicsPartnerId",
- ebics_user_id=f"unusedDefaultInstanceEbicsUserId",
- person_name=f"Shop Owner of default instance",
- bank_account_name="sandbox-account-default",
- password=ALL_INSTANCES_BANK_PASSWORD
- )
- print(" OK")
- ensure_instance(
- currency=CURRENCY,
- instance_id="default",
- backend_url = REV_PROXY_URL + "/merchant-backend",
- bank_hostname = REV_PROXY_NETLOC + "/sandbox",
- wire_method = WIRE_METHOD,
- auth_token=FRONTENDS_API_TOKEN
- )
- print_nn("Stopping the merchant with TALER_MERCHANT_TOKEN into the env...")
- subprocess.run(["systemctl", "--user", "stop", "taler-local-merchant-backend-token.service"], check=True)
- print(" OK")
- print_nn("Restarting the merchant WITHOUT the auth-token in the env...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend.service"], check=True)
- if not is_serving(REV_PROXY_URL + "/merchant-backend/config"):
- # check_running logs errors already.
- fail(f"Merchant backend did not re start correctly.")
- print(" OK")
-
- for instance in INSTANCES:
- instance_id = instance["name"]
- print_nn(f"Creating the {instance_id} instance...")
- ensure_instance(
- currency=CURRENCY,
- instance_id=instance_id,
- backend_url = REV_PROXY_URL + "/merchant-backend",
- bank_hostname = REV_PROXY_NETLOC + "/sandbox",
- wire_method = WIRE_METHOD,
- auth_token=FRONTENDS_API_TOKEN
- )
- print(" OK")
- print_nn("Creating tip reserve...")
- create_tip_reserve()
- print(" OK")
- # 1 second to let Nexus read the payment from
- # Sandbox, 1 second to let the Exchange Wirewatch
- # to read the payment from Nexus.
- print_nn("Sleep 2 seconds to let the tip reserve settle...")
- time.sleep(2)
- print(" OK")
-
- # Configure Sync.
- print_nn("Reset and init Sync DB..")
- Command([
- f"{TALER_PREFIX}/bin/sync-dbinit",
- "-c", CFG_OUTDIR / "sync.conf",
- "--reset"]
- ).run()
- print(" OK")
- subprocess.run(
- ["systemctl", "--user", "stop", "taler-local-*.service"],
- check=True
- )
-
-@cli.command()
-def launch():
- subprocess.run(["systemctl", "--user", "start", "taler-local-port-redirect.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-nginx.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-rsa.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-eddsa.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-cs.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-httpd.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-wirewatch.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-aggregator.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-transfer.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-sync.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-sandbox.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-nexus.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-donations.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-blog.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-survey.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-landing.service"], check=True)
-
- print((
- "\n"
- "Taler launched!\n\n"
- f"Serving {REV_PROXY_URL + '/$service'}\n\n"
- "Services:\n"
- " - landing\n"
- " - exchange\n"
- " - merchant-backend\n"
- " - sandbox\n"
- " - nexus\n"
- " - blog\n"
- " - survey\n"
- " - donations\n"
- ))
-
-@cli.command()
-def stop():
- subprocess.run(["systemctl", "--user", "stop", "taler-local-*.service"], check=True)
-
-if __name__ == "__main__":
- cli()