summaryrefslogtreecommitdiff
path: root/bin
diff options
context:
space:
mode:
Diffstat (limited to 'bin')
-rwxr-xr-xbin/WIP/taler-local1683
-rwxr-xr-xbin/taler-deployment838
-rwxr-xr-xbin/taler-deployment-arm11
-rwxr-xr-xbin/taler-deployment-auditor23
-rwxr-xr-xbin/taler-deployment-auth-token36
-rwxr-xr-xbin/taler-deployment-config-generate280
-rwxr-xr-xbin/taler-deployment-config-generate-sepa281
-rwxr-xr-xbin/taler-deployment-config-instances237
-rwxr-xr-xbin/taler-deployment-config-instances-iban163
-rwxr-xr-xbin/taler-deployment-config-tips24
-rwxr-xr-xbin/taler-deployment-dbstart24
-rwxr-xr-xbin/taler-deployment-prepare273
-rwxr-xr-xbin/taler-deployment-prepare-with-eufin418
-rwxr-xr-xbin/taler-deployment-restart19
-rwxr-xr-xbin/taler-deployment-restart-with-eufin19
-rwxr-xr-xbin/taler-deployment-start43
-rwxr-xr-xbin/taler-deployment-start-with-eufin44
-rwxr-xr-xbin/taler-deployment-stop12
-rwxr-xr-xbin/taler-log-adapter66
-rw-r--r--bin/taler_urls.py56
20 files changed, 0 insertions, 4550 deletions
diff --git a/bin/WIP/taler-local b/bin/WIP/taler-local
deleted file mode 100755
index e38a58d..0000000
--- a/bin/WIP/taler-local
+++ /dev/null
@@ -1,1683 +0,0 @@
-#!/usr/bin/env python3
-
-# This file is part of GNU Taler.
-#
-# GNU Taler is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# GNU Taler is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Taler. If not, see <https://www.gnu.org/licenses/>.
-
-import signal
-import socket
-import shutil
-import atexit
-import click
-import types
-import os
-import sys
-import os.path
-import subprocess
-import time
-import random
-import logging
-import json
-from os import listdir
-from os.path import isdir, join
-from pathlib import Path
-from dataclasses import dataclass
-from typing import List, Callable
-from shutil import copy
-from multiprocessing import Process
-from string import ascii_letters, ascii_uppercase
-from sys import exit
-from urllib.parse import urljoin, quote
-from os import remove
-import requests
-from collections import OrderedDict
-import errno
-from pathlib import Path
-from subprocess import Popen, DEVNULL, PIPE
-from datetime import datetime
-from requests_unixsocket import Session
-from flask import Flask, request, Response
-from werkzeug.datastructures import Headers
-from werkzeug.exceptions import HTTPException
-
-
-TALER_ROOT_DIR = Path.home() / ".taler"
-TALER_PREFIX = Path.home() / ".local"
-
-# Print No Newline.
-def print_nn(msg):
- print(msg, end="")
- sys.stdout.flush()
-
-@dataclass
-class Repo:
- name: str
- url: str
- deps: List[str]
- builder: Callable[["Repo", Path], None]
-
-@click.group()
-def cli():
- pass
-
-def split_repos_list(repos):
- return [repo for repo in repos.split(",") if repo != ""]
-
-def update_checkout(r: Repo, p: Path):
- """Clean the repository's working directory and
- update it to the match the latest version of the upstream branch
- that we are tracking."""
- subprocess.run(["git", "-C", str(p), "clean", "-fdx"], check=True)
- subprocess.run(["git", "-C", str(p), "fetch"], check=True)
- subprocess.run(["git", "-C", str(p), "reset"], check=True)
- res = subprocess.run(
- [
- "git",
- "-C",
- str(p),
- "rev-parse",
- "--abbrev-ref",
- "--symbolic-full-name",
- "@{u}",
- ],
- stderr=subprocess.DEVNULL,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if res.returncode != 0:
- ref = "HEAD"
- else:
- ref = res.stdout.strip("\n ")
- print(f"resetting {r.name} to ref {ref}")
- subprocess.run(["git", "-C", str(p), "reset", "--hard", ref], check=True)
-
-
-def default_configure(*extra):
- extra_list = list(extra)
- subprocess.run(["./configure", f"--prefix={TALER_PREFIX}"] + extra_list, check=True)
-
-def pyconfigure(*extra):
- """For python programs, --prefix doesn't work."""
- subprocess.run(["./configure"] + list(extra), check=True)
-
-def build_libeufin(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_libmicrohttpd(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- # Debian gnutls packages are too old ...
- default_configure("--with-gnutls=/usr/local")
- subprocess.run(["make"], check=True)
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_gnunet(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- "--disable-documentation",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_exchange(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-gnunet={pfx}",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_wallet(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_twister(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_merchant(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_sync(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-merchant={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_anastasis(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / ".local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-merchant={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_demos(r, p):
- update_checkout(r, p)
- pfx = Path.home() / ".local"
- pyconfigure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_backoffice(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"])
- subprocess.run(["./configure"])
- subprocess.run(["make", "build-single"])
- (p / "taler-buildstamp").touch()
-
-repos = {
- "libmicrohttpd": Repo(
- "libmicrohttpd",
- "git://git.gnunet.org/libmicrohttpd.git",
- [],
- build_libmicrohttpd,
- ),
- "gnunet": Repo(
- "gnunet",
- "git://git.gnunet.org/gnunet.git",
- ["libmicrohttpd"],
- build_gnunet
- ),
- "exchange": Repo(
- "exchange",
- "git://git.taler.net/exchange",
- ["gnunet", "libmicrohttpd"],
- build_exchange,
- ),
- "merchant": Repo(
- "merchant",
- "git://git.taler.net/merchant",
- ["exchange","libmicrohttpd","gnunet"],
- build_merchant,
- ),
- "sync": Repo(
- "sync",
- "git://git.taler.net/sync",
- ["exchange",
- "merchant",
- "gnunet",
- "libmicrohttpd"],
- build_sync,
- ),
- "anastasis": Repo(
- "anastasis",
- "git://git.taler.net/anastasis",
- ["exchange",
- "merchant",
- "libmicrohttpd",
- "gnunet"],
- build_anastasis,
- ),
- "wallet-core": Repo(
- "wallet-core",
- "git://git.taler.net/wallet-core",
- [],
- build_wallet,
- ),
- "libeufin": Repo(
- "libeufin",
- "git://git.taler.net/libeufin.git",
- [],
- build_libeufin,
- ),
- "taler-merchant-demos": Repo(
- "taler-merchant-demos",
- "git://git.taler.net/taler-merchant-demos",
- [],
- build_demos,
- ),
- "twister": Repo(
- "twister",
- "git://git.taler.net/twister",
- ["gnunet", "libmicrohttpd"],
- build_twister,
- ),
-}
-
-def get_repos_names() -> List[str]:
- r_dir = TALER_ROOT_DIR / "sources"
- return [el for el in listdir(r_dir) if isdir(join(r_dir, el)) and repos.get(el)]
-
-# Get the installed repositories from the sources directory.
-def load_repos(reposNames) -> List[Repo]:
- return [repos.get(r) for r in reposNames if repos.get(r)]
-
-def update_repos(repos: List[Repo]) -> None:
- for r in repos:
- r_dir = TALER_ROOT_DIR / "sources" / r.name
- subprocess.run(["git", "-C", str(r_dir), "fetch"], check=True)
- res = subprocess.run(
- ["git", "-C", str(r_dir), "status", "-sb"],
- check=True,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if "behind" in res.stdout:
- print(f"new commits in {r}")
- s = r_dir / "taler-buildstamp"
- if s.exists():
- s.unlink()
-
-def get_stale_repos(repos: List[Repo]) -> List[Repo]:
- timestamps = {}
- stale = []
- for r in repos:
- r_dir = TALER_ROOT_DIR / "sources" / r.name
- s = r_dir / "taler-buildstamp"
- if not s.exists():
- timestamps[r.name] = time.time()
- stale.append(r)
- continue
- ts = timestamps[r.name] = s.stat().st_mtime
- for dep in r.deps:
- # When 'dep' in not found, it has been
- # excluded from the compilation.
- if timestamps.get("dep", 0) > ts:
- stale.append(r)
- break
- return stale
-
-@cli.command()
-@click.option(
- "--without-repos", metavar="WITHOUT REPOS",
- help="WITHOUT REPOS is a unspaced and comma-separated list \
-of the repositories to _exclude_ from compilation",
- default="")
-@click.option(
- "--only-repos", metavar="ONLY REPOS",
- help="ONLY REPOS is a unspaced and comma-separated exclusive list \
-of the repositories to include in the compilation",
- default="")
-def build(without_repos, only_repos) -> None:
-
- """Build the deployment from source."""
-
- if only_repos != "" and without_repos != "":
- print("Either use --only-repos or --without-repos")
- exit(1)
- repos_names = get_repos_names()
- if only_repos != "":
- repos_names = list(filter(
- lambda x: x in split_repos_list(only_repos),
- repos_names
- ))
- if without_repos != "":
- repos_names = list(filter(
- lambda x: x not in split_repos_list(without_repos),
- repos_names
- ))
-
- # Reorder the list of repositories so that the
- # most fundamental dependecies appear left-most.
- repos_keys = repos.keys()
- sorted_repos = sorted(
- set(repos_keys).intersection(repos_names),
- key=lambda x: list(repos_keys).index(x)
- )
- target_repos = load_repos(sorted_repos) # Get Repo objects
- update_repos(target_repos)
- stale = get_stale_repos(target_repos)
- print(f"found stale repos: {[r.name for r in stale]}")
- for r in stale:
- # Warn, if a dependency is not being built:
- diff = set(r.deps) - set(repos_names)
- if len(diff) > 0:
- print(f"WARNING: those dependencies are not being built: {diff}")
- p = TALER_ROOT_DIR / "sources" / r.name
- os.chdir(str(p))
- r.builder(r, p)
-
-@cli.command()
-@click.option(
- "--repos", "-r",
- metavar="REPOS",
- help="REPOS is a unspaced and comma-separated list of the repositories to clone.",
- default="libmicrohttpd,gnunet,exchange,merchant,wallet-core,taler-merchant-demos,sync,anastasis,libeufin",
- show_default=True,
-)
-@click.option(
- "--list-repos/--no-list-repos", default=False,
- help="Lists the repositories that were bootstrapped.",
-)
-def bootstrap(list_repos, repos) -> None:
-
- """Clone all the specified repositories."""
-
- if list_repos:
- for repo in get_repos_names():
- print(repo)
- return
-
- # Download the repository.
- def checkout_repos(repos: List[Repo]):
- if len(repos) == 0:
- print("No repositories can be checked out. Spelled correctly?")
- return
- sources = TALER_ROOT_DIR / "sources"
- for r in repos:
- r_dir = sources / r.name
- if not r_dir.exists():
- r_dir.mkdir(parents=True, exist_ok=True)
- subprocess.run(["git", "-C", str(sources), "clone", r.url], check=True)
-
- reposList = split_repos_list(repos)
- checkout_repos(load_repos(reposList))
-
-# Globals sharead accross multiple sub-commands:
-# needed to configure and launch the reverse proxy.
-REV_PROXY_HOSTNAME = "localhost"
-REV_PROXY_PORT = "8080"
-REV_PROXY_NETLOC = REV_PROXY_HOSTNAME + ":" + REV_PROXY_PORT
-REV_PROXY_PROTO = "http"
-REV_PROXY_URL = f"{REV_PROXY_PROTO}://{REV_PROXY_NETLOC}"
-UNIX_SOCKETS_DIR = TALER_ROOT_DIR / "sockets"
-LOG_DIR = TALER_ROOT_DIR / "logs"
-# needed to create the customer's bank account and
-# to let them subsequently withdraw via the Access API.
-CUSTOMER_BANK_ACCOUNT = "sandbox-account-customer"
-CUSTOMER_BANK_PASSWORD = "secret"
-# needed along preparation and later to withdraw via
-# the Access API.
-CURRENCY = "EUR"
-
-@cli.command()
-def prepare():
-
- """Generate configuration, run-time blobs, instances, euFin accounts."""
-
- def is_serving(check_url, tries=10):
- for i in range(tries):
- try:
- print_nn(".")
- # Raises if the service is not reachable.
- response = requests.get(
- check_url,
- timeout=1
- )
- # The reverse proxy may return 500 if the
- # end service is not ready, therefore this
- # case should be tolerated.
- response.raise_for_status()
- except:
- time.sleep(0.5)
- if i == tries - 1:
- return False
- continue
- break
- return True
-
-
-
- def fail(reason=None):
- if reason:
- print("ERROR:", reason)
- print(f"Logs in {LOG_DIR}")
- exit(1)
-
- def kill(proc):
- proc.terminate()
- proc.wait()
-
- def get_nexus_cli_env(
- username,
- password,
- nexus_url
- ):
- env = os.environ.copy()
- env["LIBEUFIN_NEXUS_USERNAME"] = username
- env["LIBEUFIN_NEXUS_PASSWORD"] = password
- env["LIBEUFIN_NEXUS_URL"] = nexus_url
- return env
-
- def get_sandbox_cli_env(
- username, password
- ):
- env = os.environ.copy()
- env["LIBEUFIN_SANDBOX_USERNAME"] = username
- env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- return env
-
- # Will be extended to include a SANDBOX_ADMIN_TOKEN
- # that will obsolete the 'superuser' flag of ordinary
- # user accounts. Likewise, the client side will be
- # modified to use such token.
- def get_sandbox_server_env(db_file, base_url, admin_password):
- env = os.environ.copy()
- env["LIBEUFIN_SANDBOX_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
- env["LIBEUFIN_SANDBOX_BASE_URL"] = base_url
- env["LIBEUFIN_SANDBOX_ADMIN_PASSWORD"] = admin_password
- return env
-
- def get_nexus_server_env(db_file, base_url):
- env = os.environ.copy()
- env["LIBEUFIN_NEXUS_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
- env["LIBEUFIN_NEXUS_BASE_URL"] = base_url
- return env
-
- def urljoin_nodrop(a, b):
- a = a + "/" # urljoin will drop extra trailing slashes.
- b = "/".join([x for x in b.split("/") if x != ""]) # remove leading slashes.
- return urljoin(a, b)
-
- def prepare_nexus_account(
- ebics_url,
- ebics_host_id,
- ebics_partner_id,
- ebics_user_id,
- bank_connection_name,
- bank_account_name_sandbox,
- bank_account_name_nexus,
- env
- ):
- # make connection
- Command(
- [
- "libeufin-cli", "connections",
- "new-ebics-connection",
- "--ebics-url", ebics_url,
- "--host-id", ebics_host_id,
- "--partner-id", ebics_partner_id,
- "--ebics-user-id", ebics_user_id,
- bank_connection_name
- ],
- env
- ).run()
- # connect
- Command(
- [
- "libeufin-cli", "connections",
- "connect", bank_connection_name
- ],
- env
- ).run()
- # Import bank account
- Command(
- [
- "libeufin-cli", "connections",
- "download-bank-accounts",
- bank_connection_name
- ],
- env
- ).run()
- Command(
- [
- "libeufin-cli", "connections",
- "import-bank-account",
- "--offered-account-id",
- bank_account_name_sandbox,
- "--nexus-bank-account-id",
- bank_account_name_nexus,
- bank_connection_name
- ],
- env
- ).run()
- # Set background tasks.
- Command(
- [
- "libeufin-cli", "accounts",
- "task-schedule", bank_account_name_nexus,
- "--task-type", "submit",
- "--task-name", "submit-payments-each-second",
- "--task-cronspec", "* * *"
- ],
- env
- ).run()
- Command(
- [
- "libeufin-cli", "accounts",
- "task-schedule", bank_account_name_nexus,
- "--task-type", "fetch",
- "--task-name", "fetch-reports-each-second",
- "--task-cronspec", "* * *",
- "--task-param-level", "report",
- "--task-param-range-type", "latest"
- ],
- env
- ).run()
-
- def get_sandbox_account_info(
- sandbox_url,
- bank_account_label,
- password,
- ):
- customer_env = os.environ.copy()
- customer_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_label
- customer_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
- r = Command([
- "libeufin-cli", "sandbox",
- "--sandbox-url", demobank_url,
- "demobank", "info",
- "--bank-account", bank_account_label],
- env = customer_env,
- capture_stdout=True
- ).run()
- return json.loads(r)
-
- def prepare_sandbox_account(
- sandbox_url,
- ebics_host_id,
- ebics_partner_id,
- ebics_user_id,
- person_name,
- # This value is BOTH a username
- # and a bank account label.
- bank_account_name,
- password
- ):
- demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
- user_env = os.environ.copy()
- user_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_name
- user_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
- Command(
- [
- "libeufin-cli", "sandbox",
- "--sandbox-url", demobank_url,
- "demobank", "register"
- ],
- env = user_env
- ).run()
- admin_env = os.environ.copy()
- admin_env["LIBEUFIN_SANDBOX_USERNAME"] = SANDBOX_ADMIN_USERNAME
- admin_env["LIBEUFIN_SANDBOX_PASSWORD"] = SANDBOX_ADMIN_PASSWORD
- Command([
- "libeufin-cli", "sandbox",
- "--sandbox-url", demobank_url,
- "demobank", "new-ebicssubscriber",
- "--host-id", ebics_host_id,
- "--partner-id", ebics_partner_id,
- "--user-id", ebics_user_id,
- "--bank-account", bank_account_name
- ],
- env = admin_env
- ).run()
-
-
- WIRE_METHOD = "iban"
- # euFin URLs
- SANDBOX_URL = REV_PROXY_URL + "/sandbox"
- NEXUS_URL = REV_PROXY_URL + "/nexus"
-
- # Filesystem's paths
- CFG_OUTDIR = TALER_ROOT_DIR / "config"
- TALER_RUNTIME_DIR = TALER_ROOT_DIR / "runtime"
- TALER_DATA_DIR = TALER_ROOT_DIR / "data"
- TALER_UNIT_FILES_DIR = systemd_user_dir = Path.home() / ".config" / "systemd" / "user"
-
- def get_random_iban():
- cc_no_check = 131400 # is "DE00"
- bban = "".join(random.choices("0123456789", k=4))
- check_digits = 98 - (int(f"{bban}{cc_no_check}") % 97)
- return "DE" + (f"0{check_digits}"[-2:]) + bban
-
- # IBANs
- IBAN_EXCHANGE = get_random_iban()
- IBAN_CUSTOMER = get_random_iban()
- IBAN_MERCHANT_DEFAULT = get_random_iban()
- IBAN_MERCHANT_DEMOSHOP = get_random_iban()
-
- # Instances
- INSTANCES = {
- "GNUnet": IBAN_MERCHANT_DEMOSHOP,
- "Taler": IBAN_MERCHANT_DEMOSHOP,
- "Tor": IBAN_MERCHANT_DEMOSHOP,
- "survey": IBAN_MERCHANT_DEMOSHOP,
- "blog": IBAN_MERCHANT_DEMOSHOP
- }
-
- # Credentials / API keys
- EXCHANGE_NEXUS_USERNAME = "exchange-nexus-user"
- EXCHANGE_NEXUS_PASSWORD = "exchange-nexus-password"
- FRONTENDS_API_TOKEN = "secret-token:secret"
- TALER_MERCHANT_TOKEN = "secret-token:secret"
- ALL_INSTANCES_BANK_PASSWORD = "secret"
- EXCHANGE_BANK_ACCOUNT_SANDBOX = "sandbox-account-exchange"
- EXCHANGE_BANK_ACCOUNT_PASSWORD = "secret"
-
- # EBICS
- EBICS_HOST_ID = "ebicsDeployedHost"
- EXCHANGE_EBICS_USER_ID = "exchangeEbicsUserId"
- EXCHANGE_EBICS_PARTNER_ID = "exchangeEbicsPartnerId"
- EBICS_URL = REV_PROXY_URL + "/sandbox/ebicsweb"
-
- # euFin
- EXCHANGE_BANK_ACCOUNT_NEXUS = "exchange-imported-account-nexus"
- EXCHANGE_BANK_CONNECTION = "exchange-ebics-connection"
- NEXUS_DB_FILE = "/tmp/nexus.sqlite"
- SANDBOX_DB_FILE = "/tmp/sandbox.sqlite"
- EXCHANGE_FACADE_NAME = "exchange-taler-facade"
- SANDBOX_ADMIN_USERNAME = "admin"
- SANDBOX_ADMIN_PASSWORD = "secret"
-
- class Command:
- def __init__(
- self, cmd, env=os.environ, log_dir=LOG_DIR,
- custom_name=None, capture_stdout=False
- ):
- if len(cmd) == 0:
- fail("Command to execute was given empty.")
- self.name = custom_name if custom_name else cmd[0]
- self.cmd = cmd
- self.capture_stdout = capture_stdout
- self.log_dir = log_dir
- self.env = env
-
- def run(self):
- self.do()
- return_code = self.handle.wait()
- if return_code != 0:
- fail(f"Command {self.name} failed. Logs in {self.get_log_filename()}")
- self.cleanup()
- if self.capture_stdout:
- return self.handle.communicate()[0].decode("utf-8").rstrip()
-
- def get_log_filename(self):
- return self.log_file.name
-
- def cleanup(self):
- if not self.log_file.closed:
- self.log_file.flush()
- self.log_file.close()
-
- def do(self):
- if not self.log_dir.is_dir():
- os.makedirs(self.log_dir)
- try:
- log_filename = self.log_dir / f"{self.name}.log"
- self.log_file = open(log_filename, "a+")
- except Exception as error:
- fail(f"Could not open log file: {log_filename}: {error}")
- try:
- self.handle = Popen(
- self.cmd, # list
- stdin=DEVNULL,
- stdout=self.log_file if not self.capture_stdout else PIPE,
- stderr=self.log_file,
- env=self.env
- )
- except Exception as error:
- fail(f"Could not execute: {' '.join(self.cmd)}: {error}")
-
- class ConfigFile:
- def __init__(self, filename):
- self.sections = OrderedDict()
- self.filename = filename
-
- def destroy(self):
- del self.sections
- self.sections = OrderedDict()
-
- def cfg_put(self, section_name, key, value):
- s = self.sections[section_name] = self.sections.get(section_name, OrderedDict())
- s[key] = value
-
- def cfg_write(self, outdir):
- if outdir:
- if not os.path.isdir(outdir):
- os.makedirs(outdir)
- fstream = open(os.path.join(outdir, self.filename), "w")
- else:
- fstream = open(sys.stdout)
-
- for section_name, section in self.sections.items():
- fstream.write("[" + section_name + "]" + "\n")
- for key, value in section.items():
- fstream.write(key + " = " + value + "\n")
- fstream.write("\n")
- fstream.close()
-
- def config_specify_master_pub(
- filename,
- currency,
- exchange_master_pub
- ):
- Command([
- "taler-config", "-c", filename,
- "-s", "exchange", "-o", "master_public_key",
- "-V", exchange_master_pub
- ]).run()
- Command([
- "taler-config", "-c", filename,
- "-s", f"merchant-exchange-{currency}",
- "-o", "master_key",
- "-V", exchange_master_pub
- ]).run()
-
- # When called, there is no exchange master pub yet.
- # taler-exchange-offline will prouce the key _after_
- # taler.conf is generated. Only after that, we'll
- # specify the master key where it is missing; namely
- # in the merchant backend and exchange HTTP daemon sections.
-
- def config_main(
- filename,
- outdir,
- unix_sockets_dir,
- currency,
- rev_proxy_url,
- wire_method,
- merchant_wire_address,
- exchange_wire_gateway_username,
- exchange_wire_gateway_password,
- frontend_api_key,
- taler_runtime_dir
- ):
- def coin(
- obj,
- currency,
- name,
- value,
- d_withdraw="3 years",
- d_spend="5 years",
- d_legal="10 years",
- f_withdraw="0.01",
- f_deposit="0.01",
- f_refresh="0.01",
- f_refund="0.01",
- rsa_keysize="2048",
- ):
- sec = "coin_" + currency + "_" + name
- obj.cfg_put(sec, "value", currency + ":" + value)
- obj.cfg_put(sec, "duration_withdraw", d_withdraw)
- obj.cfg_put(sec, "duration_spend", d_spend)
- obj.cfg_put(sec, "duration_legal", d_legal)
- obj.cfg_put(sec, "fee_withdraw", currency + ":" + f_withdraw)
- obj.cfg_put(sec, "fee_refresh", currency + ":" + f_refresh)
- obj.cfg_put(sec, "fee_refund", currency + ":" + f_refund)
- obj.cfg_put(sec, "fee_deposit", currency + ":" + f_deposit)
- obj.cfg_put(sec, "rsa_keysize", rsa_keysize)
-
- obj = ConfigFile("taler.conf")
- obj.cfg_put("paths", "TALER_DATA_HOME", str(TALER_DATA_DIR))
- if not taler_runtime_dir.is_dir():
- os.makedirs(taler_runtime_dir)
- obj.cfg_put("paths", "TALER_RUNTIME_DIR", str(taler_runtime_dir))
- obj.cfg_put("taler", "CURRENCY", currency)
- obj.cfg_put("taler", "CURRENCY_ROUND_UNIT", f"{currency}:0.01")
-
- obj.cfg_put("bank", "serve", "uwsgi")
- obj.cfg_put("bank", "uwsgi_serve", "unix")
- obj.cfg_put("bank", "uwsgi_unixpath", str(unix_sockets_dir / "bank.sock"))
- obj.cfg_put("bank", "uwsgi_unixpath_mode", "660")
- obj.cfg_put("bank", "database", "taler")
- obj.cfg_put("bank", "max_debt", "%s:500.0" % currency)
- obj.cfg_put("bank", "max_debt_bank", "%s:1000000000.0" % currency)
- obj.cfg_put("bank", "allow_registrations", "YES")
- obj.cfg_put("bank", "base_url", rev_proxy_url + "/bank/")
- obj.cfg_put("bank", "database", "postgres:///taler")
- obj.cfg_put("bank", "suggested_exchange", rev_proxy_url + "/exchange/")
-
- obj.cfg_put("donations", "serve", "http")
- obj.cfg_put("donations", "http_serve", "unix")
- obj.cfg_put("donations", "http_unixpath", str(unix_sockets_dir / "donations.sock"))
- obj.cfg_put("donations", "http_unixpath_mode", "660")
-
- obj.cfg_put("landing", "serve", "http")
- obj.cfg_put("landing", "http_serve", "unix")
- obj.cfg_put("landing", "http_unixpath", str(unix_sockets_dir / "landing.sock"))
- obj.cfg_put("landing", "http_unixpath_mode", "660")
-
- obj.cfg_put("blog", "serve", "http")
- obj.cfg_put("blog", "http_serve", "unix")
- obj.cfg_put("blog", "http_unixpath", str(unix_sockets_dir / "blog.sock"))
- obj.cfg_put("blog", "http_unixpath_mode", "660")
-
- obj.cfg_put("survey", "serve", "http")
- obj.cfg_put("survey", "http_serve", "unix")
- obj.cfg_put("survey", "http_unixpath", str(unix_sockets_dir / "survey.sock"))
- obj.cfg_put("survey", "http_unixpath_mode", "660")
- obj.cfg_put("survey", "bank_password", "x")
-
- obj.cfg_put("merchant", "serve", "unix")
- obj.cfg_put("merchant", "unixpath", str(unix_sockets_dir / "merchant-backend.sock"))
- obj.cfg_put("merchant", "wire_transfer_delay", "0 s")
- obj.cfg_put("merchant", "default_max_wire_fee", currency + ":" + "0.01")
- obj.cfg_put("merchant", "default_max_deposit_fee", currency + ":" + "0.05")
- obj.cfg_put("merchantdb-postgres", "config", "postgres:///taler")
-
- obj.cfg_put("frontends", "backend", rev_proxy_url + "/merchant-backend/")
- obj.cfg_put(
- "merchant-exchange-{}".format(currency),
- "exchange_base_url", rev_proxy_url + "/exchange/",
- )
- obj.cfg_put(
- "merchant-exchange-{}".format(currency),
- "currency", currency
- )
- obj.cfg_put("auditor", "serve", "unix")
- # FIXME: both below used?
- obj.cfg_put("auditor", "base_url", rev_proxy_url + "/auditor")
- obj.cfg_put("auditor", "auditor_url", rev_proxy_url + "/auditor")
- obj.cfg_put("auditor", "unixpath", str(unix_sockets_dir / "auditor.sock"))
- obj.cfg_put("auditor", "tiny_amount", currency + ":0.01")
-
- obj.cfg_put(
- "taler-exchange-secmod-eddsa",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-eddsa.sock")
- )
- obj.cfg_put(
- "taler-exchange-secmod-rsa",
- "unixpath",
- str(unix_sockets_dir / "exchange-secmod-rsa.sock")
- )
- obj.cfg_put("taler-exchange-secmod-rsa", "sm_priv_key",
- "${TALER_DATA_HOME}/taler-exchange-secmod-rsa/secmod-private-key"
- )
- obj.cfg_put("exchange", "base_url", rev_proxy_url + "/exchange/")
- obj.cfg_put("exchange", "serve", "unix")
- obj.cfg_put("exchange", "unixpath", str(unix_sockets_dir / "exchange.sock"))
- obj.cfg_put("exchange", "terms_etag", "0")
- obj.cfg_put("exchange", "terms_dir", "$HOME/.local/share/taler-exchange/tos")
- obj.cfg_put("exchange", "privacy_etag", "0")
- obj.cfg_put("exchange", "privacy_dir", "$HOME/.local/share/taler-exchange/pp")
-
- obj.cfg_put("exchangedb-postgres", "db_conn_str", "postgres:///taler")
- obj.cfg_put("exchangedb-postgres", "config", "postgres:///taler")
- obj.cfg_put("auditordb-postgres", "db_conn_str", "postgres:///taler")
- obj.cfg_put("auditordb-postgres", "config", "postgres:///taler")
- obj.cfg_put("exchange-account-1", "enable_debit", "yes")
- obj.cfg_put("exchange-account-1", "enable_credit", "yes")
- obj.cfg_put("merchant-account-merchant", "payto_uri",
- f"payto://{wire_method}/{rev_proxy_url + '/sandbox'}/{merchant_wire_address}"
- )
- obj.cfg_put("merchant-account-merchant",
- "wire_response",
- "${TALER_DATA_HOME}/merchant/wire/merchant.json",
- )
- obj.cfg_put("merchant-account-merchant", "wire_file_mode", "770")
-
- obj.cfg_put("frontends", "backend_apikey", f"{frontend_api_key}")
- coin(obj, currency, "ct_10", "0.10")
- coin(obj, currency, "1", "1")
- coin(obj, currency, "2", "2")
- coin(obj, currency, "5", "5")
- coin(obj, currency, "10", "10")
- coin(obj, currency, "1000", "1000")
- obj.cfg_write(outdir)
- return obj
-
- def config_sync(filename, outdir, unix_sockets_dir, currency, api_key, rev_proxy_url):
- obj = ConfigFile(filename)
- obj.cfg_put("taler", "currency", currency)
- obj.cfg_put("sync", "serve", "unix")
- obj.cfg_put("sync", "unixpath", str(unix_sockets_dir / "sync.sock"))
- obj.cfg_put("sync", "apikey", f"Bearer secret-token:{api_key}")
- obj.cfg_put("sync", "annual_fee", f"{currency}:0.1")
- obj.cfg_put("sync", "fulfillment_url", "taler://fulfillment-success/")
- obj.cfg_put("sync", "payment_backend_url", rev_proxy_url + "merchant-backend/instances/Taler/")
- obj.cfg_put("syncdb-postgres", "config", f"postgres:///taler")
- obj.cfg_write(outdir)
-
- def config_anastasis(filename, outdir, unix_sockets_dir, currency, rev_proxy_url, api_key):
- obj = ConfigFile(filename)
- obj.cfg_put("taler", "currency", currency)
- obj.cfg_put("anastasis", "serve", "unix")
- obj.cfg_put("anastasis", "business_name", f"GNU Taler Demo Anastasis Provider")
- obj.cfg_put("anastasis", "unixpath", str(unix_sockets_dir / "anastasis.sock"))
- obj.cfg_put("anastasis", "annual_fee", f"{currency}:0")
- obj.cfg_put("anastasis", "question_cost", f"{currency}:0")
- obj.cfg_put("anastasis", "insurance", f"{currency}:0")
- obj.cfg_put("anastasis", "truth_upload_fee", f"{currency}:0")
- obj.cfg_put("anastasis", "fulfillment_url", "taler://fulfillment-success/")
- obj.cfg_put("anastasis", "server_salt", "kreb3ia9dmj43gfa")
- obj.cfg_put("stasis-postgres", "config", f"postgres:///taler")
- obj.cfg_put("anastasis-merchant-backend",
- "payment_backend_url",
- rev_proxy_url + "merchant-backend/instances/anastasis/"
- )
- obj.cfg_put("anastasis-merchant-backend", "api_key", f"Bearer secret-token:{api_key}")
- obj.cfg_put("authorization-question", "cost", f"{currency}:0")
- obj.cfg_put("authorization-question", "enabled", "yes")
- obj.cfg_write(outdir)
-
- def unit_file_content(description, cmd, env=None):
- executable_name = cmd.split(" ")[0].split("/")[-1]
- content = (
- "[Unit]\n"
- f"Description={description}\n"
- "[Service]\n"
- f"ExecStart={cmd}\n"
- f"StandardOutput=append:{LOG_DIR / executable_name}.log\n"
- f"StandardError=append:{LOG_DIR / executable_name}.log"
- )
- if env:
- content += f"\nEnvironmentFile={env}"
- return content
-
-
- print_nn("Ensure no service is running...")
- if is_serving(REV_PROXY_URL + "/", tries=3):
- fail("Reverse proxy is unexpectedly running!")
- if UNIX_SOCKETS_DIR.is_dir():
- for left_socket in os.listdir(UNIX_SOCKETS_DIR):
- s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- socket_file = str(UNIX_SOCKETS_DIR / left_socket)
- if s.connect_ex(socket_file.encode("utf-8")) == 0:
- fail(f"A service is unexpectedly running and bound to {socket_file}!")
- print(" OK")
-
- print_nn("Remove stale data and config...")
- if TALER_DATA_DIR.exists():
- shutil.rmtree(TALER_DATA_DIR)
- if TALER_RUNTIME_DIR.exists():
- shutil.rmtree(TALER_RUNTIME_DIR)
- if CFG_OUTDIR.exists():
- shutil.rmtree(CFG_OUTDIR)
- print(" OK")
-
- print_nn("Generate preliminary taler.conf...")
- mc = config_main(
- "taler.conf",
- outdir=CFG_OUTDIR,
- unix_sockets_dir=UNIX_SOCKETS_DIR,
- currency=CURRENCY,
- rev_proxy_url=REV_PROXY_URL,
- wire_method=WIRE_METHOD,
- merchant_wire_address=IBAN_MERCHANT_DEFAULT,
- exchange_wire_gateway_username=EXCHANGE_NEXUS_USERNAME,
- exchange_wire_gateway_password=EXCHANGE_NEXUS_PASSWORD,
- frontend_api_key=FRONTENDS_API_TOKEN,
- taler_runtime_dir=TALER_RUNTIME_DIR
- )
- print(" OK")
-
- print_nn("Installing SystemD unit files...")
- if not systemd_user_dir.exists():
- systemd_user_dir.mkdir(parents=True, exist_ok=True)
-
- if not TALER_UNIT_FILES_DIR.exists():
- TALER_UNIT_FILES_DIR.mkdir(parents=True, exist_ok=True)
-
- # Exchange HTTPD unit file.
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-httpd.service", "w") as exchange_unit:
- exchange_unit.write(unit_file_content(
- description = "Taler Exchange HTTP daemon",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-httpd -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-wirewatch.service", "w") as exchange_wirewatch_unit:
- exchange_wirewatch_unit.write(unit_file_content(
- description = "Taler Exchange Wirewatch",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-wirewatch -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-aggregator.service", "w") as exchange_aggregator_unit:
- exchange_aggregator_unit.write(unit_file_content(
- description = "Taler Exchange Aggregator",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-aggregator -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-rsa.service", "w") as exchange_rsa_unit:
- exchange_rsa_unit.write(unit_file_content(
- description = "Taler Exchange RSA security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-rsa -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-eddsa.service", "w") as exchange_eddsa_unit:
- exchange_eddsa_unit.write(unit_file_content(
- description = "Taler Exchange EDDSA security module",
- cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-eddsa -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-merchant-backend.service", "w") as merchant_unit:
- merchant_unit.write(unit_file_content(
- description = "Taler Merchant backend",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-merchant-backend-token.service", "w") as merchant_token_unit:
- merchant_token_unit.write(unit_file_content(
- description = "Taler Merchant backend with auth token to allow default instance creation.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -a {TALER_MERCHANT_TOKEN} -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if os.environ.get("PGPORT") else None
- ))
- # Custom Postgres connection.
- if os.environ.get("PGPORT"):
- with open(TALER_UNIT_FILES_DIR / "taler-local-postgres.env", "w") as postgres_env:
- postgres_env.write(f"PGPORT={os.environ.get('PGPORT')}")
-
- # euFin unit files.
- with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.service", "w") as sandbox_unit:
- sandbox_unit.write(unit_file_content(
- description = "euFin Sandbox",
- cmd = f"{TALER_PREFIX}/bin/libeufin-sandbox serve --with-unix-socket {UNIX_SOCKETS_DIR / 'sandbox.sock'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-sandbox.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.service", "w") as nexus_unit:
- nexus_unit.write(unit_file_content(
- description = "euFin Nexus",
- cmd = f"{TALER_PREFIX}/bin/libeufin-nexus serve --with-unix-socket {UNIX_SOCKETS_DIR / 'nexus.sock'}",
- env = TALER_UNIT_FILES_DIR / "taler-local-nexus.env"
- ))
- # euFin env files.
- with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.env", "w") as sandbox_env:
- sandbox_env.write(f"LIBEUFIN_SANDBOX_DB_CONNECTION=jdbc:sqlite:{SANDBOX_DB_FILE}\n")
- sandbox_env.write(f"LIBEUFIN_SANDBOX_ADMIN_PASSWORD={SANDBOX_ADMIN_PASSWORD}")
- with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.env", "w") as nexus_env:
- nexus_env.write(f"LIBEUFIN_NEXUS_DB_CONNECTION=jdbc:sqlite:{NEXUS_DB_FILE}\n")
-
- with open(TALER_UNIT_FILES_DIR / "taler-local-donations.service", "w") as donations_unit:
- donations_unit.write(unit_file_content(
- description = "Donation Website that accepts Taler payments.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos donations -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-frontends-local.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-blog.service", "w") as blog_unit:
- blog_unit.write(unit_file_content(
- description = "Blog that accepts Taler payments.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos blog -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-frontends-local.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-survey.service", "w") as survey_unit:
- survey_unit.write(unit_file_content(
- description = "Survey Website awarding tips via Taler.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos survey -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-frontends-local.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-landing.service", "w") as landing_unit:
- landing_unit.write(unit_file_content(
- description = "Landing Website of Taler demo.",
- cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos landing -c {CFG_OUTDIR / 'taler.conf'}",
- env = TALER_UNIT_FILES_DIR / "taler-frontends-local.env"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-frontends.env", "w") as frontends_env:
- frontends_env.write((
- f"PATH={os.environ.get('PATH')}\n"
- f"TALER_CONFIG_FILE={CFG_OUTDIR / 'taler.conf'}\n"
- f"TALER_ENV_URL_INTRO={REV_PROXY_URL + '/landing/'}\n"
- f"TALER_ENV_URL_BANK={SANDBOX_URL + '/'}\n"
- f"TALER_ENV_URL_MERCHANT_BLOG={REV_PROXY_URL + '/blog/'}\n"
- f"TALER_ENV_URL_MERCHANT_DONATIONS={REV_PROXY_URL + '/donations/'}\n"
- f"TALER_ENV_URL_MERCHANT_SURVEY={REV_PROXY_URL + '/survey/'}\n"
- ))
- with open(TALER_UNIT_FILES_DIR / "taler-local-nginx.service", "w") as nginx_unit:
- nginx_unit.write(unit_file_content(
- description = "Nginx: reverse proxy for taler-local.",
- cmd = f"nginx -c {CFG_OUTDIR / 'nginx.conf'}",
- ))
- print(" OK")
- print_nn("Reload SystemD...")
- Command(["systemctl", "--user", "daemon-reload"]).run()
- atexit.register(lambda: subprocess.run(
- ["systemctl", "--user", "stop", "taler-local-*.service"],
- check=True
- )
- )
- print(" OK")
- print_nn("Generate exchange's master key...")
- EXCHANGE_MASTER_PUB = Command(
- [
- "taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "setup"
- ],
- capture_stdout=True
- ).run()
- print(" OK")
- print_nn("Specify exchange master pub in taler.conf...")
- config_specify_master_pub(
- CFG_OUTDIR / "taler.conf",
- CURRENCY,
- EXCHANGE_MASTER_PUB
- )
- print(" OK")
- print_nn("Generating sync.conf...")
- config_sync(
- "sync.conf",
- outdir=CFG_OUTDIR,
- unix_sockets_dir=UNIX_SOCKETS_DIR,
- currency=CURRENCY,
- api_key=FRONTENDS_API_TOKEN,
- rev_proxy_url=REV_PROXY_URL
- )
- print(" OK")
- print_nn("Generating anastasis.conf...")
- config_anastasis(
- "anastasis.conf",
- outdir=CFG_OUTDIR,
- unix_sockets_dir=UNIX_SOCKETS_DIR,
- currency=CURRENCY,
- rev_proxy_url=REV_PROXY_URL,
- api_key=FRONTENDS_API_TOKEN
- )
- print(" OK")
- print_nn("Reset and init exchange DB..")
- Command([
- "taler-exchange-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"]
- ).run()
- print(" OK")
-
- print_nn("Launching the reverse proxy...")
- with open(CFG_OUTDIR / "nginx.conf", "w") as nginx_conf:
- nginx_conf.write((
- f"error_log {LOG_DIR / 'nginx.log'};\n"
- f"pid {TALER_ROOT_DIR / 'nginx.pid'};\n"
- "daemon off;\n"
- "events {}\n"
- "http {\n"
- f"access_log {LOG_DIR / 'nginx.log'};\n"
- "server {\n"
- f"listen {REV_PROXY_PORT};\n"
- "location / {\n"
- "return 200 'Hello, I am Nginx - proxying taler-local';\n"
- "}\n"
- "location ~* ^/(?<component>[a-z\-]+)(/(?<taler_uri>.*))? {\n"
- "proxy_pass http://unix:/home/job/.taler/sockets/$component.sock:/$taler_uri?$args;\n"
- "proxy_redirect off;\n"
- "proxy_set_header X-Forwarded-Prefix /$component;\n"
- f"proxy_set_header X-Forwarded-Host {REV_PROXY_NETLOC};\n"
- f"proxy_set_header X-Forwarded-Proto {REV_PROXY_PROTO};\n"
- "}\n"
- "}\n"
- "}\n"
- ))
- subprocess.run(["systemctl", "--user", "start", "taler-local-nginx.service"], check=True)
- if not is_serving(REV_PROXY_URL + "/"):
- fail(f"Reverse proxy did not start correctly")
- # Do check.
- print(" OK")
- print_nn("Launching the exchange RSA helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-rsa.service"])
- print(" OK")
- print_nn("Launching the exchange EDDSA helper...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-eddsa.service"])
- print(" OK")
- print_nn("Launching the exchange...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-httpd.service"])
- if not is_serving(REV_PROXY_URL + "/exchange/"):
- fail(f"Exchange did not start correctly.")
- print(" OK")
- print_nn("exchange-offline: signing key material...")
- Command([
- "taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "download", "sign", "upload"
- ]).run()
- print(" OK")
- # Set up wire fees for next 5 years
- NOW = datetime.now()
- YEAR = NOW.year
- print_nn("Setting wire fees for the next 5 years...")
- for year in range(YEAR, YEAR+5):
- Command(
- [
- "taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "wire-fee",
- str(year),
- WIRE_METHOD,
- CURRENCY + ":0.01",
- CURRENCY + ":0.01",
- "upload"
- ],
- custom_name="set-wire-fee"
- ).run()
- print(" OK")
- print_nn("Reset and init auditor DB..")
- Command([
- "taler-auditor-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"]
- ).run()
- print(" OK")
- print_nn("Add this exchange to the auditor...")
- Command(
- [
- "taler-auditor-exchange",
- "-c", CFG_OUTDIR / "taler.conf",
- "-m", EXCHANGE_MASTER_PUB,
- "-u", REV_PROXY_URL + "/exchange/"
- ],
- ).run()
- print(" OK")
- ## Step 4: Set up euFin
- print_nn("Resetting euFin databases...")
- try:
- remove(SANDBOX_DB_FILE)
- remove(NEXUS_DB_FILE)
- except OSError as error:
- if error.errno != errno.ENOENT:
- raise error
- print(" OK")
-
- # This step transparantly creates a default demobank.
- print_nn("Launching Sandbox...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-sandbox.service"])
- if not is_serving(SANDBOX_URL):
- fail(f"Sandbox did not start correctly.")
- print(" OK")
- print_nn("Make Sandbox EBICS host...")
- Command(
- [
- "libeufin-cli", "sandbox",
- "--sandbox-url", SANDBOX_URL,
- "ebicshost", "create",
- "--host-id", EBICS_HOST_ID,
- ],
- env=get_sandbox_cli_env(
- SANDBOX_ADMIN_USERNAME,
- SANDBOX_ADMIN_PASSWORD,
- ),
- custom_name="sandbox-create-ebicshost",
- ).run()
- print(" OK")
-
- print_nn("Create Exchange account at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
- ebics_user_id=EXCHANGE_EBICS_USER_ID,
- person_name="Exchange Owner",
- bank_account_name=EXCHANGE_BANK_ACCOUNT_SANDBOX,
- password=EXCHANGE_BANK_ACCOUNT_PASSWORD
- )
- print(" OK")
- print_nn("Getting exchange payto-URI from the bank.")
- exchange_bank_account_info = get_sandbox_account_info(
- SANDBOX_URL,
- EXCHANGE_BANK_ACCOUNT_SANDBOX,
- EXCHANGE_BANK_ACCOUNT_PASSWORD
- )
- EXCHANGE_PAYTO = exchange_bank_account_info["paytoUri"]
- print(" OK")
- print_nn("Specify own payto-URI to exchange's configuration..")
- Command([
- "taler-config", "-c", CFG_OUTDIR / 'taler.conf',
- "-s", "exchange-account-1", "-o", "payto_uri", "-V",
- EXCHANGE_PAYTO
- ]).run()
- print(" OK")
- print_nn(f"exchange-offline: enabling {EXCHANGE_PAYTO}...")
- Command([
- "taler-exchange-offline",
- "-c", CFG_OUTDIR / "taler.conf",
- "enable-account", EXCHANGE_PAYTO, "upload"
- ]).run()
- print(" OK")
-
- # Give each instance a Sandbox account (note: 'default')
- # won't have one, as it should typically only manage other
- # instances.
- for instance_id, iban in INSTANCES.items():
- print_nn(f"Create account of {instance_id} at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedMerchantEbicsPartnerId",
- ebics_user_id=f"unused{instance_id}EbicsUserId",
- person_name=f"Shop Owner of {instance_id}",
- bank_account_name=f"sandbox-account-{instance_id.lower()}",
- password=ALL_INSTANCES_BANK_PASSWORD
- )
- print(" OK")
- print_nn("Create Customer account at Sandbox...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedCustomerEbicsPartnerId",
- ebics_user_id="unusedCustomerEbicsUserId",
- person_name="Customer Person",
- bank_account_name=CUSTOMER_BANK_ACCOUNT,
- password=CUSTOMER_BANK_PASSWORD
- )
- print(" OK")
- print_nn("Make Nexus superuser ...")
- Command(
- [
- "libeufin-nexus", "superuser",
- EXCHANGE_NEXUS_USERNAME,
- "--password", EXCHANGE_NEXUS_PASSWORD
- ],
- env=get_nexus_server_env(
- NEXUS_DB_FILE,
- NEXUS_URL
- ),
- custom_name="nexus-superuser",
- ).run()
- print(" OK")
-
- print_nn("Launching Nexus...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-nexus.service"])
- if not is_serving(NEXUS_URL):
- fail(f"Nexus did not start correctly")
- print(" OK")
-
- print_nn("Create Exchange account at Nexus...")
- prepare_nexus_account(
- ebics_url=EBICS_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
- ebics_user_id=EXCHANGE_EBICS_USER_ID,
- bank_connection_name=EXCHANGE_BANK_CONNECTION,
- bank_account_name_sandbox=EXCHANGE_BANK_ACCOUNT_SANDBOX,
- bank_account_name_nexus=EXCHANGE_BANK_ACCOUNT_NEXUS,
- env=get_nexus_cli_env(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD,
- NEXUS_URL
- )
- )
- print(" OK")
-
- print_nn("Create Taler facade ...")
- Command(
- [
- "libeufin-cli", "facades",
- "new-taler-wire-gateway-facade",
- "--currency", CURRENCY,
- "--facade-name", EXCHANGE_FACADE_NAME,
- EXCHANGE_BANK_CONNECTION,
- EXCHANGE_BANK_ACCOUNT_NEXUS
- ],
- env=get_nexus_cli_env(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD,
- NEXUS_URL
- ),
- custom_name="create-taler-facade",
- ).run()
- print(" OK")
- try:
- response = requests.get(
- NEXUS_URL + "/facades",
- auth=requests.auth.HTTPBasicAuth(
- EXCHANGE_NEXUS_USERNAME,
- EXCHANGE_NEXUS_PASSWORD
- )
- )
- response.raise_for_status()
- except Exception as error:
- fail(error)
- FACADE_URL=response.json().get("facades")[0].get("baseUrl")
-
- print_nn("Set suggested exchange at Sandbox...")
- Command([
- "libeufin-sandbox",
- "default-exchange",
- REV_PROXY_URL + "/exchange/",
- EXCHANGE_PAYTO],
- env={
- "PATH": os.environ["PATH"],
- "LIBEUFIN_SANDBOX_DB_CONNECTION": f"jdbc:sqlite:{SANDBOX_DB_FILE}"
- }).run()
- print(" OK")
-
- # Point the exchange to the facade.
- Command(
- [
- "taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "wire_gateway_auth_method",
- "-V", "basic"
- ],
- custom_name="specify-wire-gateway-auth-method",
- ).run()
- Command(
- [
- "taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "wire_gateway_url",
- "-V", FACADE_URL
- ],
- custom_name="specify-facade-url",
- ).run()
- Command(
- [
- "taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "username",
- "-V", EXCHANGE_NEXUS_USERNAME
- ],
- custom_name="specify-username-for-facade",
- ).run()
- Command(
- [
- "taler-config",
- "-c", CFG_OUTDIR / "taler.conf",
- "-s", "exchange-accountcredentials-1",
- "-o" "password",
- "-V", EXCHANGE_NEXUS_PASSWORD
- ],
- custom_name="specify-password-for-facade",
- ).run()
-
- ## Step 6: Set up merchant
-
- print_nn("Reset and init merchant database...")
- Command([
- "taler-merchant-dbinit",
- "-c", CFG_OUTDIR / "taler.conf",
- "--reset"
- ]).run()
- print(" OK")
-
- def ensure_instance(
- currency, instance_id,
- backend_url, bank_hostname,
- wire_method, merchant_wire_address,
- auth_token
- ):
- auth_header = {"Authorization": f"Bearer {auth_token}"}
- resp = requests.get(
- urljoin_nodrop(backend_url, f"management/instances/{instance_id}"),
- headers = auth_header
- )
- bankaccount_info = get_sandbox_account_info(
- SANDBOX_URL,
- f"sandbox-account-{instance_id.lower()}",
- ALL_INSTANCES_BANK_PASSWORD
- )
- req = dict(
- id=instance_id,
- name=f"Name of '{instance_id}'",
- payto_uris=[bankaccount_info["paytoUri"]],
- address=dict(),
- jurisdiction=dict(),
- default_max_wire_fee=f"{currency}:1",
- default_wire_fee_amortization=3,
- default_max_deposit_fee=f"{currency}:1",
- default_wire_transfer_delay=dict(d_ms="forever"),
- default_pay_delay=dict(d_ms="forever"),
- auth=dict(method="token", token=auth_token),
- )
- http_method = requests.post
- endpoint = "management/instances"
-
- # Instance exists, patching it.
- if resp.status_code == 200:
- print(f"Patching instance '{instance_id}'")
- http_method = requests.patch
- endpoint = f"management/instances/{instance_id}"
-
- resp = http_method(
- urljoin_nodrop(backend_url, endpoint),
- json=req,
- headers = auth_header
- )
- if resp.status_code < 200 or resp.status_code >= 300:
- print(f"Backend responds: {resp.status_code}/{resp.text}")
- fail(f"Could not create (or patch) instance '{instance_id}'")
-
- print_nn(f"Start merchant (with TALER_MERCHANT_TOKEN into the env)...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend-token.service"], check=True)
- if not is_serving(REV_PROXY_URL + "/merchant-backend/config"):
- fail(
- f"Merchant backend did not start correctly.",
- )
- print(" OK")
- print_nn("Give default instance a bank account...")
- prepare_sandbox_account(
- sandbox_url=SANDBOX_URL,
- ebics_host_id=EBICS_HOST_ID,
- ebics_partner_id="unusedMerchantEbicsPartnerId",
- ebics_user_id=f"unusedDefaultInstanceEbicsUserId",
- person_name=f"Shop Owner of default instance",
- bank_account_name="sandbox-account-default",
- password=ALL_INSTANCES_BANK_PASSWORD
- )
- print(" OK")
- ensure_instance(
- currency=CURRENCY,
- instance_id="default",
- backend_url = REV_PROXY_URL + "/merchant-backend",
- bank_hostname = REV_PROXY_NETLOC + "/sandbox",
- wire_method = "iban",
- merchant_wire_address = IBAN_MERCHANT_DEFAULT,
- auth_token=FRONTENDS_API_TOKEN
- )
-
- print_nn("Restarting the merchant WITHOUT the auth-token in the env...")
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend.service"], check=True)
- if not is_serving(REV_PROXY_URL + "/merchant-backend/config"):
- # check_running logs errors already.
- fail(f"Merchant backend did not re start correctly.")
- print(" OK")
-
- for instance_id, iban in INSTANCES.items():
- print_nn(f"Creating the {instance_id} instance...")
- ensure_instance(
- currency=CURRENCY,
- instance_id=instance_id,
- backend_url = REV_PROXY_URL + "/merchant-backend",
- bank_hostname = REV_PROXY_NETLOC + "/sandbox",
- wire_method = "iban",
- merchant_wire_address = iban,
- auth_token=FRONTENDS_API_TOKEN
- )
- print(" OK")
-
-@cli.command()
-def launch():
- subprocess.run(["systemctl", "--user", "start", "taler-local-nginx.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-rsa.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-secmod-eddsa.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-httpd.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-wirewatch.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-exchange-aggregator.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-merchant-backend.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-sandbox.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-nexus.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-donations.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-blog.service"], check=True)
- subprocess.run(["systemctl", "--user", "start", "taler-local-survey.service"], check=True)
-
-@cli.command()
-def stop():
- subprocess.run(["systemctl", "--user", "stop", "taler-local-*.service"], check=True)
-
-@cli.command()
-def withdraw():
- print_nn("Create withdrawal operation...")
- resp = requests.post(REV_PROXY_URL +
- f"/sandbox/demobanks/default/access-api/accounts/{CUSTOMER_BANK_ACCOUNT}/withdrawals",
- json = dict(amount=CURRENCY + ":5"),
- auth = requests.auth.HTTPBasicAuth(CUSTOMER_BANK_ACCOUNT, CUSTOMER_BANK_PASSWORD)
- )
- try:
- resp.raise_for_status()
- except Exception as error:
- print("Could not create withdrawal")
- print(error)
- exit(1)
- withdrawal_id = resp.json()["withdrawal_id"]
- withdraw_uri = resp.json()["taler_withdraw_uri"]
- print(" OK")
- print("Let wallet specify the reserve public key at the bank...")
- # Let wallet give the reserve public key to the bank.
- subprocess.run(["taler-wallet-cli", "handle-uri", withdraw_uri], check=True)
- # Let the user confirm the withdrawal operation and
- # get the bank wire the funds.
- print_nn("Confirm withdrawal operation at the bank...")
- resp = requests.post(REV_PROXY_URL +
- f"/sandbox/demobanks/default/access-api/accounts/{CUSTOMER_BANK_ACCOUNT}/withdrawals/{withdrawal_id}/confirm",
- auth = requests.auth.HTTPBasicAuth(CUSTOMER_BANK_ACCOUNT, CUSTOMER_BANK_PASSWORD)
- )
- try:
- resp.raise_for_status()
- except Exception as error:
- print("Could not create withdrawal")
- print(error)
- exit(1)
- print(" OK")
- print("Let wallet complete all pending operations")
- subprocess.run(["taler-wallet-cli", "handle-uri", withdraw_uri], check=True)
- subprocess.run(["taler-wallet-cli", "run-until-done"], check=True)
-
-if __name__ == "__main__":
- cli()
diff --git a/bin/taler-deployment b/bin/taler-deployment
deleted file mode 100755
index 33d7573..0000000
--- a/bin/taler-deployment
+++ /dev/null
@@ -1,838 +0,0 @@
-#!/usr/bin/env python3
-
-# This file is part of GNU Taler.
-#
-# GNU Taler is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# GNU Taler is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Taler. If not, see <https://www.gnu.org/licenses/>.
-
-import click
-import types
-import os
-import sys
-import os.path
-import subprocess
-import time
-import random
-from pathlib import Path
-from dataclasses import dataclass
-from typing import List, Callable
-from shutil import copy
-from taler_urls import get_urls
-from string import ascii_letters, ascii_uppercase
-
-activate_template = """\
-#!/bin/bash
-
-# Generated by taler-deployment-bootstrap
-
-if ! echo $PATH | tr ":" '\\n' | grep "$HOME/deployment/bin" > /dev/null
-then
- export PATH="{curr_path}"
-fi
-export PYTHONUSERBASE=$HOME/local
-export TALER_BOOTSTRAP_TIMESTAMP={timestamp}
-export TALER_CONFIG_CURRENCY={currency}
-export TALER_ENV_NAME={envname}
-export TALER_ENV_URL_INTRO="{landing}"
-export TALER_ENV_URL_BANK="{bank}"
-export TALER_ENV_URL_MERCHANT_BLOG="{blog}"
-export TALER_ENV_URL_MERCHANT_DONATIONS="{donations}"
-export TALER_ENV_URL_MERCHANT_SURVEY="{survey}"
-export TALER_ENV_URL_AUDITOR="{auditor}"
-export TALER_ENV_URL_BACKOFFICE="{backoffice}"
-export TALER_ENV_URL_SYNC="{sync}"
-export TALER_ENV_MERCHANT_BACKEND="{merchant_backend}"
-export TALER_COVERAGE={coverage}
-export TALER_ENV_FRONTENDS_APITOKEN="$(cat ~/merchant_auth_token)"
-export LIBEUFIN_ENV_SANDBOX_ADMIN_PASSWORD="$(cat ~/libeufin_admin_password)"
-export LIBEUFIN_NEXUS_DB_CONNECTION="jdbc:sqlite:$HOME/nexus.sqlite"
-export LIBEUFIN_SANDBOX_DB_CONNECTION="jdbc:sqlite:$HOME/sandbox.sqlite"
-export LIBEUFIN_SANDBOX_HOSTNAME=bank.{envname}.taler.net/eufin/sandbox
-export LIBEUFIN_SANDBOX_CURRENCY={currency}
-"""
-
-@dataclass
-class Repo:
- name: str
- url: str
- deps: List[str]
- builder: Callable[["Repo", Path], None]
-
-
-class EnvInfo:
- def __init__(self, name, repos, cfg):
- self.name = name
- self.repos = []
- for r in repos:
- tag = getattr(cfg, "tag_" + r.name.replace("-", "_"))
- # This check skips all the components that are
- # expected to be already installed; typically via
- # a distribution package manager.
- if not tag:
- continue
- self.repos.append(r)
-
-@click.group()
-def cli():
- pass
-
-
-# map from environment name to currency
-currmap = {
- "test": "TESTKUDOS",
- "docs-builder": "TESTKUDOS",
- "coverage": "TESTKUDOS",
- "integrationtest": "TESTKUDOS",
- "demo": "KUDOS",
- "int": "INT",
- "euro": "EUR",
- "chf": "CHF",
- "auditor-reporter-test": "TESTKUDOS",
- "auditor-reporter-demo": "KUDOS",
- "local": "LOCALKUDOS",
- "tanker": "SEK"
-}
-
-def generate_apitoken():
- return "secret-token:" + ''.join(random.choices(ascii_letters + ascii_uppercase, k=10))
-
-def generate_password():
- return ''.join(random.choices(ascii_letters + ascii_uppercase, k=10))
-
-
-def update_checkout(r: Repo, p: Path):
- """Clean the repository's working directory and
- update it to the match the latest version of the upstream branch
- that we are tracking."""
- subprocess.run(["git", "-C", str(p), "clean", "-fdx"], check=True)
- subprocess.run(["git", "-C", str(p), "fetch"], check=True)
- subprocess.run(["git", "-C", str(p), "reset"], check=True)
- res = subprocess.run(
- [
- "git",
- "-C",
- str(p),
- "rev-parse",
- "--abbrev-ref",
- "--symbolic-full-name",
- "@{u}",
- ],
- stderr=subprocess.DEVNULL,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if res.returncode != 0:
- ref = "HEAD"
- else:
- ref = res.stdout.strip("\n ")
- print(f"resetting {r.name} to ref {ref}")
- subprocess.run(["git", "-C", str(p), "reset", "--hard", ref], check=True)
-
-
-def default_configure(*extra):
- pfx = Path.home() / "local"
- extra_list = list(extra)
- if int(os.environ.get("TALER_COVERAGE")):
- extra_list.append("--enable-coverage")
- subprocess.run(["./configure", f"--prefix={pfx}"] + extra_list, check=True)
-
-def default_configure_nc(*extra):
- """Variant of default_configure() that does NEVER add --enable-coverage"""
- pfx = Path.home() / "local"
- extra_list = list(extra)
- subprocess.run(["./configure", f"--prefix={pfx}"] + extra_list, check=True)
-
-
-def pyconfigure(*extra):
- """For python programs, --prefix doesn't work."""
- subprocess.run(["./configure"] + list(extra), check=True)
-
-
-def build_libeufin(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure_nc()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_libmicrohttpd(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- # Debian gnutls packages are too old ...
- default_configure("--with-gnutls=/usr/local")
- subprocess.run(["make"], check=True)
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_gnunet(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- default_configure(
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- "--disable-documentation",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_exchange(r: Repo, p: Path):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-gnunet={pfx}",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_wallet(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- default_configure_nc()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_twister(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_merchant(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_sync(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-merchant={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_anastasis(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- default_configure(
- "CFLAGS=-ggdb -O0",
- "--enable-logging=verbose",
- f"--with-microhttpd={pfx}",
- f"--with-exchange={pfx}",
- f"--with-merchant={pfx}",
- f"--with-gnunet={pfx}",
- "--disable-doc",
- )
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_bank(r, p):
- update_checkout(r, p)
- subprocess.run(["pip3", "install", "poetry"], check=True)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- pyconfigure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def build_demos(r, p):
- update_checkout(r, p)
- pfx = Path.home() / "local"
- pyconfigure()
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-def build_backoffice(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"])
- subprocess.run(["./configure"])
- subprocess.run(["make", "build-single"])
- (p / "taler-buildstamp").touch()
-
-def build_docs(r, p):
- update_checkout(r, p)
- subprocess.run(["./bootstrap"], check=True)
- pfx = Path.home() / "local"
- subprocess.run(["make", "install"], check=True)
- (p / "taler-buildstamp").touch()
-
-
-def get_repos(envname):
- """Get a list of repos (topologically sorted) that should be build for the
- given environment"""
- print(f"Loading return repositories for {envname}.", file=sys.stderr)
- if envname in ("demochecker",):
- return []
- if envname in ("docs-builder",):
- return [
- Repo(
- "libmicrohttpd",
- "git://git.gnunet.org/libmicrohttpd.git",
- [],
- build_libmicrohttpd,
- ),
- Repo(
- "docs",
- "git://git.taler.net/docs",
- [],
- build_docs,
- ),
- Repo(
- "wallet-core",
- "git://git.taler.net/wallet-core",
- [],
- build_wallet,
- ),
- Repo(
- "exchange",
- "git://git.taler.net/exchange",
- ["gnunet", "libmicrohttpd"],
- build_exchange,
- ),
- Repo(
- "merchant",
- "git://git.taler.net/merchant",
- ["exchange","libmicrohttpd","gnunet"],
- build_merchant,
- ),
- Repo(
- "sync",
- "git://git.taler.net/sync",
- ["exchange", "merchant","gnunet","libmicrohttpd"],
- build_sync,
- ),
- Repo(
- "anastasis",
- "git://git.taler.net/anastasis",
- ["exchange", "merchant","libmicrohttpd","gnunet"],
- build_anastasis,
- ),
- ]
- if envname in ("int", "coverage", "integrationtest",):
- return [
- Repo(
- "libeufin",
- "git://git.taler.net/libeufin.git",
- [],
- build_libeufin,
- ),
- Repo(
- "libmicrohttpd",
- "git://git.gnunet.org/libmicrohttpd.git",
- [],
- build_libmicrohttpd,
- ),
- Repo(
- "gnunet",
- "git://git.gnunet.org/gnunet.git",
- ["libmicrohttpd"],
- build_gnunet),
- Repo(
- "bank",
- "git://git.taler.net/bank",
- [],
- build_bank
- ),
- Repo(
- "wallet-core",
- "git://git.taler.net/wallet-core",
- [],
- build_wallet,
- ),
- Repo(
- "exchange",
- "git://git.taler.net/exchange",
- ["gnunet", "libmicrohttpd", "wallet-core"],
- build_exchange,
- ),
- Repo(
- "merchant",
- "git://git.taler.net/merchant",
- ["exchange", "libmicrohttpd", "gnunet"],
- build_merchant,
- ),
- Repo(
- "sync",
- "git://git.taler.net/sync",
- ["exchange", "merchant", "gnunet", "libmicrohttpd"],
- build_sync,
- ),
- Repo(
- "anastasis",
- "git://git.taler.net/anastasis",
- ["exchange", "merchant", "gnunet", "libmicrohttpd"],
- build_anastasis,
- ),
- ]
-
- # Note: these are currently not in use!
- if envname in ("euro", "chf"):
- return [
- Repo(
- "libmicrohttpd",
- "git://git.gnunet.org/libmicrohttpd.git",
- [],
- build_libmicrohttpd,
- ),
- Repo(
- "bank",
- "git://git.taler.net/bank",
- [],
- build_bank,
- ),
- Repo(
- "gnunet",
- "git://git.gnunet.org/gnunet.git",
- ["libmicrohttpd"],
- build_gnunet,
- ),
- Repo(
- "exchange",
- "git://git.taler.net/exchange",
- ["gnunet", "libmicrohttpd"],
- build_exchange,
- ),
- Repo(
- "merchant",
- "git://git.taler.net/merchant",
- ["exchange", "libmicrohttpd"],
- build_merchant,
- ),
- Repo(
- "taler-merchant-demos",
- "git://git.taler.net/taler-merchant-demos",
- [],
- build_demos,
- ),
- ]
- if envname in ("tanker", "local", "demo", "int", "test", "auditor-reporter-test", "auditor-reporter-demo"):
- return [
- Repo(
- "wallet-core",
- "git://git.taler.net/wallet-core",
- [],
- build_wallet,
- ),
- Repo(
- "bank",
- "git://git.taler.net/bank",
- [],
- build_bank,
- ),
- Repo(
- "libmicrohttpd",
- "git://git.gnunet.org/libmicrohttpd.git",
- [],
- build_libmicrohttpd,
- ),
- Repo(
- "gnunet",
- "git://git.gnunet.org/gnunet.git",
- ["libmicrohttpd"],
- build_gnunet,
- ),
- Repo(
- "twister",
- "git://git.taler.net/twister",
- ["gnunet", "libmicrohttpd"],
- build_twister,
- ),
- Repo(
- "exchange",
- "git://git.taler.net/exchange",
- ["gnunet", "libmicrohttpd", "twister", "wallet-core"],
- build_exchange,
- ),
- Repo(
- "merchant",
- "git://git.taler.net/merchant",
- ["exchange", "libmicrohttpd", "gnunet"],
- build_merchant,
- ),
- Repo(
- "sync",
- "git://git.taler.net/sync",
- ["exchange", "merchant", "gnunet", "libmicrohttpd"],
- build_sync,
- ),
- Repo(
- "anastasis",
- "git://git.taler.net/anastasis",
- ["gnunet", "libmicrohttpd", "exchange", "merchant"],
- build_sync,
- ),
- Repo(
- "taler-merchant-demos",
- "git://git.taler.net/taler-merchant-demos",
- [],
- build_demos,
- ),
- ]
- raise Exception(f"no repos defined for envname {envname}")
-
-
-def ensure_activated():
- """Make sure that the environment variables have been
- loaded correctly via the ~/activate script"""
- ts = os.environ.get("TALER_BOOTSTRAP_TIMESTAMP")
- if ts is None:
- print("Please do 'source ~/activate' first.", file=sys.stderr)
- sys.exit(1)
- out = subprocess.check_output(
- ["bash", "-c", "source ~/activate; echo $TALER_BOOTSTRAP_TIMESTAMP"],
- encoding="utf-8",
- )
- out = out.strip(" \n")
- if out != ts:
- print(
- f"Please do 'source ~/activate'. Current ts={ts}, new ts={out}",
- file=sys.stderr,
- )
- sys.exit(1)
-
-
-def update_repos(repos: List[Repo]) -> None:
- for r in repos:
- r_dir = Path.home() / "sources" / r.name
- subprocess.run(["git", "-C", str(r_dir), "fetch"], check=True)
- res = subprocess.run(
- ["git", "-C", str(r_dir), "status", "-sb"],
- check=True,
- stdout=subprocess.PIPE,
- encoding="utf-8",
- )
- if "behind" in res.stdout:
- print(f"new commits in {r}")
- s = r_dir / "taler-buildstamp"
- if s.exists():
- s.unlink()
-
-
-def get_stale_repos(repos: List[Repo]) -> List[Repo]:
- timestamps = {}
- stale = []
- for r in repos:
- r_dir = Path.home() / "sources" / r.name
- s = r_dir / "taler-buildstamp"
- if not s.exists():
- timestamps[r.name] = time.time()
- stale.append(r)
- continue
- ts = timestamps[r.name] = s.stat().st_mtime
- for dep in r.deps:
- if timestamps[dep] > ts:
- stale.append(r)
- break
- return stale
-
-
-allowed_envs = (
- "test",
- "int",
- "demo",
- "auditor-reporter-test",
- "auditor-reporter-demo",
- "docs-builder",
- "euro",
- "chf",
- "coverage",
- "integrationtest",
- "local",
- "tanker"
-)
-
-def load_apitoken():
- apitoken_path = Path.home() / "envcfg.py"
- if not os.path.isfile(apitoken_path):
- return None
- with open(apitoken_path, "r") as f:
- return f.readline()
-
-def load_envcfg():
- cfg = types.ModuleType("taler_deployment_cfg")
- envcfg_path = Path.home() / "envcfg.py"
- if not os.path.isfile(envcfg_path):
- return None
- print(f"Loading configuration from {envcfg_path}.", file=sys.stderr)
- cfgtext = envcfg_path.read_text()
- exec(cfgtext, cfg.__dict__)
- return cfg
-
-
-def get_env_info(cfg):
- envname = getattr(cfg, "env")
- if envname not in allowed_envs:
- print(f"env '{envname}' not supported")
- sys.exit(1)
- repos = get_repos(envname)
- return EnvInfo(envname, repos, cfg)
-
-
-@cli.command()
-def build() -> None:
- """Build the deployment from source."""
- ensure_activated()
- cfg = load_envcfg()
- if not cfg:
- print("Please create ~/envcfg.py (template in deployment.git can help)")
- return 1
- env_info = get_env_info(cfg)
- update_repos(env_info.repos)
- stale = get_stale_repos(env_info.repos)
- print(f"found stale repos: {stale}")
- for r in stale:
- p = Path.home() / "sources" / r.name
- os.chdir(str(p))
- r.builder(r, p)
-
-
-@cli.command()
-@click.argument("color", metavar="COLOR", type=click.Choice(["blue", "green"]))
-def switch_demo(color) -> None:
- """Switch deployment color of demo."""
- if os.environ["USER"] != "demo":
- print("Command should be executed as the demo user only.")
- sys.exit(1)
- active_home = Path.home() / "active-home"
- try:
- active_home.unlink()
- except:
- pass
- active_home.symlink_to(f"/home/demo-{color}")
-
-
-# repos does not contain distro-installed components
-def checkout_repos(cfg, repos):
- """Check out repos to the version specified in envcfg.py"""
- home = Path.home()
- sources = home / "sources"
- for r in repos:
- r_dir = home / "sources" / r.name
- if not r_dir.exists():
- r_dir.mkdir(parents=True, exist_ok=True)
- subprocess.run(["git", "-C", str(sources), "clone", r.url], check=True)
- subprocess.run(["git", "-C", str(r_dir), "fetch"], check=True)
- tag = getattr(cfg, "tag_" + r.name.replace("-", "_"))
- subprocess.run(
- ["git", "-C", str(r_dir), "checkout", "-q", "-f", tag, "--"], check=True,
- )
-
-
-@cli.command()
-def sync_repos() -> None:
- """Sync repos with the envcfg.py file."""
- home = Path.home()
- cfg = load_envcfg()
- if not cfg:
- print("Please create ~/envcfg.py (template in deployment.git can help)")
- return 1
- env_info = get_env_info(cfg)
- repos = env_info.repos
- checkout_repos(cfg, repos)
- for r in repos:
- r_dir = home / "sources" / r.name
- subprocess.run(["git", "-C", str(r_dir), "clean", "-fdx"], check=True)
-
-@cli.command()
-def bootstrap() -> None:
- """Bootstrap a GNU Taler deployment."""
- home = Path.home()
- cfg = load_envcfg()
- if not cfg:
- print("Please create ~/envcfg.py (template in deployment.git can help)")
- return 1
- env_info = get_env_info(cfg)
- repos = env_info.repos
- envname = env_info.name
- checkout_repos(cfg,repos)
-
- # Generate $PATH variable that will be set in the activate script.
- local_path = str(Path.home() / "local" / "bin")
- deployment_path = str(Path.home() / "deployment" / "bin")
- path_list = os.environ["PATH"].split(":")
- if local_path not in path_list:
- path_list.insert(0, local_path)
- if deployment_path not in path_list:
- path_list.insert(0, deployment_path)
-
- token_file = Path.home() / "merchant_auth_token"
- if not token_file.is_file():
- with token_file.open("w") as f:
- f.write(generate_apitoken())
- print(f"Token file '{token_file}' created.")
-
- sandbox_admin_password_file = Path.home() / "libeufin_admin_password"
- if not sandbox_admin_password_file.is_file():
- with sandbox_admin_password_file.open("w") as f:
- f.write(generate_password())
- print(f"Libeufin Sandbox admin password file '{sandbox_admin_password_file}' created.")
-
- with (home / "activate").open("w") as f:
- f.write(
- activate_template.format(
- envname=envname,
- timestamp=str(time.time()),
- currency=currmap[envname],
- curr_path=":".join(path_list),
- coverage=1 if envname == "coverage" else 0,
- **get_urls(envname)
- )
- )
- if envname != "local":
- (home / "sockets").mkdir(parents=True, exist_ok=True)
-
- if envname in ("test", "int", "local"):
- (home / "taler-data").mkdir(parents=True, exist_ok=True)
- if envname == "demo":
- setup_service("config-tips.timer")
- create_bb_worker(
- "tips-checker.service", "tips-checker-dir",
- "tips-checker-worker", "tips-checker-pass"
- )
-
- if not (home / "taler-data").exists():
- (home / "taler-data").symlink_to("/home/demo/shared-data")
-
- if envname == "integrationtest":
- create_bb_worker("buildbot-worker-wallet.service", "worker", "wallet-worker", "wallet-pass")
-
- if envname == "test":
- create_bb_worker("buildbot-worker-taler.service", "bb-worker", "test-worker", "test-pass")
- setup_service("config-tips.timer")
-
- elif envname in ("auditor-reporter-test", "auditor-reporter-demo"):
- create_bb_worker("buildbot-worker-auditor.service", "worker", "auditor-worker", "auditor-pass")
- elif envname == "demo-checker":
- create_bb_worker("buildbot-worker-taler-healthcheck.service", "bb-worker", "demo-worker", "demo-pass")
- elif envname == "coverage":
- create_bb_worker("buildbot-worker-lcov.service", "worker", "lcov-worker", "lcov-pass")
-
- www_path = Path.home() / "www"
- www_path.mkdir(exist_ok=True)
- if not os.path.islink(www_path / "merchant"):
- os.symlink(
- Path.home() / "sources" / "merchant" / "coverage_report",
- www_path / "merchant",
- )
- if not os.path.islink(www_path / "exchange"):
- os.symlink(
- Path.home() / "sources" / "exchange" / "coverage_report",
- www_path / "exchange",
- )
- if not os.path.islink(www_path / "sync"):
- os.symlink(
- Path.home() / "sources" / "sync" / "coverage_report",
- www_path / "sync",
- )
-
- print("Bootstrap finished.")
- print("Please source the ~/activate file before proceeding.")
-
-
-def create_bb_worker(systemd_unit, dirname, workername, workerpw):
- home = Path.home()
- bb_dir = home / dirname
- if bb_dir.exists():
- return
- subprocess.run(
- [
- "buildbot-worker",
- "create-worker",
- "--umask=0o22",
- str(bb_dir),
- "localhost:9989",
- workername,
- workerpw,
- ],
- check=True,
- )
- setup_service (systemd_unit)
-
-def setup_service(systemd_unit):
- sc_path = Path.home() / ".config" / "systemd" / "user"
- sc_path.mkdir(exist_ok=True,parents=True)
- sc_unit = Path.home() / "deployment" / "systemd-services" / systemd_unit
- copy(sc_unit, sc_path)
-
- # If a timer got just installed, the related service
- # file needs to be installed now.
- split_filename = systemd_unit.split(".")
- if "timer" == split_filename[-1]:
- copy(Path.home() / "deployment" / "systemd-services" / f"{split_filename[0]}.service", sc_path)
-
- subprocess.run(
- [
- "systemctl",
- "--user",
- "daemon-reload",
- ],
- check=True,
- )
- subprocess.run(
- [
- "systemctl",
- "--user",
- "enable",
- systemd_unit
- ],
- check=True,
- )
- subprocess.run(
- [
- "systemctl",
- "--user",
- "start",
- systemd_unit
- ],
- check=True,
- )
-
-if __name__ == "__main__":
- cli()
diff --git a/bin/taler-deployment-arm b/bin/taler-deployment-arm
deleted file mode 100755
index e1c2112..0000000
--- a/bin/taler-deployment-arm
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-set -eu
-
-export GNUNET_FORCE_LOG="util;;;;WARNING/taler;;;;DEBUG/twister;;;;DEBUG"
-export GNUNET_BASE_CONFIG=$HOME/deployment/taler-arm
-export PATH="$HOME/local/bin":$PATH
-
-ulimit -c $((100 * 1024))
-mkdir -p $HOME/logs
-exec gnunet-arm -c $HOME/deployment/gnunet.conf "$@"
diff --git a/bin/taler-deployment-auditor b/bin/taler-deployment-auditor
deleted file mode 100755
index f90c7fb..0000000
--- a/bin/taler-deployment-auditor
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/bash
-
-# serve landing page via uwsgi
-
-base=$HOME
-
-export PATH="$base/deployment":$PATH
-
-ulimit -c $((100 * 1024))
-
-mkdir -p $HOME/sockets
-
-
-# redirect / to index.html,
-# serve static files from $HOME/auditor
-exec uwsgi \
- --mimefile /etc/mime.types \
- --socket $HOME/sockets/auditor.uwsgi \
- --chmod-socket=660 \
- --route "^/?$ redirect:index.html" \
- --route "^/(.+) addvar:FILE=$HOME/auditor/\$1" \
- --route-if "exists:\${FILE} static:\${FILE}" \
- --route "^/(.+) break:404 not found"
diff --git a/bin/taler-deployment-auth-token b/bin/taler-deployment-auth-token
deleted file mode 100755
index 03c0620..0000000
--- a/bin/taler-deployment-auth-token
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python3
-
-# This file is part of GNU Taler.
-#
-# GNU Taler is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# GNU Taler is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Taler. If not, see <https://www.gnu.org/licenses/>.
-
-import random
-import os
-import sys
-from pathlib import Path
-from string import ascii_letters, ascii_uppercase
-
-TOKEN_FILE = Path.home() / "merchant_auth_token"
-
-def generate_apitoken():
- return "secret-token:" + ''.join(random.choices(ascii_letters + ascii_uppercase, k=10))
-
-if TOKEN_FILE.is_file():
- print("~/merchant_auth_token exists already. Not overwriting it!")
- sys.exit(0)
-
-with TOKEN_FILE.open("w") as f:
- f.write(generate_apitoken())
-
-print(f"Token file '{TOKEN_FILE}' created")
diff --git a/bin/taler-deployment-config-generate b/bin/taler-deployment-config-generate
deleted file mode 100755
index f3a52ca..0000000
--- a/bin/taler-deployment-config-generate
+++ /dev/null
@@ -1,280 +0,0 @@
-#!/usr/bin/env python3
-import click
-import sys
-from collections import OrderedDict
-import json
-import os
-import urllib.parse
-import stat
-from taler_urls import get_urls, get_port
-
-
-class ConfigFile:
- def __init__(self, envname, currency, exchange_pub, filename):
- self.sections = OrderedDict()
- self.envname = envname
- self.filename = filename
- self.currency = currency
- self.exchange_pub = exchange_pub
-
- def destroy(self):
- del self.sections
- self.sections = OrderedDict()
-
- def cfg_put(self, section_name, key, value):
- s = self.sections[section_name] = self.sections.get(section_name, OrderedDict())
- s[key] = value
-
- def cfg_write(self, outdir):
-
- if outdir:
- fstream = open(os.path.join(outdir, self.filename), "w")
- else:
- fstream = open(sys.stdout)
-
- for section_name, section in self.sections.items():
- fstream.write("[" + section_name + "]" + "\n")
- for key, value in section.items():
- fstream.write(key + " = " + value + "\n")
- fstream.write("\n")
- fstream.close()
-
-
-def coin(
- obj,
- name,
- value,
- d_withdraw="3 years",
- d_spend="5 years",
- d_legal="10 years",
- f_withdraw="0.01",
- f_deposit="0.01",
- f_refresh="0.01",
- f_refund="0.01",
- rsa_keysize="2048",
-):
- sec = "coin_" + obj.currency + "_" + name
- obj.cfg_put(sec, "value", obj.currency + ":" + value)
- obj.cfg_put(sec, "duration_withdraw", d_withdraw)
- obj.cfg_put(sec, "duration_spend", d_spend)
- obj.cfg_put(sec, "duration_legal", d_legal)
- obj.cfg_put(sec, "fee_withdraw", obj.currency + ":" + f_withdraw)
- obj.cfg_put(sec, "fee_refresh", obj.currency + ":" + f_refresh)
- obj.cfg_put(sec, "fee_refund", obj.currency + ":" + f_refund)
- obj.cfg_put(sec, "fee_deposit", obj.currency + ":" + f_deposit)
- obj.cfg_put(sec, "rsa_keysize", rsa_keysize)
-
-
-def config(obj):
- urls = get_urls(obj.envname)
- obj.cfg_put("paths", "TALER_DATA_HOME", "${HOME}/taler-data")
- obj.cfg_put("paths", "TALER_RUNTIME_DIR", "${HOME}/taler-runtime")
- obj.cfg_put("taler", "CURRENCY", obj.currency)
- obj.cfg_put("taler", "CURRENCY_ROUND_UNIT", f"{obj.currency}:0.01")
- if obj.envname != "local":
- obj.cfg_put("bank", "serve", "uwsgi")
- obj.cfg_put("bank", "uwsgi_serve", "unix")
- obj.cfg_put("bank", "uwsgi_unixpath", "$HOME/sockets/bank.uwsgi")
- obj.cfg_put("bank", "uwsgi_unixpath_mode", "660")
- else:
- obj.cfg_put("bank", "serve", "http")
- obj.cfg_put("bank", "http_port", get_port(urls["bank"]))
-
- obj.cfg_put("bank", "max_debt", "%s:500.0" % obj.currency)
- obj.cfg_put("bank", "max_debt_bank", "%s:1000000000.0" % obj.currency)
- obj.cfg_put("bank", "allow_registrations", "YES")
- obj.cfg_put("bank", "base_url", urls["bank"])
- obj.cfg_put("bank", "database", "postgres:///taler{}".format(obj.envname))
- obj.cfg_put("bank", "suggested_exchange", urls["exchange"])
-
- obj.cfg_put("donations", "serve", "uwsgi")
- obj.cfg_put("donations", "uwsgi_serve", "unix")
- obj.cfg_put("donations", "uwsgi_unixpath", "$HOME/sockets/donations.uwsgi")
- obj.cfg_put("donations", "uwsgi_unixpath_mode", "660")
-
- obj.cfg_put("landing", "serve", "uwsgi")
- obj.cfg_put("landing", "uwsgi_serve", "unix")
- obj.cfg_put("landing", "uwsgi_unixpath", "$HOME/sockets/landing.uwsgi")
- obj.cfg_put("landing", "uwsgi_unixpath_mode", "660")
-
- obj.cfg_put("blog", "serve", "uwsgi")
- obj.cfg_put("blog", "uwsgi_serve", "unix")
- obj.cfg_put("blog", "uwsgi_unixpath", "$HOME/sockets/shop.uwsgi")
- obj.cfg_put("blog", "uwsgi_unixpath_mode", "660")
-
- obj.cfg_put("survey", "serve", "uwsgi")
- obj.cfg_put("survey", "uwsgi_serve", "unix")
- obj.cfg_put("survey", "uwsgi_unixpath", "$HOME/sockets/survey.uwsgi")
- obj.cfg_put("survey", "uwsgi_unixpath_mode", "660")
- obj.cfg_put("survey", "bank_password", "x")
-
- obj.cfg_put("backoffice-all", "backend", urls["merchant_backend"])
-
- # Keep only one back-office service for all instances, for simplicity.
- obj.cfg_put("backoffice-all", "uwsgi_serve", "unix")
- obj.cfg_put("backoffice-all", "uwsgi_unixpath_mode", "660")
- obj.cfg_put("backoffice-all", "uwsgi_unixpath", "$HOME/sockets/backoffice.uwsgi")
- obj.cfg_put("backoffice-all", "instances", "FSF default Tor")
-
- if obj.envname != "local":
- obj.cfg_put("merchant", "serve", "unix")
- obj.cfg_put("merchant", "unixpath", "$HOME/sockets/merchant.http")
- else:
- obj.cfg_put("merchant", "serve", "tcp")
- obj.cfg_put("merchant", "port", get_port(urls["merchant_backend"]))
-
- obj.cfg_put("merchant", "wire_transfer_delay", "0 s")
- obj.cfg_put("merchant", "default_max_wire_fee", obj.currency + ":" + "0.01")
- obj.cfg_put("merchant", "default_max_deposit_fee", obj.currency + ":" + "0.05")
- obj.cfg_put(
- "merchantdb-postgres", "config", "postgres:///taler{}".format(obj.envname)
- )
-
- obj.cfg_put("frontends", "backend", urls["merchant_backend"])
-
- obj.cfg_put(
- "merchant-exchange-{}".format(obj.currency), "master_key", obj.exchange_pub
- )
- obj.cfg_put("merchant-exchange-{}".format(obj.currency), "currency", obj.currency)
-
- obj.cfg_put(
- "merchant-exchange-{}".format(obj.currency),
- "exchange_base_url",
- urls["exchange"],
- )
-
- obj.cfg_put("auditor", "serve", "unix")
- obj.cfg_put("auditor", "base_url", urls["auditor"])
- obj.cfg_put("auditor", "auditor_url", urls["auditor"])
- obj.cfg_put("auditor", "unixpath", "$HOME/sockets/auditor.http")
- obj.cfg_put("auditor", "tiny_amount", obj.currency + ":0.01")
-
- obj.cfg_put("taler-exchange-secmod-eddsa", "unixpath", "$HOME/sockets/taler-exchange-secmod-eddsa.sock")
- obj.cfg_put("taler-exchange-secmod-rsa", "unixpath", "$HOME/sockets/taler-exchange-secmod-rsa.sock")
- obj.cfg_put("taler-exchange-secmod-rsa", "sm_priv_key", "${TALER_DATA_HOME}/taler-exchange-secmod-rsa/secmod-private-key")
-
- obj.cfg_put("exchange", "base_url", urls["exchange"])
-
- if obj.envname != "local":
- obj.cfg_put("exchange", "serve", "unix")
- obj.cfg_put("exchange", "unixpath", "$HOME/sockets/exchange.http")
- else:
- obj.cfg_put("exchange", "serve", "tcp")
- obj.cfg_put("exchange", "port", get_port(urls["exchange"]))
-
- obj.cfg_put("exchange", "master_public_key", obj.exchange_pub)
- obj.cfg_put("exchange", "terms_etag", "0")
- obj.cfg_put("exchange", "terms_dir", "$HOME/local/share/taler-exchange/tos")
- obj.cfg_put("exchange", "privacy_etag", "0")
- obj.cfg_put("exchange", "privacy_dir", "$HOME/local/share/taler-exchange/pp")
-
-
- obj.cfg_put(
- "exchangedb-postgres", "db_conn_str", "postgres:///taler{}".format(obj.envname)
- )
- obj.cfg_put(
- "exchangedb-postgres", "config", "postgres:///taler{}".format(obj.envname)
- )
- obj.cfg_put(
- "auditordb-postgres", "db_conn_str", "postgres:///taler{}".format(obj.envname)
- )
- obj.cfg_put(
- "auditordb-postgres", "config", "postgres:///taler{}".format(obj.envname)
- )
-
- bank_acct_url = "{}taler-wire-gateway/Exchange/".format(urls["bank"])
-
- obj.cfg_put(
- "exchange-account-1", "payto_uri", "{}Exchange".format(urls["talerbank_payto"])
- )
- obj.cfg_put("exchange-account-1", "enable_debit", "yes")
- obj.cfg_put("exchange-account-1", "enable_credit", "yes")
- obj.cfg_put("exchange-accountcredentials-1", "wire_gateway_auth_method", "basic")
- obj.cfg_put("exchange-accountcredentials-1", "wire_gateway_url", bank_acct_url)
- obj.cfg_put("exchange-accountcredentials-1", "username", "Exchange")
- obj.cfg_put("exchange-accountcredentials-1", "password", "x")
-
- obj.cfg_put(
- "merchant-account-merchant",
- "payto_uri",
- "{}Tutorial".format(urls["talerbank_payto"]),
- )
- obj.cfg_put(
- "merchant-account-merchant",
- "wire_response",
- "${TALER_DATA_HOME}/merchant/wire/merchant.json",
- )
- obj.cfg_put("merchant-account-merchant", "wire_file_mode", "770")
-
- # The following block should be obsoleted by the new API to configure instances.
- merchant_instance_names = ("default", "Tor", "GNUnet", "Taler", "FSF", "Tutorial")
- for mi in merchant_instance_names:
- obj.cfg_put("merchant-account-merchant", f"HONOR_{mi}", "YES")
- obj.cfg_put("merchant-account-merchant", f"ACTIVE_{mi}", "YES")
-
- coin(obj, "ct_10", "0.10")
- coin(obj, "1", "1")
- coin(obj, "2", "2")
- coin(obj, "5", "5")
- coin(obj, "10", "10")
- coin(obj, "1000", "1000")
-
-
-@click.command()
-@click.option("--currency", default="KUDOS")
-@click.option("--envname", default="demo")
-@click.option("--outdir", required=True)
-@click.option("--exchange-pub", required=True)
-# Expected to contain already the 'secret-token:' scheme.
-@click.option("--frontends-apitoken", required=True)
-def main(currency, envname, outdir, exchange_pub, frontends_apitoken):
-
- if envname not in ("tanker", "demo", "test", "int", "euro", "chf", "local"):
- print("envname (%s) not demo/test/int, aborting config generation" % envname)
- return
-
- config_files = []
-
- mc = ConfigFile(envname, currency, exchange_pub, "taler.conf")
- mc.cfg_put("frontends", "backend_apikey", f"{frontends_apitoken}")
- config(mc)
- config_files.append(mc)
-
- urls = get_urls(envname)
-
- sc = ConfigFile(envname, currency, exchange_pub, "sync.conf")
- sc.cfg_put("taler", "currency", currency)
- sc.cfg_put("sync", "serve", "unix")
- sc.cfg_put("sync", "unixpath", "$HOME/sockets/sync.http")
- sc.cfg_put("sync", "apikey", f"Bearer {frontends_apitoken}")
- sc.cfg_put("sync", "annual_fee", f"{currency}:0.1")
- sc.cfg_put("sync", "fulfillment_url", "taler://fulfillment-success/")
- sc.cfg_put("sync", "payment_backend_url", urls["merchant_backend"] + "instances/Taler/")
- sc.cfg_put("syncdb-postgres", "config", f"postgres:///taler{envname}")
- config_files.append(sc)
-
- ac = ConfigFile(envname, currency, exchange_pub, "anastasis.conf")
- ac.cfg_put("taler", "currency", currency)
- ac.cfg_put("anastasis", "serve", "unix")
- ac.cfg_put("anastasis", "business_name", f"GNU Taler Demo Anastasis Provider")
- ac.cfg_put("anastasis", "unixpath", "$HOME/sockets/anastasis.http")
- ac.cfg_put("anastasis", "annual_fee", f"{currency}:0")
- ac.cfg_put("anastasis", "question_cost", f"{currency}:0")
- ac.cfg_put("anastasis", "insurance", f"{currency}:0")
- ac.cfg_put("anastasis", "truth_upload_fee", f"{currency}:0")
- ac.cfg_put("anastasis", "fulfillment_url", "taler://fulfillment-success/")
- ac.cfg_put("anastasis", "server_salt", "kreb3ia9dmj43gfa")
- ac.cfg_put("stasis-postgres", "config", f"postgres:///taler{envname}")
- ac.cfg_put("anastasis-merchant-backend", "payment_backend_url", urls["merchant_backend"] + "instances/anastasis/")
- ac.cfg_put("anastasis-merchant-backend", "api_key", f"Bearer {frontends_apitoken}")
- ac.cfg_put("authorization-question", "cost", f"{currency}:0")
- ac.cfg_put("authorization-question", "enabled", "yes")
- config_files.append(ac)
-
- assert 0 < len(config_files)
- for obj in config_files:
- obj.cfg_write(outdir)
-
-
-if __name__ == "__main__":
- main()
diff --git a/bin/taler-deployment-config-generate-sepa b/bin/taler-deployment-config-generate-sepa
deleted file mode 100755
index 959c224..0000000
--- a/bin/taler-deployment-config-generate-sepa
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/env python3
-import click
-import sys
-from collections import OrderedDict
-import json
-import os
-import urllib.parse
-import stat
-from taler_urls import get_urls, get_port
-
-
-class ConfigFile:
- def __init__(self, envname, currency, exchange_pub, filename):
- self.sections = OrderedDict()
- self.envname = envname
- self.filename = filename
- self.currency = currency
- self.exchange_pub = exchange_pub
-
- def destroy(self):
- del self.sections
- self.sections = OrderedDict()
-
- def cfg_put(self, section_name, key, value):
- s = self.sections[section_name] = self.sections.get(section_name, OrderedDict())
- s[key] = value
-
- def cfg_write(self, outdir):
-
- if outdir:
- fstream = open(os.path.join(outdir, self.filename), "w")
- else:
- fstream = open(sys.stdout)
-
- for section_name, section in self.sections.items():
- fstream.write("[" + section_name + "]" + "\n")
- for key, value in section.items():
- fstream.write(key + " = " + value + "\n")
- fstream.write("\n")
- fstream.close()
-
-
-def coin(
- obj,
- name,
- value,
- d_withdraw="3 years",
- d_spend="5 years",
- d_legal="10 years",
- f_withdraw="0.01",
- f_deposit="0.01",
- f_refresh="0.01",
- f_refund="0.01",
- rsa_keysize="2048",
-):
- sec = "coin_" + obj.currency + "_" + name
- obj.cfg_put(sec, "value", obj.currency + ":" + value)
- obj.cfg_put(sec, "duration_withdraw", d_withdraw)
- obj.cfg_put(sec, "duration_spend", d_spend)
- obj.cfg_put(sec, "duration_legal", d_legal)
- obj.cfg_put(sec, "fee_withdraw", obj.currency + ":" + f_withdraw)
- obj.cfg_put(sec, "fee_refresh", obj.currency + ":" + f_refresh)
- obj.cfg_put(sec, "fee_refund", obj.currency + ":" + f_refund)
- obj.cfg_put(sec, "fee_deposit", obj.currency + ":" + f_deposit)
- obj.cfg_put(sec, "rsa_keysize", rsa_keysize)
-
-
-def config(obj):
- urls = get_urls(obj.envname)
- obj.cfg_put("paths", "TALER_DATA_HOME", "${HOME}/taler-data")
- obj.cfg_put("paths", "TALER_RUNTIME_DIR", "${HOME}/taler-runtime")
- obj.cfg_put("taler", "CURRENCY", obj.currency)
- obj.cfg_put("taler", "CURRENCY_ROUND_UNIT", f"{obj.currency}:0.01")
- if obj.envname != "local":
- obj.cfg_put("bank", "serve", "uwsgi")
- obj.cfg_put("bank", "uwsgi_serve", "unix")
- obj.cfg_put("bank", "uwsgi_unixpath", "$HOME/sockets/bank.uwsgi")
- obj.cfg_put("bank", "uwsgi_unixpath_mode", "660")
- else:
- obj.cfg_put("bank", "serve", "http")
- obj.cfg_put("bank", "http_port", get_port(urls["bank"]))
-
- obj.cfg_put("bank", "database", "taler" + obj.envname)
- obj.cfg_put("bank", "max_debt", "%s:500.0" % obj.currency)
- obj.cfg_put("bank", "max_debt_bank", "%s:1000000000.0" % obj.currency)
- obj.cfg_put("bank", "allow_registrations", "YES")
- obj.cfg_put("bank", "base_url", urls["bank"])
- obj.cfg_put("bank", "database", "postgres:///taler{}".format(obj.envname))
- obj.cfg_put("bank", "suggested_exchange", urls["exchange"])
-
- obj.cfg_put("bank-admin", "uwsgi_serve", "unix")
- obj.cfg_put("bank-admin", "uwsgi_unixpath", "$HOME/sockets/bank-admin.uwsgi")
- obj.cfg_put("bank-admin", "uwsgi_unixpath_mode", "660")
-
- obj.cfg_put("donations", "serve", "uwsgi")
- obj.cfg_put("donations", "uwsgi_serve", "unix")
- obj.cfg_put("donations", "uwsgi_unixpath", "$HOME/sockets/donations.uwsgi")
- obj.cfg_put("donations", "uwsgi_unixpath_mode", "660")
-
- obj.cfg_put("landing", "serve", "uwsgi")
- obj.cfg_put("landing", "uwsgi_serve", "unix")
- obj.cfg_put("landing", "uwsgi_unixpath", "$HOME/sockets/landing.uwsgi")
- obj.cfg_put("landing", "uwsgi_unixpath_mode", "660")
-
- obj.cfg_put("blog", "serve", "uwsgi")
- obj.cfg_put("blog", "uwsgi_serve", "unix")
- obj.cfg_put("blog", "uwsgi_unixpath", "$HOME/sockets/shop.uwsgi")
- obj.cfg_put("blog", "uwsgi_unixpath_mode", "660")
-
- obj.cfg_put("survey", "serve", "uwsgi")
- obj.cfg_put("survey", "uwsgi_serve", "unix")
- obj.cfg_put("survey", "uwsgi_unixpath", "$HOME/sockets/survey.uwsgi")
- obj.cfg_put("survey", "uwsgi_unixpath_mode", "660")
- obj.cfg_put("survey", "bank_password", "x")
-
- obj.cfg_put("backoffice-all", "backend", urls["merchant_backend"])
-
- # Keep only one back-office service for all instances, for simplicity.
- obj.cfg_put("backoffice-all", "uwsgi_serve", "unix")
- obj.cfg_put("backoffice-all", "uwsgi_unixpath_mode", "660")
- obj.cfg_put("backoffice-all", "uwsgi_unixpath", "$HOME/sockets/backoffice.uwsgi")
- obj.cfg_put("backoffice-all", "instances", "FSF default Tor")
-
- if obj.envname != "local":
- obj.cfg_put("merchant", "serve", "unix")
- obj.cfg_put("merchant", "unixpath", "$HOME/sockets/merchant.http")
- else:
- obj.cfg_put("merchant", "serve", "tcp")
- obj.cfg_put("merchant", "port", get_port(urls["merchant_backend"]))
-
- obj.cfg_put("merchant", "wire_transfer_delay", "0 s")
- obj.cfg_put("merchant", "default_max_wire_fee", obj.currency + ":" + "0.01")
- obj.cfg_put("merchant", "default_max_deposit_fee", obj.currency + ":" + "0.05")
- obj.cfg_put(
- "merchantdb-postgres", "config", "postgres:///taler{}".format(obj.envname)
- )
-
- obj.cfg_put("frontends", "backend", urls["merchant_backend"])
-
- obj.cfg_put(
- "merchant-exchange-{}".format(obj.currency), "master_key", obj.exchange_pub
- )
- obj.cfg_put("merchant-exchange-{}".format(obj.currency), "currency", obj.currency)
-
- obj.cfg_put(
- "merchant-exchange-{}".format(obj.currency),
- "exchange_base_url",
- urls["exchange"],
- )
-
- obj.cfg_put("auditor", "serve", "unix")
- obj.cfg_put("auditor", "base_url", urls["auditor"])
- obj.cfg_put("auditor", "auditor_url", urls["auditor"])
- obj.cfg_put("auditor", "unixpath", "$HOME/sockets/auditor.http")
- obj.cfg_put("auditor", "tiny_amount", obj.currency + ":0.01")
-
- obj.cfg_put("taler-exchange-secmod-eddsa", "unixpath", "$HOME/sockets/taler-exchange-secmod-eddsa.sock")
- obj.cfg_put("taler-exchange-secmod-rsa", "unixpath", "$HOME/sockets/taler-exchange-secmod-rsa.sock")
- obj.cfg_put("taler-exchange-secmod-rsa", "sm_priv_key", "${TALER_DATA_HOME}/taler-exchange-secmod-rsa/secmod-private-key")
-
- obj.cfg_put("exchange", "base_url", urls["exchange"])
-
- if obj.envname != "local":
- obj.cfg_put("exchange", "serve", "unix")
- obj.cfg_put("exchange", "unixpath", "$HOME/sockets/exchange.http")
- else:
- obj.cfg_put("exchange", "serve", "tcp")
- obj.cfg_put("exchange", "port", get_port(urls["exchange"]))
-
- obj.cfg_put("exchange", "master_public_key", obj.exchange_pub)
- obj.cfg_put("exchange", "terms_etag", "0")
- obj.cfg_put("exchange", "terms_dir", "$HOME/local/share/taler-exchange/tos")
- obj.cfg_put("exchange", "privacy_etag", "0")
- obj.cfg_put("exchange", "privacy_dir", "$HOME/local/share/taler-exchange/pp")
-
-
- obj.cfg_put(
- "exchangedb-postgres", "db_conn_str", "postgres:///taler{}".format(obj.envname)
- )
- obj.cfg_put(
- "exchangedb-postgres", "config", "postgres:///taler{}".format(obj.envname)
- )
- obj.cfg_put(
- "auditordb-postgres", "db_conn_str", "postgres:///taler{}".format(obj.envname)
- )
- obj.cfg_put(
- "auditordb-postgres", "config", "postgres:///taler{}".format(obj.envname)
- )
-
- bank_acct_url = "{}taler-wire-gateway/Exchange/".format(urls["bank"])
-
- obj.cfg_put("exchange-account-1", "payto_uri",
- "{}/EX00000000000000000000".format(urls["sepa_payto"])
- )
- obj.cfg_put("exchange-account-1", "enable_debit", "yes")
- obj.cfg_put("exchange-account-1", "enable_credit", "yes")
- obj.cfg_put("exchange-accountcredentials-1", "wire_gateway_auth_method", "basic")
- obj.cfg_put("exchange-accountcredentials-1", "wire_gateway_url", bank_acct_url)
- obj.cfg_put("exchange-accountcredentials-1", "username", "Exchange")
- obj.cfg_put("exchange-accountcredentials-1", "password", "x")
-
- obj.cfg_put("merchant-account-merchant", "payto_uri",
- "{}/ME00000000000000000000".format(urls["sepa_payto"]),
- )
- obj.cfg_put("merchant-account-merchant", "wire_response",
- "${TALER_DATA_HOME}/merchant/wire/merchant.json",
- )
- obj.cfg_put("merchant-account-merchant", "wire_file_mode", "770")
-
- # The following block should be obsoleted by the new API to configure instances.
- merchant_instance_names = ("default", "Tor", "GNUnet", "Taler", "FSF", "Tutorial")
- for mi in merchant_instance_names:
- obj.cfg_put("merchant-account-merchant", f"HONOR_{mi}", "YES")
- obj.cfg_put("merchant-account-merchant", f"ACTIVE_{mi}", "YES")
-
- coin(obj, "ct_10", "0.10")
- coin(obj, "1", "1")
- coin(obj, "2", "2")
- coin(obj, "5", "5")
- coin(obj, "10", "10")
- coin(obj, "1000", "1000")
-
-
-@click.command()
-@click.option("--currency", default="KUDOS")
-@click.option("--envname", default="demo")
-@click.option("--outdir", required=True)
-@click.option("--exchange-pub", required=True)
-# Expected to contain already the 'secret-token:' scheme.
-@click.option("--frontends-apitoken", required=True)
-def main(currency, envname, outdir, exchange_pub, frontends_apitoken):
-
- if envname not in ("tanker", "demo", "test", "int", "euro", "chf", "local"):
- print("envname (%s) not demo/test/int, aborting config generation" % envname)
- return
-
- config_files = []
-
- mc = ConfigFile(envname, currency, exchange_pub, "taler.conf")
- mc.cfg_put("frontends", "backend_apikey", f"{frontends_apitoken}")
- config(mc)
- config_files.append(mc)
-
- urls = get_urls(envname)
-
- sc = ConfigFile(envname, currency, exchange_pub, "sync.conf")
- sc.cfg_put("taler", "currency", currency)
- sc.cfg_put("sync", "serve", "unix")
- sc.cfg_put("sync", "unixpath", "$HOME/sockets/sync.http")
- sc.cfg_put("sync", "apikey", f"Bearer {frontends_apitoken}")
- sc.cfg_put("sync", "annual_fee", f"{currency}:0.1")
- sc.cfg_put("sync", "fulfillment_url", "taler://fulfillment-success/")
- sc.cfg_put("sync", "payment_backend_url", urls["merchant_backend"] + "instances/Taler/")
- sc.cfg_put("syncdb-postgres", "config", f"postgres:///taler{envname}")
- config_files.append(sc)
-
- ac = ConfigFile(envname, currency, exchange_pub, "anastasis.conf")
- ac.cfg_put("taler", "currency", currency)
- ac.cfg_put("anastasis", "serve", "unix")
- ac.cfg_put("anastasis", "business_name", f"GNU Taler Demo Anastasis Provider")
- ac.cfg_put("anastasis", "unixpath", "$HOME/sockets/anastasis.http")
- ac.cfg_put("anastasis", "annual_fee", f"{currency}:0")
- ac.cfg_put("anastasis", "question_cost", f"{currency}:0")
- ac.cfg_put("anastasis", "insurance", f"{currency}:0")
- ac.cfg_put("anastasis", "truth_upload_fee", f"{currency}:0")
- ac.cfg_put("anastasis", "fulfillment_url", "taler://fulfillment-success/")
- ac.cfg_put("anastasis", "server_salt", "kreb3ia9dmj43gfa")
- ac.cfg_put("stasis-postgres", "config", f"postgres:///taler{envname}")
- ac.cfg_put("anastasis-merchant-backend", "payment_backend_url", urls["merchant_backend"] + "instances/anastasis/")
- ac.cfg_put("anastasis-merchant-backend", "api_key", f"Bearer {frontends_apitoken}")
- ac.cfg_put("authorization-question", "cost", f"{currency}:0")
- ac.cfg_put("authorization-question", "enabled", "yes")
- config_files.append(ac)
-
- assert 0 < len(config_files)
- for obj in config_files:
- obj.cfg_write(outdir)
-
-
-if __name__ == "__main__":
- main()
diff --git a/bin/taler-deployment-config-instances b/bin/taler-deployment-config-instances
deleted file mode 100755
index 9895737..0000000
--- a/bin/taler-deployment-config-instances
+++ /dev/null
@@ -1,237 +0,0 @@
-#!/usr/bin/env python3
-
-"""
-This script makes sure that the merchant backend instances used by the
-test/demo environment are created.
-
-We assume that the merchant backend is running, and that the "~/activate"
-file has been sourced to provide the right environment variables.
-"""
-
-import requests
-from os import environ, system
-from sys import exit
-from urllib.parse import urljoin
-from subprocess import Popen
-from time import sleep
-import psutil
-from getpass import getuser
-
-def expect_env(name):
- val = environ.get(name)
- if not val:
- print(f"{name} not defined. Please source the ~/activate file.")
- exit(1)
- return val
-
-def wait_merchant_up():
- # Check it started correctly and it is ready to serve requests.
- checks = 10
- url = urljoin(MERCHANT_BACKEND_BASE_URL, "/config")
- print("Check URL: {}".format(url))
- while checks > 0:
-
- try:
- resp = requests.get(url, timeout=5)
- except Exception:
- print("Merchant unreachable")
- sleep(1)
- checks -= 1
- continue
-
- if resp.status_code != 200:
- sleep(1)
- checks -= 1
- continue
-
- # Ready.
- print("Merchant is up and running")
- return True
-
- if checks == 0:
- print("Merchant is not correctly serving requests.")
- return False
-
-
-MERCHANT_BACKEND_BASE_URL = expect_env("TALER_ENV_MERCHANT_BACKEND")
-TALER_ENV_NAME = expect_env("TALER_ENV_NAME")
-TALER_CONFIG_CURRENCY = expect_env("TALER_CONFIG_CURRENCY")
-TALER_ENV_FRONTENDS_APITOKEN = expect_env("TALER_ENV_FRONTENDS_APITOKEN")
-authorization_header = {"Authorization": f"Bearer {TALER_ENV_FRONTENDS_APITOKEN}"}
-
-def ensure_instance(instance_id, name, payto_uris, auth):
-
- resp = requests.get(
- urljoin(MERCHANT_BACKEND_BASE_URL, f"management/instances/{instance_id}"),
- headers = authorization_header
- )
-
- # Instance exists, we PATCH the auth just in case it changed.
- if resp.status_code == 200:
- if instance_id != "Tutorial":
- print(f"Patching (auth of) instance '{instance_id}'")
- patch_resp = requests.post(
- urljoin(MERCHANT_BACKEND_BASE_URL,
- f"management/instances/{instance_id}/auth"),
- json=auth,
- headers = authorization_header
- )
- if patch_resp.status_code < 200 or patch_resp.status_code >= 300:
- print(f"Failed to update auth of '{instance_id}', backend responds: {patch_resp.status_code}/{patch_resp.text}")
- exit(1)
- return
-
- print(f"Instance '{instance_id}' not found, trying to create it.")
- req = dict(
- id=instance_id,
- name=name,
- payto_uris=payto_uris,
- address=dict(),
- jurisdiction=dict(),
- default_max_wire_fee=f"{TALER_CONFIG_CURRENCY}:1",
- default_wire_fee_amortization=3,
- default_max_deposit_fee=f"{TALER_CONFIG_CURRENCY}:1",
- default_wire_transfer_delay=dict(d_ms="forever"),
- default_pay_delay=dict(d_ms="forever"),
- auth=auth,
- )
- create_resp = requests.post(
- urljoin(MERCHANT_BACKEND_BASE_URL, "management/instances"),
- json=req,
- headers = authorization_header
- )
- if create_resp.status_code < 200 or create_resp.status_code >= 300:
- print(f"Could not create instance '{instance_id}', backend responds: {create_resp.status_code}/{create_resp.text}")
- exit(1)
-
-
-def is_merchant_running():
- for proc in psutil.process_iter():
- if proc.name() == "taler-merchant-httpd" and proc.username() == getuser():
- return True
- return False
-
-
-def ensure_default_instance():
- # Assumed is managed by ARM
- merchant_was_running = is_merchant_running()
- if merchant_was_running:
- print("Found running merchant, assuming is managed by ARM. Terminating it")
- system("taler-deployment-arm -k taler-merchant")
-
- checks = 10
- while checks > 0:
- if is_merchant_running():
- sleep(1)
- checks -= 1
- continue
- break
-
- if checks == 0:
- print("Could not stop the running merchant.")
- exit(1)
-
- print("Successfully terminating the merchant.")
- # ARM is _not_ running the merchant at this point.
- env_with_token = environ.copy()
- env_with_token["TALER_MERCHANT_TOKEN"] = TALER_ENV_FRONTENDS_APITOKEN
-
- print("Starting the merchant outside ARM, passing the token into the environment.")
- # Start the merchant natively.
- merchant = Popen(["taler-merchant-httpd"], env=env_with_token)
-
- if not wait_merchant_up():
- merchant.terminate()
- merchant.wait()
- exit(1)
-
- print("Merchant started successfully, creating the default instance now.")
- ensure_instance(
- "default",
- "default",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/Taler"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN)
- )
- # Native process can be terminated now.
- merchant.terminate()
- merchant.wait()
- print("Merchant terminated, restarting it via ARM now.")
-
- # Restarting the official ARM merchant.
- if merchant_was_running:
- system("taler-deployment-arm -i taler-merchant")
-
-ensure_default_instance()
-
-# Needs to wait here since the merchant got lastly restarted via ARM,
-# in the previous operation.
-if not wait_merchant_up():
- system("taler-deployment-arm -k taler-merchant")
- exit(1)
-
-ensure_instance(
- "blog",
- name="Blog",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/blog"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-ensure_instance(
- "donations",
- name="Donations",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/donations"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-ensure_instance(
- "survey",
- name="Survey",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/survey"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-ensure_instance(
- "pos",
- name="PoS",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/pos"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-ensure_instance(
- "GNUnet",
- name="GNUnet",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/GNUnet"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-# This instance relate to both the donation receiver and the sync service.
-ensure_instance(
- "Taler",
- name="Taler",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/Taler"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-ensure_instance(
- "Tor",
- name="Tor",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/Tor"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-ensure_instance(
- "anastasis",
- name="Tor",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/anastasis"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-
-# Note: this instance has a fixed secret-token, so as to allow anyone to easily
-# run their tutorial.
-ensure_instance(
- "Tutorial",
- name="Tutorial",
- payto_uris=[f"payto://x-taler-bank/bank.{TALER_ENV_NAME}.taler.net/Tutorial"],
- auth=dict(method="token", token="secret-token:sandbox")
-)
-
diff --git a/bin/taler-deployment-config-instances-iban b/bin/taler-deployment-config-instances-iban
deleted file mode 100755
index 2a5daef..0000000
--- a/bin/taler-deployment-config-instances-iban
+++ /dev/null
@@ -1,163 +0,0 @@
-#!/usr/bin/env python3
-
-"""
-This script makes sure that the merchant backend instances used by the
-test/demo environment are created.
-
-We assume that the merchant backend is running, and that the "~/activate"
-file has been sourced to provide the right environment variables.
-"""
-
-import requests
-from os import environ, system
-from sys import exit
-from urllib.parse import urljoin
-from subprocess import Popen
-from time import sleep
-import psutil
-from getpass import getuser
-
-ibans = dict(
- default = "ME00000000000000000000",
- # Must match the IBAN given in the prepare script, called IBAN_MERCHANT.
- blog = "ME00000000000000000001",
-)
-
-def expect_env(name):
- val = environ.get(name)
- if not val:
- print(f"{name} not defined. Please source the ~/activate file.")
- exit(1)
- return val
-
-def wait_merchant_up():
- # Check it started correctly and it is ready to serve requests.
- checks = 10
- url = urljoin(MERCHANT_BACKEND_BASE_URL, "/config")
- while checks > 0:
- try:
- resp = requests.get(url, timeout=5)
- except Exception:
- print("Merchant unreachable")
- sleep(1)
- checks -= 1
- continue
-
- if resp.status_code != 200:
- sleep(1)
- checks -= 1
- continue
-
- # Ready.
- return True
-
- print("Merchant is not correctly serving requests.")
- return False
-
-MERCHANT_BACKEND_BASE_URL = expect_env("TALER_ENV_MERCHANT_BACKEND")
-TALER_ENV_NAME = expect_env("TALER_ENV_NAME")
-TALER_CONFIG_CURRENCY = expect_env("TALER_CONFIG_CURRENCY")
-TALER_ENV_FRONTENDS_APITOKEN = expect_env("TALER_ENV_FRONTENDS_APITOKEN")
-authorization_header = {"Authorization": f"Bearer {TALER_ENV_FRONTENDS_APITOKEN}"}
-
-
-def ensure_instance(instance_id, name, payto_uris, auth):
- resp = requests.get(
- urljoin(MERCHANT_BACKEND_BASE_URL, f"management/instances/{instance_id}"),
- headers = authorization_header
- )
- req = dict(
- id=instance_id,
- name=name,
- payto_uris=payto_uris,
- address=dict(),
- jurisdiction=dict(),
- default_max_wire_fee=f"{TALER_CONFIG_CURRENCY}:1",
- default_wire_fee_amortization=3,
- default_max_deposit_fee=f"{TALER_CONFIG_CURRENCY}:1",
- default_wire_transfer_delay=dict(d_ms="forever"),
- default_pay_delay=dict(d_ms="forever"),
- auth=auth,
- )
- http_method = requests.post
- endpoint = "management/instances"
- # Instance exists, patching it.
- if resp.status_code == 200:
- if instance_id != "Tutorial":
- print(f"Patching instance '{instance_id}'")
- http_method = requests.patch
- endpoint = f"management/instances/{instance_id}"
- resp = http_method(
- urljoin(MERCHANT_BACKEND_BASE_URL, endpoint),
- json=req,
- headers = authorization_header
- )
- if resp.status_code < 200 or resp.status_code >= 300:
- print(f"Could not create (or patch) instance '{instance_id}', backend responds: {resp.status_code}/{resp.text}")
- exit(1)
-
-def is_merchant_running():
- for proc in psutil.process_iter():
- if proc.name() == "taler-merchant-httpd" and proc.username() == getuser():
- return True
- return False
-
-
-def ensure_default_instance():
- # Assumed is managed by ARM
- merchant_was_running = is_merchant_running()
- if merchant_was_running:
- print("Found running merchant, assuming is managed by ARM. Terminating it")
- system("taler-deployment-arm -k taler-merchant")
-
- checks = 10
- while checks > 0:
- if is_merchant_running():
- sleep(1)
- checks -= 1
- continue
- break
-
- if checks == 0:
- print("Could not stop the running merchant.")
- exit(1)
- # ARM is _not_ running the merchant at this point.
- env_with_token = environ.copy()
- env_with_token["TALER_MERCHANT_TOKEN"] = TALER_ENV_FRONTENDS_APITOKEN
-
- print("Starting the merchant outside ARM to pass the token into the environment.")
- # Start the merchant natively.
- merchant = Popen(["taler-merchant-httpd"], env=env_with_token)
-
- if not wait_merchant_up():
- merchant.terminate()
- merchant.wait()
- exit(1)
-
- print("Merchant started successfully, creating the default instance now.")
- ensure_instance(
- "default",
- "default",
- payto_uris=[f"payto://sepa/bank.{TALER_ENV_NAME}.taler.net/{ibans.get('default')}"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN)
- )
- # Native process can be terminated now.
- merchant.terminate()
- merchant.wait()
-
-ensure_default_instance()
-print("Restarting merchant _with_ ARM, to create other non-default instances.")
-system("taler-deployment-arm -s")
-system("taler-deployment-arm -i taler-merchant")
-wait_merchant_up()
-
-ensure_instance(
- "blog",
- name="Blog",
- payto_uris=[f"payto://sepa/bank.{TALER_ENV_NAME}.taler.net/{ibans.get('blog')}"],
- auth=dict(method="token", token=TALER_ENV_FRONTENDS_APITOKEN),
-)
-print("Stopping the ARM merchant")
-system("taler-deployment-arm -k taler-merchant")
-# NOTE: ARM itself will be stopped by the main prepare script.
-# Stopping here will result in indefinite wait at the caller.
diff --git a/bin/taler-deployment-config-tips b/bin/taler-deployment-config-tips
deleted file mode 100755
index 7baff4a..0000000
--- a/bin/taler-deployment-config-tips
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-
-set -eu
-
-function join_no_double_slash {
- echo "$1$2" | sed -s 's/\([^:]\)\/\+/\1\//g'
-}
-
-BANK_URL=$(taler-config -s bank -o base_url)
-MERCHANT_URL=$(taler-config -s frontends -o backend)
-CURRENCY=$(taler-config -s taler -o currency)
-EXCHANGE_URL=$(taler-config -s exchange -o base_url)
-WIRE_METHOD="x-taler-bank"
-APIKEY=$(taler-config -s frontends -o backend_apikey)
-
-PAYTO_WITH_SUBJECT=$(taler-merchant-setup-reserve \
- --amount="${CURRENCY}:50" \
- --exchange-url=${EXCHANGE_URL} \
- --merchant-url=$(join_no_double_slash ${MERCHANT_URL} "/instances/survey/") \
- --wire-method=${WIRE_METHOD} \
- --apikey="Bearer ${APIKEY}")
-echo "Merchant generated the following tip-reserve: $PAYTO_WITH_SUBJECT"
-taler-bank-manage django wire_transfer_payto Survey x ${PAYTO_WITH_SUBJECT} "${CURRENCY}:50"
-echo Paid for tip reserve.
diff --git a/bin/taler-deployment-dbstart b/bin/taler-deployment-dbstart
deleted file mode 100755
index 2b740ee..0000000
--- a/bin/taler-deployment-dbstart
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env bash
-
-# Start the local database used for Taler if necessary (because we're a
-# standalone environment) and possible.
-
-set -eu
-
-base=$HOME
-
-export PATH="$base/deployment":$PATH
-
-ulimit -c $((100 * 1024))
-
-cd $HOME
-
-if [[ ! -e ~/local/bin/gnunet-arm ]]; then
- echo "not starting database, since gnunet-arm is not installed"
- exit
-fi
-
-if [ "${TALER_CONFIG_STANDALONE:-0}" = 1 ]; then
- taler-deployment-arm -s
- taler-deployment-arm -i taler-postgres-standalone
-fi
diff --git a/bin/taler-deployment-prepare b/bin/taler-deployment-prepare
deleted file mode 100755
index 438e3be..0000000
--- a/bin/taler-deployment-prepare
+++ /dev/null
@@ -1,273 +0,0 @@
-#!/bin/bash
-
-# Prepare a deployment for execution:
-# * generate the configuration and setup database
-# * put keys in the right place
-# * set bank password for the exchange
-# * sign the exchange's wire response
-# * run some sanity checks (FIXME: not done yet!)
-
-set -eu
-
-source "$HOME/activate"
-
-# $1 = {yes,no} indicates WITH_DB_RESET. Defaults to no.
-# Helps avoiding color Y destroying the DB while color X is in
-# production.
-WITH_DB_RESET=${1-no}
-
-if [[ -z ${TALER_ENV_NAME+x} ]]; then
- echo "TALER_ENV_NAME not set"
- exit 1
-fi
-
-if [[ -z ${TALER_CONFIG_CURRENCY+x} ]]; then
- echo "TALER_CONFIG_CURRENCY not set"
- exit 1
-fi
-
-# The script stops what started along the flow.
-# This function should help against processes left
-# somehow running.
-function stop_running() {
- taler-deployment-stop
- for n in `jobs -p`
- do
- kill $n 2> /dev/null || true
- done
- wait
-}
-
-trap "stop_running" EXIT
-
-function generate_config() {
- EXCHANGE_PUB=$(gnunet-ecc -p "$HOME/deployment/private-keys/${TALER_ENV_NAME}-exchange-master.priv")
-
- mkdir -p "$HOME/.config"
-
- taler-deployment-config-generate \
- --exchange-pub "$EXCHANGE_PUB" \
- --currency "$TALER_CONFIG_CURRENCY" \
- --outdir "$HOME/.config" \
- --envname "$TALER_ENV_NAME" \
- --frontends-apitoken "$TALER_ENV_FRONTENDS_APITOKEN"
-}
-
-##
-## Step 1: Generate config
-##
-
-case $TALER_ENV_NAME in
- tanker|demo|test|int|local)
- generate_config
- ;;
- *)
- echo "Not generating config for env $TALER_ENV_NAME"
- ;;
-esac
-
-##
-## Step 1b: initialize database
-##
-
-if test $WITH_DB_RESET = yes; then
- taler-exchange-dbinit --reset
-else
- taler-exchange-dbinit
-fi
-
-##
-## Step 2: Copy key material and update denom keys
-##
-
-# For demo, make sure the link to shared data between demo-blue and demo-green is
-# set up properly.
-case $TALER_ENV_NAME in
- demo)
- echo "linking taler-data"
- ln -sfT ~demo/shared-data ~/taler-data
- # Check if we won't mess up permissions later
- if [[ ! -g ~/taler-data ]]; then
- echo "the shared-data directory should have the set-group-id bit set"
- exit 1
- fi
- ;;
-esac
-
-case $TALER_ENV_NAME in
- demo|test|int|local)
- EXCHANGE_PUB=$(gnunet-ecc -p "$HOME/deployment/private-keys/${TALER_ENV_NAME}-exchange-master.priv")
- EXCHANGE_PRIV_FILE=$(taler-config -f -s exchange-offline -o master_priv_file)
- if [[ -e "$EXCHANGE_PRIV_FILE" ]]; then
- EXCHANGE_PUB2=$(gnunet-ecc -p "$EXCHANGE_PRIV_FILE")
- if [[ "$EXCHANGE_PUB" != "$EXCHANGE_PUB2" ]]; then
- echo "Warning: Different exchange private key already exists, not copying"
- fi
- else
- mkdir -p "$(dirname "$EXCHANGE_PRIV_FILE")"
- cp "$HOME/deployment/private-keys/${TALER_ENV_NAME}-exchange-master.priv" "$EXCHANGE_PRIV_FILE"
- fi
- ;;
- *)
- echo "Not copying key material for env $TALER_ENV_NAME"
- ;;
-esac
-
-EXCHANGE_MASTER_PUB=$(taler-config -s exchange -o master_public_key)
-taler-auditor-exchange \
- -m "$EXCHANGE_MASTER_PUB" \
- -u "$(taler-config -s exchange -o base_url)" || true
-
-# Make configuration accessible to auditor
-chmod 750 "$HOME/.config"
-
-
-##
-## Step 3: Set up the exchange key material
-##
-
-taler-deployment-arm -s
-
-# Quickly start+shutdown exchange httpd and crypto SM helpers
-taler-deployment-arm -i taler-exchange
-taler-deployment-arm -i taler-exchange-secmod-rsa
-taler-deployment-arm -i taler-exchange-secmod-eddsa
-
-sleep 2 # FIXME: poll keys?
-if ! taler-deployment-arm -I | grep "^taler-exchange" | grep "status=started" > /dev/null; then
- echo "Exchange didn't start, cannot set up keys"
- exit 1
-fi
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-rsa" | grep "status=started" > /dev/null; then
- echo "Exchange (RSA module) didn't start, cannot set up keys."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-eddsa" | grep "status=started" > /dev/null; then
- echo "Exchange (EDDSA module) didn't start, cannot set up keys."
- exit 1
-fi
-
-taler-exchange-offline download sign upload
-
-payto_uri=$(taler-config -s exchange-account-1 -o payto_uri)
-taler-exchange-offline enable-account "$payto_uri" upload
-
-# Set up wire fees for next 5 years
-year=$(date +%Y)
-curr=$TALER_CONFIG_CURRENCY
-for y in $(seq $year $((year + 5))); do
- taler-exchange-offline wire-fee $y x-taler-bank "$curr:0.01" "$curr:0.01" upload
-done
-
-taler-deployment-arm -k taler-exchange
-taler-deployment-arm -k taler-exchange-secmod-rsa
-taler-deployment-arm -k taler-exchange-secmod-eddsa
-
-# Give time to store to disk.
-sleep 5
-
-##
-## Step 4: Set up the bank
-##
-
-# Delete existing data from bank.
-if test $WITH_DB_RESET = yes; then
- echo "yes" | taler-bank-manage django flush
-fi
-
-case $TALER_ENV_NAME in
- demo|test|int|local|tanker)
- taler-bank-manage django provide_accounts
- taler-bank-manage django changepassword_unsafe Exchange x
- taler-bank-manage django changepassword_unsafe Survey x
- ;;
- *)
- echo "Not setting unsafe Exchange bank account password for env $TALER_ENV_NAME"
- ;;
-esac
-
-
-##
-## Step 5: Adjust some permissions
-##
-
-case $TALER_ENV_NAME in
- demo|test|int)
- # Make sure the web server can read ~/local
- chmod og+rx ~/local
-
- # Make sure that shared files created by this user
- # are group writable and readable.
- find ~/taler-data/ -user "$USER" -exec chmod g+rw {} \;
- ;;
- *)
- ;;
-esac
-
-##
-## Step 6: Set up merchant
-##
-
-if test $WITH_DB_RESET = yes; then
- taler-merchant-dbinit --reset
-else
- taler-merchant-dbinit
-fi
-
-# Making sure ARM is not running yet.
-taler-deployment-arm -e
-
-# Need the following services to config instances and tip reserve:
-taler-deployment-arm -s
-taler-deployment-arm -i taler-merchant
-taler-deployment-arm -i taler-demobank
-
-taler-deployment-arm -i taler-exchange
-taler-deployment-arm -i taler-exchange-secmod-rsa
-taler-deployment-arm -i taler-exchange-secmod-eddsa
-sleep 5
-
-if ! taler-deployment-arm -I | grep "^taler-merchant" | grep "status=started" > /dev/null; then
- echo "Merchant didn't start, cannot configure instances / create tip reserve."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-demobank" | grep "status=started" > /dev/null; then
- echo "Bank didn't start, cannot create tip reserve."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-exchange" | grep "status=started" > /dev/null; then
- echo "Exchange didn't start, cannot create tip reserve."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-rsa" | grep "status=started" > /dev/null; then
- echo "Exchange (RSA module) didn't start, cannot create tip reserve."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-eddsa" | grep "status=started" > /dev/null; then
- echo "Exchange (EDDSA module) didn't start, cannot create tip reserve."
- exit 1
-fi
-
-echo "Configuring instances"
-taler-deployment-config-instances
-
-echo "Creating tip reserve"
-taler-deployment-config-tips
-
-taler-deployment-arm -k taler-merchant
-taler-deployment-arm -k taler-demobank
-taler-deployment-arm -k taler-exchange
-taler-deployment-arm -k taler-exchange-secmod-rsa
-taler-deployment-arm -k taler-exchange-secmod-eddsa
-taler-deployment-arm -e
-
-##
-## Step 7: Set up anastasis
-##
-
-anastasis-dbinit
diff --git a/bin/taler-deployment-prepare-with-eufin b/bin/taler-deployment-prepare-with-eufin
deleted file mode 100755
index f3313b6..0000000
--- a/bin/taler-deployment-prepare-with-eufin
+++ /dev/null
@@ -1,418 +0,0 @@
-#!/bin/bash
-
-# Values needed:
-#
-# currency
-
-set -eu
-
-source "$HOME/activate"
-
-# $1 = {yes,no} indicates WITH_DB_RESET. Defaults to no.
-# Helps avoiding color Y destroying the DB while color X is in
-# production.
-WITH_DB_RESET=${1-no}
-
-if [[ -z ${TALER_ENV_NAME+x} ]]; then
- echo "TALER_ENV_NAME not set"
- exit 1
-fi
-
-if [[ -z ${TALER_CONFIG_CURRENCY+x} ]]; then
- echo "TALER_CONFIG_CURRENCY not set"
- exit 1
-fi
-
-# The script stops what started along the flow.
-# This function should help against processes left
-# somehow running.
-function stop_running() {
- taler-deployment-stop
- for n in `jobs -p`
- do
- kill $n 2> /dev/null || true
- done
- wait
-}
-
-trap "stop_running" EXIT
-
-
-export IBAN_EXCHANGE="EX00000000000000000000"
-function generate_config() {
- EXCHANGE_PUB=$(gnunet-ecc -p "$HOME/deployment/private-keys/${TALER_ENV_NAME}-exchange-master.priv")
-
- mkdir -p "$HOME/.config"
-
- taler-deployment-config-generate \
- --exchange-pub "$EXCHANGE_PUB" \
- --currency "$TALER_CONFIG_CURRENCY" \
- --outdir "$HOME/.config" \
- --envname "$TALER_ENV_NAME" \
- --frontends-apitoken "$TALER_ENV_FRONTENDS_APITOKEN"
-
-taler-config -s exchange-account-1 \
- -o PAYTO_URI \
- -V "payto://sepa/bank.${TALER_ENV_NAME}.taler.net/eufin/sandbox/$IBAN_EXCHANGE"
-}
-
-##
-## Step 1: Generate config
-##
-
-echo -n "Generating configuration.."
-case $TALER_ENV_NAME in
- tanker|demo|test|int|local)
- generate_config
- ;;
- *)
- echo "Not generating config for env $TALER_ENV_NAME"
- ;;
-esac
-echo " OK"
-##
-## Step 1b: initialize database
-##
-if test $WITH_DB_RESET = resetDb; then
- echo -n "Reset and init exchange DB.."
- taler-exchange-dbinit --reset
- echo " OK"
-fi
-##
-## Step 2: Copy key material and update denom keys
-##
-
-# For demo, make sure the link to shared data between demo-blue and demo-green is
-# set up properly.
-case $TALER_ENV_NAME in
- demo)
- echo -n "Syminking demo's taler-data/ to the color's home directory.."
- ln -sfT ~demo/shared-data ~/taler-data
- # Check if we won't mess up permissions later
- if [[ ! -g ~/taler-data ]]; then
- echo "the shared-data directory should have the set-group-id bit set"
- exit 1
- fi
- echo " OK"
- ;;
-esac
-
-echo -n "Trying to copy the exchange private key from deployment.git.."
-case $TALER_ENV_NAME in
- demo|test|int|local)
- EXCHANGE_PUB=$(gnunet-ecc -p "$HOME/deployment/private-keys/${TALER_ENV_NAME}-exchange-master.priv")
- EXCHANGE_PRIV_FILE=$(taler-config -f -s exchange-offline -o master_priv_file)
- if [[ -e "$EXCHANGE_PRIV_FILE" ]]; then
- EXCHANGE_PUB2=$(gnunet-ecc -p "$EXCHANGE_PRIV_FILE")
- if [[ "$EXCHANGE_PUB" != "$EXCHANGE_PUB2" ]]; then
- echo "Warning: Different exchange private key already exists, not copying"
- fi
- else
- mkdir -p "$(dirname "$EXCHANGE_PRIV_FILE")"
- cp "$HOME/deployment/private-keys/${TALER_ENV_NAME}-exchange-master.priv" "$EXCHANGE_PRIV_FILE"
- fi
- ;;
- *)
- echo "Not copying key material for env $TALER_ENV_NAME"
- ;;
-esac
-echo " OK"
-
-echo -n "Add this exchange to the auditor..."
-EXCHANGE_MASTER_PUB=$(taler-config -s exchange -o master_public_key)
-taler-auditor-exchange \
- -m "$EXCHANGE_MASTER_PUB" \
- -u "$(taler-config -s exchange -o base_url)" || true
-# Make configuration accessible to auditor
-chmod 750 "$HOME/.config"
-echo " OK"
-
-##
-## Step 3: Set up the exchange key material
-##
-
-echo -n "Setup exchange's key material.."
-taler-deployment-arm -s
-
-# Quickly start+shutdown exchange httpd and crypto SM helpers
-taler-deployment-arm -i taler-exchange
-taler-deployment-arm -i taler-exchange-secmod-rsa
-taler-deployment-arm -i taler-exchange-secmod-eddsa
-
-sleep 2 # FIXME: poll keys?
-if ! taler-deployment-arm -I | grep "^taler-exchange" | grep "status=started" > /dev/null; then
- echo "Exchange didn't start, cannot set up keys"
- exit 1
-fi
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-rsa" | grep "status=started" > /dev/null; then
- echo "Exchange (RSA module) didn't start, cannot set up keys."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-eddsa" | grep "status=started" > /dev/null; then
- echo "Exchange (EDDSA module) didn't start, cannot set up keys."
- exit 1
-fi
-
-taler-exchange-offline download sign upload
-
-payto_uri=$(taler-config -s exchange-account-1 -o payto_uri)
-taler-exchange-offline enable-account "$payto_uri" upload
-
-# Set up wire fees for next 5 years
-year=$(date +%Y)
-curr=$TALER_CONFIG_CURRENCY
-for y in $(seq $year $((year + 5))); do
- taler-exchange-offline wire-fee $y sepa "$curr:0.01" "$curr:0.01" upload
-done
-
-taler-deployment-arm -k taler-exchange
-taler-deployment-arm -k taler-exchange-secmod-rsa
-taler-deployment-arm -k taler-exchange-secmod-eddsa
-echo " OK"
-# Give time to store to disk.
-sleep 5
-
-##
-## Step 4: Set up euFin
-##
-
-if test $WITH_DB_RESET = resetDb; then
- echo -n "Resetting euFin databases.."
- # NOTE/FIXME: those values _could_ be extracted from
- # the environment, as this one contains already the DB
- # connection strings.
- rm ~/nexus.sqlite
- rm ~/sandbox.sqlite
- echo " OK"
-fi
-export LIBEUFIN_SANDBOX_USERNAME="admin"
-export LIBEUFIN_SANDBOX_PASSWORD=${LIBEUFIN_ENV_SANDBOX_ADMIN_PASSWORD}
-# $1 = ebics user id, $2 = ebics partner, $3 = bank connection name
-# $4 = bank account name local to Nexus, $5 = bank account name as known
-# by Sandbox
-function prepare_nexus_account() {
- echo -n "Making bank connection $3 ..."
- libeufin-cli connections new-ebics-connection \
- --ebics-url="${SANDBOX_URL}ebicsweb" \
- --host-id=$EBICS_HOST \
- --partner-id=$2 \
- --ebics-user-id=$1 \
- $3 > /dev/null
- echo " OK"
- echo -n "Connecting $3 ..."
- libeufin-cli connections connect $3 > /dev/null
- echo " OK"
- echo -n "Importing Sandbox bank account ($5) to Nexus ($4) ..."
- libeufin-cli connections download-bank-accounts $3 > /dev/null
- libeufin-cli connections import-bank-account \
- --offered-account-id=$5 --nexus-bank-account-id=$4 $3 > /dev/null
- echo " OK"
- # Set how often the automatic routing must fetch the bank account.
- echo -n "Setting background payment initiator.."
- libeufin-cli accounts task-schedule $4 \
- --task-type="submit" \
- --task-name='submit-payments-every-second' \
- --task-cronspec='* * *'
- echo " OK"
- echo -n "Setting background history fetch.."
- libeufin-cli accounts task-schedule $4 \
- --task-type="fetch" \
- --task-name='fetch-reports-every-second' \
- --task-cronspec='* * *' \
- --task-param-level=report \
- --task-param-range-type=latest
- echo " OK"
-}
-
-# $1=ebics username, $2=ebics partner name,
-# $3=person name, $4=sandbox bank account name, $5=iban
-function prepare_sandbox_account() {
- echo -n "Activating ebics subscriber $1 at the sandbox ..."
- libeufin-cli \
- sandbox --sandbox-url=$SANDBOX_URL \
- ebicssubscriber create \
- --host-id=$EBICS_HOST \
- --partner-id=$2 \
- --user-id=$1
- echo " OK"
- echo -n "Giving a bank account ($4) to $1 ..."
- libeufin-cli \
- sandbox --sandbox-url=$SANDBOX_URL \
- ebicsbankaccount create \
- --iban=$5 \
- --bic="BCMAESM1XXX"\
- --person-name="$3" \
- --account-name=$4 \
- --ebics-user-id=$1 \
- --ebics-host-id=$EBICS_HOST \
- --ebics-partner-id=$2 \
- --currency=$TALER_CONFIG_CURRENCY
- echo " OK"
-}
-
-NEXUS_URL="http://localhost:5222/"
-SANDBOX_URL="http://localhost:5111/"
-
-echo -n "Making Sandbox superuser..."
-libeufin-sandbox superuser admin --password=${LIBEUFIN_ENV_SANDBOX_ADMIN_PASSWORD}
-echo " OK"
-
-echo -n "Lunching Sandbox..."
-taler-deployment-arm -i libeufin-sandbox
-
-if ! curl -s --retry 5 --retry-connrefused $SANDBOX_URL > /dev/null; then
- echo "Could not launch Sandbox"
- stop_running
- exit 1
-fi
-echo " OK"
-
-echo -n "Launching Nexus..."
-taler-deployment-arm -i libeufin-nexus
-if ! curl -s --retry 5 --retry-connrefused $NEXUS_URL > /dev/null; then
- echo "Could not launch Nexus"
- stop_running
- exit 1
-fi
-echo " OK"
-
-EBICS_HOST="ebicsDeployedHost"
-
-echo -n "Make Sandbox EBICS host..."
-libeufin-cli \
- sandbox --sandbox-url=$SANDBOX_URL \
- ebicshost create \
- --host-id=$EBICS_HOST
-echo " OK"
-
-export IBAN_MERCHANT="ME00000000000000000001"
-export IBAN_CUSTOMER="WA00000000000000000000"
-
-# note: Ebisc schema doesn't allow dashed names.
-prepare_sandbox_account \
- ebicsuserExchange \
- ebicspartnerExchange \
- "Person Exchange" \
- sandbox-account-exchange \
- $IBAN_EXCHANGE
-prepare_sandbox_account \
- ebicsuserMerchant \
- ebicspartnerMerchant \
- "Person Merchant" \
- sandbox-account-merchant \
- $IBAN_MERCHANT
-prepare_sandbox_account \
- ebicsuserCustomer \
- ebicspartnerCustomer \
- "Person Customer" \
- sandbox-account-customer \
- $IBAN_CUSTOMER
-
-# Only the exchange needs Nexus.
-EXCHANGE_NEXUS_USERNAME=exchange-nexus-user
-EXCHANGE_NEXUS_PASSWORD=exchange-nexus-password
-echo -n "Make Nexus superuser ..."
-libeufin-nexus superuser $EXCHANGE_NEXUS_USERNAME --password=$EXCHANGE_NEXUS_PASSWORD
-echo " OK"
-export LIBEUFIN_NEXUS_URL=$NEXUS_URL
-export LIBEUFIN_NEXUS_USERNAME=$EXCHANGE_NEXUS_USERNAME
-export LIBEUFIN_NEXUS_PASSWORD=$EXCHANGE_NEXUS_PASSWORD
-
-# FIXME: this command below likely not needed. Please
-# remove, run the test, and commit+push if it still works!
-prepare_nexus_account \
- ebicsuserExchange \
- ebicspartnerExchange \
- bankconnection-exchange \
- nexus-bankaccount-exchange \
- sandbox-account-exchange
-
-echo -n "Create Taler facade ..."
-libeufin-cli facades new-taler-wire-gateway-facade \
- --currency=$TALER_CONFIG_CURRENCY \
- --facade-name=facade-exchange \
- bankconnection-exchange nexus-bankaccount-exchange
-echo " OK"
-FACADE_URL=$(libeufin-cli facades list | jq .facades[0].baseUrl | tr -d \")
-
-taler-deployment-arm -k libeufin-nexus
-taler-deployment-arm -k libeufin-sandbox
-
-# Point the exchange to the facade.
-taler-config -s exchange-accountcredentials-1 \
- -o WIRE_GATEWAY_URL \
- -V "${FACADE_URL}"
-
-taler-config -s exchange-accountcredentials-1 \
- -o USERNAME \
- -V "${EXCHANGE_NEXUS_USERNAME}"
-
-taler-config -s exchange-accountcredentials-1 \
- -o PASSWORD \
- -V "${EXCHANGE_NEXUS_PASSWORD}"
-
-
-##
-## Step 5: Adjust some permissions
-##
-
-case $TALER_ENV_NAME in
- demo|test|int)
- # Make sure the web server can read ~/local
- chmod og+rx ~/local
-
- # Make sure that shared files created by this user
- # are group writable and readable.
- find ~/taler-data/ -user "$USER" -exec chmod g+rw {} \;
- ;;
- *)
- ;;
-esac
-
-##
-## Step 6: Set up merchant
-##
-
-if test $WITH_DB_RESET = resetDb; then
- echo -n "Reset and init merchant database.."
- taler-merchant-dbinit --reset
- echo " OK"
-fi
-
-# Making sure ARM is not running yet.
-taler-deployment-arm -e
-
-# Need the following services to config instances and tip reserve:
-taler-deployment-arm -s
-
-taler-deployment-arm -i taler-exchange
-taler-deployment-arm -i taler-exchange-secmod-rsa
-taler-deployment-arm -i taler-exchange-secmod-eddsa
-sleep 5
-
-if ! taler-deployment-arm -I | grep "^taler-exchange" | grep "status=started" > /dev/null; then
- echo "Exchange didn't start, cannot create tip reserve."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-rsa" | grep "status=started" > /dev/null; then
- echo "Exchange (RSA module) didn't start, cannot create tip reserve."
- exit 1
-fi
-
-if ! taler-deployment-arm -I | grep "^taler-exchange-secmod-eddsa" | grep "status=started" > /dev/null; then
- echo "Exchange (EDDSA module) didn't start, cannot create tip reserve."
- exit 1
-fi
-
-echo "Configuring instances"
-taler-deployment-config-instances-iban
-echo "Stopping all the services"
-# The following three commands should be removed,
-# because the last one is already supposed to stop
-# all the running ones.
-taler-deployment-arm -k taler-exchange
-taler-deployment-arm -k taler-exchange-secmod-rsa
-taler-deployment-arm -k taler-exchange-secmod-eddsa
-taler-deployment-arm -e
diff --git a/bin/taler-deployment-restart b/bin/taler-deployment-restart
deleted file mode 100755
index 88eed4e..0000000
--- a/bin/taler-deployment-restart
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/bash
-
-set -eu
-
-base=$HOME
-
-export PATH="$base/deployment/bin":$PATH
-
-# might fail if invoked from another script with ulimit
-ulimit -c $((100 * 1024)) &>/dev/null || true
-
-cd $HOME
-
-if taler-deployment-arm -T 300ms -I &>/dev/null; then
- # looks like deployment is running, stop it
- taler-deployment-arm -e -T 10s &>/dev/null
-fi
-
-exec taler-deployment-start
diff --git a/bin/taler-deployment-restart-with-eufin b/bin/taler-deployment-restart-with-eufin
deleted file mode 100755
index 0f945bd..0000000
--- a/bin/taler-deployment-restart-with-eufin
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/bash
-
-set -eu
-
-base=$HOME
-
-export PATH="$base/deployment/bin":$PATH
-
-# might fail if invoked from another script with ulimit
-ulimit -c $((100 * 1024)) &>/dev/null || true
-
-cd $HOME
-
-if taler-deployment-arm -T 300ms -I &>/dev/null; then
- # looks like deployment is running, stop it
- taler-deployment-arm -e -T 10s &>/dev/null
-fi
-
-exec taler-deployment-start-with-eufin
diff --git a/bin/taler-deployment-start b/bin/taler-deployment-start
deleted file mode 100755
index 271a7e9..0000000
--- a/bin/taler-deployment-start
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/bin/bash
-
-set -eu
-
-export PATH="$HOME/deployment":$PATH
-
-# might fail if invoked from another script with ulimit
-ulimit -c $((100 * 1024)) &>/dev/null || true
-
-cd $HOME
-
-taler_config_file=$HOME/.config/taler.conf
-
-if [[ ! -e "$taler_config_file" ]]; then
- echo "taler config file ($taler_config_file) missing"
- exit 1
-fi
-
-taler-deployment-arm -s
-
-taler-deployment-arm -i taler-exchange
-taler-deployment-arm -i taler-exchange-secmod-eddsa
-taler-deployment-arm -i taler-exchange-secmod-rsa
-taler-deployment-arm -i taler-auditor
-taler-deployment-arm -i taler-merchant
-taler-deployment-arm -i taler-demobank
-taler-deployment-arm -i taler-donations
-taler-deployment-arm -i taler-blog
-taler-deployment-arm -i taler-landing
-taler-deployment-arm -i taler-survey
-taler-deployment-arm -i taler-aggregator
-taler-deployment-arm -i taler-exchange-wirewatch
-taler-deployment-arm -i taler-sync
-taler-deployment-arm -i taler-transfer
-taler-deployment-arm -i anastasis
-
-if $(taler-config -s twister -o taler_deploy >& /dev/null); then
- taler-deployment-arm -i taler-twister
- taler-deployment-arm -i taler-twister-exchange
- taler-deployment-arm -i taler-twister-bank
-fi
-
-exit 0
diff --git a/bin/taler-deployment-start-with-eufin b/bin/taler-deployment-start-with-eufin
deleted file mode 100755
index ea97734..0000000
--- a/bin/taler-deployment-start-with-eufin
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/bin/bash
-
-set -eu
-
-export PATH="$HOME/deployment":$PATH
-
-# might fail if invoked from another script with ulimit
-ulimit -c $((100 * 1024)) &>/dev/null || true
-
-cd $HOME
-
-taler_config_file=$HOME/.config/taler.conf
-
-if [[ ! -e "$taler_config_file" ]]; then
- echo "taler config file ($taler_config_file) missing"
- exit 1
-fi
-
-taler-deployment-arm -s
-
-taler-deployment-arm -i taler-exchange
-taler-deployment-arm -i taler-exchange-secmod-eddsa
-taler-deployment-arm -i taler-exchange-secmod-rsa
-taler-deployment-arm -i taler-auditor
-taler-deployment-arm -i taler-merchant
-taler-deployment-arm -i libeufin-sandbox
-taler-deployment-arm -i libeufin-nexus
-taler-deployment-arm -i taler-donations
-taler-deployment-arm -i taler-blog
-taler-deployment-arm -i taler-landing
-taler-deployment-arm -i taler-survey
-taler-deployment-arm -i taler-aggregator
-taler-deployment-arm -i taler-exchange-wirewatch
-taler-deployment-arm -i taler-sync
-taler-deployment-arm -i taler-transfer
-taler-deployment-arm -i anastasis
-
-if $(taler-config -s twister -o taler_deploy >& /dev/null); then
- taler-deployment-arm -i taler-twister
- taler-deployment-arm -i taler-twister-exchange
- taler-deployment-arm -i taler-twister-bank
-fi
-
-exit 0
diff --git a/bin/taler-deployment-stop b/bin/taler-deployment-stop
deleted file mode 100755
index e08ee71..0000000
--- a/bin/taler-deployment-stop
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-
-base=$HOME
-
-export PATH="$base/deployment":$PATH
-
-# might fail if invoked from another script
-ulimit -c $((100 * 1024)) &>/dev/null
-
-cd $HOME
-
-taler-deployment-arm -e -T 10s &>/dev/null
diff --git a/bin/taler-log-adapter b/bin/taler-log-adapter
deleted file mode 100755
index 07321fa..0000000
--- a/bin/taler-log-adapter
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-# This file is part of GNU TALER.
-# Copyright (C) 2018 INRIA
-#
-# TALER is free software; you can redistribute it and/or modify it under the
-# terms of the GNU Lesser General Public License as published by the Free Software
-# Foundation; either version 2.1, or (at your option) any later version.
-#
-# TALER is distributed in the hope that it will be useful, but WITHOUT ANY
-# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
-# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License along with
-# GNU TALER; see the file COPYING. If not, see <http://www.gnu.org/licenses/>
-#
-# @author Florian Dold
-
-"""
-Wrapper for programs that log to stderr. Redirects logs to a file specified by
-a path with strfmt-style placeholders in it.
-"""
-
-from subprocess import Popen, PIPE
-import sys
-import os
-import os.path
-import signal
-import time
-
-def handler(signum, frame):
- if p:
- os.kill(p.pid, signum)
- else:
- sys.exit(-1)
-
-def touchp(path):
- dir = os.path.dirname(path)
- if dir:
- os.makedirs(dir, exist_ok=True)
-
-if len(sys.argv) < 3:
- print("Usage: {} logfile prog_and_args...".format(sys.argv[0]), file=sys.stderr)
- sys.exit(-1)
-
-p = None
-catchable_sigs = set(signal.Signals) - {signal.SIGKILL, signal.SIGSTOP}
-for sig in catchable_sigs:
- signal.signal(sig, handler)
-p = Popen(sys.argv[2:], stderr=PIPE, shell=False)
-
-log = sys.argv[1]
-last_name = None
-
-while p.poll() is None:
- full_name = time.strftime(log)
- if full_name != last_name:
- touchp(full_name)
- last_name = full_name
- last_read = p.stderr.readline()
- if last_read == '':
- break
- with open(full_name, "ab") as f:
- f.write(last_read)
-
-status = p.wait()
-sys.exit(status)
diff --git a/bin/taler_urls.py b/bin/taler_urls.py
deleted file mode 100644
index 8c81e38..0000000
--- a/bin/taler_urls.py
+++ /dev/null
@@ -1,56 +0,0 @@
-from urllib.parse import urlparse
-
-taler_urls = dict(
- online = dict(
- donations = "https://donations.{}.taler.net/",
- blog = "https://shop.{}.taler.net/",
- bank = "https://bank.{}.taler.net/",
- backoffice = "https://backoffice.{}.taler.net/",
- exchange = "https://exchange.{}.taler.net/",
- merchant_backend = "https://backend.{}.taler.net/",
- landing = "https://{}.taler.net/",
- survey = "https://survey.{}.taler.net/",
- auditor = "https://auditor.{}.taler.net/",
- sync = "https://sync.{}.taler.net/",
- talerbank_payto = "payto://x-taler-bank/bank.{}.taler.net/"
- ),
- offline = dict(
- donations = "http://localhost:5880/",
- blog = "http://localhost:5881/",
- bank = "http://localhost:5882/",
- backoffice = "http://localhost:5883/",
- exchange = "http://localhost:5884/",
- merchant_backend = "http://localhost:5885/",
- landing = "http://localhost:5886/",
- survey = "http://localhost:5887/",
- auditor = "http://localhost:5888/",
- sync = "http://localhost:5889/",
- talerbank_payto = "payto://x-taler-bank/localhost:5882/"
- )
-)
-
-def get_urls(envname):
- if envname == "tanker":
- return dict(
- donations = "https://donations.grumla.se/",
- blog = "https://shop.grumla.se/",
- auditor = "#",
- bank = "https://bank.grumla.se/",
- backoffice = "https://backoffice.grumla.se/",
- exchange = "https://exchange.grumla.se/",
- merchant_backend = "https://merchant-backend.grumla.se/",
- landing = "https://grumla.se/",
- survey = "https://survey.grumla.se/",
- sync = "https://sync.grumla.se/",
- talerbank_payto = "payto://x-taler-bank/bank.grumla.se/"
- )
- if envname == "local":
- return taler_urls["offline"]
- return dict((k, v.format(envname))
- for k, v in taler_urls["online"].items()
- )
-
-def get_port(localhost_url):
- parsed = urlparse(localhost_url)
- assert(parsed.port)
- return str(parsed.port)