#!/usr/bin/env python3
# This file is part of GNU Taler.
#
# GNU Taler is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GNU Taler is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Taler. If not, see .
import signal
import socket
import shutil
import atexit
import click
import types
import os
import sys
import os.path
import subprocess
import time
import random
import logging
from os import listdir
from os.path import isdir, join
from pathlib import Path
from dataclasses import dataclass
from typing import List, Callable
from shutil import copy
from multiprocessing import Process
from string import ascii_letters, ascii_uppercase
from sys import exit
from urllib.parse import urljoin, quote
from os import remove
import requests
from collections import OrderedDict
import errno
from pathlib import Path
from subprocess import Popen, DEVNULL, PIPE
from datetime import datetime
from requests_unixsocket import Session
from flask import Flask, request, Response
from werkzeug.datastructures import Headers
from werkzeug.exceptions import HTTPException
TALER_ROOT_DIR = Path.home() / ".taler"
# Print No Newline.
def print_nn(msg):
print(msg, end="")
sys.stdout.flush()
@dataclass
class Repo:
name: str
url: str
deps: List[str]
builder: Callable[["Repo", Path], None]
@click.group()
def cli():
pass
def split_repos_list(repos):
return [repo for repo in repos.split(",") if repo != ""]
def update_checkout(r: Repo, p: Path):
"""Clean the repository's working directory and
update it to the match the latest version of the upstream branch
that we are tracking."""
subprocess.run(["git", "-C", str(p), "clean", "-fdx"], check=True)
subprocess.run(["git", "-C", str(p), "fetch"], check=True)
subprocess.run(["git", "-C", str(p), "reset"], check=True)
res = subprocess.run(
[
"git",
"-C",
str(p),
"rev-parse",
"--abbrev-ref",
"--symbolic-full-name",
"@{u}",
],
stderr=subprocess.DEVNULL,
stdout=subprocess.PIPE,
encoding="utf-8",
)
if res.returncode != 0:
ref = "HEAD"
else:
ref = res.stdout.strip("\n ")
print(f"resetting {r.name} to ref {ref}")
subprocess.run(["git", "-C", str(p), "reset", "--hard", ref], check=True)
def default_configure(*extra):
pfx = Path.home() / ".local"
extra_list = list(extra)
subprocess.run(["./configure", f"--prefix={pfx}"] + extra_list, check=True)
def pyconfigure(*extra):
"""For python programs, --prefix doesn't work."""
subprocess.run(["./configure"] + list(extra), check=True)
def build_libeufin(r: Repo, p: Path):
update_checkout(r, p)
subprocess.run(["./bootstrap"], check=True)
default_configure()
subprocess.run(["make", "install"], check=True)
(p / "taler-buildstamp").touch()
def build_libmicrohttpd(r: Repo, p: Path):
update_checkout(r, p)
subprocess.run(["./bootstrap"], check=True)
# Debian gnutls packages are too old ...
default_configure("--with-gnutls=/usr/local")
subprocess.run(["make"], check=True)
subprocess.run(["make", "install"], check=True)
(p / "taler-buildstamp").touch()
def build_gnunet(r: Repo, p: Path):
update_checkout(r, p)
subprocess.run(["./bootstrap"], check=True)
pfx = Path.home() / ".local"
default_configure(
"--enable-logging=verbose",
f"--with-microhttpd={pfx}",
"--disable-documentation",
)
subprocess.run(["make", "install"], check=True)
(p / "taler-buildstamp").touch()
def build_exchange(r: Repo, p: Path):
update_checkout(r, p)
subprocess.run(["./bootstrap"], check=True)
pfx = Path.home() / ".local"
default_configure(
"CFLAGS=-ggdb -O0",
"--enable-logging=verbose",
f"--with-microhttpd={pfx}",
f"--with-gnunet={pfx}",
)
subprocess.run(["make", "install"], check=True)
(p / "taler-buildstamp").touch()
def build_wallet(r, p):
update_checkout(r, p)
subprocess.run(["./bootstrap"], check=True)
default_configure()
subprocess.run(["make", "install"], check=True)
(p / "taler-buildstamp").touch()
def build_twister(r, p):
update_checkout(r, p)
subprocess.run(["./bootstrap"], check=True)
pfx = Path.home() / ".local"
default_configure(
"CFLAGS=-ggdb -O0",
"--enable-logging=verbose",
f"--with-exchange={pfx}",
f"--with-gnunet={pfx}",
)
subprocess.run(["make", "install"], check=True)
(p / "taler-buildstamp").touch()
def build_merchant(r, p):
update_checkout(r, p)
subprocess.run(["./bootstrap"], check=True)
pfx = Path.home() / ".local"
default_configure(
"CFLAGS=-ggdb -O0",
"--enable-logging=verbose",
f"--with-microhttpd={pfx}",
f"--with-exchange={pfx}",
f"--with-gnunet={pfx}",
"--disable-doc",
)
subprocess.run(["make", "install"], check=True)
(p / "taler-buildstamp").touch()
def build_sync(r, p):
update_checkout(r, p)
subprocess.run(["./bootstrap"], check=True)
pfx = Path.home() / ".local"
default_configure(
"CFLAGS=-ggdb -O0",
"--enable-logging=verbose",
f"--with-microhttpd={pfx}",
f"--with-exchange={pfx}",
f"--with-merchant={pfx}",
f"--with-gnunet={pfx}",
"--disable-doc",
)
subprocess.run(["make", "install"], check=True)
(p / "taler-buildstamp").touch()
def build_anastasis(r, p):
update_checkout(r, p)
subprocess.run(["./bootstrap"], check=True)
pfx = Path.home() / ".local"
default_configure(
"CFLAGS=-ggdb -O0",
"--enable-logging=verbose",
f"--with-microhttpd={pfx}",
f"--with-exchange={pfx}",
f"--with-merchant={pfx}",
f"--with-gnunet={pfx}",
"--disable-doc",
)
subprocess.run(["make", "install"], check=True)
(p / "taler-buildstamp").touch()
def build_demos(r, p):
update_checkout(r, p)
pfx = Path.home() / ".local"
pyconfigure()
subprocess.run(["make", "install"], check=True)
(p / "taler-buildstamp").touch()
def build_backoffice(r, p):
update_checkout(r, p)
subprocess.run(["./bootstrap"])
subprocess.run(["./configure"])
subprocess.run(["make", "build-single"])
(p / "taler-buildstamp").touch()
repos = {
"libmicrohttpd": Repo(
"libmicrohttpd",
"git://git.gnunet.org/libmicrohttpd.git",
[],
build_libmicrohttpd,
),
"gnunet": Repo(
"gnunet",
"git://git.gnunet.org/gnunet.git",
["libmicrohttpd"],
build_gnunet
),
"exchange": Repo(
"exchange",
"git://git.taler.net/exchange",
["gnunet", "libmicrohttpd"],
build_exchange,
),
"merchant": Repo(
"merchant",
"git://git.taler.net/merchant",
["exchange","libmicrohttpd","gnunet"],
build_merchant,
),
"sync": Repo(
"sync",
"git://git.taler.net/sync",
["exchange",
"merchant",
"gnunet",
"libmicrohttpd"],
build_sync,
),
"anastasis": Repo(
"anastasis",
"git://git.taler.net/anastasis",
["exchange",
"merchant",
"libmicrohttpd",
"gnunet"],
build_anastasis,
),
"wallet-core": Repo(
"wallet-core",
"git://git.taler.net/wallet-core",
[],
build_wallet,
),
"libeufin": Repo(
"libeufin",
"git://git.taler.net/libeufin.git",
[],
build_libeufin,
),
"taler-merchant-demos": Repo(
"taler-merchant-demos",
"git://git.taler.net/taler-merchant-demos",
[],
build_demos,
),
"twister": Repo(
"twister",
"git://git.taler.net/twister",
["gnunet", "libmicrohttpd"],
build_twister,
),
}
def get_repos_names() -> List[str]:
r_dir = TALER_ROOT_DIR / "sources"
return [el for el in listdir(r_dir) if isdir(join(r_dir, el)) and repos.get(el)]
# Get the installed repositories from the sources directory.
def load_repos(reposNames) -> List[Repo]:
return [repos.get(r) for r in reposNames if repos.get(r)]
def update_repos(repos: List[Repo]) -> None:
for r in repos:
r_dir = TALER_ROOT_DIR / "sources" / r.name
subprocess.run(["git", "-C", str(r_dir), "fetch"], check=True)
res = subprocess.run(
["git", "-C", str(r_dir), "status", "-sb"],
check=True,
stdout=subprocess.PIPE,
encoding="utf-8",
)
if "behind" in res.stdout:
print(f"new commits in {r}")
s = r_dir / "taler-buildstamp"
if s.exists():
s.unlink()
def get_stale_repos(repos: List[Repo]) -> List[Repo]:
timestamps = {}
stale = []
for r in repos:
r_dir = TALER_ROOT_DIR / "sources" / r.name
s = r_dir / "taler-buildstamp"
if not s.exists():
timestamps[r.name] = time.time()
stale.append(r)
continue
ts = timestamps[r.name] = s.stat().st_mtime
for dep in r.deps:
# When 'dep' in not found, it has been
# excluded from the compilation.
if timestamps.get("dep", 0) > ts:
stale.append(r)
break
return stale
@cli.command()
@click.option(
"--without-repos", metavar="WITHOUT REPOS",
help="WITHOUT REPOS is a unspaced and comma-separated list \
of the repositories to _exclude_ from compilation",
default="")
@click.option(
"--only-repos", metavar="ONLY REPOS",
help="ONLY REPOS is a unspaced and comma-separated exclusive list \
of the repositories to include in the compilation",
default="")
def build(without_repos, only_repos) -> None:
"""Build the deployment from source."""
if only_repos != "" and without_repos != "":
print("Either use --only-repos or --without-repos")
exit(1)
repos_names = get_repos_names()
if only_repos != "":
repos_names = list(filter(
lambda x: x in split_repos_list(only_repos),
repos_names
))
if without_repos != "":
repos_names = list(filter(
lambda x: x not in split_repos_list(without_repos),
repos_names
))
# Reorder the list of repositories so that the
# most fundamental dependecies appear left-most.
repos_keys = repos.keys()
sorted_repos = sorted(
set(repos_keys).intersection(repos_names),
key=lambda x: list(repos_keys).index(x)
)
target_repos = load_repos(sorted_repos) # Get Repo objects
update_repos(target_repos)
stale = get_stale_repos(target_repos)
print(f"found stale repos: {[r.name for r in stale]}")
for r in stale:
# Warn, if a dependency is not being built:
diff = set(r.deps) - set(repos_names)
if len(diff) > 0:
print(f"WARNING: those dependencies are not being built: {diff}")
p = TALER_ROOT_DIR / "sources" / r.name
os.chdir(str(p))
r.builder(r, p)
@cli.command()
@click.option(
"--repos", "-r",
metavar="REPOS",
help="REPOS is a unspaced and comma-separated list of the repositories to clone.",
default="libmicrohttpd,gnunet,exchange,merchant,wallet-core,taler-merchant-demos,sync,anastasis,libeufin",
show_default=True,
)
@click.option(
"--list-repos/--no-list-repos", default=False,
help="Lists the repositories that were bootstrapped.",
)
def bootstrap(list_repos, repos) -> None:
"""Clone all the specified repositories."""
if list_repos:
for repo in get_repos_names():
print(repo)
return
# Download the repository.
def checkout_repos(repos: List[Repo]):
if len(repos) == 0:
print("No repositories can be checked out. Spelled correctly?")
return
sources = TALER_ROOT_DIR / "sources"
for r in repos:
r_dir = sources / r.name
if not r_dir.exists():
r_dir.mkdir(parents=True, exist_ok=True)
subprocess.run(["git", "-C", str(sources), "clone", r.url], check=True)
reposList = split_repos_list(repos)
checkout_repos(load_repos(reposList))
class TalerReverseProxy(Flask):
def __init__(
self,
log_dir,
unix_sockets_dir,
proxy_proto,
proxy_netloc
):
super().__init__("taler-proxy")
all_methods = [
"GET", "POST", "HEAD",
"DELETE", "OPTIONS", "PUT"
]
self.log_dir = log_dir
self.unix_sockets_dir = unix_sockets_dir
self.proxy_proto = proxy_proto
self.proxy_netloc = proxy_netloc
self.add_url_rule("/", view_func=self.index)
self.add_url_rule("/", view_func=self.proxy, methods=all_methods)
self.add_url_rule("//", view_func=self.proxy, methods=all_methods)
self.add_url_rule("//", view_func=self.proxy, methods=all_methods)
def stop(self):
self.proc.terminate()
self.proc.join()
def get_log_filename(self):
return self.logger.root.handlers[0].baseFilename
def start(self):
if not self.log_dir.is_dir():
os.makedirs(self.log_dir)
logging.basicConfig(filename=self.log_dir / "reverse-proxy.log", filemode="a")
self.logger = logging.getLogger("werkzeug")
self.logger.setLevel(logging.DEBUG)
os.environ["WERKZEUG_RUN_MAIN"] = "true"
self.proc = Process(
target=self.run,
kwargs=dict(debug=False, port=8080, host="localhost")
)
# This proxy does set Content-Length, as it often
# reassembles chunked responses. Future versions
# will echo back chunks as they arrive.
# Response.automatically_set_content_length = False
self.proc.start()
atexit.register(self.stop)
def index(self):
return "I'm the Taler reverse proxy."
def iter_response(self, r):
for line in r.iter_lines():
self.logger.debug("Responding chunk " + line.decode("utf-8"))
yield line
def proxy(self, component, path=""):
s = Session()
try:
if request.method == "GET":
method = s.get
if request.method == "POST":
method = s.post
if request.method == "PUT":
method = s.put
if request.method == "DELETE":
method = s.delete
proxied_headers = {
"X-Forwarded-Host": self.proxy_netloc,
"X-Forwarded-Proto": self.proxy_proto,
"X-Forwarded-Prefix": f"/{component}"
}
for k, v in request.headers.items():
proxied_headers[k] = v
socket_path = f"{self.unix_sockets_dir}/{component}.sock"
self.logger.debug("Proxying to: " + socket_path + "/" + path)
uri = f"http+unix://{quote(socket_path, safe='')}/{path}"
if len(request.args) > 0:
uri += f"?{request.query_string.decode()}"
resp = method(
uri,
headers=proxied_headers,
data=request.get_data(),
)
except Exception as error:
self.logger.error(error)
return "Could not connect to upstream", 500
self.logger.debug(f"Upstream responds: {resp.text}")
headers = Headers()
for k in resp.headers.keys():
# This version does not send chunked responses, so
# remove that header when it's found. Content-Length
# will be added before sending the response.
if k == "Transfer-Encoding" and resp.headers[k] == "chunked":
continue
# The content was already decompressed by this proxy, therefore
# the following header would confuse the client.
if k == "Content-Encoding" and (resp.headers[k] in ["deflate", "gzip"]):
continue
headers.set(k, resp.headers[k])
return Response(
response=resp.text,
status=resp.status_code,
headers=headers,
)
def get_app(self):
return self
@cli.command()
def prepare():
"""Generate configuration, run-time blobs, instances, euFin accounts."""
def fail(reason=None, proxy_proc=None):
if reason:
print("ERROR: " + reason)
if proxy_proc:
print(f"Proxy logs in: {proxy_proc.get_log_filename()}")
exit(1)
def kill(proc):
proc.terminate()
proc.wait()
def get_nexus_cli_env(
username,
password,
nexus_url
):
env = os.environ.copy()
env["LIBEUFIN_NEXUS_USERNAME"] = username
env["LIBEUFIN_NEXUS_PASSWORD"] = password
env["LIBEUFIN_NEXUS_URL"] = nexus_url
return env
def get_sandbox_cli_env(
username, password
):
env = os.environ.copy()
env["LIBEUFIN_SANDBOX_USERNAME"] = username
env["LIBEUFIN_SANDBOX_PASSWORD"] = password
return env
# Will be extended to include a SANDBOX_ADMIN_TOKEN
# that will obsolete the 'superuser' flag of ordinary
# user accounts. Likewise, the client side will be
# modified to use such token.
def get_sandbox_server_env(db_file, base_url, admin_password):
env = os.environ.copy()
env["LIBEUFIN_SANDBOX_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
env["LIBEUFIN_SANDBOX_BASE_URL"] = base_url
env["LIBEUFIN_SANDBOX_ADMIN_PASSWORD"] = admin_password
return env
def get_nexus_server_env(db_file, base_url):
env = os.environ.copy()
env["LIBEUFIN_NEXUS_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
env["LIBEUFIN_NEXUS_BASE_URL"] = base_url
return env
def urljoin_nodrop(a, b):
a = a + "/" # urljoin will drop extra trailing slashes.
b = "/".join([x for x in b.split("/") if x != ""]) # remove leading slashes.
return urljoin(a, b)
def prepare_nexus_account(
ebics_url,
ebics_host_id,
ebics_partner_id,
ebics_user_id,
bank_connection_name,
bank_account_name_sandbox,
bank_account_name_nexus,
env
):
# make connection
Command(
[
"libeufin-cli", "connections",
"new-ebics-connection",
"--ebics-url", ebics_url,
"--host-id", ebics_host_id,
"--partner-id", ebics_partner_id,
"--ebics-user-id", ebics_user_id,
bank_connection_name
],
env
).run()
# connect
Command(
[
"libeufin-cli", "connections",
"connect", bank_connection_name
],
env
).run()
# Import bank account
Command(
[
"libeufin-cli", "connections",
"download-bank-accounts",
bank_connection_name
],
env
).run()
Command(
[
"libeufin-cli", "connections",
"import-bank-account",
"--offered-account-id",
bank_account_name_sandbox,
"--nexus-bank-account-id",
bank_account_name_nexus,
bank_connection_name
],
env
).run()
# Set background tasks.
Command(
[
"libeufin-cli", "accounts",
"task-schedule", bank_account_name_nexus,
"--task-type", "submit",
"--task-name", "submit-payments-each-second",
"--task-cronspec", "* * *"
],
env
).run()
Command(
[
"libeufin-cli", "accounts",
"task-schedule", bank_account_name_nexus,
"--task-type", "fetch",
"--task-name", "fetch-reports-each-second",
"--task-cronspec", "* * *",
"--task-param-level", "report",
"--task-param-range-type", "latest"
],
env
).run()
def prepare_sandbox_account(
currency,
sandbox_url,
ebics_host_id,
ebics_partner_id,
ebics_user_id,
person_name,
bank_account_name,
bank_account_iban,
env
):
Command(
[
"libeufin-cli", "sandbox",
"--sandbox-url", sandbox_url,
"ebicssubscriber", "create",
"--host-id", ebics_host_id,
"--partner-id", ebics_partner_id,
"--user-id", ebics_user_id
],
env
).run()
Command(
[
"libeufin-cli", "sandbox",
"--sandbox-url", sandbox_url,
"ebicsbankaccount", "create",
"--iban", bank_account_iban,
"--bic", "ABCDEFGH",
"--person-name", person_name,
"--account-name", bank_account_name,
"--ebics-user-id", ebics_user_id,
"--ebics-host-id", ebics_host_id,
"--ebics-partner-id", ebics_partner_id,
"--currency", currency
],
env
).run()
CURRENCY = "EUR"
WIRE_METHOD = "sepa"
# Filesystem's paths
CFG_OUTDIR = TALER_ROOT_DIR / "config"
UNIX_SOCKETS_DIR = TALER_ROOT_DIR / "sockets"
TALER_RUNTIME_DIR = TALER_ROOT_DIR / "runtime"
TALER_DATA_DIR = TALER_ROOT_DIR / "data"
LOG_DIR = TALER_ROOT_DIR / "logs"
# IBANs
IBAN_EXCHANGE = "EX00000000000000000000"
IBAN_CUSTOMER = "WA00000000000000000000"
IBAN_MERCHANT_DEFAULT = "ME00000000000000000000"
IBAN_MERCHANT_DEMOSHOP = "ME00000000000000000001"
# Instances
INSTANCES = {
"demoshop": IBAN_MERCHANT_DEMOSHOP
}
# Credentials / API keys
SANDBOX_ADMIN_USERNAME = "admin"
SANDBOX_ADMIN_PASSWORD = "secret"
EXCHANGE_NEXUS_USERNAME = "exchange-nexus-user"
EXCHANGE_NEXUS_PASSWORD = "exchange-nexus-password"
FRONTENDS_API_TOKEN = "secret-token:secret"
TALER_MERCHANT_TOKEN = "secret-token:secret"
# Network locations
REV_PROXY_NETLOC = "localhost:8080"
REV_PROXY_PROTO = "http"
# URLs
REV_PROXY_URL = f"{REV_PROXY_PROTO}://{REV_PROXY_NETLOC}"
SANDBOX_URL = REV_PROXY_URL + "/sandbox"
NEXUS_URL = REV_PROXY_URL + "/nexus"
# EBICS
EBICS_HOST_ID = "ebicsDeployedHost"
EXCHANGE_EBICS_USER_ID = "exchangeEbicsUserId"
EXCHANGE_EBICS_PARTNER_ID = "exchangeEbicsPartnerId"
EBICS_URL = REV_PROXY_URL + "/sandbox/ebicsweb"
# euFin
NEXUS_DB_FILE = "/tmp/nexus.sqlite"
SANDBOX_DB_FILE = "/tmp/sandbox.sqlite"
EXCHANGE_BANK_ACCOUNT_NEXUS = "exchange-imported-account-nexus"
EXCHANGE_BANK_ACCOUNT_SANDBOX = "exchange-account-sandbox"
EXCHANGE_BANK_CONNECTION = "exchange-ebics-connection"
EXCHANGE_FACADE_NAME = "exchange-taler-facade"
class Command:
def __init__(
self, cmd, env=os.environ, log_dir=LOG_DIR,
custom_name=None, capture_stdout=False
):
if len(cmd) == 0:
fail("Could not find a command to execute")
self.name = custom_name if custom_name else cmd[0]
self.cmd = cmd
self.env = env
self.capture_stdout = capture_stdout
self.log_dir = log_dir
@staticmethod
def is_serving(check_url, tries=10):
for i in range(tries):
try:
print_nn(".")
# Raises if the service is not reachable.
response = requests.get(
check_url,
timeout=1
)
# The reverse proxy may return 500 if the
# end service is not ready, therefore this
# case should be tolerated.
response.raise_for_status()
except:
time.sleep(0.5)
if i == tries - 1:
return False
continue
break
return True
def run(self):
self.do()
return_code = self.handle.wait()
if return_code != 0:
fail(f"Command {self.name} failed. Logs in {self.get_log_filename()}")
self.cleanup()
if self.capture_stdout:
return self.handle.communicate()[0].decode("utf-8").rstrip()
def launch(self):
self.do()
return self
def stop(self):
self.cleanup()
self.handle.terminate()
self.handle.wait()
def get_log_filename(self):
return self.log_file.name
def cleanup(self):
if not self.log_file.closed:
self.log_file.flush()
self.log_file.close()
def do(self):
if not self.log_dir.is_dir():
os.makedirs(self.log_dir)
try:
log_filename = self.log_dir / f"{self.name}.log"
self.log_file = open(log_filename, "a+")
except Exception as error:
fail(f"Could not open log file: {log_filename}: {error}")
try:
self.handle = Popen(
self.cmd, # list
stdin=DEVNULL,
stdout=self.log_file if not self.capture_stdout else PIPE,
stderr=self.log_file,
env=self.env
)
except Exception as error:
fail(f"Could not execute: {' '.join(self.cmd)} (a.k.a. {self.name}): {error}")
atexit.register(self.stop)
class ConfigFile:
def __init__(self, filename):
self.sections = OrderedDict()
self.filename = filename
def destroy(self):
del self.sections
self.sections = OrderedDict()
def cfg_put(self, section_name, key, value):
s = self.sections[section_name] = self.sections.get(section_name, OrderedDict())
s[key] = value
def cfg_write(self, outdir):
if outdir:
if not os.path.isdir(outdir):
os.makedirs(outdir)
fstream = open(os.path.join(outdir, self.filename), "w")
else:
fstream = open(sys.stdout)
for section_name, section in self.sections.items():
fstream.write("[" + section_name + "]" + "\n")
for key, value in section.items():
fstream.write(key + " = " + value + "\n")
fstream.write("\n")
fstream.close()
def config_specify_master_pub(
filename,
currency,
exchange_master_pub
):
Command([
"taler-config", "-c", filename,
"-s", "exchange", "-o", "master_public_key",
"-V", exchange_master_pub
]).run()
Command([
"taler-config", "-c", filename,
"-s", f"merchant-exchange-{currency}",
"-o", "master_key",
"-V", exchange_master_pub
]).run()
# When called, there is no exchange master pub yet.
# taler-exchange-offline will prouce the key _after_
# taler.conf is generated. Only after that, we'll
# specify the master key where it is missing; namely
# in the merchant backend and exchange HTTP daemon sections.
def config_main(
filename,
outdir,
unix_sockets_dir,
currency,
rev_proxy_url,
wire_method,
exchange_wire_address,
merchant_wire_address,
exchange_wire_gateway_username,
exchange_wire_gateway_password,
frontend_api_key,
taler_runtime_dir
):
def coin(
obj,
currency,
name,
value,
d_withdraw="3 years",
d_spend="5 years",
d_legal="10 years",
f_withdraw="0.01",
f_deposit="0.01",
f_refresh="0.01",
f_refund="0.01",
rsa_keysize="2048",
):
sec = "coin_" + currency + "_" + name
obj.cfg_put(sec, "value", currency + ":" + value)
obj.cfg_put(sec, "duration_withdraw", d_withdraw)
obj.cfg_put(sec, "duration_spend", d_spend)
obj.cfg_put(sec, "duration_legal", d_legal)
obj.cfg_put(sec, "fee_withdraw", currency + ":" + f_withdraw)
obj.cfg_put(sec, "fee_refresh", currency + ":" + f_refresh)
obj.cfg_put(sec, "fee_refund", currency + ":" + f_refund)
obj.cfg_put(sec, "fee_deposit", currency + ":" + f_deposit)
obj.cfg_put(sec, "rsa_keysize", rsa_keysize)
obj = ConfigFile("taler.conf")
obj.cfg_put("paths", "TALER_DATA_HOME", str(TALER_DATA_DIR))
if not taler_runtime_dir.is_dir():
os.makedirs(taler_runtime_dir)
obj.cfg_put("paths", "TALER_RUNTIME_DIR", str(taler_runtime_dir))
obj.cfg_put("taler", "CURRENCY", currency)
obj.cfg_put("taler", "CURRENCY_ROUND_UNIT", f"{currency}:0.01")
obj.cfg_put("bank", "serve", "uwsgi")
obj.cfg_put("bank", "uwsgi_serve", "unix")
obj.cfg_put("bank", "uwsgi_unixpath", str(unix_sockets_dir / "bank.sock"))
obj.cfg_put("bank", "uwsgi_unixpath_mode", "660")
obj.cfg_put("bank", "database", "taler")
obj.cfg_put("bank", "max_debt", "%s:500.0" % currency)
obj.cfg_put("bank", "max_debt_bank", "%s:1000000000.0" % currency)
obj.cfg_put("bank", "allow_registrations", "YES")
obj.cfg_put("bank", "base_url", rev_proxy_url + "/bank/")
obj.cfg_put("bank", "database", "postgres:///taler")
obj.cfg_put("bank", "suggested_exchange", rev_proxy_url + "/exchange/")
obj.cfg_put("donations", "serve", "uwsgi")
obj.cfg_put("donations", "uwsgi_serve", "unix")
obj.cfg_put("donations", "uwsgi_unixpath", str(unix_sockets_dir / "DONATIONS.Sock"))
obj.cfg_put("donations", "uwsgi_unixpath_mode", "660")
obj.cfg_put("landing", "serve", "uwsgi")
obj.cfg_put("landing", "uwsgi_serve", "unix")
obj.cfg_put("landing", "uwsgi_unixpath", str(unix_sockets_dir / "landing.sock"))
obj.cfg_put("landing", "uwsgi_unixpath_mode", "660")
obj.cfg_put("blog", "serve", "uwsgi")
obj.cfg_put("blog", "uwsgi_serve", "unix")
obj.cfg_put("blog", "uwsgi_unixpath", str(unix_sockets_dir / "blog.sock"))
obj.cfg_put("blog", "uwsgi_unixpath_mode", "660")
obj.cfg_put("survey", "serve", "uwsgi")
obj.cfg_put("survey", "uwsgi_serve", "unix")
obj.cfg_put("survey", "uwsgi_unixpath", str(unix_sockets_dir / "survey.sock"))
obj.cfg_put("survey", "uwsgi_unixpath_mode", "660")
obj.cfg_put("survey", "bank_password", "x")
obj.cfg_put("merchant", "serve", "unix")
obj.cfg_put("merchant", "unixpath", str(unix_sockets_dir / "merchant-backend.sock"))
obj.cfg_put("merchant", "wire_transfer_delay", "0 s")
obj.cfg_put("merchant", "default_max_wire_fee", currency + ":" + "0.01")
obj.cfg_put("merchant", "default_max_deposit_fee", currency + ":" + "0.05")
obj.cfg_put("merchantdb-postgres", "config", "postgres:///taler")
obj.cfg_put("frontends", "backend", rev_proxy_url + "/merchant-backend/")
obj.cfg_put(
"merchant-exchange-{}".format(currency),
"exchange_base_url", rev_proxy_url + "/exchange/",
)
obj.cfg_put(
"merchant-exchange-{}".format(currency),
"currency", currency
)
obj.cfg_put("auditor", "serve", "unix")
# FIXME: both below used?
obj.cfg_put("auditor", "base_url", rev_proxy_url + "/auditor")
obj.cfg_put("auditor", "auditor_url", rev_proxy_url + "/auditor")
obj.cfg_put("auditor", "unixpath", str(unix_sockets_dir / "auditor.sock"))
obj.cfg_put("auditor", "tiny_amount", currency + ":0.01")
obj.cfg_put(
"taler-exchange-secmod-eddsa",
"unixpath",
str(unix_sockets_dir / "exchange-secmod-eddsa.sock")
)
obj.cfg_put(
"taler-exchange-secmod-rsa",
"unixpath",
str(unix_sockets_dir / "exchange-secmod-rsa.sock")
)
obj.cfg_put("taler-exchange-secmod-rsa", "sm_priv_key",
"${TALER_DATA_HOME}/taler-exchange-secmod-rsa/secmod-private-key"
)
obj.cfg_put("exchange", "base_url", rev_proxy_url + "/exchange/")
obj.cfg_put("exchange", "serve", "unix")
obj.cfg_put("exchange", "unixpath", str(unix_sockets_dir / "exchange.sock"))
obj.cfg_put("exchange", "terms_etag", "0")
obj.cfg_put("exchange", "terms_dir", "$HOME/.local/share/taler-exchange/tos")
obj.cfg_put("exchange", "privacy_etag", "0")
obj.cfg_put("exchange", "privacy_dir", "$HOME/.local/share/taler-exchange/pp")
obj.cfg_put("exchangedb-postgres", "db_conn_str", "postgres:///taler")
obj.cfg_put("exchangedb-postgres", "config", "postgres:///taler")
obj.cfg_put("auditordb-postgres", "db_conn_str", "postgres:///taler")
obj.cfg_put("auditordb-postgres", "config", "postgres:///taler")
obj.cfg_put(
"exchange-account-1",
"payto_uri",
f"payto://{wire_method}/{rev_proxy_url + '/bank'}/{exchange_wire_address}"
)
obj.cfg_put("exchange-account-1", "enable_debit", "yes")
obj.cfg_put("exchange-account-1", "enable_credit", "yes")
obj.cfg_put("merchant-account-merchant", "payto_uri",
f"payto://{wire_method}/{rev_proxy_url + '/bank'}/{merchant_wire_address}"
)
obj.cfg_put("merchant-account-merchant",
"wire_response",
"${TALER_DATA_HOME}/merchant/wire/merchant.json",
)
obj.cfg_put("merchant-account-merchant", "wire_file_mode", "770")
obj.cfg_put("frontends", "backend_apikey", f"{frontend_api_key}")
coin(obj, currency, "ct_10", "0.10")
coin(obj, currency, "1", "1")
coin(obj, currency, "2", "2")
coin(obj, currency, "5", "5")
coin(obj, currency, "10", "10")
coin(obj, currency, "1000", "1000")
obj.cfg_write(outdir)
return obj
def config_sync(filename, outdir, unix_sockets_dir, currency, api_key, rev_proxy_url):
obj = ConfigFile(filename)
obj.cfg_put("taler", "currency", currency)
obj.cfg_put("sync", "serve", "unix")
obj.cfg_put("sync", "unixpath", str(unix_sockets_dir / "sync.sock"))
obj.cfg_put("sync", "apikey", f"Bearer secret-token:{api_key}")
obj.cfg_put("sync", "annual_fee", f"{currency}:0.1")
obj.cfg_put("sync", "fulfillment_url", "taler://fulfillment-success/")
obj.cfg_put("sync", "payment_backend_url", rev_proxy_url + "merchant-backend/instances/Taler/")
obj.cfg_put("syncdb-postgres", "config", f"postgres:///taler")
obj.cfg_write(outdir)
def config_anastasis(filename, outdir, unix_sockets_dir, currency, rev_proxy_url, api_key):
obj = ConfigFile(filename)
obj.cfg_put("taler", "currency", currency)
obj.cfg_put("anastasis", "serve", "unix")
obj.cfg_put("anastasis", "business_name", f"GNU Taler Demo Anastasis Provider")
obj.cfg_put("anastasis", "unixpath", str(unix_sockets_dir / "anastasis.sock"))
obj.cfg_put("anastasis", "annual_fee", f"{currency}:0")
obj.cfg_put("anastasis", "question_cost", f"{currency}:0")
obj.cfg_put("anastasis", "insurance", f"{currency}:0")
obj.cfg_put("anastasis", "truth_upload_fee", f"{currency}:0")
obj.cfg_put("anastasis", "fulfillment_url", "taler://fulfillment-success/")
obj.cfg_put("anastasis", "server_salt", "kreb3ia9dmj43gfa")
obj.cfg_put("stasis-postgres", "config", f"postgres:///taler")
obj.cfg_put("anastasis-merchant-backend",
"payment_backend_url",
rev_proxy_url + "merchant-backend/instances/anastasis/"
)
obj.cfg_put("anastasis-merchant-backend", "api_key", f"Bearer secret-token:{api_key}")
obj.cfg_put("authorization-question", "cost", f"{currency}:0")
obj.cfg_put("authorization-question", "enabled", "yes")
obj.cfg_write(outdir)
print_nn("Ensure no service is running...")
if Command.is_serving(REV_PROXY_URL + "/", tries=3):
fail("Reverse proxy is unexpectedly running!")
if UNIX_SOCKETS_DIR.is_dir():
for left_socket in os.listdir(UNIX_SOCKETS_DIR):
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
socket_file = str(UNIX_SOCKETS_DIR / left_socket)
if s.connect_ex(socket_file.encode("utf-8")) == 0:
fail(f"A service is unexpectedly running and bound to {socket_file}!")
print(" OK")
print_nn("Remove stale data and config...")
if TALER_DATA_DIR.exists():
shutil.rmtree(TALER_DATA_DIR)
if TALER_RUNTIME_DIR.exists():
shutil.rmtree(TALER_RUNTIME_DIR)
if CFG_OUTDIR.exists():
shutil.rmtree(CFG_OUTDIR)
print(" OK")
print_nn("Generate preliminary taler.conf...")
mc = config_main(
"taler.conf",
outdir=CFG_OUTDIR,
unix_sockets_dir=UNIX_SOCKETS_DIR,
currency=CURRENCY,
rev_proxy_url=REV_PROXY_URL,
wire_method=WIRE_METHOD,
exchange_wire_address=IBAN_EXCHANGE,
merchant_wire_address=IBAN_MERCHANT_DEFAULT,
exchange_wire_gateway_username=EXCHANGE_NEXUS_USERNAME,
exchange_wire_gateway_password=EXCHANGE_NEXUS_PASSWORD,
frontend_api_key=FRONTENDS_API_TOKEN,
taler_runtime_dir=TALER_RUNTIME_DIR
)
print(" OK")
print_nn("Generate exchange's master key...")
EXCHANGE_MASTER_PUB = Command(
[
"taler-exchange-offline",
"-c", CFG_OUTDIR / "taler.conf",
"setup"
],
capture_stdout=True
).run()
print(" OK")
print_nn("Specify exchange master pub in taler.conf...")
config_specify_master_pub(
CFG_OUTDIR / "taler.conf",
CURRENCY,
EXCHANGE_MASTER_PUB
)
print(" OK")
print_nn("Generating sync.conf...")
config_sync(
"sync.conf",
outdir=CFG_OUTDIR,
unix_sockets_dir=UNIX_SOCKETS_DIR,
currency=CURRENCY,
api_key=FRONTENDS_API_TOKEN,
rev_proxy_url=REV_PROXY_URL
)
print(" OK")
print_nn("Generating anastasis.conf...")
config_anastasis(
"anastasis.conf",
outdir=CFG_OUTDIR,
unix_sockets_dir=UNIX_SOCKETS_DIR,
currency=CURRENCY,
rev_proxy_url=REV_PROXY_URL,
api_key=FRONTENDS_API_TOKEN
)
print(" OK")
print_nn("Reset and init exchange DB..")
Command([
"taler-exchange-dbinit",
"-c", CFG_OUTDIR / "taler.conf",
"--reset"]
).run()
print(" OK")
print_nn("Launching the reverse proxy...")
rev_proxy = TalerReverseProxy(
LOG_DIR,
UNIX_SOCKETS_DIR,
REV_PROXY_PROTO,
REV_PROXY_NETLOC
)
rev_proxy.start()
if not Command.is_serving(REV_PROXY_URL + "/"):
fail(f"Reverse proxy did not start correctly. \
Logs: {rev_proxy.get_log_filename()}"
)
# Do check.
print(" OK")
print_nn("Launching the exchange RSA helper...")
exchange_rsa_handle = Command([
"taler-exchange-secmod-rsa",
"-c", CFG_OUTDIR / "taler.conf"
]).launch()
print(" OK")
print_nn("Launching the exchange EDDSA helper...")
exchange_eddsa_handle = Command([
"taler-exchange-secmod-eddsa",
"-c", CFG_OUTDIR / "taler.conf"
]).launch()
print(" OK")
print_nn("Launching the exchange...")
exchange_handle = Command([
"taler-exchange-httpd",
"-c", CFG_OUTDIR / "taler.conf"
]).launch()
if not Command.is_serving(REV_PROXY_URL + "/exchange/"):
fail(
f"Exchange did not start correctly. Logs: {exchange_handle.get_log_filename()}",
rev_proxy
)
print(" OK")
print_nn("exchange-offline: signing key material...")
Command([
"taler-exchange-offline",
"-c", CFG_OUTDIR / "taler.conf",
"download", "sign", "upload"
]).run()
print(" OK")
PAYTO_URI=mc.sections["exchange-account-1"]["payto_uri"]
print_nn(f"exchange-offline: enabling {PAYTO_URI}...")
Command([
"taler-exchange-offline",
"-c", CFG_OUTDIR / "taler.conf",
"enable-account", PAYTO_URI, "upload"]
).run()
print(" OK")
# Set up wire fees for next 5 years
NOW = datetime.now()
YEAR = NOW.year
print_nn("Setting wire fees for the next 5 years...")
for year in range(YEAR, YEAR+5):
Command(
[
"taler-exchange-offline",
"-c", CFG_OUTDIR / "taler.conf",
"wire-fee",
str(year),
WIRE_METHOD,
CURRENCY + ":0.01",
CURRENCY + ":0.01",
"upload"
],
custom_name="set-wire-fee"
).run()
print(" OK")
print_nn("Stopping exchange HTTP daemon and crypto helpers...")
exchange_rsa_handle.stop()
exchange_eddsa_handle.stop()
exchange_handle.stop()
print(" OK")
print_nn("Add this exchange to the auditor...")
Command(
[
"taler-auditor-exchange",
"-c", CFG_OUTDIR / "taler.conf",
"-m", EXCHANGE_MASTER_PUB,
"-u", REV_PROXY_URL + "/exchange/"
],
).run()
print(" OK")
## Step 4: Set up euFin
print_nn("Resetting euFin databases...")
try:
remove(SANDBOX_DB_FILE)
remove(NEXUS_DB_FILE)
except OSError as error:
if error.errno != errno.ENOENT:
raise error
print(" OK")
print_nn("Launching Sandbox...")
sandbox_handle = Command(
[
"libeufin-sandbox", "serve",
"--with-unix-socket", UNIX_SOCKETS_DIR / "sandbox.sock",
],
env=get_sandbox_server_env(
SANDBOX_DB_FILE,
SANDBOX_URL,
SANDBOX_ADMIN_PASSWORD
)
).launch()
if not Command.is_serving(SANDBOX_URL):
fail(
f"Sandbox did not start correctly. Logs: {sandbox_handle.get_log_filename()}",
rev_proxy
)
print(" OK")
print_nn("Make Sandbox EBICS host...")
Command(
[
"libeufin-cli", "sandbox",
"--sandbox-url", SANDBOX_URL,
"ebicshost", "create",
"--host-id", EBICS_HOST_ID,
],
env=get_sandbox_cli_env(
SANDBOX_ADMIN_USERNAME,
SANDBOX_ADMIN_PASSWORD,
),
custom_name="sandbox-create-ebicshost",
).run()
print(" OK")
print_nn("Create Exchange account at Sandbox...")
prepare_sandbox_account(
currency=CURRENCY,
sandbox_url=SANDBOX_URL,
ebics_host_id=EBICS_HOST_ID,
ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
ebics_user_id=EXCHANGE_EBICS_USER_ID,
person_name="Exchange Owner",
bank_account_name=EXCHANGE_BANK_ACCOUNT_SANDBOX,
bank_account_iban=IBAN_EXCHANGE,
env=get_sandbox_cli_env(
SANDBOX_ADMIN_USERNAME,
SANDBOX_ADMIN_PASSWORD,
)
)
print(" OK")
# Give each instance a Sandbox account (note: 'default')
# won't have one, as it should typically only manage other
# instances.
for instance_id, iban in INSTANCES.items():
print_nn(f"Create account of {instance_id} at Sandbox...")
prepare_sandbox_account(
currency=CURRENCY,
sandbox_url=SANDBOX_URL,
ebics_host_id=EBICS_HOST_ID,
ebics_partner_id="unusedMerchantEbicsPartnerId",
ebics_user_id=f"unused{instance_id}EbicsUserId",
person_name=f"Shop Owner of {instance_id}",
bank_account_name=f"sandbox-account-{instance_id}",
bank_account_iban=iban,
env=get_sandbox_cli_env(
SANDBOX_ADMIN_USERNAME,
SANDBOX_ADMIN_PASSWORD,
)
)
print(" OK")
print_nn("Create Customer account at Sandbox...")
prepare_sandbox_account(
currency=CURRENCY,
sandbox_url=SANDBOX_URL,
ebics_host_id=EBICS_HOST_ID,
ebics_partner_id="unusedCustomerEbicsPartnerId",
ebics_user_id="unusedCustomerEbicsUserId",
person_name="Customer Person",
bank_account_name="sandbox-account-customer",
bank_account_iban=IBAN_CUSTOMER,
env=get_sandbox_cli_env(
SANDBOX_ADMIN_USERNAME,
SANDBOX_ADMIN_PASSWORD,
)
)
print(" OK")
print_nn("Make Nexus superuser ...")
Command(
[
"libeufin-nexus", "superuser",
EXCHANGE_NEXUS_USERNAME,
"--password", EXCHANGE_NEXUS_PASSWORD
],
env=get_nexus_server_env(
NEXUS_DB_FILE,
NEXUS_URL
),
custom_name="nexus-superuser",
).run()
print(" OK")
print_nn("Launching Nexus...")
nexus_handle = Command(
[
"libeufin-nexus", "serve",
"--with-unix-socket", UNIX_SOCKETS_DIR / "nexus.sock"
],
env=get_nexus_server_env(NEXUS_DB_FILE, NEXUS_URL)
).launch()
if not Command.is_serving(NEXUS_URL):
fail(
f"Nexus did not start correctly. Logs: {nexus_handle.get_log_filename()}",
rev_proxy
)
print(" OK")
print_nn("Create Exchange account at Nexus...")
prepare_nexus_account(
ebics_url=EBICS_URL,
ebics_host_id=EBICS_HOST_ID,
ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
ebics_user_id=EXCHANGE_EBICS_USER_ID,
bank_connection_name=EXCHANGE_BANK_CONNECTION,
bank_account_name_sandbox=EXCHANGE_BANK_ACCOUNT_SANDBOX,
bank_account_name_nexus=EXCHANGE_BANK_ACCOUNT_NEXUS,
env=get_nexus_cli_env(
EXCHANGE_NEXUS_USERNAME,
EXCHANGE_NEXUS_PASSWORD,
NEXUS_URL
)
)
print(" OK")
print_nn("Create Taler facade ...")
Command(
[
"libeufin-cli", "facades",
"new-taler-wire-gateway-facade",
"--currency", CURRENCY,
"--facade-name", EXCHANGE_FACADE_NAME,
EXCHANGE_BANK_CONNECTION,
EXCHANGE_BANK_ACCOUNT_NEXUS
],
env=get_nexus_cli_env(
EXCHANGE_NEXUS_USERNAME,
EXCHANGE_NEXUS_PASSWORD,
NEXUS_URL
),
custom_name="create-taler-facade",
).run()
print(" OK")
try:
response = requests.get(
NEXUS_URL + "/facades",
auth=requests.auth.HTTPBasicAuth(
EXCHANGE_NEXUS_USERNAME,
EXCHANGE_NEXUS_PASSWORD
)
)
response.raise_for_status()
except Exception as error:
fail(error, rev_proxy)
FACADE_URL=response.json().get("facades")[0].get("baseUrl")
print_nn("Terminating Nexus...")
nexus_handle.stop()
print(" OK")
print_nn("Terminating Sandbox...")
sandbox_handle.stop()
print(" OK")
# Point the exchange to the facade.
Command(
[
"taler-config", "-s",
"-c", CFG_OUTDIR / "taler.conf",
f"exchange-account-credentials-1",
"-o" "wire_gateway_url",
"-V", FACADE_URL
],
custom_name="specify-facade-url",
).run()
Command(
[
"taler-config", "-s",
"-c", CFG_OUTDIR / "taler.conf",
"exchange-account-credentials-1",
"-o" "username",
"-V", EXCHANGE_NEXUS_USERNAME
],
custom_name="specify-username-for-facade",
).run()
Command(
[
"taler-config", "-s",
"-c", CFG_OUTDIR / "taler.conf",
"exchange-account-credentials-1",
"-o" "password",
"-V", EXCHANGE_NEXUS_PASSWORD
],
custom_name="specify-password-for-facade",
).run()
## Step 6: Set up merchant
print_nn("Reset and init merchant database...")
Command([
"taler-merchant-dbinit",
"-c", CFG_OUTDIR / "taler.conf",
"--reset"
]).run()
print(" OK")
def ensure_instance(
currency, instance_id,
backend_url, bank_hostname,
wire_method, merchant_wire_address,
auth_token
):
auth_header = {"Authorization": f"Bearer {auth_token}"}
resp = requests.get(
urljoin_nodrop(backend_url, f"management/instances/{instance_id}"),
headers = auth_header
)
req = dict(
id=instance_id,
name=f"Name of '{instance_id}'",
payto_uris=[f"payto://{wire_method}/{bank_hostname}/{merchant_wire_address}"],
address=dict(),
jurisdiction=dict(),
default_max_wire_fee=f"{currency}:1",
default_wire_fee_amortization=3,
default_max_deposit_fee=f"{currency}:1",
default_wire_transfer_delay=dict(d_ms="forever"),
default_pay_delay=dict(d_ms="forever"),
auth=dict(method="token", token=auth_token),
)
http_method = requests.post
endpoint = "management/instances"
# Instance exists, patching it.
if resp.status_code == 200:
print(f"Patching instance '{instance_id}'")
http_method = requests.patch
endpoint = f"management/instances/{instance_id}"
resp = http_method(
urljoin_nodrop(backend_url, endpoint),
json=req,
headers = auth_header
)
if resp.status_code < 200 or resp.status_code >= 300:
print(f"Could not create (or patch) instance '{instance_id}'")
print(f"Backend responds: {resp.status_code}/{resp.text}")
fail(proxy_proc=rev_proxy)
print_nn(f"Start merchant (with TALER_MERCHANT_TOKEN into the env)...")
auth_env = os.environ.copy()
auth_env["TALER_MERCHANT_TOKEN"] = TALER_MERCHANT_TOKEN
merchant_handle = Command(
["taler-merchant-httpd", "-c", CFG_OUTDIR / "taler.conf"],
env=auth_env
).launch()
if not Command.is_serving(REV_PROXY_URL + "/merchant-backend/config"):
fail(
f"Merchant backend did not start correctly. Logs: {merchant_handle.get_log_filename()}",
rev_proxy
)
print(" OK")
ensure_instance(
currency=CURRENCY,
instance_id="default",
backend_url = REV_PROXY_URL + "/merchant-backend",
bank_hostname = REV_PROXY_NETLOC + "/sandbox",
wire_method = "sepa",
merchant_wire_address = IBAN_MERCHANT_DEFAULT,
auth_token=FRONTENDS_API_TOKEN
)
print_nn("Stopping the merchant...")
merchant_handle.stop()
print(" OK")
print_nn("Restarting the merchant WITHOUT the auth-token in the env...")
merchant_handle.launch()
if not Command.is_serving(REV_PROXY_URL + "/merchant-backend/config"):
# check_running logs errors already.
fail(
f"Merchant backend did not re start correctly. Logs: {merchant_handle.get_log_filename()}",
rev_proxy
)
print(" OK")
for instance_id, iban in INSTANCES.items():
print_nn(f"Creating the {instance_id} instance...")
ensure_instance(
currency=CURRENCY,
instance_id=instance_id,
backend_url = REV_PROXY_URL + "/merchant-backend",
bank_hostname = REV_PROXY_NETLOC + "/sandbox",
wire_method = "sepa",
merchant_wire_address = iban,
auth_token=FRONTENDS_API_TOKEN
)
print(" OK")
print_nn("Stopping the merchant backend...")
merchant_handle.stop()
print(" OK")
print_nn("Stopping the reverse proxy...")
rev_proxy.stop()
print(" OK")
if __name__ == "__main__":
cli()