gnunet-svn
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[taler-deployment] branch master updated: remove legacy files


From: gnunet
Subject: [taler-deployment] branch master updated: remove legacy files
Date: Mon, 23 Jan 2023 14:14:24 +0100

This is an automated email from the git hooks/post-receive script.

dold pushed a commit to branch master
in repository deployment.

The following commit(s) were added to refs/heads/master by this push:
     new 5482b6a  remove legacy files
5482b6a is described below

commit 5482b6aee3d2c4a9426db96c63fd4764b83dc1ee
Author: Florian Dold <florian@dold.me>
AuthorDate: Mon Jan 23 14:14:20 2023 +0100

    remove legacy files
---
 mypy/mypy.ini                        |    3 -
 selenium/launch_selenium_test        |   23 -
 typescript/README                    |    9 -
 typescript/config.ts                 |    2 -
 typescript/container/Dockerfile      |   51 -
 typescript/container/prepare.service |   10 -
 typescript/container/prepare.sh      |    7 -
 typescript/container/taler-config.sh |    7 -
 typescript/container/taler-local     | 1889 ----------------------------------
 9 files changed, 2001 deletions(-)

diff --git a/mypy/mypy.ini b/mypy/mypy.ini
deleted file mode 100644
index 924c128..0000000
--- a/mypy/mypy.ini
+++ /dev/null
@@ -1,3 +0,0 @@
-[mypy]
-ignore_missing_imports = True
-python_version = 3.5
diff --git a/selenium/launch_selenium_test b/selenium/launch_selenium_test
deleted file mode 100755
index 12e35ca..0000000
--- a/selenium/launch_selenium_test
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/bash
-
-ulimit -v 6000000
-
-set -eu
-
-# clean /tmp
-rm -fr /tmp/.org.chromium*
-
-cd $HOME/wallet-webex
-git clean -fdx
-
-git fetch
-# reset to updated upstream branch, but only if we're tracking a branch
-branch=$(git rev-parse --abbrev-ref --symbolic-full-name @{u} 2>/dev/null || 
echo HEAD)
-git reset --hard "$branch"
-
-git submodule update --remote
-
-./configure && make
-
-# call python3 selenium script
-python3 $HOME/wallet-webex/selenium/withdraw_buy.py 
--ext-unpacked=$HOME/wallet-webex
diff --git a/typescript/README b/typescript/README
deleted file mode 100644
index 996ec7a..0000000
--- a/typescript/README
+++ /dev/null
@@ -1,9 +0,0 @@
-Building and running the image.
-
-'cd' into 'container/' and run:
-$ podman build -t $tag .
-
-Run it, passing a configuration expression:
-$ podman run -it [-v /host/path/to/config-file:/config.ts] $tag
-
-Please, kill running container with 'podman kill'.
diff --git a/typescript/config.ts b/typescript/config.ts
deleted file mode 100644
index 6b5719a..0000000
--- a/typescript/config.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-var h = require("@gnu-taler/taler-config-lib");
-h()
diff --git a/typescript/container/Dockerfile b/typescript/container/Dockerfile
deleted file mode 100644
index 31a71ba..0000000
--- a/typescript/container/Dockerfile
+++ /dev/null
@@ -1,51 +0,0 @@
-FROM debian:testing
-RUN apt-get update
-RUN apt-get install -y  autoconf autopoint libtool texinfo \
-  libgcrypt-dev libidn11-dev zlib1g-dev libunistring-dev \
-  libjansson-dev python3-pip git recutils libsqlite3-dev \
-  libpq-dev postgresql libcurl4-openssl-dev libsodium-dev git \
-  libqrencode-dev zip jq nodejs npm openjdk-17-jre nginx procps curl
-RUN pip3 install qrcode click requests jinja2 poetry babel
-
-# NOTE: taler-local is a _copy_ of the official
-# deployment/bin/taler-local, needed due to problems
-# referencing files outside of the Dockerfile's directory.
-COPY taler-local . 
-
-# Use taler-local to build from sources for now.
-# This step will be optional, offering to install
-# from Debian packages.
-RUN python3 /taler-local bootstrap --without-repos wallet-core
-RUN python3 /taler-local build 
-# Setup the PNPM/TypeScript/Node environment.
-RUN npm install -g pnpm
-RUN pnpm config set global-bin-dir /usr/local/bin
-RUN pnpm install -g typescript
-RUN pnpm install --save-dev @types/node
-
-# Disable logins:
-RUN systemctl mask console-getty
-RUN systemctl mask systemd-logind
-
-# This unit file will start along the boot process.
-# It'll create the database, and finally call the config
-# interpreter.
-COPY prepare.service /etc/systemd/system
-RUN chmod 664 /etc/systemd/system/prepare.service
-RUN systemctl enable prepare
-
-# Install 'taler-config-lib'.
-RUN git clone git://git.taler.net/wallet-core
-RUN cd /wallet-core && ./bootstrap && ./configure && make config-lib
-
-# Can be moved up (next to its unit file); here to
-# avoid huge re-buildings.  prepare.sh creates the
-# database and finally calls the configuration interpreter
-# / generator: taler-config.js.
-COPY prepare.sh .
-
-# Compiles the TypeScript file passed in by the user, 
-# sets NODE_PATH, and finally triggers the configuration.
-COPY taler-config.sh .
-
-CMD ["/sbin/init"]
diff --git a/typescript/container/prepare.service 
b/typescript/container/prepare.service
deleted file mode 100644
index 560be16..0000000
--- a/typescript/container/prepare.service
+++ /dev/null
@@ -1,10 +0,0 @@
-[Unit]
-Description=PrepareDatabase
-After=postgresql.service
-
-[Service]
-ExecStart=/prepare.sh
-StandardOutput=journal+console
-
-[Install]
-WantedBy=multi-user.target
diff --git a/typescript/container/prepare.sh b/typescript/container/prepare.sh
deleted file mode 100755
index 7be811e..0000000
--- a/typescript/container/prepare.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-set -e
-
-su -c "createuser --superuser root && createdb taler" postgres
-
-/taler-config.sh
diff --git a/typescript/container/taler-config.sh 
b/typescript/container/taler-config.sh
deleted file mode 100755
index 74c199a..0000000
--- a/typescript/container/taler-config.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-set -eu
-
-tsc /config.ts
-export NODE_PATH=$(pnpm root -g)
-node /config.js
diff --git a/typescript/container/taler-local b/typescript/container/taler-local
deleted file mode 100755
index bd683eb..0000000
--- a/typescript/container/taler-local
+++ /dev/null
@@ -1,1889 +0,0 @@
-#!/usr/bin/env python3
-
-# This file is part of GNU Taler.
-#
-# GNU Taler is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# GNU Taler is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Taler.  If not, see <https://www.gnu.org/licenses/>.
-
-import qrcode
-import signal
-import socket
-import shutil
-import atexit
-import click
-import types
-import os
-import sys
-import os.path
-import subprocess
-import time
-import random
-import logging
-import json
-from os import listdir
-from os.path import isdir, join, basename
-from pathlib import Path
-from typing import List, Callable
-from shutil import copy
-from multiprocessing import Process
-from string import ascii_letters, ascii_uppercase
-from sys import exit
-from urllib.parse import urljoin, quote
-from os import remove
-import requests
-from collections import OrderedDict
-import errno
-from pathlib import Path
-from subprocess import Popen, DEVNULL, PIPE
-from datetime import datetime
-
-
-TALER_ROOT_DIR = Path.home() / ".taler"
-TALER_PREFIX = Path.home() / ".local"
-
-# Print No Newline.
-def print_nn(msg):
-    print(msg, end="")
-    sys.stdout.flush()
-
-class Repo:
-    def __init__(self, name, url, deps, builder, version="master"):
-        self.name = name
-        self.url = url
-        self.deps = deps
-        self.builder = builder
-        self.version = version
-
-@click.group()
-def cli():
-    pass
-
-# Parses the command-line-given and comma-separated repos list
-# into a list of names.
-def split_repos_list(repos):
-    return [repo for repo in repos.split(",") if repo != ""]
-
-# fetch the remote.  No timestamp deletion here
-def update_checkout(r: Repo, p: Path):
-    """Clean the repository's working directory and
-    update it to the match the latest version of the upstream branch
-    that we are tracking."""
-    subprocess.run(["git", "-C", str(p), "clean", "-fdx"], check=True) # 
remove unversioned files.
-
-    # Equivalent to "git pull".  Does nothing if in detached HEAD
-    # but pulls new code into the local copy otherwise.
-    subprocess.run(["git", "-C", str(p), "fetch"], check=True)
-    subprocess.run(["git", "-C", str(p), "reset"], check=True)
-
-    # Makes the last step "--hard", namely removes files not
-    # belonging to the current version.
-    res = subprocess.run(
-        [
-            "git",
-            "-C",
-            str(p),
-            "rev-parse",
-            "--abbrev-ref",
-            "--symbolic-full-name",
-            "@{u}",
-        ],
-        stderr=subprocess.DEVNULL,
-        stdout=subprocess.PIPE,
-        encoding="utf-8",
-    )
-    if res.returncode != 0:
-        ref = "HEAD"
-    else:
-        ref = res.stdout.strip("\n ")
-    print(f"resetting {r.name} to ref {ref}")
-    subprocess.run(["git", "-C", str(p), "reset", "--hard", ref], check=True)
-
-
-def default_configure(*extra):
-    extra_list = list(extra)
-    subprocess.run(["./configure", f"--prefix={TALER_PREFIX}"] + extra_list, 
check=True)
-
-def pyconfigure(*extra):
-    """For python programs, --prefix doesn't work."""
-    subprocess.run(["./configure"] + list(extra), check=True)
-
-def build_libeufin(r: Repo, p: Path):
-    update_checkout(r, p)
-    subprocess.run(["./bootstrap"], check=True)
-    default_configure()
-    subprocess.run(["make", "install"], check=True)
-    (p / "taler-buildstamp").touch()
-
-def build_libmicrohttpd(r: Repo, p: Path):
-    update_checkout(r, p)
-    subprocess.run(["./bootstrap"], check=True)
-    default_configure("--disable-doc")
-    subprocess.run(["make"], check=True)
-    subprocess.run(["make", "install"], check=True)
-    (p / "taler-buildstamp").touch()
-
-def build_gnunet(r: Repo, p: Path):
-    update_checkout(r, p)
-    subprocess.run(["./bootstrap"], check=True)
-    pfx = Path.home() / ".local"
-    default_configure(
-        "--enable-logging=verbose",
-        f"--with-microhttpd={pfx}",
-        "--disable-documentation",
-    )
-    subprocess.run(["make", "install"], check=True)
-    (p / "taler-buildstamp").touch()
-
-def build_exchange(r: Repo, p: Path):
-    update_checkout(r, p)
-    subprocess.run(["./bootstrap"], check=True)
-    pfx = Path.home() / ".local"
-    default_configure(
-        "CFLAGS=-ggdb -O0",
-        "--enable-logging=verbose",
-        f"--with-microhttpd={pfx}",
-        f"--with-gnunet={pfx}",
-        "--disable-doc",
-    )
-    subprocess.run(["make", "install"], check=True)
-    (p / "taler-buildstamp").touch()
-
-def build_wallet(r, p):
-    update_checkout(r, p)
-    subprocess.run(["./bootstrap"], check=True)
-    default_configure()
-    subprocess.run(["make", "install"], check=True)
-    (p / "taler-buildstamp").touch()
-
-def build_twister(r, p):
-    update_checkout(r, p)
-    subprocess.run(["./bootstrap"], check=True)
-    pfx = Path.home() / ".local"
-    default_configure(
-        "CFLAGS=-ggdb -O0",
-        "--enable-logging=verbose",
-        f"--with-exchange={pfx}",
-        f"--with-gnunet={pfx}",
-    )
-    subprocess.run(["make", "install"], check=True)
-    (p / "taler-buildstamp").touch()
-
-
-def build_merchant(r, p):
-    update_checkout(r, p)
-    subprocess.run(["./bootstrap"], check=True)
-    pfx = Path.home() / ".local"
-    default_configure(
-        "CFLAGS=-ggdb -O0",
-        "--enable-logging=verbose",
-        f"--with-microhttpd={pfx}",
-        f"--with-exchange={pfx}",
-        f"--with-gnunet={pfx}",
-        "--disable-doc",
-    )
-    subprocess.run(["make", "install"], check=True)
-    (p / "taler-buildstamp").touch()
-
-def build_sync(r, p):
-    update_checkout(r, p)
-    subprocess.run(["./bootstrap"], check=True)
-    pfx = Path.home() / ".local"
-    default_configure(
-        "CFLAGS=-ggdb -O0",
-        "--enable-logging=verbose",
-        f"--with-microhttpd={pfx}",
-        f"--with-exchange={pfx}",
-        f"--with-merchant={pfx}",
-        f"--with-gnunet={pfx}",
-        "--disable-doc",
-    )
-    subprocess.run(["make", "install"], check=True)
-    (p / "taler-buildstamp").touch()
-
-
-
-def build_demos(r, p):
-    update_checkout(r, p)
-    pfx = Path.home() / ".local"
-    pyconfigure()
-    subprocess.run(["make", "install"], check=True)
-    (p / "taler-buildstamp").touch()
-
-def build_backoffice(r, p):
-    update_checkout(r, p)
-    subprocess.run(["./bootstrap"])
-    subprocess.run(["./configure"])
-    subprocess.run(["make", "build-single"])
-    (p / "taler-buildstamp").touch()
-
-repos = {
-    "libmicrohttpd": Repo(
-        "libmicrohttpd",
-        "git://git.gnunet.org/libmicrohttpd.git",
-        [],
-        build_libmicrohttpd,
-    ),
-    "gnunet": Repo(
-        "gnunet",
-        "git://git.gnunet.org/gnunet.git",
-        ["libmicrohttpd"],
-        build_gnunet
-    ),
-    "exchange": Repo(
-        "exchange",
-        "git://git.taler.net/exchange",
-        ["gnunet", "libmicrohttpd"],
-        build_exchange,
-    ),
-    "merchant": Repo(
-        "merchant",
-        "git://git.taler.net/merchant",
-        ["exchange","libmicrohttpd","gnunet"],
-        build_merchant,
-    ),
-    "sync": Repo(
-       "sync",
-       "git://git.taler.net/sync",
-       ["exchange",
-        "merchant",
-        "gnunet",
-        "libmicrohttpd"],
-       build_sync,
-   ),
-    "wallet-core": Repo(
-        "wallet-core",
-        "git://git.taler.net/wallet-core",
-        [],
-        build_wallet,
-    ),
-    "libeufin": Repo(
-        "libeufin",
-        "git://git.taler.net/libeufin.git",
-        [],
-        build_libeufin,
-    ),
-    "taler-merchant-demos": Repo(
-        "taler-merchant-demos",
-        "git://git.taler.net/taler-merchant-demos",
-        [],
-        build_demos,
-    ),
-    "twister": Repo(
-        "twister",
-        "git://git.taler.net/twister",
-        ["gnunet", "libmicrohttpd"],
-        build_twister,
-    ),
-}
-
-def get_repos_names() -> List[str]:
-    r_dir = TALER_ROOT_DIR / "sources"
-    if not r_dir.is_dir():
-        print(f"'{r_dir}' not found.  Did bootstrap run?")
-        return []
-    return [el for el in listdir(r_dir) if isdir(join(r_dir, el)) and 
repos.get(el)]
-
-# Get 'Repo' objects (globally defined),
-# using their names as index.
-def load_repos(reposNames) -> List[Repo]:
-    ret = []
-    for repo in repos.keys():
-        if repo in reposNames:
-            ret.append(repos[repo])
-    return ret
-
-# Return the list of repos (equipped with their version)
-# to install.
-def load_repos_with_envcfg(envcfg_path) -> List[Repo]:
-    envcfg_path = Path(envcfg_path)
-    if not os.path.isfile(envcfg_path):
-        print(f"{envcfg_path} is not a file")
-        sys.exit(1)
-    cfgtext = envcfg_path.read_text()
-    cfg = types.ModuleType("taler_deployment_cfg")
-    try:
-        exec(cfgtext, cfg.__dict__)
-    except SyntaxError:
-        print(f"{envcfg_path} is not Python.")
-        exit(1)
-    ret = []
-    for repo in repos.keys():
-        try:
-            envcfg_entry = getattr(cfg, "tag_" + repo.replace("-", "_"))
-        except AttributeError:
-            # 'env' files doesn't have this repo, continue looping.
-            continue
-        repos[repo].version = envcfg_entry
-        ret.append(repos[repo])
-    return ret
-
-# Flag as stale the projects set on 'master' that
-# aren't in line with upstream.  Detached head projects
-# aren't affected.
-def update_repos(repos: List[Repo], force) -> None:
-    for r in repos:
-        r_dir = TALER_ROOT_DIR / "sources" / r.name
-        subprocess.run(["git", "-C", str(r_dir), "fetch"], check=True)
-        res = subprocess.run(
-            ["git", "-C", str(r_dir), "status", "-sb"],
-            check=True,
-            stdout=subprocess.PIPE,
-            encoding="utf-8",
-        )
-        if "behind" in res.stdout or force:
-            print(f"{r.name} will be compiled")
-            s = r_dir / "taler-buildstamp"
-            if s.exists():
-                s.unlink()
-
-# projects without the build timestamp are considered stale,
-# even if one of their dependencies _got_ marked as stale.
-def get_stale_repos(repos: List[Repo]) -> List[Repo]:
-    timestamps = {}
-    stale = []
-    for r in repos:
-        r_dir = TALER_ROOT_DIR / "sources" / r.name
-        s = r_dir / "taler-buildstamp"
-        if not s.exists():
-            timestamps[r.name] = time.time()
-            stale.append(r)
-            continue
-        ts = timestamps[r.name] = s.stat().st_mtime
-        for dep in r.deps:
-            # When 'dep' in not found, it has been
-            # excluded from the compilation.
-            if timestamps.get("dep", 0) > ts:
-                stale.append(r)
-                break
-    return stale
-
-@cli.command()
-@click.option(
-    "--without-repos", metavar="WITHOUT REPOS",
-    help="WITHOUT REPOS is a unspaced and comma-separated list \
-of the repositories to _exclude_ from compilation",
-    default="")
-@click.option(
-    "--only-repos", metavar="ONLY REPOS",
-    help="ONLY REPOS is a unspaced and comma-separated exclusive list \
-of the repositories to include in the compilation",
-    default="")
-@click.option(
-    "--dry/--no-dry", default=False,
-    help="Only getting changes, without actual build."
-)
-@click.option(
-    "--with-envcfg", metavar="PATH",
-    help="python file pinning each codebase version.",
-)
-# Normally, we don't rebuild dependent projects when one
-# of their dependency changed.  This lets check whether non
-# breaking changes are really so; this option invalidates
-# this policy by letting all the codebases be compiled.
-@click.option(
-    "--force/--no-force", default=False,
-    help="build all the projects.",
-)
-def build(without_repos, only_repos, dry, with_envcfg, force) -> None:
-    """Build the deployment from source."""
-    if only_repos != "" and without_repos != "":
-        print("Either use --only-repos or --without-repos")
-        exit(1)
-    repos_names = get_repos_names()
-    if only_repos != "":
-        repos_names = list(filter(
-                lambda x: x in split_repos_list(only_repos),
-                repos_names
-        ))
-    if without_repos != "":
-        repos_names = list(filter(
-                lambda x: x not in split_repos_list(without_repos),
-                repos_names
-        ))
-    if with_envcfg:
-        target_repos = load_repos_with_envcfg(with_envcfg)
-    else:
-        target_repos = load_repos(repos_names)
-    # enforce version here.
-    sources = TALER_ROOT_DIR / "sources"
-    for r in target_repos:
-        subprocess.run(
-            ["git", "-C", str(sources / r.name),
-             "checkout", "-q", "-f",
-             r.version, "--"], check=True
-        )
-    update_repos(target_repos, force)
-    stale = get_stale_repos(target_repos)
-    print(f"found stale repos: {[r.name for r in stale]}")
-    for r in stale:
-        # Inform, if a dependency is not being built:
-        diff = set(r.deps) - set(repos_names)
-        if len(diff) > 0:
-            print(f"Info: those dependencies are not being built: {diff}")
-        p = TALER_ROOT_DIR / "sources" / r.name
-        os.chdir(str(p))
-        if dry:
-            print("dry running")
-            continue
-        r.builder(r, p)
-
-# Only git-clone the codebases.  The 'build' step
-# will run all the update logic.  At this point, a
-# 'env' file - as well as the --repos option - will
-# only express which codebases are to clone.
-@cli.command()
-@click.option(
-    "--repos", "-r",
-    metavar="REPOS",
-    help="REPOS is a unspaced and comma-separated list of the repositories to 
clone.",
-    
default="libmicrohttpd,gnunet,exchange,merchant,wallet-core,taler-merchant-demos,sync,libeufin",
-    show_default=True,
-)
-@click.option(
-    "--without-repos",
-    metavar="REPOS",
-    help="REPOS is a unspaced and comma-separated list of the repositories NOT 
to clone."
-)
-@click.option(
-    "--list-repos/--no-list-repos", default=False,
-    help="Lists the repositories that were bootstrapped.",
-)
-@click.option(
-    "--with-envcfg", metavar="PATH",
-    help="python file pinning each codebase version.",
-)
-@click.option(
-    "--dry/--no-dry", default=False,
-    help="Print steps, without downloading any repository.",
-)
-def bootstrap(list_repos, repos, with_envcfg, dry, without_repos) -> None:
-    """Clone all the specified repositories."""
-    # Only saying _which_ repo were installed.  No further action
-    if list_repos:
-        for repo in get_repos_names():
-            print(repo)
-        return
-
-    # Download the repositories.
-    def clone_repos(repos: List[Repo]):
-        if len(repos) == 0:
-            print("No repositories can be checked out.  Spelled correctly?")
-            return
-        sources = TALER_ROOT_DIR / "sources"
-        for r in repos:
-            print(f"Bootstrapping '{r.name}', at version '{r.version}'")
-            if dry:
-                print("dry running")
-                continue
-            r_dir = sources / r.name
-            if not r_dir.exists():
-                r_dir.mkdir(parents=True, exist_ok=True)
-                subprocess.run(
-                    ["git", "-C", str(sources),
-                     "clone", r.url], check=True
-                )
-                subprocess.run(
-                    ["git", "-C", str(r_dir),
-                     "checkout", "-q", "-f",
-                     r.version, "--"], check=True
-                )
-
-    # Get list of to-be-cloned repos from the 'env' file.
-    if with_envcfg:
-        # 'with_envcfg' is a path to a "envcfg.py" file.
-        preparedRepos = load_repos_with_envcfg(with_envcfg)
-    # Get list of to-be-cloned repos from the command line
-    # (or its default)
-    else:
-        # 'repos' is here "repo1,repo2,.."
-        reposList = split_repos_list(repos)
-        # 'reposList' is here ["repo1", "repo2", ...]
-        preparedRepos = load_repos(reposList)
-    if without_repos:
-        for exclude_repo in split_repos_list(without_repos):
-            preparedRepos = [el for el in preparedRepos if el.name != 
exclude_repo]
-    clone_repos(preparedRepos)
-
-# Globals sharead accross multiple sub-commands:
-# needed to configure and launch the reverse proxy.
-REV_PROXY_HOSTNAME = "localhost"
-REV_PROXY_PORT = "8080"
-REV_PROXY_NETLOC = REV_PROXY_HOSTNAME + ":" + REV_PROXY_PORT
-REV_PROXY_PROTO = "http"
-REV_PROXY_URL = f"{REV_PROXY_PROTO}://{REV_PROXY_NETLOC}"
-UNIX_SOCKETS_DIR = TALER_ROOT_DIR / "sockets"
-LOG_DIR = TALER_ROOT_DIR / "logs"
-# needed to create the customer's bank account and
-# to let them subsequently withdraw via the Access API.
-CUSTOMER_BANK_ACCOUNT = "sandbox-account-customer"
-CUSTOMER_BANK_PASSWORD = "secret"
-# needed along preparation and later to withdraw via
-# the Access API.
-CURRENCY = "EUR"
-
-@cli.command()
-@click.option(
-    "--x-forwarded-host", metavar="HOST",
-    help="Instruct Nginx to set HOST as the X-Forwarded-Host.",
-    default=REV_PROXY_NETLOC
-)
-@click.option(
-    "--x-forwarded-proto", metavar="PROTO",
-    help="Instruct Nginx to set PROTO as the X-Forwarded-Proto.",
-    default="http"
-)
-@click.option(
-    "--postgres-db-name", metavar="DBNAME",
-    help="Set postgres database name for all the services.",
-    default="taler"
-)
-def prepare(x_forwarded_host, x_forwarded_proto, postgres_db_name):
-    """Generate configuration, run-time blobs, instances, euFin accounts."""
-    def is_serving(check_url, tries=10):
-        for i in range(tries):
-            try:
-                print_nn(".")
-                # Raises if the service is not reachable.
-                response = requests.get(
-                    check_url,
-                    timeout=1
-                )
-                # The reverse proxy may return 500 if the
-                # end service is not ready, therefore this
-                # case should be tolerated.
-                response.raise_for_status()
-            except:
-                time.sleep(0.5)
-                if i == tries - 1:
-                    return False
-                continue
-            break
-        return True
-
-    def fail(reason=None):
-        if reason:
-            print("ERROR:", reason)
-        exit(1)
-
-    def kill(proc):
-        proc.terminate()
-        proc.wait()
-
-    def get_nexus_cli_env(
-        username,
-        password,
-        nexus_url
-    ):
-        env = os.environ.copy()
-        env["LIBEUFIN_NEXUS_USERNAME"] = username
-        env["LIBEUFIN_NEXUS_PASSWORD"] = password
-        env["LIBEUFIN_NEXUS_URL"] = nexus_url
-        return env
-
-    def get_sandbox_cli_env(
-        username, password
-    ):
-       env = os.environ.copy()
-       env["LIBEUFIN_SANDBOX_USERNAME"] = username
-       env["LIBEUFIN_SANDBOX_PASSWORD"] = password
-       return env
-
-    # Will be extended to include a SANDBOX_ADMIN_TOKEN
-    # that will obsolete the 'superuser' flag of ordinary
-    # user accounts.  Likewise, the client side will be
-    # modified to use such token.
-    def get_sandbox_server_env(db_file, base_url, admin_password):
-        env = os.environ.copy()
-        env["LIBEUFIN_SANDBOX_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
-        env["LIBEUFIN_SANDBOX_BASE_URL"] = base_url
-        env["LIBEUFIN_SANDBOX_ADMIN_PASSWORD"] = admin_password
-        return env
-
-    def get_nexus_server_env(db_file, base_url):
-        env = os.environ.copy()
-        env["LIBEUFIN_NEXUS_DB_CONNECTION"] = f"jdbc:sqlite:{db_file}"
-        env["LIBEUFIN_NEXUS_BASE_URL"] = base_url
-        return env
-
-    def urljoin_nodrop(a, b):
-        a = a + "/" # urljoin will drop extra trailing slashes.
-        b = "/".join([x for x in b.split("/") if x != ""]) # remove leading 
slashes.
-        return urljoin(a, b)
-
-    def prepare_nexus_account(
-        ebics_url,
-        ebics_host_id,
-        ebics_partner_id,
-        ebics_user_id,
-        bank_connection_name,
-        bank_account_name_sandbox,
-        bank_account_name_nexus,
-        env
-    ):
-        # make connection
-        Command(
-            [
-                f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
-                "new-ebics-connection",
-                "--ebics-url", ebics_url,
-                "--host-id", ebics_host_id,
-                "--partner-id", ebics_partner_id,
-                "--ebics-user-id", ebics_user_id,
-                bank_connection_name
-            ],
-            env
-        ).run()
-        # connect
-        Command(
-            [
-                f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
-                "connect", bank_connection_name
-            ],
-            env
-        ).run()
-        # Import bank account
-        Command(
-            [
-                f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
-                "download-bank-accounts",
-                bank_connection_name
-            ],
-            env
-        ).run()
-        Command(
-            [
-                f"{TALER_PREFIX}/bin/libeufin-cli", "connections",
-                "import-bank-account",
-                "--offered-account-id",
-                bank_account_name_sandbox,
-                "--nexus-bank-account-id",
-                bank_account_name_nexus,
-                bank_connection_name
-            ],
-            env
-        ).run()
-        # Set background tasks.
-        Command(
-            [
-                f"{TALER_PREFIX}/bin/libeufin-cli", "accounts",
-                "task-schedule", bank_account_name_nexus,
-                "--task-type", "submit",
-                "--task-name", "submit-payments-each-second",
-                "--task-cronspec", "* * *"
-            ],
-            env
-        ).run()
-        Command(
-            [
-                f"{TALER_PREFIX}/bin/libeufin-cli", "accounts",
-                "task-schedule", bank_account_name_nexus,
-                "--task-type", "fetch",
-                "--task-name", "fetch-reports-each-second",
-                "--task-cronspec", "* * *",
-                "--task-param-level", "report",
-                "--task-param-range-type", "latest"
-            ],
-            env
-        ).run()
-
-    def get_sandbox_account_info(
-        sandbox_url,
-        bank_account_label,
-        password,
-    ):
-        customer_env = os.environ.copy()
-        customer_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_label
-        customer_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
-        demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
-        r = Command([
-            f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
-            "--sandbox-url", demobank_url,
-            "demobank", "info",
-            "--bank-account", bank_account_label],
-            env = customer_env,
-            capture_stdout=True
-        ).run()
-        return json.loads(r)
-
-    def prepare_sandbox_account(
-        sandbox_url,
-        ebics_host_id,
-        ebics_partner_id,
-        ebics_user_id,
-        person_name,
-        # This value is BOTH a username
-        # and a bank account label.
-        bank_account_name,
-        password,
-        is_public=False
-    ):
-        demobank_url = urljoin_nodrop(sandbox_url, "/demobanks/default")
-        user_env = os.environ.copy()
-        user_env["LIBEUFIN_SANDBOX_USERNAME"] = bank_account_name
-        user_env["LIBEUFIN_SANDBOX_PASSWORD"] = password
-        register_cmd = [
-            f"{TALER_PREFIX}/bin/libeufin-cli",
-            "sandbox", "--sandbox-url", demobank_url,
-            "demobank", "register"
-        ]
-        if is_public:
-            register_cmd.append("--public")
-        Command(register_cmd, env = user_env).run()
-        admin_env = os.environ.copy()
-        admin_env["LIBEUFIN_SANDBOX_USERNAME"] = SANDBOX_ADMIN_USERNAME
-        admin_env["LIBEUFIN_SANDBOX_PASSWORD"] = SANDBOX_ADMIN_PASSWORD
-        Command([
-                f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
-                "--sandbox-url", demobank_url,
-                "demobank", "new-ebicssubscriber",
-                "--host-id", ebics_host_id,
-                "--partner-id", ebics_partner_id,
-                "--user-id", ebics_user_id,
-                "--bank-account", bank_account_name
-            ],
-            env = admin_env
-        ).run()
-
-
-    WIRE_METHOD = "iban"
-    # euFin URLs
-    SANDBOX_URL = REV_PROXY_URL + "/sandbox"
-    NEXUS_URL = REV_PROXY_URL + "/nexus"
-
-    # Filesystem's paths
-    CFG_OUTDIR = TALER_ROOT_DIR / "config"
-    TALER_RUNTIME_DIR = TALER_ROOT_DIR / "runtime"
-    TALER_DATA_DIR = TALER_ROOT_DIR / "data"
-    TALER_UNIT_FILES_DIR = systemd_user_dir = Path.home() / ".config" / 
"systemd" / "user"
-
-    def get_link(path = ""):
-       return x_forwarded_proto + "://" + x_forwarded_host + path
-
-    def create_tip_reserve():
-        payto = Command([
-            f"{TALER_PREFIX}/bin/taler-merchant-setup-reserve",
-            "--amount", f"{CURRENCY}:20",
-            "--exchange-url", get_link("/exchange/"),
-            "--merchant-url", get_link("/merchant-backend/instances/survey/"),
-            "--apikey", f"Bearer {FRONTENDS_API_TOKEN}",
-            "--wire-method", WIRE_METHOD],
-            capture_stdout=True
-        ).run()
-
-        Command([
-            f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox", "--sandbox-url",
-            SANDBOX_URL + "/demobanks/default/", "demobank",
-            "new-transaction", "--bank-account", "sandbox-account-survey",
-            "--payto-with-subject", payto, "--amount", f"{CURRENCY}:20"],
-            env = get_sandbox_cli_env(
-                username = "sandbox-account-survey",
-                password = ALL_INSTANCES_BANK_PASSWORD
-            )).run()
-
-    def get_random_iban():
-        cc_no_check = 131400 # is "DE00"
-        bban = "".join(random.choices("0123456789", k=4))
-        check_digits = 98 - (int(f"{bban}{cc_no_check}") % 97)
-        return "DE" + (f"0{check_digits}"[-2:]) + bban
-
-    # IBANs
-
-    IBAN_MERCHANT_DEFAULT = get_random_iban()
-    IBAN_MERCHANT_DEMOSHOP = get_random_iban()
-
-    # Instances
-    INSTANCES = [
-        dict(name="GNUnet", isPublic=True),
-        dict(name="Taler", isPublic=True),
-        dict(name="Tor", isPublic=True),
-        dict(name="survey"),
-        dict(name="blog"),
-    ]
-
-    # Credentials / API keys
-    EXCHANGE_NEXUS_USERNAME = "exchange-nexus-user"
-    EXCHANGE_NEXUS_PASSWORD = "exchange-nexus-password"
-    FRONTENDS_API_TOKEN = "secret-token:secret"
-    TALER_MERCHANT_TOKEN = "secret-token:secret"
-    ALL_INSTANCES_BANK_PASSWORD = "secret"
-    EXCHANGE_BANK_ACCOUNT_SANDBOX = "sandbox-account-exchange"
-    EXCHANGE_BANK_ACCOUNT_PASSWORD = "secret"
-
-    # EBICS
-    EBICS_HOST_ID = "ebicsDeployedHost"
-    EXCHANGE_EBICS_USER_ID = "exchangeEbicsUserId"
-    EXCHANGE_EBICS_PARTNER_ID = "exchangeEbicsPartnerId"
-    EBICS_URL = REV_PROXY_URL + "/sandbox/ebicsweb"
-
-    # euFin
-    EXCHANGE_BANK_ACCOUNT_NEXUS = "exchange-imported-account-nexus"
-    EXCHANGE_BANK_CONNECTION = "exchange-ebics-connection"
-    NEXUS_DB_FILE = "/tmp/nexus.sqlite"
-    SANDBOX_DB_FILE = "/tmp/sandbox.sqlite"
-    EXCHANGE_FACADE_NAME = "exchange-taler-facade"
-    SANDBOX_ADMIN_USERNAME = "admin"
-    SANDBOX_ADMIN_PASSWORD = "secret"
-
-    class Command:
-        def __init__(
-                self, cmd, env=os.environ, log_dir=LOG_DIR,
-                custom_name=None, capture_stdout=False
-            ):
-            if len(cmd) == 0:
-                fail("Command to execute was given empty.")
-            self.name = custom_name if custom_name else basename(cmd[0])
-            self.cmd = cmd
-            self.capture_stdout = capture_stdout
-            self.log_dir = log_dir
-            self.env = env
-
-        def run(self):
-            self.do()
-            return_code = self.handle.wait()
-            self.cleanup() # Mainly closes the log file.
-            if return_code != 0:
-                fail(f"Command {self.name} failed. Logs in {self.log_dir}")
-            if self.capture_stdout:
-                return self.handle.communicate()[0].decode("utf-8").rstrip()
-
-        def get_log_filename(self):
-            return self.log_file.name
-
-        def cleanup(self):
-            self.log_file.flush()
-            self.log_file.close()
-
-        def do(self):
-            if not self.log_dir.is_dir():
-                os.makedirs(self.log_dir)
-            try:
-                log_filename = self.log_dir / f"{self.name}.log"
-                self.log_file = open(log_filename, "a+")
-            except Exception as error:
-                fail(f"Could not open log file: {log_filename}: {error}")
-            try:
-                self.handle = Popen(
-                    self.cmd, # list
-                    stdin=DEVNULL,
-                    stdout=self.log_file if not self.capture_stdout else PIPE,
-                    stderr=self.log_file,
-                    env=self.env
-                )
-            except Exception as error:
-                fail(f"Could not execute: {' '.join(self.cmd)}: {error}")
-
-    class ConfigFile:
-        def __init__(self, filename):
-            self.sections = OrderedDict()
-            self.filename = filename
-
-        def destroy(self):
-            del self.sections
-            self.sections = OrderedDict()
-
-        def cfg_put(self, section_name, key, value):
-            s = self.sections[section_name] = self.sections.get(section_name, 
OrderedDict())
-            s[key] = value
-
-        def cfg_write(self, outdir):
-            if outdir:
-                if not os.path.isdir(outdir):
-                    os.makedirs(outdir)
-                fstream = open(os.path.join(outdir, self.filename), "w")
-            else:
-                fstream = open(sys.stdout)
-
-            for section_name, section in self.sections.items():
-                fstream.write("[" + section_name + "]" + "\n")
-                for key, value in section.items():
-                    fstream.write(key + " = " + value + "\n")
-                fstream.write("\n")
-            fstream.close()
-
-    def config_specify_master_pub(
-            filename,
-            currency,
-            exchange_master_pub
-    ):
-        Command([
-            f"{TALER_PREFIX}/bin/taler-config", "-c", filename,
-            "-s", "exchange", "-o", "master_public_key",
-            "-V", exchange_master_pub
-        ]).run()
-        Command([
-            f"{TALER_PREFIX}/bin/taler-config", "-c", filename,
-            "-s", f"merchant-exchange-{currency}",
-            "-o", "master_key",
-            "-V", exchange_master_pub
-        ]).run()
-
-    # When called, there is no exchange master pub yet.
-    # taler-exchange-offline will prouce the key _after_
-    # taler.conf is generated.  Only after that, we'll
-    # specify the master key where it is missing; namely
-    # in the merchant backend and exchange HTTP daemon sections.
-
-    def config_main(
-        filename,
-        outdir,
-        unix_sockets_dir,
-        currency,
-        rev_proxy_url,
-        wire_method,
-        exchange_wire_gateway_username,
-        exchange_wire_gateway_password,
-        frontend_api_key,
-        taler_runtime_dir,
-        postgres_db_name
-    ):
-        def coin(
-            obj,
-            currency,
-            name,
-            value,
-            d_withdraw="3 years",
-            d_spend="5 years",
-            d_legal="10 years",
-            f_withdraw="0.01",
-            f_deposit="0.01",
-            f_refresh="0.01",
-            f_refund="0.01",
-            rsa_keysize="2048",
-        ):
-            sec = "coin_" + currency + "_" + name
-            obj.cfg_put(sec, "cipher", "RSA")
-            obj.cfg_put(sec, "value", currency + ":" + value)
-            obj.cfg_put(sec, "duration_withdraw", d_withdraw)
-            obj.cfg_put(sec, "duration_spend", d_spend)
-            obj.cfg_put(sec, "duration_legal", d_legal)
-            obj.cfg_put(sec, "fee_withdraw", currency + ":" + f_withdraw)
-            obj.cfg_put(sec, "fee_refresh", currency + ":" + f_refresh)
-            obj.cfg_put(sec, "fee_refund", currency + ":" + f_refund)
-            obj.cfg_put(sec, "fee_deposit", currency + ":" + f_deposit)
-            obj.cfg_put(sec, "rsa_keysize", rsa_keysize)
-
-        obj = ConfigFile("taler.conf")
-        obj.cfg_put("paths", "TALER_DATA_HOME", str(TALER_DATA_DIR))
-        if not taler_runtime_dir.is_dir():
-            os.makedirs(taler_runtime_dir)
-        obj.cfg_put("paths", "TALER_RUNTIME_DIR", str(taler_runtime_dir))
-        obj.cfg_put("taler", "CURRENCY", currency)
-        obj.cfg_put("taler", "CURRENCY_ROUND_UNIT", f"{currency}:0.01")
-
-        obj.cfg_put("bank", "serve", "uwsgi")
-        obj.cfg_put("bank", "uwsgi_serve", "unix")
-        obj.cfg_put("bank", "uwsgi_unixpath", str(unix_sockets_dir / 
"bank.sock"))
-        obj.cfg_put("bank", "uwsgi_unixpath_mode", "660")
-        obj.cfg_put("bank", "database", "taler")
-        obj.cfg_put("bank", "max_debt", "%s:500.0" % currency)
-        obj.cfg_put("bank", "max_debt_bank", "%s:1000000000.0" % currency)
-        obj.cfg_put("bank", "allow_registrations", "YES")
-        obj.cfg_put("bank", "base_url", rev_proxy_url + "/bank/")
-        obj.cfg_put("bank", "database", f"postgres:///{postgres_db_name}")
-        obj.cfg_put("bank", "suggested_exchange", rev_proxy_url + "/exchange/")
-
-        obj.cfg_put("donations", "serve", "http")
-        obj.cfg_put("donations", "http_serve", "unix")
-        obj.cfg_put("donations", "http_unixpath", str(unix_sockets_dir / 
"donations.sock"))
-        obj.cfg_put("donations", "http_unixpath_mode", "660")
-
-        obj.cfg_put("landing", "serve", "http")
-        obj.cfg_put("landing", "http_serve", "unix")
-        obj.cfg_put("landing", "http_unixpath", str(unix_sockets_dir / 
"landing.sock"))
-        obj.cfg_put("landing", "http_unixpath_mode", "660")
-
-        obj.cfg_put("blog", "serve", "http")
-        obj.cfg_put("blog", "http_serve", "unix")
-        obj.cfg_put("blog", "http_unixpath", str(unix_sockets_dir / 
"blog.sock"))
-        obj.cfg_put("blog", "http_unixpath_mode", "660")
-
-        obj.cfg_put("survey", "serve", "http")
-        obj.cfg_put("survey", "http_serve", "unix")
-        obj.cfg_put("survey", "http_unixpath", str(unix_sockets_dir / 
"survey.sock"))
-        obj.cfg_put("survey", "http_unixpath_mode", "660")
-        obj.cfg_put("survey", "bank_password", "x")
-
-        obj.cfg_put("merchant", "serve", "unix")
-        obj.cfg_put("merchant", "unixpath", str(unix_sockets_dir / 
"merchant-backend.sock"))
-        obj.cfg_put("merchant", "wire_transfer_delay", "0 s")
-        obj.cfg_put("merchant", "default_max_wire_fee", currency + ":" + 
"0.01")
-        obj.cfg_put("merchant", "default_max_deposit_fee", currency + ":" + 
"0.05")
-        obj.cfg_put("merchantdb-postgres", "config", 
f"postgres:///{postgres_db_name}")
-
-        obj.cfg_put("frontends", "backend", rev_proxy_url + 
"/merchant-backend/")
-        obj.cfg_put(
-            "merchant-exchange-{}".format(currency),
-            "exchange_base_url", rev_proxy_url + "/exchange/",
-        )
-        obj.cfg_put(
-            "merchant-exchange-{}".format(currency),
-            "currency", currency
-        )
-        obj.cfg_put("auditor", "serve", "unix")
-        # FIXME: both below used?
-        obj.cfg_put("auditor", "base_url", rev_proxy_url + "/auditor")
-        obj.cfg_put("auditor", "auditor_url", rev_proxy_url + "/auditor")
-        obj.cfg_put("auditor", "unixpath", str(unix_sockets_dir / 
"auditor.sock"))
-        obj.cfg_put("auditor", "tiny_amount", currency + ":0.01")
-
-        obj.cfg_put(
-            "taler-exchange-secmod-eddsa",
-            "unixpath",
-            str(unix_sockets_dir / "exchange-secmod-eddsa.sock")
-        )
-        obj.cfg_put(
-            "taler-exchange-secmod-cs",
-            "unixpath",
-            str(unix_sockets_dir / "exchange-secmod-cs.sock")
-        )
-        obj.cfg_put("taler-exchange-secmod-cs", "sm_priv_key",
-                    
"${TALER_DATA_HOME}/taler-exchange-secmod-cs/secmod-private-key"
-        )
-        obj.cfg_put(
-            "taler-exchange-secmod-rsa",
-            "unixpath",
-            str(unix_sockets_dir / "exchange-secmod-rsa.sock")
-        )
-        obj.cfg_put("taler-exchange-secmod-rsa", "sm_priv_key",
-                    
"${TALER_DATA_HOME}/taler-exchange-secmod-rsa/secmod-private-key"
-        )
-        obj.cfg_put("exchange", "base_url", rev_proxy_url + "/exchange/")
-        obj.cfg_put("exchange", "serve", "unix")
-        obj.cfg_put("exchange", "unixpath", str(unix_sockets_dir / 
"exchange.sock"))
-        obj.cfg_put("exchange", "terms_etag", "0")
-        obj.cfg_put("exchange", "terms_dir", 
"$HOME/.local/share/taler-exchange/tos")
-        obj.cfg_put("exchange", "privacy_etag", "0")
-        obj.cfg_put("exchange", "privacy_dir", 
"$HOME/.local/share/taler-exchange/pp")
-
-        obj.cfg_put("exchangedb-postgres", "db_conn_str", 
f"postgres:///{postgres_db_name}")
-        obj.cfg_put("exchangedb-postgres", "config", 
f"postgres:///{postgres_db_name}")
-        obj.cfg_put("auditordb-postgres", "db_conn_str", 
f"postgres:///{postgres_db_name}")
-        obj.cfg_put("auditordb-postgres", "config", 
f"postgres:///{postgres_db_name}")
-        obj.cfg_put("exchange-account-1", "enable_debit", "yes")
-        obj.cfg_put("exchange-account-1", "enable_credit", "yes")
-        obj.cfg_put("merchant-account-merchant",
-                    "wire_response",
-                    "${TALER_DATA_HOME}/merchant/wire/merchant.json",
-        )
-        obj.cfg_put("merchant-account-merchant", "wire_file_mode", "770")
-
-        obj.cfg_put("frontends", "backend_apikey", f"{frontend_api_key}")
-        coin(obj, currency, "ct_10", "0.10")
-        coin(obj, currency, "1", "1")
-        coin(obj, currency, "2", "2")
-        coin(obj, currency, "5", "5")
-        coin(obj, currency, "10", "10")
-        coin(obj, currency, "1000", "1000")
-        obj.cfg_write(outdir)
-        return obj
-
-    def config_sync(
-            filename, outdir,
-            unix_sockets_dir,
-            currency, api_key,
-            rev_proxy_url,
-            postgres_db_name
-    ):
-        obj = ConfigFile(filename)
-        obj.cfg_put("taler", "currency", currency)
-        obj.cfg_put("sync", "serve", "unix")
-        obj.cfg_put("sync", "unixpath", str(unix_sockets_dir / "sync.sock"))
-        obj.cfg_put("sync", "apikey", f"Bearer secret-token:{api_key}")
-        obj.cfg_put("sync", "annual_fee", f"{currency}:0.1")
-        obj.cfg_put("sync", "fulfillment_url", "taler://fulfillment-success/")
-        obj.cfg_put("sync", "payment_backend_url", rev_proxy_url + 
"merchant-backend/instances/Taler/")
-        obj.cfg_put("syncdb-postgres", "config", 
f"postgres:///{postgres_db_name}")
-        obj.cfg_write(outdir)
-
-    def unit_file_content(description, cmd, env=None):
-        executable_name = cmd.split(" ")[0].split("/")[-1]
-        content = (
-            "[Unit]\n"
-            f"Description={description}\n"
-            "[Service]\n"
-            f"ExecStart={cmd}\n"
-            f"StandardOutput=append:{LOG_DIR / executable_name}.log\n"
-            f"StandardError=append:{LOG_DIR / executable_name}.log"
-        )
-        if env:
-            content += f"\nEnvironmentFile={env}"
-        return content
-
-
-    print_nn("Ensure no service is running...")
-    if is_serving(REV_PROXY_URL + "/", tries=3):
-        fail("Reverse proxy is unexpectedly running!")
-    if UNIX_SOCKETS_DIR.is_dir():
-        for left_socket in os.listdir(UNIX_SOCKETS_DIR):
-            s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
-            socket_file = str(UNIX_SOCKETS_DIR / left_socket)
-            if s.connect_ex(socket_file.encode("utf-8")) == 0:
-                fail(f"A service is unexpectedly running and bound to 
{socket_file}!")
-    print(" OK")
-
-    print_nn("Remove stale data and config...")
-    if TALER_DATA_DIR.exists():
-        shutil.rmtree(TALER_DATA_DIR)
-    if TALER_RUNTIME_DIR.exists():
-        shutil.rmtree(TALER_RUNTIME_DIR)
-    if CFG_OUTDIR.exists():
-        shutil.rmtree(CFG_OUTDIR)
-    print(" OK")
-
-    print_nn("Generate preliminary taler.conf...")
-    mc = config_main(
-        "taler.conf",
-        outdir=CFG_OUTDIR,
-        unix_sockets_dir=UNIX_SOCKETS_DIR,
-        currency=CURRENCY,
-        rev_proxy_url=get_link(), # Gets X-Forwarded-* compatible base URL.
-        wire_method=WIRE_METHOD,
-        exchange_wire_gateway_username=EXCHANGE_NEXUS_USERNAME,
-        exchange_wire_gateway_password=EXCHANGE_NEXUS_PASSWORD,
-        frontend_api_key=FRONTENDS_API_TOKEN,
-        taler_runtime_dir=TALER_RUNTIME_DIR,
-        postgres_db_name=postgres_db_name
-    )
-    print(" OK")
-
-    print_nn("Installing SystemD unit files...")
-    if not systemd_user_dir.exists():
-        systemd_user_dir.mkdir(parents=True, exist_ok=True)
-
-    if not TALER_UNIT_FILES_DIR.exists():
-        TALER_UNIT_FILES_DIR.mkdir(parents=True, exist_ok=True)
-
-    # Internal redirect of X-Forwarded-Host's port
-    # to the port Nginx binds to.  Allows clients
-    # connecting from within a container to still
-    # reach services at X-Forwarded-Host.
-    try:
-        x_forwarded_port = x_forwarded_host.split(":")[1]
-    except IndexError:
-        x_forwarded_port = None
-
-    need_redirect = (x_forwarded_port) and (x_forwarded_port != REV_PROXY_PORT)
-    with open(TALER_UNIT_FILES_DIR / "taler-local-port-redirect.service", "w") 
as port_redirect_unit:
-        port_redirect_unit.write(unit_file_content(
-            description = "Port redirect allowing configuration at 
X-Forwarded-Host",
-            cmd = f"socat TCP4-LISTEN:{x_forwarded_port},fork 
TCP4:{REV_PROXY_NETLOC}" if need_redirect else "true",
-        ))
-    with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-httpd.service", 
"w") as exchange_unit:
-        exchange_unit.write(unit_file_content(
-            description = "Taler Exchange HTTP daemon",
-            cmd = f"{TALER_PREFIX}/bin/taler-exchange-httpd -L DEBUG -c 
{CFG_OUTDIR / 'taler.conf'}",
-            env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if 
os.environ.get("PGPORT") else None
-        ))
-    with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-wirewatch.service", 
"w") as exchange_wirewatch_unit:
-        exchange_wirewatch_unit.write(unit_file_content(
-            description = "Taler Exchange Wirewatch",
-            cmd = f"{TALER_PREFIX}/bin/taler-exchange-wirewatch -L DEBUG -c 
{CFG_OUTDIR / 'taler.conf'}",
-            env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if 
os.environ.get("PGPORT") else None
-        ))
-    with open(TALER_UNIT_FILES_DIR / 
"taler-local-exchange-aggregator.service", "w") as exchange_aggregator_unit:
-        exchange_aggregator_unit.write(unit_file_content(
-            description = "Taler Exchange Aggregator",
-            cmd = f"{TALER_PREFIX}/bin/taler-exchange-aggregator --kyc-off -L 
DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
-            env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if 
os.environ.get("PGPORT") else None
-        ))
-    with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-transfer.service", 
"w") as exchange_transfer_unit:
-        exchange_transfer_unit.write(unit_file_content(
-            description = "Taler Exchange Transfer",
-            cmd = f"{TALER_PREFIX}/bin/taler-exchange-transfer -L DEBUG -c 
{CFG_OUTDIR / 'taler.conf'}",
-            env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if 
os.environ.get("PGPORT") else None
-        ))
-    with open(TALER_UNIT_FILES_DIR / "taler-local-exchange-secmod-cs.service", 
"w") as exchange_cs_unit:
-        exchange_cs_unit.write(unit_file_content(
-            description = "Taler Exchange CS security module",
-            cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-cs -L DEBUG -c 
{CFG_OUTDIR / 'taler.conf'}"
-        ))
-
-    with open(TALER_UNIT_FILES_DIR / 
"taler-local-exchange-secmod-rsa.service", "w") as exchange_rsa_unit:
-        exchange_rsa_unit.write(unit_file_content(
-            description = "Taler Exchange RSA security module",
-            cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-rsa -L DEBUG -c 
{CFG_OUTDIR / 'taler.conf'}"
-        ))
-    with open(TALER_UNIT_FILES_DIR / 
"taler-local-exchange-secmod-eddsa.service", "w") as exchange_eddsa_unit:
-        exchange_eddsa_unit.write(unit_file_content(
-            description = "Taler Exchange EDDSA security module",
-            cmd = f"{TALER_PREFIX}/bin/taler-exchange-secmod-eddsa -L DEBUG -c 
{CFG_OUTDIR / 'taler.conf'}"
-        ))
-    with open(TALER_UNIT_FILES_DIR / "taler-local-merchant-backend.service", 
"w") as merchant_unit:
-        merchant_unit.write(unit_file_content(
-            description = "Taler Merchant backend",
-            cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -L DEBUG -c 
{CFG_OUTDIR / 'taler.conf'}",
-            env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if 
os.environ.get("PGPORT") else None
-        ))
-    with open(TALER_UNIT_FILES_DIR / 
"taler-local-merchant-backend-token.service", "w") as merchant_token_unit:
-        merchant_token_unit.write(unit_file_content(
-            description = "Taler Merchant backend with auth token to allow 
default instance creation.",
-            cmd = f"{TALER_PREFIX}/bin/taler-merchant-httpd -a 
{TALER_MERCHANT_TOKEN} -L DEBUG -c {CFG_OUTDIR / 'taler.conf'}",
-            env = TALER_UNIT_FILES_DIR / "taler-local-postgres.env" if 
os.environ.get("PGPORT") else None
-        ))
-    # Custom Postgres connection.
-    if os.environ.get("PGPORT"):
-        with open(TALER_UNIT_FILES_DIR / "taler-local-postgres.env", "w") as 
postgres_env:
-            postgres_env.write(f"PGPORT={os.environ.get('PGPORT')}")
-
-    # euFin unit files.
-    with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.service", "w") as 
sandbox_unit:
-        sandbox_unit.write(unit_file_content(
-            description = "euFin Sandbox",
-            cmd = f"{TALER_PREFIX}/bin/libeufin-sandbox serve 
--with-unix-socket {UNIX_SOCKETS_DIR / 'sandbox.sock'}",
-            env = TALER_UNIT_FILES_DIR / "taler-local-sandbox.env"
-        ))
-    with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.service", "w") as 
nexus_unit:
-        nexus_unit.write(unit_file_content(
-            description = "euFin Nexus",
-            cmd = f"{TALER_PREFIX}/bin/libeufin-nexus serve --with-unix-socket 
{UNIX_SOCKETS_DIR / 'nexus.sock'}",
-            env = TALER_UNIT_FILES_DIR / "taler-local-nexus.env"
-        ))
-    # euFin env files.
-    with open(TALER_UNIT_FILES_DIR / "taler-local-sandbox.env", "w") as 
sandbox_env:
-        
sandbox_env.write(f"LIBEUFIN_SANDBOX_DB_CONNECTION=jdbc:sqlite:{SANDBOX_DB_FILE}\n")
-        
sandbox_env.write(f"LIBEUFIN_SANDBOX_ADMIN_PASSWORD={SANDBOX_ADMIN_PASSWORD}\n")
-        sandbox_env.write(f"TALER_ENV_URL_INTRO={get_link('/landing/')}\n")
-        
sandbox_env.write(f"TALER_ENV_URL_BANK={get_link('/sandbox/demobanks/default')}\n")
-        
sandbox_env.write(f"TALER_ENV_URL_MERCHANT_BLOG={get_link('/blog/')}\n")
-        
sandbox_env.write(f"TALER_ENV_URL_MERCHANT_DONATIONS={get_link('/donations/')}\n")
-        
sandbox_env.write(f"TALER_ENV_URL_MERCHANT_SURVEY={get_link('/survey/')}\n")
-
-    with open(TALER_UNIT_FILES_DIR / "taler-local-nexus.env", "w") as 
nexus_env:
-        
nexus_env.write(f"LIBEUFIN_NEXUS_DB_CONNECTION=jdbc:sqlite:{NEXUS_DB_FILE}\n")
-        nexus_env.write((
-            f"TALER_ENV_URL_INTRO={get_link('/landing/')}\n"
-            f"TALER_ENV_URL_BANK={get_link('/sandbox/demobanks/default')}\n"
-            f"TALER_ENV_URL_MERCHANT_BLOG={get_link('/blog/')}\n"
-            f"TALER_ENV_URL_MERCHANT_DONATIONS={get_link('/donations/')}\n"
-            f"TALER_ENV_URL_MERCHANT_SURVEY={get_link('/survey/')}\n"
-        ))
-    with open(TALER_UNIT_FILES_DIR / "taler-local-donations.service", "w") as 
donations_unit:
-        donations_unit.write(unit_file_content(
-            description = "Donation Website that accepts Taler payments.",
-            cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos donations -c 
{CFG_OUTDIR / 'taler.conf'}",
-            env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
-        ))
-    with open(TALER_UNIT_FILES_DIR / "taler-local-blog.service", "w") as 
blog_unit:
-        blog_unit.write(unit_file_content(
-            description = "Blog that accepts Taler payments.",
-            cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos blog -c 
{CFG_OUTDIR / 'taler.conf'}",
-            env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
-        ))
-    with open(TALER_UNIT_FILES_DIR / "taler-local-survey.service", "w") as 
survey_unit:
-        survey_unit.write(unit_file_content(
-            description = "Survey Website awarding tips via Taler.",
-            cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos survey -c 
{CFG_OUTDIR / 'taler.conf'}",
-            env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
-        ))
-    with open(TALER_UNIT_FILES_DIR / "taler-local-landing.service", "w") as 
landing_unit:
-        landing_unit.write(unit_file_content(
-            description = "Landing Website of Taler demo.",
-            cmd = f"{TALER_PREFIX}/bin/taler-merchant-demos landing -c 
{CFG_OUTDIR / 'taler.conf'}",
-            env = TALER_UNIT_FILES_DIR / "taler-local-frontends.env"
-        ))
-    with open(TALER_UNIT_FILES_DIR / "taler-local-frontends.env", "w") as 
frontends_env:
-        frontends_env.write((
-            f"PATH={os.environ.get('PATH')}\n"
-            f"TALER_CONFIG_FILE={CFG_OUTDIR / 'taler.conf'}\n"
-            f"TALER_ENV_URL_INTRO={get_link('/landing/')}\n"
-            f"TALER_ENV_URL_BANK={get_link('/sandbox/demobanks/default/')}\n"
-            f"TALER_ENV_URL_MERCHANT_BLOG={get_link('/blog/')}\n"
-            f"TALER_ENV_URL_MERCHANT_DONATIONS={get_link('/donations/')}\n"
-            f"TALER_ENV_URL_MERCHANT_SURVEY={get_link('/survey/')}\n"
-        ))
-    with open(TALER_UNIT_FILES_DIR / "taler-local-nginx.service", "w") as 
nginx_unit:
-            nginx_unit.write(unit_file_content(
-                description = "Nginx: reverse proxy for taler-local.",
-                cmd = f"nginx -c {CFG_OUTDIR / 'nginx.conf'}",
-            ))
-    print(" OK")
-    print_nn("Reload SystemD...")
-    Command(["systemctl", "--user", "daemon-reload"]).run()
-    atexit.register(lambda: subprocess.run(
-            ["systemctl", "--user", "stop", "taler-local-*.service"],
-            check=True
-        )
-    )
-    print(" OK")
-    print_nn("Generate exchange's master key...")
-    EXCHANGE_MASTER_PUB = Command(
-        [
-            f"{TALER_PREFIX}/bin/taler-exchange-offline",
-            "-c", CFG_OUTDIR / "taler.conf",
-            "setup"
-        ],
-        capture_stdout=True
-    ).run()
-    print(" OK")
-    print_nn("Specify exchange master pub in taler.conf...")
-    config_specify_master_pub(
-        CFG_OUTDIR / "taler.conf",
-        CURRENCY,
-        EXCHANGE_MASTER_PUB
-    )
-    print(" OK")
-    print_nn("Generating sync.conf...")
-    config_sync(
-        "sync.conf",
-        outdir=CFG_OUTDIR,
-        unix_sockets_dir=UNIX_SOCKETS_DIR,
-        currency=CURRENCY,
-        api_key=FRONTENDS_API_TOKEN,
-        rev_proxy_url=get_link(),
-        postgres_db_name=postgres_db_name
-    )
-    print(" OK")
-    print_nn("Reset and init exchange DB..")
-    Command([
-        f"{TALER_PREFIX}/bin/taler-exchange-dbinit",
-        "-c", CFG_OUTDIR / "taler.conf",
-        "--reset"]
-    ).run()
-    print(" OK")
-
-    print_nn("Launching X-Forwarded-Host port redirect...")
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-port-redirect.service"], check=True)
-    time.sleep(1)
-    print(" OK")
-    print_nn("Launching the reverse proxy...")
-    with open(CFG_OUTDIR / "nginx.conf", "w") as nginx_conf:
-        nginx_conf.write((
-            f"error_log  {LOG_DIR / 'nginx.log'};\n"
-            f"pid {TALER_ROOT_DIR / 'nginx.pid'};\n"
-            "daemon off;\n"
-            "events {}\n"
-            "http {\n"
-              f"access_log {LOG_DIR / 'nginx.log'};\n"
-              "server {\n"
-                f"listen {REV_PROXY_PORT};\n"
-                f"listen [::]:{REV_PROXY_PORT};\n"
-                "location / {\n"
-                  "return 200 'Hello, I am Nginx - proxying taler-local\n';\n"
-                "}\n"
-                "location ~* ^/(?<component>[a-z\-]+)(/(?<taler_uri>.*))? {\n"
-                "proxy_redirect off;\n"
-                "proxy_set_header X-Forwarded-Prefix /$component;\n"
-                f"proxy_set_header X-Forwarded-Host {x_forwarded_host};\n"
-                f"proxy_set_header X-Forwarded-Proto {x_forwarded_proto};\n"
-                f"client_body_temp_path /tmp/taler-local-nginx;\n"
-                f"proxy_pass 
http://unix:{UNIX_SOCKETS_DIR}/$component.sock:/$taler_uri?$args;\n";
-                "}\n"
-              "}\n"
-            "}\n"
-        ))
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-nginx.service"], check=True)
-    if not is_serving(REV_PROXY_URL + "/"):
-        fail(f"Reverse proxy did not start correctly")
-    # Do check.
-    print(" OK")
-    print_nn("Launching the exchange RSA helper...")
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-exchange-secmod-rsa.service"])
-    print(" OK")
-    print_nn("Launching the exchange EDDSA helper...")
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-exchange-secmod-eddsa.service"])
-    print(" OK")
-    print_nn("Launching the exchange CS helper...")
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-exchange-secmod-cs.service"])
-    print(" OK")
-    print_nn("Launching the exchange...")
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-exchange-httpd.service"])
-    if not is_serving(REV_PROXY_URL + "/exchange/"):
-        fail(f"Exchange did not start correctly.")
-    print(" OK")
-    print_nn("exchange-offline: signing key material...")
-    Command([
-        f"{TALER_PREFIX}/bin/taler-exchange-offline",
-        "-c", CFG_OUTDIR / "taler.conf",
-        "download", "sign", "upload"
-    ]).run()
-    print(" OK")
-    # Set up wire fees for next 5 years
-    NOW = datetime.now()
-    YEAR = NOW.year
-    print_nn("Setting wire fees for the next 5 years...")
-    for year in range(YEAR, YEAR+5):
-        Command(
-            [
-                f"{TALER_PREFIX}/bin/taler-exchange-offline",
-                "-c", CFG_OUTDIR / "taler.conf",
-                "wire-fee",
-                str(year),
-                WIRE_METHOD,
-                CURRENCY + ":0.01",
-                CURRENCY + ":0.01",
-                CURRENCY + ":0.01",
-                "upload"
-            ],
-            custom_name="set-wire-fee"
-        ).run()
-    print(" OK")
-    print_nn("Reset and init auditor DB..")
-    Command([
-        f"{TALER_PREFIX}/bin/taler-auditor-dbinit",
-        "-c", CFG_OUTDIR / "taler.conf",
-        "--reset"]
-    ).run()
-    print(" OK")
-    print_nn("Add this exchange to the auditor...")
-    Command(
-        [
-            f"{TALER_PREFIX}/bin/taler-auditor-exchange",
-            "-c", CFG_OUTDIR / "taler.conf",
-            "-m", EXCHANGE_MASTER_PUB,
-            "-u", REV_PROXY_URL + "/exchange/"
-        ],
-    ).run()
-    print(" OK")
-    ## Step 4:  Set up euFin
-    print_nn("Resetting euFin databases...")
-    try:
-        remove(SANDBOX_DB_FILE)
-        remove(NEXUS_DB_FILE)
-    except OSError as error:
-        if error.errno != errno.ENOENT:
-            raise error
-    print(" OK")
-    # Make the 'default' demobank at Sandbox.  (No signup bonus)
-    Command([
-        f"{TALER_PREFIX}/bin/libeufin-sandbox",
-        "config", "--currency", "CHF", "--without-signup-bonus", "default"],
-        env={
-            "PATH": os.environ["PATH"],
-            "LIBEUFIN_SANDBOX_DB_CONNECTION": f"jdbc:sqlite:{SANDBOX_DB_FILE}"
-        }).run()
-    # This step transparantly creates a default demobank.
-    print_nn("Launching Sandbox...")
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-sandbox.service"])
-    if not is_serving(SANDBOX_URL):
-        fail(f"Sandbox did not start correctly.")
-    print(" OK")
-    print_nn("Make Sandbox EBICS host...")
-    Command(
-        [
-            f"{TALER_PREFIX}/bin/libeufin-cli", "sandbox",
-            "--sandbox-url", SANDBOX_URL,
-            "ebicshost", "create",
-            "--host-id", EBICS_HOST_ID,
-        ],
-        env=get_sandbox_cli_env(
-            SANDBOX_ADMIN_USERNAME,
-            SANDBOX_ADMIN_PASSWORD,
-        ),
-        custom_name="sandbox-create-ebicshost",
-    ).run()
-    print(" OK")
-
-    print_nn("Create Exchange account at Sandbox...")
-    prepare_sandbox_account(
-        sandbox_url=SANDBOX_URL,
-        ebics_host_id=EBICS_HOST_ID,
-        ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
-        ebics_user_id=EXCHANGE_EBICS_USER_ID,
-        person_name="Exchange Owner",
-        bank_account_name=EXCHANGE_BANK_ACCOUNT_SANDBOX,
-        password=EXCHANGE_BANK_ACCOUNT_PASSWORD
-    )
-    print(" OK")
-    print_nn("Getting exchange payto-URI from the bank...")
-    exchange_bank_account_info = get_sandbox_account_info(
-        SANDBOX_URL,
-        EXCHANGE_BANK_ACCOUNT_SANDBOX,
-        EXCHANGE_BANK_ACCOUNT_PASSWORD
-    )
-    EXCHANGE_PAYTO = exchange_bank_account_info["paytoUri"]
-    print(" OK")
-    print_nn("Specify own payto-URI to exchange's configuration..")
-    Command([
-        f"{TALER_PREFIX}/bin/taler-config", "-c", CFG_OUTDIR / 'taler.conf',
-        "-s", "exchange-account-1", "-o", "payto_uri", "-V",
-        EXCHANGE_PAYTO
-    ]).run()
-    print(" OK")
-    print_nn(f"exchange-offline: enabling {EXCHANGE_PAYTO}...")
-    Command([
-        f"{TALER_PREFIX}/bin/taler-exchange-offline",
-        "-c", CFG_OUTDIR / "taler.conf",
-        "enable-account", EXCHANGE_PAYTO, "upload"
-    ]).run()
-    print(" OK")
-
-    # Give each instance a Sandbox account (note: 'default'
-    # won't have one, as it should typically only manage other
-    # instances).
-    for instance in INSTANCES:
-        instance_id = instance["name"]
-        print_nn(f"Create account of {instance_id} at Sandbox...")
-        prepare_sandbox_account(
-            sandbox_url=SANDBOX_URL,
-            ebics_host_id=EBICS_HOST_ID,
-            ebics_partner_id="unusedMerchantEbicsPartnerId",
-            ebics_user_id=f"unused{instance_id}EbicsUserId",
-            person_name=f"Shop Owner of {instance_id}",
-            bank_account_name=f"sandbox-account-{instance_id.lower()}",
-            password=ALL_INSTANCES_BANK_PASSWORD,
-            is_public=instance.get("isPublic")
-        )
-        print(" OK")
-    print_nn("Create Customer account at Sandbox...")
-    prepare_sandbox_account(
-        sandbox_url=SANDBOX_URL,
-        ebics_host_id=EBICS_HOST_ID,
-        ebics_partner_id="unusedCustomerEbicsPartnerId",
-        ebics_user_id="unusedCustomerEbicsUserId",
-        person_name="Customer Person",
-        bank_account_name=CUSTOMER_BANK_ACCOUNT,
-        password=CUSTOMER_BANK_PASSWORD
-    )
-    print(" OK")
-    print_nn("Make Nexus superuser ...")
-    Command(
-        [
-            f"{TALER_PREFIX}/bin/libeufin-nexus", "superuser",
-            EXCHANGE_NEXUS_USERNAME,
-            "--password", EXCHANGE_NEXUS_PASSWORD
-        ],
-        env=get_nexus_server_env(
-            NEXUS_DB_FILE,
-            NEXUS_URL
-        ),
-        custom_name="nexus-superuser",
-    ).run()
-    print(" OK")
-
-    print_nn("Launching Nexus...")
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-nexus.service"])
-    if not is_serving(NEXUS_URL):
-        fail(f"Nexus did not start correctly")
-    print(" OK")
-
-    print_nn("Create Exchange account at Nexus...")
-    prepare_nexus_account(
-        ebics_url=EBICS_URL,
-        ebics_host_id=EBICS_HOST_ID,
-        ebics_partner_id=EXCHANGE_EBICS_PARTNER_ID,
-        ebics_user_id=EXCHANGE_EBICS_USER_ID,
-        bank_connection_name=EXCHANGE_BANK_CONNECTION,
-        bank_account_name_sandbox=EXCHANGE_BANK_ACCOUNT_SANDBOX,
-        bank_account_name_nexus=EXCHANGE_BANK_ACCOUNT_NEXUS,
-        env=get_nexus_cli_env(
-            EXCHANGE_NEXUS_USERNAME,
-            EXCHANGE_NEXUS_PASSWORD,
-            NEXUS_URL
-        )
-    )
-    print(" OK")
-
-    print_nn("Create Taler facade ...")
-    Command(
-        [
-            f"{TALER_PREFIX}/bin/libeufin-cli", "facades",
-            "new-taler-wire-gateway-facade",
-            "--currency", CURRENCY,
-            "--facade-name", EXCHANGE_FACADE_NAME,
-            EXCHANGE_BANK_CONNECTION,
-            EXCHANGE_BANK_ACCOUNT_NEXUS
-        ],
-        env=get_nexus_cli_env(
-            EXCHANGE_NEXUS_USERNAME,
-            EXCHANGE_NEXUS_PASSWORD,
-            NEXUS_URL
-        ),
-        custom_name="create-taler-facade",
-    ).run()
-    print(" OK")
-    try:
-        response = requests.get(
-            NEXUS_URL + "/facades",
-            auth=requests.auth.HTTPBasicAuth(
-                EXCHANGE_NEXUS_USERNAME,
-                EXCHANGE_NEXUS_PASSWORD
-            )
-        )
-        response.raise_for_status()
-    except Exception as error:
-        fail(error)
-    FACADE_URL = response.json().get("facades")[0].get("baseUrl")
-    print_nn("Set suggested exchange at Sandbox...")
-    Command([
-        f"{TALER_PREFIX}/bin/libeufin-sandbox",
-        "default-exchange",
-        get_link('/exchange/'),
-        EXCHANGE_PAYTO],
-        env={
-            "PATH": os.environ["PATH"],
-            "LIBEUFIN_SANDBOX_DB_CONNECTION": f"jdbc:sqlite:{SANDBOX_DB_FILE}"
-        }).run()
-    print(" OK")
-
-    # Point the exchange to the facade.
-    Command(
-        [
-            f"{TALER_PREFIX}/bin/taler-config",
-            "-c", CFG_OUTDIR / "taler.conf",
-            "-s", "exchange-accountcredentials-1",
-            "-o" "wire_gateway_auth_method",
-            "-V", "basic"
-        ],
-        custom_name="specify-wire-gateway-auth-method",
-    ).run()
-    Command(
-        [
-            f"{TALER_PREFIX}/bin/taler-config",
-            "-c", CFG_OUTDIR / "taler.conf",
-            "-s", "exchange-accountcredentials-1",
-            "-o" "wire_gateway_url",
-            "-V", FACADE_URL
-        ],
-        custom_name="specify-facade-url",
-    ).run()
-    Command(
-        [
-            f"{TALER_PREFIX}/bin/taler-config",
-            "-c", CFG_OUTDIR / "taler.conf",
-            "-s", "exchange-accountcredentials-1",
-            "-o" "username",
-            "-V", EXCHANGE_NEXUS_USERNAME
-        ],
-        custom_name="specify-username-for-facade",
-    ).run()
-    Command(
-        [
-            f"{TALER_PREFIX}/bin/taler-config",
-            "-c", CFG_OUTDIR / "taler.conf",
-            "-s", "exchange-accountcredentials-1",
-            "-o" "password",
-            "-V", EXCHANGE_NEXUS_PASSWORD
-        ],
-        custom_name="specify-password-for-facade",
-    ).run()
-
-    ## Step 6: Set up merchant
-
-    print_nn("Reset and init merchant database...")
-    Command([
-        f"{TALER_PREFIX}/bin/taler-merchant-dbinit",
-        "-c", CFG_OUTDIR / "taler.conf",
-        "--reset"
-    ]).run()
-    print(" OK")
-
-    def ensure_instance(
-            currency,
-            instance_id,
-            backend_url,
-            bank_hostname,
-            wire_method,
-            auth_token
-        ):
-        auth_header = {"Authorization": f"Bearer {auth_token}"}
-        resp = requests.get(
-            urljoin_nodrop(backend_url, f"management/instances/{instance_id}"),
-            headers = auth_header
-        )
-        bankaccount_info = get_sandbox_account_info(
-            SANDBOX_URL,
-            f"sandbox-account-{instance_id.lower()}",
-            ALL_INSTANCES_BANK_PASSWORD
-        )
-        req = dict(
-            id=instance_id,
-            name=f"Name of '{instance_id}'",
-            payto_uris=[bankaccount_info["paytoUri"]],
-            address=dict(),
-            jurisdiction=dict(),
-            default_max_wire_fee=f"{currency}:1",
-            default_wire_fee_amortization=3,
-            default_max_deposit_fee=f"{currency}:1",
-            default_wire_transfer_delay=dict(d_us="forever"),
-            default_pay_delay=dict(d_us="forever"),
-            auth=dict(method="token", token=auth_token),
-        )
-        http_method = requests.post
-        endpoint = "management/instances"
-
-        # Instance exists, patching it.
-        if resp.status_code == 200:
-            print(f"Patching instance '{instance_id}'")
-            http_method = requests.patch
-            endpoint = f"management/instances/{instance_id}"
-
-        resp = http_method(
-            urljoin_nodrop(backend_url, endpoint),
-            json=req,
-            headers = auth_header
-        )
-        if resp.status_code < 200 or resp.status_code >= 300:
-            print(f"Backend responds: {resp.status_code}/{resp.text}")
-            fail(f"Could not create (or patch) instance '{instance_id}'")
-
-    print_nn(f"Start merchant (with TALER_MERCHANT_TOKEN into the env)...")
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-merchant-backend-token.service"], check=True)
-    if not is_serving(REV_PROXY_URL + "/merchant-backend/config"):
-        fail(
-            f"Merchant backend did not start correctly.",
-        )
-    print(" OK")
-    print_nn("Give default instance a bank account...")
-    prepare_sandbox_account(
-        sandbox_url=SANDBOX_URL,
-        ebics_host_id=EBICS_HOST_ID,
-        ebics_partner_id="unusedMerchantEbicsPartnerId",
-        ebics_user_id=f"unusedDefaultInstanceEbicsUserId",
-        person_name=f"Shop Owner of default instance",
-        bank_account_name="sandbox-account-default",
-        password=ALL_INSTANCES_BANK_PASSWORD
-    )
-    print(" OK")
-    ensure_instance(
-        currency=CURRENCY,
-        instance_id="default",
-        backend_url = REV_PROXY_URL + "/merchant-backend",
-        bank_hostname = REV_PROXY_NETLOC + "/sandbox",
-        wire_method = WIRE_METHOD,
-        auth_token=FRONTENDS_API_TOKEN
-    )
-
-    print_nn("Restarting the merchant WITHOUT the auth-token in the env...")
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-merchant-backend.service"], check=True)
-    if not is_serving(REV_PROXY_URL + "/merchant-backend/config"):
-        # check_running logs errors already.
-        fail(f"Merchant backend did not re start correctly.")
-    print(" OK")
-
-    for instance in INSTANCES:
-        instance_id = instance["name"]
-        print_nn(f"Creating the {instance_id} instance...")
-        ensure_instance(
-            currency=CURRENCY,
-            instance_id=instance_id,
-            backend_url = REV_PROXY_URL + "/merchant-backend",
-            bank_hostname = REV_PROXY_NETLOC + "/sandbox",
-            wire_method = WIRE_METHOD,
-            auth_token=FRONTENDS_API_TOKEN
-        )
-        print(" OK")
-    print_nn("Creating tip reserve...")
-    create_tip_reserve()
-    print(" OK")
-    # 1 second to let Nexus read the payment from
-    # Sandbox, 1 second to let the Exchange Wirewatch
-    # to read the payment from Nexus.
-    print_nn("Sleep 2 seconds to let the tip reserve settle...")
-    time.sleep(2)
-    print(" OK")
-
-@cli.command()
-def launch():
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-port-redirect.service"], check=True)
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-nginx.service"], check=True)
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-exchange-secmod-rsa.service"], check=True)
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-exchange-secmod-eddsa.service"], check=True)
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-exchange-secmod-cs.service"], check=True)
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-exchange-httpd.service"], check=True)
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-exchange-wirewatch.service"], check=True)
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-exchange-aggregator.service"], check=True)
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-exchange-transfer.service"], check=True)
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-merchant-backend.service"], check=True)
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-sandbox.service"], check=True)
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-nexus.service"], check=True)
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-donations.service"], check=True)
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-blog.service"], check=True)
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-survey.service"], check=True)
-    subprocess.run(["systemctl", "--user", "start", 
"taler-local-landing.service"], check=True)
-
-    print((
-        "\n"
-        "Taler launched!\n\n"
-        f"Serving {REV_PROXY_URL + '/$service'}\n\n"
-        "Services:\n"
-        "  - landing\n"
-        "  - exchange\n"
-        "  - merchant-backend\n"
-        "  - sandbox\n"
-        "  - nexus\n"
-        "  - blog\n"
-        "  - survey\n"
-        "  - donations\n"
-    ))
-
-@cli.command()
-def stop():
-    subprocess.run(["systemctl", "--user", "stop", "taler-local-*.service"], 
check=True)
-
-@cli.command()
-@click.option(
-    "--bank-host", metavar="HOST",
-    help="Host serving the bank, defaults to 'localhost:8080'.",
-    default="localhost:8080"
-)
-@click.option(
-    "--bank-proto", metavar="PROTO",
-    help="Protocol accepred by the bank, defaults to 'http'",
-    default="http"
-)
-@click.option(
-    "--with-qr-code", is_flag=True,
-    help="""When specified, it prints the QR code on screen,
-and waits the user's input before confirming the operation
-at the bank.""",
-    default=False
-)
-def withdraw(bank_host, bank_proto, with_qr_code):
-    print_nn("Create withdrawal operation...")
-    bank_base_url = bank_proto + "://" + bank_host
-    resp = requests.post(bank_base_url +
-        
f"/sandbox/demobanks/default/access-api/accounts/{CUSTOMER_BANK_ACCOUNT}/withdrawals",
-        json = dict(amount=CURRENCY + ":5"),
-        auth = requests.auth.HTTPBasicAuth(CUSTOMER_BANK_ACCOUNT, 
CUSTOMER_BANK_PASSWORD)
-    )
-    try:
-        resp.raise_for_status()
-    except Exception as error:
-        print("Could not create withdrawal")
-        print(error)
-        exit(1)
-    withdrawal_id = resp.json()["withdrawal_id"]
-    withdraw_uri = resp.json()["taler_withdraw_uri"]
-    print(" OK")
-    print("Let wallet specify the reserve public key at the bank...")
-    # Let wallet give the reserve public key to the bank.
-    if with_qr_code:
-        withdraw_QR_code = qrcode.QRCode()
-        withdraw_QR_code.add_data(withdraw_uri)
-        withdraw_QR_code.print_ascii()
-        print(withdraw_uri)
-        input("After scanning the code, press ENTER to wire funds to the 
Exchange: ")
-    else:
-        subprocess.run(["taler-wallet-cli", "handle-uri", withdraw_uri], 
check=True)
-    # Let the user confirm the withdrawal operation and
-    # get the bank wire the funds.
-    print_nn("Confirm withdrawal operation at the bank...")
-    resp = requests.post(bank_base_url +
-        
f"/sandbox/demobanks/default/access-api/accounts/{CUSTOMER_BANK_ACCOUNT}/withdrawals/{withdrawal_id}/confirm",
-        auth = requests.auth.HTTPBasicAuth(CUSTOMER_BANK_ACCOUNT, 
CUSTOMER_BANK_PASSWORD)
-    )
-    try:
-        resp.raise_for_status()
-    except Exception as error:
-        print("Could not create withdrawal")
-        print(error)
-        exit(1)
-    print(" OK")
-    if not with_qr_code:
-       print("Let wallet complete all pending operations")
-       # FIXME: Why the following operation twice?
-       subprocess.run(["taler-wallet-cli", "handle-uri", withdraw_uri], 
check=True)
-       subprocess.run(["taler-wallet-cli", "run-until-done"], check=True)
-
-if __name__ == "__main__":
-    cli()

-- 
To stop receiving notification emails like this one, please contact
gnunet@gnunet.org.



reply via email to

[Prev in Thread] Current Thread [Next in Thread]