gnunet-svn
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[taler-deployment] 02/02: Developer utilities. Work in progress


From: gnunet
Subject: [taler-deployment] 02/02: Developer utilities. Work in progress
Date: Thu, 23 Sep 2021 16:37:34 +0200

This is an automated email from the git hooks/post-receive script.

ms pushed a commit to branch master
in repository deployment.

commit e5b76632437824c77cbc881f23b2e6bb350e25d6
Author: ms <ms@taler.net>
AuthorDate: Thu Sep 23 16:30:59 2021 +0200

    Developer utilities.  Work in progress
    
    Drafting programs that will help building / configuring
    / launching a whole Taler setup on the developer's machine
    without having to configure Nginx, versioning private keys,
    sourcing environment scripts, and generally trying to minimize
    the amount of configuration values to provide.
---
 bin/WIP/taler-config-local  | 244 ++++++++++++++++++++++++++++
 bin/WIP/taler-local         | 380 ++++++++++++++++++++++++++++++++++++++++++++
 bin/WIP/taler-prepare-local | 285 +++++++++++++++++++++++++++++++++
 3 files changed, 909 insertions(+)

diff --git a/bin/WIP/taler-config-local b/bin/WIP/taler-config-local
new file mode 100755
index 0000000..38144e4
--- /dev/null
+++ b/bin/WIP/taler-config-local
@@ -0,0 +1,244 @@
+#!/usr/bin/env python3
+import click
+import sys
+from collections import OrderedDict
+import json
+import os
+import urllib.parse
+import stat
+from taler_urls import get_urls, get_port
+
+
+class ConfigFile:
+    def __init__(self, filename):
+        self.sections = OrderedDict()
+        self.filename = filename
+
+    def destroy(self):
+        del self.sections
+        self.sections = OrderedDict()
+
+    def cfg_put(self, section_name, key, value):
+        s = self.sections[section_name] = self.sections.get(section_name, 
OrderedDict())
+        s[key] = value
+
+    def cfg_write(self, outdir):
+
+        if outdir:
+            fstream = open(os.path.join(outdir, self.filename), "w")
+        else:
+            fstream = open(sys.stdout)
+
+        for section_name, section in self.sections.items():
+            fstream.write("[" + section_name + "]" + "\n")
+            for key, value in section.items():
+                fstream.write(key + " = " + value + "\n")
+            fstream.write("\n")
+        fstream.close()
+
+def coin(
+    obj,
+    currency,
+    name,
+    value,
+    d_withdraw="3 years",
+    d_spend="5 years",
+    d_legal="10 years",
+    f_withdraw="0.01",
+    f_deposit="0.01",
+    f_refresh="0.01",
+    f_refund="0.01",
+    rsa_keysize="2048",
+):
+    sec = "coin_" + currency + "_" + name
+    obj.cfg_put(sec, "value", currency + ":" + value)
+    obj.cfg_put(sec, "duration_withdraw", d_withdraw)
+    obj.cfg_put(sec, "duration_spend", d_spend)
+    obj.cfg_put(sec, "duration_legal", d_legal)
+    obj.cfg_put(sec, "fee_withdraw", currency + ":" + f_withdraw)
+    obj.cfg_put(sec, "fee_refresh", currency + ":" + f_refresh)
+    obj.cfg_put(sec, "fee_refund", currency + ":" + f_refund)
+    obj.cfg_put(sec, "fee_deposit", currency + ":" + f_deposit)
+    obj.cfg_put(sec, "rsa_keysize", rsa_keysize)
+
+
+def config(
+        obj,
+        rev_proxy_url,
+        wire_method,
+        currency,
+        exchange_wire_address,
+        merchant_wire_address,
+        exchange_wire_gateway_username,
+        exchange_wire_gateway_password,
+):
+    obj.cfg_put("paths", "TALER_DATA_HOME", "${HOME}/.taler-data")
+    obj.cfg_put("paths", "TALER_RUNTIME_DIR", "${HOME}/.taler-runtime")
+    obj.cfg_put("taler", "CURRENCY", obj.currency)
+    obj.cfg_put("taler", "CURRENCY_ROUND_UNIT", f"{currency}:0.01")
+    obj.cfg_put("bank", "serve", "uwsgi")
+    obj.cfg_put("bank", "uwsgi_serve", "unix")
+    obj.cfg_put("bank", "uwsgi_unixpath", "/tmp/bank.sock")
+    obj.cfg_put("bank", "uwsgi_unixpath_mode", "660")
+    obj.cfg_put("bank", "database", "taler")
+    obj.cfg_put("bank", "max_debt", "%s:500.0" % obj.currency)
+    obj.cfg_put("bank", "max_debt_bank", "%s:1000000000.0" % obj.currency)
+    obj.cfg_put("bank", "allow_registrations", "YES")
+    obj.cfg_put("bank", "base_url", rev_proxy_url + "/bank/")
+    obj.cfg_put("bank", "database", "postgres:///taler")
+    obj.cfg_put("bank", "suggested_exchange", rev_proxy_url + "/exchange/")
+
+    obj.cfg_put("donations", "serve", "uwsgi")
+    obj.cfg_put("donations", "uwsgi_serve", "unix")
+    obj.cfg_put("donations", "uwsgi_unixpath", "/tmp/donations.sock")
+    obj.cfg_put("donations", "uwsgi_unixpath_mode", "660")
+
+    obj.cfg_put("landing", "serve", "uwsgi")
+    obj.cfg_put("landing", "uwsgi_serve", "unix")
+    obj.cfg_put("landing", "uwsgi_unixpath", "/tmp/landing.sock")
+    obj.cfg_put("landing", "uwsgi_unixpath_mode", "660")
+
+    obj.cfg_put("blog", "serve", "uwsgi")
+    obj.cfg_put("blog", "uwsgi_serve", "unix")
+    obj.cfg_put("blog", "uwsgi_unixpath", "/tmp/blog.sock")
+    obj.cfg_put("blog", "uwsgi_unixpath_mode", "660")
+
+    obj.cfg_put("survey", "serve", "uwsgi")
+    obj.cfg_put("survey", "uwsgi_serve", "unix")
+    obj.cfg_put("survey", "uwsgi_unixpath", "/tmp/survey.sock")
+    obj.cfg_put("survey", "uwsgi_unixpath_mode", "660")
+    obj.cfg_put("survey", "bank_password", "x")
+
+    obj.cfg_put("merchant", "serve", "unix")
+    obj.cfg_put("merchant", "unixpath", "/tmp/merchant-backend.sock")
+    obj.cfg_put("merchant", "wire_transfer_delay", "0 s")
+    obj.cfg_put("merchant", "default_max_wire_fee", obj.currency + ":" + 
"0.01")
+    obj.cfg_put("merchant", "default_max_deposit_fee", obj.currency + ":" + 
"0.05")
+    obj.cfg_put("merchantdb-postgres", "config", "postgres:///taler")
+
+    obj.cfg_put("frontends", "backend", rev_proxy_url + "/merchant-backend/")
+    obj.cfg_put(
+        "merchant-exchange-{}".format(currency),
+        "exchange_base_url", rev_proxy_url + "/exchange/",
+    )
+
+    obj.cfg_put("auditor", "serve", "unix")
+    # FIXME: both below used?
+    obj.cfg_put("auditor", "base_url", rev_proxy_url + "/auditor")
+    obj.cfg_put("auditor", "auditor_url", rev_proxy_url + "/auditor")
+    obj.cfg_put("auditor", "unixpath", "/tmp/auditor.sock")
+    obj.cfg_put("auditor", "tiny_amount", currency + ":0.01")
+
+    obj.cfg_put("taler-exchange-secmod-eddsa", "unixpath", 
"/tmp/exchange-secmod-eddsa.sock")
+    obj.cfg_put("taler-exchange-secmod-rsa", "unixpath", 
"/tmp/exchange-secmod-rsa.sock")
+    obj.cfg_put("taler-exchange-secmod-rsa", "sm_priv_key",
+                "/tmp/taler-data/taler-exchange-secmod-rsa/secmod-private-key"
+    )
+    obj.cfg_put("exchange", "base_url", rev_proxy_url + "/exchange/")
+
+    obj.cfg_put("exchange", "serve", "unix")
+    obj.cfg_put("exchange", "unixpath", "/tmp/exchange.sock")
+
+    obj.cfg_put("exchange", "terms_etag", "0")
+    obj.cfg_put("exchange", "terms_dir", 
"$HOME/local/share/taler-exchange/tos")
+    obj.cfg_put("exchange", "privacy_etag", "0")
+    obj.cfg_put("exchange", "privacy_dir", 
"$HOME/local/share/taler-exchange/pp")
+
+
+    obj.cfg_put("exchangedb-postgres", "db_conn_str", "postgres:///taler")
+    obj.cfg_put("exchangedb-postgres", "config", "postgres:///taler")
+    obj.cfg_put("auditordb-postgres", "db_conn_str", "postgres:///taler")
+    obj.cfg_put("auditordb-postgres", "config", "postgres:///taler")
+
+    obj.cfg_put(
+        "exchange-account-1",
+        "payto_uri",
+        f"payto://{wire_method}/{rev_proxy_url + 
'/bank'}/{exchange_wire_address}"
+    )
+    obj.cfg_put("exchange-account-1", "enable_debit", "yes")
+    obj.cfg_put("exchange-account-1", "enable_credit", "yes")
+    obj.cfg_put("merchant-account-merchant", "payto_uri",
+        f"payto://{wire_method}/{rev_proxy_url + 
'/bank'}/{merchant_wire_address}"
+    )
+    obj.cfg_put("merchant-account-merchant",
+                "wire_response",
+                "${TALER_DATA_HOME}/merchant/wire/merchant.json",
+    )
+    obj.cfg_put("merchant-account-merchant", "wire_file_mode", "770")
+
+    # The following block should be obsoleted by the new API to configure 
instances.
+    merchant_instance_names = ("default", "Tor", "GNUnet", "Taler", "FSF", 
"Tutorial")
+    for mi in merchant_instance_names:
+        obj.cfg_put("merchant-account-merchant", f"HONOR_{mi}", "YES")
+        obj.cfg_put("merchant-account-merchant", f"ACTIVE_{mi}", "YES")
+
+    coin(obj, currency, "ct_10", "0.10")
+    coin(obj, currency, "1", "1")
+    coin(obj, currency, "2", "2")
+    coin(obj, currency, "5", "5")
+    coin(obj, currency, "10", "10")
+    coin(obj, currency, "1000", "1000")
+
+@click.command()
+@click.option("--currency", required=True)
+@click.option("--outdir", required=True)
+@click.option("--exchange-pub", required=True)
+@click.option("--exchange-wire-address", required=True)
+@click.option("--merchant-wire-address", required=True)
+@click.option("--rev-proxy-url", required=True)
+# Expected to contain already the 'secret-token:' scheme.
+@click.option("--frontends-apitoken", required=True)
+def main(
+        currency,
+        outdir,
+        exchange_wire_address,
+        merchant_wire_address,
+        rev_proxy_url,
+        frontends_apitoken
+):
+
+    config_files = []
+
+    mc = ConfigFile("taler.conf")
+    mc.cfg_put("frontends", "backend_apikey", f"{frontends_apitoken}")
+    config(mc) # does 99%
+    config_files.append(mc)
+
+    sc = ConfigFile("sync.conf")
+    sc.cfg_put("taler", "currency", currency)
+    sc.cfg_put("sync", "serve", "unix")
+    sc.cfg_put("sync", "unixpath", "$HOME/sockets/sync.http")
+    sc.cfg_put("sync", "apikey", f"Bearer {frontends_apitoken}")
+    sc.cfg_put("sync", "annual_fee", f"{currency}:0.1")
+    sc.cfg_put("sync", "fulfillment_url", "taler://fulfillment-success/")
+    sc.cfg_put("sync", "payment_backend_url", rev_proxy_url + 
"merchant-backend/instances/Taler/")
+    sc.cfg_put("syncdb-postgres", "config", f"postgres:///taler")
+    config_files.append(sc)
+
+    ac = ConfigFile("anastasis.conf")
+    ac.cfg_put("taler", "currency", currency)
+    ac.cfg_put("anastasis", "serve", "unix")
+    ac.cfg_put("anastasis", "business_name", f"GNU Taler Demo Anastasis 
Provider")
+    ac.cfg_put("anastasis", "unixpath", "/tmp/anastasis.sock")
+    ac.cfg_put("anastasis", "annual_fee", f"{currency}:0")
+    ac.cfg_put("anastasis", "question_cost", f"{currency}:0")
+    ac.cfg_put("anastasis", "insurance", f"{currency}:0")
+    ac.cfg_put("anastasis", "truth_upload_fee", f"{currency}:0")
+    ac.cfg_put("anastasis", "fulfillment_url", "taler://fulfillment-success/")
+    ac.cfg_put("anastasis", "server_salt", "kreb3ia9dmj43gfa")
+    ac.cfg_put("stasis-postgres", "config", f"postgres:///taler")
+    ac.cfg_put("anastasis-merchant-backend",
+               "payment_backend_url",
+               rev_proxy_url "merchant-backend/instances/anastasis/"
+    )
+    ac.cfg_put("anastasis-merchant-backend", "api_key", f"Bearer 
{frontends_apitoken}")
+    ac.cfg_put("authorization-question", "cost", f"{currency}:0")
+    ac.cfg_put("authorization-question", "enabled", "yes")
+    config_files.append(ac)
+
+    assert 0 < len(config_files)
+    for obj in config_files:
+        obj.cfg_write(outdir)
+
+if __name__ == "__main__":
+    main()
diff --git a/bin/WIP/taler-local b/bin/WIP/taler-local
new file mode 100755
index 0000000..eb6589a
--- /dev/null
+++ b/bin/WIP/taler-local
@@ -0,0 +1,380 @@
+#!/usr/bin/env python3
+
+# This file is part of GNU Taler.
+#
+# GNU Taler is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# GNU Taler is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with GNU Taler.  If not, see <https://www.gnu.org/licenses/>.
+
+import click
+import types
+import os
+import sys
+import os.path
+import subprocess
+import time
+import random
+from os import listdir
+from os.path import isdir, join
+from pathlib import Path
+from dataclasses import dataclass
+from typing import List, Callable
+from shutil import copy
+from taler_urls import get_urls
+from string import ascii_letters, ascii_uppercase
+
+@dataclass
+class Repo:
+    name: str
+    url: str
+    deps: List[str]
+    builder: Callable[["Repo", Path], None]
+
+@click.group()
+def cli():
+    pass
+
+def split_repos_list(repos):
+    return [repo for repo in repos.split(",") if repo != ""]
+
+def update_checkout(r: Repo, p: Path):
+    """Clean the repository's working directory and
+    update it to the match the latest version of the upstream branch
+    that we are tracking."""
+    subprocess.run(["git", "-C", str(p), "clean", "-fdx"], check=True)
+    subprocess.run(["git", "-C", str(p), "fetch"], check=True)
+    subprocess.run(["git", "-C", str(p), "reset"], check=True)
+    res = subprocess.run(
+        [
+            "git",
+            "-C",
+            str(p),
+            "rev-parse",
+            "--abbrev-ref",
+            "--symbolic-full-name",
+            "@{u}",
+        ],
+        stderr=subprocess.DEVNULL,
+        stdout=subprocess.PIPE,
+        encoding="utf-8",
+    )
+    if res.returncode != 0:
+        ref = "HEAD"
+    else:
+        ref = res.stdout.strip("\n ")
+    print(f"resetting {r.name} to ref {ref}")
+    subprocess.run(["git", "-C", str(p), "reset", "--hard", ref], check=True)
+
+
+def default_configure(*extra):
+    pfx = Path.home() / ".local"
+    extra_list = list(extra)
+    subprocess.run(["./configure", f"--prefix={pfx}"] + extra_list, check=True)
+
+def pyconfigure(*extra):
+    """For python programs, --prefix doesn't work."""
+    subprocess.run(["./configure"] + list(extra), check=True)
+
+def build_libeufin(r: Repo, p: Path):
+    update_checkout(r, p)
+    subprocess.run(["./bootstrap"], check=True)
+    default_configure()
+    subprocess.run(["make", "install"], check=True)
+    (p / "taler-buildstamp").touch()
+
+def build_libmicrohttpd(r: Repo, p: Path):
+    update_checkout(r, p)
+    subprocess.run(["./bootstrap"], check=True)
+    # Debian gnutls packages are too old ...
+    default_configure("--with-gnutls=/usr/local")
+    subprocess.run(["make"], check=True)
+    subprocess.run(["make", "install"], check=True)
+    (p / "taler-buildstamp").touch()
+
+def build_gnunet(r: Repo, p: Path):
+    update_checkout(r, p)
+    subprocess.run(["./bootstrap"], check=True)
+    pfx = Path.home() / ".local"
+    default_configure(
+        "--enable-logging=verbose",
+        f"--with-microhttpd={pfx}",
+        "--disable-documentation",
+    )
+    subprocess.run(["make", "install"], check=True)
+    (p / "taler-buildstamp").touch()
+
+def build_exchange(r: Repo, p: Path):
+    update_checkout(r, p)
+    subprocess.run(["./bootstrap"], check=True)
+    pfx = Path.home() / ".local"
+    default_configure(
+        "CFLAGS=-ggdb -O0",
+        "--enable-logging=verbose",
+        f"--with-microhttpd={pfx}",
+        f"--with-gnunet={pfx}",
+    )
+    subprocess.run(["make", "install"], check=True)
+    (p / "taler-buildstamp").touch()
+
+def build_wallet(r, p):
+    update_checkout(r, p)
+    subprocess.run(["./bootstrap"], check=True)
+    default_configure()
+    subprocess.run(["make", "install"], check=True)
+    (p / "taler-buildstamp").touch()
+
+def build_twister(r, p):
+    update_checkout(r, p)
+    subprocess.run(["./bootstrap"], check=True)
+    pfx = Path.home() / ".local"
+    default_configure(
+        "CFLAGS=-ggdb -O0",
+        "--enable-logging=verbose",
+        f"--with-exchange={pfx}",
+        f"--with-gnunet={pfx}",
+    )
+    subprocess.run(["make", "install"], check=True)
+    (p / "taler-buildstamp").touch()
+
+
+def build_merchant(r, p):
+    update_checkout(r, p)
+    subprocess.run(["./bootstrap"], check=True)
+    pfx = Path.home() / ".local"
+    default_configure(
+        "CFLAGS=-ggdb -O0",
+        "--enable-logging=verbose",
+        f"--with-microhttpd={pfx}",
+        f"--with-exchange={pfx}",
+        f"--with-gnunet={pfx}",
+        "--disable-doc",
+    )
+    subprocess.run(["make", "install"], check=True)
+    (p / "taler-buildstamp").touch()
+
+def build_sync(r, p):
+    update_checkout(r, p)
+    subprocess.run(["./bootstrap"], check=True)
+    pfx = Path.home() / ".local"
+    default_configure(
+        "CFLAGS=-ggdb -O0",
+        "--enable-logging=verbose",
+        f"--with-microhttpd={pfx}",
+        f"--with-exchange={pfx}",
+        f"--with-merchant={pfx}",
+        f"--with-gnunet={pfx}",
+        "--disable-doc",
+    )
+    subprocess.run(["make", "install"], check=True)
+    (p / "taler-buildstamp").touch()
+
+
+def build_anastasis(r, p):
+    update_checkout(r, p)
+    subprocess.run(["./bootstrap"], check=True)
+    pfx = Path.home() / ".local"
+    default_configure(
+        "CFLAGS=-ggdb -O0",
+        "--enable-logging=verbose",
+        f"--with-microhttpd={pfx}",
+        f"--with-exchange={pfx}",
+        f"--with-merchant={pfx}",
+        f"--with-gnunet={pfx}",
+        "--disable-doc",
+    )
+    subprocess.run(["make", "install"], check=True)
+    (p / "taler-buildstamp").touch()
+
+
+def build_demos(r, p):
+    update_checkout(r, p)
+    pfx = Path.home() / ".local"
+    pyconfigure()
+    subprocess.run(["make", "install"], check=True)
+    (p / "taler-buildstamp").touch()
+
+def build_backoffice(r, p):
+    update_checkout(r, p)
+    subprocess.run(["./bootstrap"])
+    subprocess.run(["./configure"])
+    subprocess.run(["make", "build-single"])
+    (p / "taler-buildstamp").touch()
+
+repos = {
+    "libmicrohttpd": Repo(
+        "libmicrohttpd",
+        "git://git.gnunet.org/libmicrohttpd.git",
+        [],
+        build_libmicrohttpd,
+    ),
+    "gnunet": Repo(
+        "gnunet",
+        "git://git.gnunet.org/gnunet.git",
+        ["libmicrohttpd"],
+        build_gnunet
+    ),
+    "exchange": Repo(
+        "exchange",
+        "git://git.taler.net/exchange",
+        ["gnunet", "libmicrohttpd"],
+        build_exchange,
+    ),
+    "merchant": Repo(
+        "merchant",
+        "git://git.taler.net/merchant",
+        ["exchange","libmicrohttpd","gnunet"],
+        build_merchant,
+    ),
+    "sync": Repo(
+       "sync",
+       "git://git.taler.net/sync",
+       ["exchange",
+        "merchant",
+        "gnunet",
+        "libmicrohttpd"],
+       build_sync,
+   ),
+    "anastasis": Repo(
+       "anastasis",
+       "git://git.taler.net/anastasis",
+       ["exchange",
+        "merchant",
+        "libmicrohttpd",
+        "gnunet"],
+       build_anastasis,
+    ),
+    "wallet-core": Repo(
+        "wallet-core",
+        "git://git.taler.net/wallet-core",
+        [],
+        build_wallet,
+    ),
+    "libeufin": Repo(
+        "libeufin",
+        "git://git.taler.net/libeufin.git",
+        [],
+        build_libeufin,
+    ),
+    "taler-merchant-demos": Repo(
+        "taler-merchant-demos",
+        "git://git.taler.net/taler-merchant-demos",
+        [],
+        build_demos,
+    ),
+    "twister": Repo(
+        "twister",
+        "git://git.taler.net/twister",
+        ["gnunet", "libmicrohttpd"],
+        build_twister,
+    ),
+}
+
+def get_repos_names() -> List[str]:
+    r_dir = Path.home() / ".taler-sources"
+    return [el for el in listdir(r_dir) if isdir(join(r_dir, el)) and 
repos.get(el)]
+
+# Get the installed repositories from the sources directory.
+def load_repos(reposNames) -> List[Repo]:
+    return [repos.get(r) for r in reposNames if repos.get(r)]
+
+def update_repos(repos: List[Repo]) -> None:
+    for r in repos:
+        r_dir = Path.home() / ".taler-sources" / r.name
+        subprocess.run(["git", "-C", str(r_dir), "fetch"], check=True)
+        res = subprocess.run(
+            ["git", "-C", str(r_dir), "status", "-sb"],
+            check=True,
+            stdout=subprocess.PIPE,
+            encoding="utf-8",
+        )
+        if "behind" in res.stdout:
+            print(f"new commits in {r}")
+            s = r_dir / "taler-buildstamp"
+            if s.exists():
+                s.unlink()
+
+def get_stale_repos(repos: List[Repo]) -> List[Repo]:
+    timestamps = {}
+    stale = []
+    for r in repos:
+        r_dir = Path.home() / ".taler-sources" / r.name
+        s = r_dir / "taler-buildstamp"
+        if not s.exists():
+            timestamps[r.name] = time.time()
+            stale.append(r)
+    return stale
+
+@cli.command()
+@click.option(
+    "--without-repos", metavar="WITHOUT REPOS",
+    help="WITHOUT REPOS is a unspaced and comma-separated list \
+of the repositories to _exclude_ from compilation",
+    default="")
+def build(without_repos) -> None:
+    """Build the deployment from source."""
+    exclude = split_repos_list(without_repos)
+    # Get the repositories names from the source directory
+    repos_names = get_repos_names()
+    # Reorder the list of repositories so that the
+    # most fundamental dependecies appear left-most.
+    repos_keys = repos.keys() # Has the precedence rules.
+    sorted_repos = sorted(
+        set(repos_keys).intersection(repos_names),
+        key=lambda x: list(repos_keys).index(x)
+    )
+    target_repos = load_repos(sorted_repos) # Get Repo objects
+    update_repos(target_repos)
+    stale = get_stale_repos(target_repos)
+    print(f"found stale repos: {[r.name for r in stale]}")
+    for r in stale:
+        if r.name in exclude:
+            print(f"not building: {r.name}")
+            continue
+        # Warn, if a dependency is not being built:
+        diff = set(r.deps) - set(repos_names)
+        if len(diff) > 0:
+            print(f"WARNING: those dependencies are not being built: {diff}")
+        p = Path.home() / ".taler-sources" / r.name
+        os.chdir(str(p))
+        r.builder(r, p)
+
+# Download the repository.
+def checkout_repos(repos: List[Repo]):
+    if len(repos) == 0:
+        print("No repositories can be checked out.  Spelled correctly?")
+        return
+    home = Path.home()
+    sources = home / ".taler-sources"
+    for r in repos:
+        r_dir = home / ".taler-sources" / r.name
+        if not r_dir.exists():
+            r_dir.mkdir(parents=True, exist_ok=True)
+            subprocess.run(["git", "-C", str(sources), "clone", r.url], 
check=True)
+
+@cli.command()
+@click.option(
+    "--repos", "-r",
+    metavar="REPOS",
+    help="REPOS is a unspaced and comma-separated list of the repositories to 
clone.",
+    
default="libmicrohttpd,gnunet,exchange,merchant,wallet-core,taler-merchant-demos,sync,anastasis,libeufin",
+    show_default=True,
+)
+def bootstrap(repos) -> None:
+
+    """Clone all the specified repositories."""
+
+    home = Path.home()
+    reposList = split_repos_list(repos)    
+    checkout_repos(load_repos(reposList))
+
+if __name__ == "__main__":
+    cli()
diff --git a/bin/WIP/taler-prepare-local b/bin/WIP/taler-prepare-local
new file mode 100755
index 0000000..467fd56
--- /dev/null
+++ b/bin/WIP/taler-prepare-local
@@ -0,0 +1,285 @@
+#!/bin/bash
+
+# Values needed:
+#
+# currency
+
+set -eu
+
+# The script stops what started along the flow.
+# This function should help against processes left
+# somehow running.
+function stop_running() {
+  for n in `jobs -p`
+  do
+      kill $n 2> /dev/null || true
+  done
+  wait
+}
+
+trap "stop_running" EXIT
+
+CURRENCY="EUR"
+IBAN_EXCHANGE="EX00000000000000000000"
+IBAN_MERCHANT="ME00000000000000000001"
+IBAN_CUSTOMER="WA00000000000000000000"
+FRONTENDS_APITOKEN="secret"
+WIRE_METHOD="sepa"
+NEXUS_DB_FILESYSTEM="/tmp/nexus.sqlite"
+SANDBOX_DB_FILESYSTEM="/tmp/sandbox.sqlite"
+SANDBOX_ADMIN_PASSWORD="secret"
+REV_PROXY="http://localhost:8080";
+SANDBOX_URL="${REV_PROXY}/sandbox"
+NEXUS_URL="${REV_PROXY}/nexus"
+EBICS_HOST="ebicsDeployedHost"
+# Only the exchange needs Nexus.
+EXCHANGE_NEXUS_USERNAME=exchange-nexus-user
+EXCHANGE_NEXUS_PASSWORD=exchange-nexus-password
+# Needed to create the default instance
+TALER_MERCHANT_TOKEN="secret"
+
+function generate_preliminary_config() {
+  # Known later:
+
+  # exchange pub, needed by merchant
+  # wire gateway URL, credentials, method
+
+  mkdir -p "$HOME/.config"
+  taler-config-local \
+    --currency "$CURRENCY" \
+    --outdir "$HOME/.config" \
+    --exchange-wire-address $IBAN_EXCHANGE \
+    --frontends-apitoken "$TALER_ENV_FRONTENDS_APITOKEN"
+}
+
+echo -n "Reset and init exchange DB.."
+taler-exchange-dbinit --reset
+echo " OK"
+
+echo -n "Remove previous key and data files.."
+rm -fr ~/.taler-data/*
+echo " OK"
+
+##
+## Step 3: Set up the exchange key material
+##
+
+echo -n "Setup exchange's key material.."
+
+# New version
+EXCHANGE_PID=$(taler-exchange > /tmp/exchange.log 2>&1 & echo $!)
+SECMOD_RSA_PID=$(taler-exchange-secmod-rsa > /tmp/exchange-secmod-rsa.log 2>&1 
& echo $!)
+SECMOD_EDDSA_PID=$(taler-exchange-secmod-eddsa > 
/tmp/exchange-secmod-eddsa.log 2>&1 & echo $!)
+
+# FIXME: check the three services above are running correctly.
+
+taler-exchange-offline download sign upload
+
+PAYTO_URI=$(taler-config -s exchange-account-1 -o payto_uri)
+taler-exchange-offline enable-account "$PAYTO_URI" upload
+
+# Set up wire fees for next 5 years
+YEAR=$(date +%Y)
+for y in $(seq $year $((year + 5))); do
+  taler-exchange-offline wire-fee $y $WIRE_METHOD "$CURRENCY:0.01" 
"$CURRENCY:0.01" upload
+done
+
+echo " OK"
+echo -n "Getting exchange public key via /keys.."
+EXCHANGE_URL=$(taler-config -s exchange -o base_url)
+EXCHANGE_MASTER_PUB=$(curl -s "$EXCHANGE_URL/keys" | jq .master_public_key)
+echo " OK"
+echo -n "Add this exchange to the auditor..."
+taler-auditor-exchange \
+  -m "$EXCHANGE_MASTER_PUB" \
+  -u "$(taler-config -s exchange -o base_url)" || true
+# Make configuration accessible to auditor
+chmod 750 "$HOME/.config"
+echo " OK"
+stop_running
+
+##
+## Step 4:  Set up euFin
+##
+
+echo -n "Resetting euFin databases.."
+rm $SANDBOX_DB_FILESYSTEM
+rm $NEXUS_DB_FILESYSTEM
+echo " OK"
+export LIBEUFIN_SANDBOX_USERNAME="admin"
+export LIBEUFIN_SANDBOX_PASSWORD=$SANDBOX_ADMIN_PASSWORD
+# $1 = ebics user id, $2 = ebics partner, $3 = bank connection name
+# $4 = bank account name local to Nexus, $5 = bank account name as known
+# by Sandbox
+function prepare_nexus_account() {
+  echo -n "Making bank connection $3 ..."
+  libeufin-cli connections new-ebics-connection \
+    --ebics-url="${SANDBOX_URL}ebicsweb" \
+    --host-id=$EBICS_HOST \
+    --partner-id=$2 \
+    --ebics-user-id=$1 \
+    $3 > /dev/null
+  echo " OK"
+  echo -n "Connecting $3 ..."
+  libeufin-cli connections connect $3 > /dev/null
+  echo " OK"
+  echo -n "Importing Sandbox bank account ($5) to Nexus ($4) ..."
+  libeufin-cli connections download-bank-accounts $3 > /dev/null
+  libeufin-cli connections import-bank-account \
+    --offered-account-id=$5 --nexus-bank-account-id=$4 $3 > /dev/null
+  echo " OK"
+  # Set how often the automatic routing must fetch the bank account.
+  echo -n "Setting background payment initiator.." 
+  libeufin-cli accounts task-schedule $4 \
+    --task-type="submit" \
+    --task-name='submit-payments-every-second' \
+    --task-cronspec='* * *'
+  echo " OK"
+  echo -n "Setting background history fetch.." 
+  libeufin-cli accounts task-schedule $4 \
+    --task-type="fetch" \
+    --task-name='fetch-reports-every-second' \
+    --task-cronspec='* * *' \
+    --task-param-level=report \
+    --task-param-range-type=latest
+  echo " OK"
+}
+
+# $1=ebics username, $2=ebics partner name,
+# $3=person name, $4=sandbox bank account name, $5=iban
+function prepare_sandbox_account() {
+  echo -n "Activating ebics subscriber $1 at the sandbox ..."
+  libeufin-cli \
+    sandbox --sandbox-url=$SANDBOX_URL \
+      ebicssubscriber create \
+        --host-id=$EBICS_HOST \
+        --partner-id=$2 \
+        --user-id=$1
+  echo " OK"
+  echo -n "Giving a bank account ($4) to $1 ..."
+  libeufin-cli \
+    sandbox --sandbox-url=$SANDBOX_URL \
+      ebicsbankaccount create \
+        --iban=$5 \
+        --bic="BCMAESM1XXX"\
+        --person-name="$3" \
+        --account-name=$4 \
+        --ebics-user-id=$1 \
+        --ebics-host-id=$EBICS_HOST \
+        --ebics-partner-id=$2 \
+        --currency=$CURRENCY
+  echo " OK"
+}
+
+echo -n "Making Sandbox superuser..."
+libeufin-sandbox superuser admin --password=${SANDBOX_ADMIN_PASSWORD}
+echo " OK"
+
+echo -n "Lunching Sandbox..."
+SANDBOX_PID=$(libeufin-sandbox serve --with-unix-socket > /tmp/sandbox.log 
2>&1 & echo $!)
+if ! curl -s --retry 5 --retry-connrefused $SANDBOX_URL > /dev/null; then
+  echo "Could not launch Sandbox"
+  stop_running
+  exit 1
+fi
+echo " OK"
+
+echo -n "Launching Nexus..."
+NEXUS_PID=$(libeufin-nexus serve --with-unix-socket > /tmp/nexus.log 2>&1 & 
echo $!)
+if ! curl -s --retry 5 --retry-connrefused $NEXUS_URL > /dev/null; then
+  echo "Could not launch Nexus"
+  stop_running
+  exit 1
+fi
+echo " OK"
+
+echo -n "Make Sandbox EBICS host..."
+libeufin-cli \
+  sandbox --sandbox-url=$SANDBOX_URL \
+    ebicshost create \
+      --host-id=$EBICS_HOST
+echo " OK"
+
+# note: Ebisc schema doesn't allow dashed names.
+prepare_sandbox_account \
+  ebicsuserExchange \
+  ebicspartnerExchange \
+  "Person Exchange" \
+  sandbox-account-exchange \
+  $IBAN_EXCHANGE
+prepare_sandbox_account \
+  ebicsuserMerchant \
+  ebicspartnerMerchant \
+  "Person Merchant" \
+  sandbox-account-merchant \
+  $IBAN_MERCHANT
+prepare_sandbox_account \
+  ebicsuserCustomer \
+  ebicspartnerCustomer \
+  "Person Customer" \
+  sandbox-account-customer \
+  $IBAN_CUSTOMER
+
+echo -n "Make Nexus superuser ..."
+libeufin-nexus superuser $EXCHANGE_NEXUS_USERNAME 
--password=$EXCHANGE_NEXUS_PASSWORD
+echo " OK"
+
+export LIBEUFIN_NEXUS_URL=$NEXUS_URL
+export LIBEUFIN_NEXUS_USERNAME=$EXCHANGE_NEXUS_USERNAME
+export LIBEUFIN_NEXUS_PASSWORD=$EXCHANGE_NEXUS_PASSWORD
+prepare_nexus_account \
+  ebicsuserExchange \
+  ebicspartnerExchange \
+  bankconnection-exchange \
+  nexus-bankaccount-exchange \
+  sandbox-account-exchange
+
+echo -n "Create Taler facade ..."
+libeufin-cli facades new-taler-wire-gateway-facade \
+  --currency=$CURRENCY \
+  --facade-name=facade-exchange \
+  bankconnection-exchange nexus-bankaccount-exchange
+echo " OK"
+FACADE_URL=$(libeufin-cli facades list | jq .facades[0].baseUrl | tr -d \")
+
+stop_running
+
+# Finish configuration now:
+taler-config -s merchant-exchange-$CURRENCY \
+            -o master_key -V $EXCHANGE_MASTER_PUB
+
+# Point the exchange to the facade.
+taler-config -s exchange-accountcredentials-1 \
+             -o WIRE_GATEWAY_URL \
+             -V "${FACADE_URL}"
+
+taler-config -s exchange-accountcredentials-1 \
+             -o USERNAME \
+            -V "${EXCHANGE_NEXUS_USERNAME}"
+
+taler-config -s exchange-accountcredentials-1 \
+             -o PASSWORD \
+            -V "${EXCHANGE_NEXUS_PASSWORD}"
+##
+## Step 6: Set up merchant
+##
+
+echo -n "Reset and init merchant database.."
+taler-merchant-dbinit --reset
+echo " OK"
+
+echo "Configuring instances"
+# Obsoleted: do all here.
+# taler-deployment-config-instances-iban
+export TALER_MERCHANT_TOKEN
+MERCHANT_BACKEND_PID=$(taler-merchant-httpd > /tmp/merchant.log 2>&1 & echo $!)
+
+# Create default instance
+# Create one instance
+
+
+
+
+
+echo "Stopping all the services"
+stop_running

-- 
To stop receiving notification emails like this one, please contact
gnunet@gnunet.org.



reply via email to

[Prev in Thread] Current Thread [Next in Thread]