summaryrefslogtreecommitdiff
path: root/bin/WIP
diff options
context:
space:
mode:
authorms <ms@taler.net>2022-04-17 19:14:08 +0200
committerms <ms@taler.net>2022-04-17 19:19:43 +0200
commit5c4bfbb289d86d000ffd9a2def4f089becab57db (patch)
tree9ecca22e7456a0fb0ca1ffe28458d3ab87d1067f /bin/WIP
parentb425532268ffacb23eb95ee78a875dafbe603265 (diff)
downloaddeployment-5c4bfbb289d86d000ffd9a2def4f089becab57db.tar.gz
deployment-5c4bfbb289d86d000ffd9a2def4f089becab57db.tar.bz2
deployment-5c4bfbb289d86d000ffd9a2def4f089becab57db.zip
parse 'envcfg files' in taler-local
Diffstat (limited to 'bin/WIP')
-rwxr-xr-xbin/WIP/taler-local167
1 files changed, 132 insertions, 35 deletions
diff --git a/bin/WIP/taler-local b/bin/WIP/taler-local
index 7ec4dc4..51f2cf5 100755
--- a/bin/WIP/taler-local
+++ b/bin/WIP/taler-local
@@ -57,26 +57,36 @@ def print_nn(msg):
sys.stdout.flush()
class Repo:
- def __init__(self, name, url, deps, builder):
+ def __init__(self, name, url, deps, builder, version="master"):
self.name = name
self.url = url
self.deps = deps
self.builder = builder
+ self.version = version
@click.group()
def cli():
pass
+# Parses the command-line-given and comma-separated repos list
+# into a list of names.
def split_repos_list(repos):
return [repo for repo in repos.split(",") if repo != ""]
+# fetch the remote. No timestamp deletion here
def update_checkout(r: Repo, p: Path):
"""Clean the repository's working directory and
update it to the match the latest version of the upstream branch
that we are tracking."""
- subprocess.run(["git", "-C", str(p), "clean", "-fdx"], check=True)
+ subprocess.run(["git", "-C", str(p), "clean", "-fdx"], check=True) # remove unversioned files.
+
+ # Equivalent to "git pull". Does nothing if in detached HEAD
+ # but pulls new code into the local copy otherwise.
subprocess.run(["git", "-C", str(p), "fetch"], check=True)
subprocess.run(["git", "-C", str(p), "reset"], check=True)
+
+ # Makes the last step "--hard", namely removes files not
+ # belonging to the current version.
res = subprocess.run(
[
"git",
@@ -278,13 +288,49 @@ repos = {
def get_repos_names() -> List[str]:
r_dir = TALER_ROOT_DIR / "sources"
+ if not r_dir.is_dir():
+ print(f"'{r_dir}' not found. Did bootstrap run?")
+ return []
return [el for el in listdir(r_dir) if isdir(join(r_dir, el)) and repos.get(el)]
-# Get the installed repositories from the sources directory.
+# Get 'Repo' objects (globally defined),
+# using their names as index.
def load_repos(reposNames) -> List[Repo]:
- return [repos.get(r) for r in reposNames if repos.get(r)]
-
-def update_repos(repos: List[Repo]) -> None:
+ ret = []
+ for repo in repos.keys():
+ if repo in reposNames:
+ ret.append(repos[repo])
+ return ret
+
+# Return the list of repos (equipped with their version)
+# to install.
+def load_repos_with_envcfg(envcfg_path) -> List[Repo]:
+ envcfg_path = Path(envcfg_path)
+ if not os.path.isfile(envcfg_path):
+ print(f"{envcfg_path} is not a file")
+ sys.exit(1)
+ cfgtext = envcfg_path.read_text()
+ cfg = types.ModuleType("taler_deployment_cfg")
+ try:
+ exec(cfgtext, cfg.__dict__)
+ except SyntaxError:
+ print(f"{envcfg_path} is not Python.")
+ exit(1)
+ ret = []
+ for repo in repos.keys():
+ try:
+ envcfg_entry = getattr(cfg, "tag_" + repo.replace("-", "_"))
+ except AttributeError:
+ # 'env' files doesn't have this repo, continue looping.
+ continue
+ repos[repo].version = envcfg_entry
+ ret.append(repos[repo])
+ return ret
+
+# Flag as stale the projects set on 'master' that
+# aren't in line with upstream. Detached head projects
+# aren't affected.
+def update_repos(repos: List[Repo], force) -> None:
for r in repos:
r_dir = TALER_ROOT_DIR / "sources" / r.name
subprocess.run(["git", "-C", str(r_dir), "fetch"], check=True)
@@ -294,12 +340,14 @@ def update_repos(repos: List[Repo]) -> None:
stdout=subprocess.PIPE,
encoding="utf-8",
)
- if "behind" in res.stdout:
- print(f"new commits in {r}")
+ if "behind" in res.stdout or force:
+ print(f"{r.name} will be compiled")
s = r_dir / "taler-buildstamp"
if s.exists():
s.unlink()
+# projects without the build timestamp are considered stale,
+# even if one of their dependencies _got_ marked as stale.
def get_stale_repos(repos: List[Repo]) -> List[Repo]:
timestamps = {}
stale = []
@@ -330,10 +378,24 @@ of the repositories to _exclude_ from compilation",
help="ONLY REPOS is a unspaced and comma-separated exclusive list \
of the repositories to include in the compilation",
default="")
-def build(without_repos, only_repos) -> None:
-
+@click.option(
+ "--dry/--no-dry", default=False,
+ help="Only getting changes, without actual build."
+)
+@click.option(
+ "--with-envcfg", metavar="PATH",
+ help="python file pinning each codebase version.",
+)
+# Normally, we don't rebuild dependent projects when one
+# of their dependency changed. This lets check whether non
+# breaking changes are really so; this option invalidates
+# this policy by letting all the codebases be compiled.
+@click.option(
+ "--force/--no-force", default=False,
+ help="build all the projects.",
+)
+def build(without_repos, only_repos, dry, with_envcfg, force) -> None:
"""Build the deployment from source."""
-
if only_repos != "" and without_repos != "":
print("Either use --only-repos or --without-repos")
exit(1)
@@ -348,27 +410,37 @@ def build(without_repos, only_repos) -> None:
lambda x: x not in split_repos_list(without_repos),
repos_names
))
-
- # Reorder the list of repositories so that the
- # most fundamental dependecies appear left-most.
- repos_keys = repos.keys()
- sorted_repos = sorted(
- set(repos_keys).intersection(repos_names),
- key=lambda x: list(repos_keys).index(x)
- )
- target_repos = load_repos(sorted_repos) # Get Repo objects
- update_repos(target_repos)
+ if with_envcfg:
+ target_repos = load_repos_with_envcfg(with_envcfg)
+ else:
+ target_repos = load_repos(repos_names)
+ # enforce version here.
+ sources = TALER_ROOT_DIR / "sources"
+ for r in target_repos:
+ subprocess.run(
+ ["git", "-C", str(sources / r.name),
+ "checkout", "-q", "-f",
+ r.version, "--"], check=True
+ )
+ update_repos(target_repos, force)
stale = get_stale_repos(target_repos)
print(f"found stale repos: {[r.name for r in stale]}")
for r in stale:
- # Warn, if a dependency is not being built:
+ # Inform, if a dependency is not being built:
diff = set(r.deps) - set(repos_names)
if len(diff) > 0:
- print(f"WARNING: those dependencies are not being built: {diff}")
+ print(f"Info: those dependencies are not being built: {diff}")
p = TALER_ROOT_DIR / "sources" / r.name
os.chdir(str(p))
+ if dry:
+ print("dry running")
+ continue
r.builder(r, p)
+# Only git-clone the codebases. The 'build' step
+# will run all the update logic. At this point, a
+# 'env' file - as well as the --repos option - will
+# only express which codebases are to clone.
@cli.command()
@click.option(
"--repos", "-r",
@@ -381,29 +453,58 @@ def build(without_repos, only_repos) -> None:
"--list-repos/--no-list-repos", default=False,
help="Lists the repositories that were bootstrapped.",
)
-def bootstrap(list_repos, repos) -> None:
-
+@click.option(
+ "--with-envcfg", metavar="PATH",
+ help="python file pinning each codebase version.",
+)
+@click.option(
+ "--dry/--no-dry", default=False,
+ help="Print steps, without downloading any repository.",
+)
+def bootstrap(list_repos, repos, with_envcfg, dry) -> None:
"""Clone all the specified repositories."""
-
+ # Only saying _which_ repo were installed. No further action
if list_repos:
for repo in get_repos_names():
print(repo)
return
- # Download the repository.
- def checkout_repos(repos: List[Repo]):
+ # Download the repositories.
+ def clone_repos(repos: List[Repo]):
if len(repos) == 0:
print("No repositories can be checked out. Spelled correctly?")
return
sources = TALER_ROOT_DIR / "sources"
for r in repos:
+ print(f"Bootstrapping '{r.name}', at version '{r.version}'")
+ if dry:
+ print("dry running")
+ continue
r_dir = sources / r.name
if not r_dir.exists():
r_dir.mkdir(parents=True, exist_ok=True)
- subprocess.run(["git", "-C", str(sources), "clone", r.url], check=True)
+ subprocess.run(
+ ["git", "-C", str(sources),
+ "clone", r.url], check=True
+ )
+ subprocess.run(
+ ["git", "-C", str(r_dir),
+ "checkout", "-q", "-f",
+ r.version, "--"], check=True
+ )
- reposList = split_repos_list(repos)
- checkout_repos(load_repos(reposList))
+ # Get list of to-be-cloned repos from the 'env' file.
+ if with_envcfg:
+ # 'with_envcfg' is a path to a "envcfg.py" file.
+ preparedRepos = load_repos_with_envcfg(with_envcfg)
+ # Get list of to-be-cloned repos from the command line
+ # (or its default)
+ else:
+ # 'repos' is here "repo1,repo2,.."
+ reposList = split_repos_list(repos)
+ # 'reposList' is here ["repo1", "repo2", ...]
+ preparedRepos = load_repos(reposList)
+ clone_repos(preparedRepos)
# Globals sharead accross multiple sub-commands:
# needed to configure and launch the reverse proxy.
@@ -439,9 +540,7 @@ CURRENCY = "EUR"
default="taler"
)
def prepare(x_forwarded_host, x_forwarded_proto, postgres_db_name):
-
"""Generate configuration, run-time blobs, instances, euFin accounts."""
-
def is_serving(check_url, tries=10):
for i in range(tries):
try:
@@ -463,8 +562,6 @@ def prepare(x_forwarded_host, x_forwarded_proto, postgres_db_name):
break
return True
-
-
def fail(reason=None):
if reason:
print("ERROR:", reason)