Skip to content

Commit

Permalink
Add external stack support (#806)
Browse files Browse the repository at this point in the history
Reviewed-on: https://git.vdb.to/cerc-io/stack-orchestrator/pulls/806
Co-authored-by: David Boreham <david@bozemanpass.com>
Co-committed-by: David Boreham <david@bozemanpass.com>
  • Loading branch information
dboreham authored and David Boreham committed Apr 18, 2024
1 parent 9043a67 commit 6e4dae9
Show file tree
Hide file tree
Showing 12 changed files with 339 additions and 47 deletions.
58 changes: 58 additions & 0 deletions .gitea/workflows/test-external-stack.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
name: External Stack Test

on:
push:
branches: '*'
paths:
- '!**'
- '.gitea/workflows/triggers/test-external-stack'
- '.gitea/workflows/test-external-stack.yml'
- 'tests/external-stack/run-test.sh'
schedule: # Note: coordinate with other tests to not overload runners at the same time of day
- cron: '8 19 * * *'

jobs:
test:
name: "Run external stack test suite"
runs-on: ubuntu-latest
steps:
- name: "Clone project repository"
uses: actions/checkout@v3
# At present the stock setup-python action fails on Linux/aarch64
# Conditional steps below workaroud this by using deadsnakes for that case only
- name: "Install Python for ARM on Linux"
if: ${{ runner.arch == 'arm64' && runner.os == 'Linux' }}
uses: deadsnakes/action@v3.0.1
with:
python-version: '3.8'
- name: "Install Python cases other than ARM on Linux"
if: ${{ ! (runner.arch == 'arm64' && runner.os == 'Linux') }}
uses: actions/setup-python@v4
with:
python-version: '3.8'
- name: "Print Python version"
run: python3 --version
- name: "Install shiv"
run: pip install shiv
- name: "Generate build version file"
run: ./scripts/create_build_tag_file.sh
- name: "Build local shiv package"
run: ./scripts/build_shiv_package.sh
- name: "Run external stack tests"
run: ./tests/external-stack/run-test.sh
- name: Notify Vulcanize Slack on CI failure
if: ${{ always() && github.ref_name == 'main' }}
uses: ravsamhq/notify-slack-action@v2
with:
status: ${{ job.status }}
notify_when: 'failure'
env:
SLACK_WEBHOOK_URL: ${{ secrets.VULCANIZE_SLACK_CI_ALERTS }}
- name: Notify DeepStack Slack on CI failure
if: ${{ always() && github.ref_name == 'main' }}
uses: ravsamhq/notify-slack-action@v2
with:
status: ${{ job.status }}
notify_when: 'failure'
env:
SLACK_WEBHOOK_URL: ${{ secrets.DEEPSTACK_SLACK_CI_ALERTS }}
1 change: 1 addition & 0 deletions .gitea/workflows/triggers/test-external-stack
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Change this file to trigger running the external-stack CI job
2 changes: 1 addition & 1 deletion stack_orchestrator/build/build_containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def process_container(build_context: BuildContext) -> bool:

# Check if this is in an external stack
if stack_is_external(build_context.stack):
container_parent_dir = Path(build_context.stack).joinpath("container-build")
container_parent_dir = Path(build_context.stack).parent.parent.joinpath("container-build")
temp_build_dir = container_parent_dir.joinpath(build_context.container.replace("/", "-"))
temp_build_script_filename = temp_build_dir.joinpath("build.sh")
# Now check if the container exists in the external stack.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
# Build cerc/test-container
source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
docker build -t cerc/test-container:local -f ${SCRIPT_DIR}/Dockerfile ${build_command_args} $SCRIPT_DIR
docker build -t cerc/test-container:local -f ${SCRIPT_DIR}/Dockerfile ${build_command_args} $SCRIPT_DIR
7 changes: 6 additions & 1 deletion stack_orchestrator/deploy/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
from stack_orchestrator import constants
from stack_orchestrator.opts import opts
from stack_orchestrator.util import include_exclude_check, get_parsed_stack_config, global_options2, get_dev_root_path
from stack_orchestrator.util import resolve_compose_file
from stack_orchestrator.deploy.deployer import Deployer, DeployerException
from stack_orchestrator.deploy.deployer_factory import getDeployer
from stack_orchestrator.deploy.deploy_types import ClusterContext, DeployCommandContext
Expand Down Expand Up @@ -324,7 +325,10 @@ def _make_cluster_context(ctx, stack, include, exclude, cluster, env_file):
pod_path = pod["path"]
if include_exclude_check(pod_name, include, exclude):
if pod_repository is None or pod_repository == "internal":
compose_file_name = os.path.join(compose_dir, f"docker-compose-{pod_path}.yml")
if deployment:
compose_file_name = os.path.join(compose_dir, f"docker-compose-{pod_path}.yml")
else:
compose_file_name = resolve_compose_file(stack, pod_name)
else:
if deployment:
compose_file_name = os.path.join(compose_dir, f"docker-compose-{pod_name}.yml")
Expand All @@ -336,6 +340,7 @@ def _make_cluster_context(ctx, stack, include, exclude, cluster, env_file):
if pod_post_start_command is not None:
post_start_commands.append(os.path.join(script_dir, pod_post_start_command))
else:
# TODO: fix this code for external stack with scripts
pod_root_dir = os.path.join(dev_root_path, pod_repository.split("/")[-1], pod["path"])
compose_file_name = os.path.join(pod_root_dir, f"docker-compose-{pod_name}.yml")
pod_pre_start_command = pod.get("pre_start_command")
Expand Down
4 changes: 2 additions & 2 deletions stack_orchestrator/deploy/deploy_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import os
from typing import List, Any
from stack_orchestrator.deploy.deploy_types import DeployCommandContext, VolumeMapping
from stack_orchestrator.util import get_parsed_stack_config, get_yaml, get_compose_file_dir, get_pod_list
from stack_orchestrator.util import get_parsed_stack_config, get_yaml, get_pod_list, resolve_compose_file
from stack_orchestrator.opts import opts


Expand All @@ -27,7 +27,7 @@ def _container_image_from_service(stack: str, service: str):
pods = get_pod_list(parsed_stack)
yaml = get_yaml()
for pod in pods:
pod_file_path = os.path.join(get_compose_file_dir(), f"docker-compose-{pod}.yml")
pod_file_path = resolve_compose_file(stack, pod)
parsed_pod_file = yaml.load(open(pod_file_path, "r"))
if "services" in parsed_pod_file:
services = parsed_pod_file["services"]
Expand Down
6 changes: 3 additions & 3 deletions stack_orchestrator/deploy/deployment_create.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def _get_ports(stack):
pods = get_pod_list(parsed_stack)
yaml = get_yaml()
for pod in pods:
pod_file_path = get_pod_file_path(parsed_stack, pod)
pod_file_path = get_pod_file_path(stack, parsed_stack, pod)
parsed_pod_file = yaml.load(open(pod_file_path, "r"))
if "services" in parsed_pod_file:
for svc_name, svc in parsed_pod_file["services"].items():
Expand Down Expand Up @@ -79,7 +79,7 @@ def find_vol_usage(parsed_pod_file, vol):
return ret

for pod in pods:
pod_file_path = get_pod_file_path(parsed_stack, pod)
pod_file_path = get_pod_file_path(stack, parsed_stack, pod)
parsed_pod_file = yaml.load(open(pod_file_path, "r"))
if "volumes" in parsed_pod_file:
volumes = parsed_pod_file["volumes"]
Expand Down Expand Up @@ -483,7 +483,7 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, netw
data_dir = Path(__file__).absolute().parent.parent.joinpath("data")
yaml = get_yaml()
for pod in pods:
pod_file_path = get_pod_file_path(parsed_stack, pod)
pod_file_path = get_pod_file_path(stack_name, parsed_stack, pod)
parsed_pod_file = yaml.load(open(pod_file_path, "r"))
extra_config_dirs = _find_extra_config_dirs(parsed_pod_file, pod)
destination_pod_dir = destination_pods_dir.joinpath(pod)
Expand Down
2 changes: 2 additions & 0 deletions stack_orchestrator/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

from stack_orchestrator.command_types import CommandOptions
from stack_orchestrator.repos import setup_repositories
from stack_orchestrator.repos import fetch_stack
from stack_orchestrator.build import build_containers, fetch_containers
from stack_orchestrator.build import build_npms
from stack_orchestrator.build import build_webapp
Expand Down Expand Up @@ -50,6 +51,7 @@ def cli(ctx, stack, quiet, verbose, dry_run, local_stack, debug, continue_on_err
ctx.obj = command_options


cli.add_command(fetch_stack.command, "fetch-stack")
cli.add_command(setup_repositories.command, "setup-repositories")
cli.add_command(build_containers.command, "build-containers")
cli.add_command(fetch_containers.command, "fetch-containers")
Expand Down
45 changes: 45 additions & 0 deletions stack_orchestrator/repos/fetch_stack.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
# Copyright © 2022, 2023 Vulcanize

# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.

# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.

# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http:#www.gnu.org/licenses/>.

# env vars:
# CERC_REPO_BASE_DIR defaults to ~/cerc


import click
import os

from decouple import config
from git import exc

from stack_orchestrator.opts import opts
from stack_orchestrator.repos.setup_repositories import process_repo
from stack_orchestrator.util import error_exit


@click.command()
@click.argument('stack-locator')
@click.option('--git-ssh', is_flag=True, default=False)
@click.option('--check-only', is_flag=True, default=False)
@click.option('--pull', is_flag=True, default=False)
@click.pass_context
def command(ctx, stack_locator, git_ssh, check_only, pull):
'''optionally resolve then git clone a repository containing one or more stack definitions'''
dev_root_path = os.path.expanduser(config("CERC_REPO_BASE_DIR", default="~/cerc"))
if not opts.o.quiet:
print(f"Dev Root is: {dev_root_path}")
try:
process_repo(pull, check_only, git_ssh, dev_root_path, None, stack_locator)
except exc.GitCommandError as error:
error_exit(f"\n******* git command returned error exit status:\n{error}")
54 changes: 18 additions & 36 deletions stack_orchestrator/repos/setup_repositories.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from pathlib import Path
import yaml
from stack_orchestrator.constants import stack_file_name
from stack_orchestrator.opts import opts
from stack_orchestrator.util import include_exclude_check, stack_is_external, error_exit, warn_exit


Expand Down Expand Up @@ -87,8 +88,8 @@ def _get_repo_current_branch_or_tag(full_filesystem_repo_path):


# TODO: fix the messy arg list here
def process_repo(verbose, quiet, dry_run, pull, check_only, git_ssh, dev_root_path, branches_array, fully_qualified_repo):
if verbose:
def process_repo(pull, check_only, git_ssh, dev_root_path, branches_array, fully_qualified_repo):
if opts.o.verbose:
print(f"Processing repo: {fully_qualified_repo}")
repo_host, repo_path, repo_branch = host_and_path_for_repo(fully_qualified_repo)
git_ssh_prefix = f"git@{repo_host}:"
Expand All @@ -100,7 +101,7 @@ def process_repo(verbose, quiet, dry_run, pull, check_only, git_ssh, dev_root_pa
(current_repo_branch_or_tag, is_branch) = _get_repo_current_branch_or_tag(
full_filesystem_repo_path
) if is_present else (None, None)
if not quiet:
if not opts.o.quiet:
present_text = f"already exists active {'branch' if is_branch else 'tag'}: {current_repo_branch_or_tag}" if is_present \
else 'Needs to be fetched'
print(f"Checking: {full_filesystem_repo_path}: {present_text}")
Expand All @@ -111,25 +112,25 @@ def process_repo(verbose, quiet, dry_run, pull, check_only, git_ssh, dev_root_pa
sys.exit(1)
else:
if pull:
if verbose:
if opts.o.verbose:
print(f"Running git pull for {full_filesystem_repo_path}")
if not check_only:
if is_branch:
git_repo = git.Repo(full_filesystem_repo_path)
origin = git_repo.remotes.origin
origin.pull(progress=None if quiet else GitProgress())
origin.pull(progress=None if opts.o.quiet else GitProgress())
else:
print("skipping pull because this repo checked out a tag")
else:
print("(git pull skipped)")
if not is_present:
# Clone
if verbose:
if opts.o.verbose:
print(f'Running git clone for {full_github_repo_path} into {full_filesystem_repo_path}')
if not dry_run:
if not opts.o.dry_run:
git.Repo.clone_from(full_github_repo_path,
full_filesystem_repo_path,
progress=None if quiet else GitProgress())
progress=None if opts.o.quiet else GitProgress())
else:
print("(git clone skipped)")
# Checkout the requested branch, if one was specified
Expand All @@ -150,13 +151,13 @@ def process_repo(verbose, quiet, dry_run, pull, check_only, git_ssh, dev_root_pa
current_repo_branch_or_tag and (
current_repo_branch_or_tag != branch_to_checkout)
):
if not quiet:
if not opts.o.quiet:
print(f"switching to branch {branch_to_checkout} in repo {repo_path}")
git_repo = git.Repo(full_filesystem_repo_path)
# git checkout works for both branches and tags
git_repo.git.checkout(branch_to_checkout)
else:
if verbose:
if opts.o.verbose:
print(f"repo {repo_path} is already on branch/tag {branch_to_checkout}")


Expand All @@ -182,36 +183,18 @@ def parse_branches(branches_string):
@click.option('--check-only', is_flag=True, default=False)
@click.option('--pull', is_flag=True, default=False)
@click.option("--branches", help="override branches for repositories")
@click.option('--branches-file', help="checkout branches specified in this file")
@click.pass_context
def command(ctx, include, exclude, git_ssh, check_only, pull, branches, branches_file):
def command(ctx, include, exclude, git_ssh, check_only, pull, branches):
'''git clone the set of repositories required to build the complete system from source'''

quiet = ctx.obj.quiet
verbose = ctx.obj.verbose
dry_run = ctx.obj.dry_run
stack = ctx.obj.stack
quiet = opts.o.quiet
verbose = opts.o.verbose
stack = opts.o.stack

branches_array = []

# TODO: branches file needs to be re-worked in the context of stacks
if branches_file:
if branches:
print("Error: can't specify both --branches and --branches-file")
sys.exit(1)
else:
if verbose:
print(f"loading branches from: {branches_file}")
with open(branches_file) as branches_file_open:
branches_array = branches_file_open.read().splitlines()

print(f"branches: {branches}")
if branches:
if branches_file:
print("Error: can't specify both --branches and --branches-file")
sys.exit(1)
else:
branches_array = parse_branches(branches)
branches_array = parse_branches(branches)

if branches_array and verbose:
print(f"Branches are: {branches_array}")
Expand Down Expand Up @@ -271,7 +254,6 @@ def command(ctx, include, exclude, git_ssh, check_only, pull, branches, branches

for repo in repos:
try:
process_repo(verbose, quiet, dry_run, pull, check_only, git_ssh, dev_root_path, branches_array, repo)
process_repo(pull, check_only, git_ssh, dev_root_path, branches_array, repo)
except git.exc.GitCommandError as error:
print(f"\n******* git command returned error exit status:\n{error}")
sys.exit(1)
error_exit(f"\n******* git command returned error exit status:\n{error}")
20 changes: 17 additions & 3 deletions stack_orchestrator/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,10 +94,24 @@ def get_plugin_code_paths(stack) -> List[Path]:
return list(result)


def get_pod_file_path(parsed_stack, pod_name: str):
# Find a compose file, looking first in any external stack
# and if not found there, internally
def resolve_compose_file(stack, pod_name: str):
if stack_is_external(stack):
# First try looking in the external stack for the compose file
compose_base = Path(stack).parent.parent.joinpath("compose")
proposed_file = compose_base.joinpath(f"docker-compose-{pod_name}.yml")
if proposed_file.exists():
return proposed_file
# If we don't find it fall through to the internal case
compose_base = get_internal_compose_file_dir()
return compose_base.joinpath(f"docker-compose-{pod_name}.yml")


def get_pod_file_path(stack, parsed_stack, pod_name: str):
pods = parsed_stack["pods"]
if type(pods[0]) is str:
result = os.path.join(get_compose_file_dir(), f"docker-compose-{pod_name}.yml")
result = resolve_compose_file(stack, pod_name)
else:
for pod in pods:
if pod["name"] == pod_name:
Expand Down Expand Up @@ -131,7 +145,7 @@ def pod_has_scripts(parsed_stack, pod_name: str):
return result


def get_compose_file_dir():
def get_internal_compose_file_dir():
# TODO: refactor to use common code with deploy command
# See: https://stackoverflow.com/questions/25389095/python-get-path-of-root-project-structure
data_dir = Path(__file__).absolute().parent.joinpath("data")
Expand Down
Loading

0 comments on commit 6e4dae9

Please sign in to comment.