diff --git a/site.py b/site.py new file mode 100644 index 00000000000..c1cc7e7a7b1 --- /dev/null +++ b/site.py @@ -0,0 +1,202 @@ +from pathlib import Path +import os +import sys +import argparse +import subprocess +from typing import Union, List, Optional, Mapping + +IMAGE_NAME="airflow-site" +CONTAINER_NAME="airflow-site-c" +SITE_DIST="landing-pages/dist" +THEME_GEN="sphinx_airflow_theme/sphinx_airflow_theme/static/_gen" +RunCommandResult = Union[subprocess.CompletedProcess, subprocess.CalledProcessError] + + +def run_command(cmd: List[str], + env: Optional[Mapping[str, str]] = None, + cwd: Optional[Path] = None, + input: Optional[str] = None, + check: bool = True, + **kwargs, +)-> RunCommandResult: + workdir: str = str(cwd) if cwd else os.getcwd() + try: + cmd_env = os.environ.copy() + cmd_env.setdefault("HOME", str(Path.home())) + if env: + cmd_env.update(env) + return subprocess.run(cmd, input=input, check=check, env=cmd_env, cwd=workdir, **kwargs) + except subprocess.CalledProcessError as ex: + if ex.stdout: + print(ex.stdout) + if ex.stderr: + print(ex.stderr) + return ex + + +def check_docker_environment(): + is_docker_env = False + docker_env_file = Path(".dockerenv") + if docker_env_file.is_file(): + is_docker_env = True + return is_docker_env + +def prevent_docker(): + if check_docker_environment(): + print("This command does not run in docker environment. Run this command from the host system") + sys.exit(1) + +def build_image(): + cmd = ["docker", "build", "-t", IMAGE_NAME, "."] + build_image_command = run_command(cmd=cmd, text=True, capture_output=True) + +def ensure_image_exists(): + cmd = ["docker", "images", IMAGE_NAME, "-q"] + check_image_exists = run_command(cmd=cmd, text=True, capture_output=True) + if check_image_exists.returncode != 0: + print('Image does not exist') + build_image() + +def build_container(): + cmd = ["docker", "build", "-t", IMAGE_NAME, "."] + build_image_command = run_command(cmd=cmd, text=True, capture_output=True) + +def ensure_container_exists(): + cmd = ["docker", "container","ls", "-a", "--filter", "Name={}".format(CONTAINER_NAME), "-q"] + check_container_exists = run_command(cmd=cmd, text=True, capture_output=True) + if check_container_exists.returncode != 0: + print('Container does not exist') + build_container() + +def ensure_container_running(): + container_running_status = False + container_status_cmd = ["docker", "inspect", CONTAINER_NAME, "--format", '{{.State.Status}}'] + container_status = run_command(cmd=container_status_cmd, text=True, capture_output=True) + if container_status.stdout and container_status.stdout.strip() == 'running': + container_running_status = True + return container_running_status + +def run_container(): + if ensure_container_running(): + cmd = ["docker", "start", CONTAINER_NAME] + run_command(cmd=cmd) + else: + print("Container does not exist") + +def remove_docker_container(): + cmd = ["docker", "rm", CONTAINER_NAME] + run_command(cmd=cmd) + +def remove_docker_image(): + cmd = ["docker", "rmi", IMAGE_NAME] + run_command(cmd=cmd) + +def kill_container(): + cmd = ["docker", "kill", CONTAINER_NAME] + run_command(cmd) + +def clean_up_environment(): + if ensure_container_running(): + kill_container() + sys.exit(0) + container_list_cmd = ["docker", "container", "ls", "-a", '--filter="Name={}"'.format(CONTAINER_NAME), "-q"] + + image_list_cmd = ["docker", "images", IMAGE_NAME, "-q"] + +def prepare_environment(): + if not check_docker_environment(): + ensure_image_exists() + ensure_container_exists() + ensure_container_running() + +def run_build_image(args): + docker_env = check_docker_environment() + if not docker_env: + build_image() + +def run_stop(args): + docker_env = check_docker_environment() + if not docker_env: + kill_container() + sys.exit(0) + else: + print('Docker container is not running') + +def run_shell(args): + """ + Open shell access to Airflow's worker. This allows you to test commands in the context of + the Airflow instance. + """ + prepare_environment() + prevent_docker() + cmd = ["docker", "exec", "-ti", CONTAINER_NAME, "bash"] + run_command(cmd=cmd) + # print("Shell", args) + +def run_stop(args): + """ + Stops the Airflow worker container if it is running. + """ + prevent_docker() + kill_container() + +def run_info(args): + """ + Run arbitrary command on the Airflow worker. + Example: + To list current running process, run: + pianka.py run -- ps -aux + To list DAGs, run: + pianka.py run -- airflow list_dags + """ + print("Info", args) + + +def run_mysql(args): + """ + Starts the MySQL console. Additional parameters are passed to the mysql client. + Tip: + If you want to execute "SELECT 123" query, run following command: + pianka.sh mysql -- --execute="SELECT 123" + """ + print("MySQL", args) + + +def ger_parser(): + parser = argparse.ArgumentParser( + description="Various commands used to build the airflow site", formatter_class=argparse.RawTextHelpFormatter + ) + # parser.add_argument( + # "-C", + # "--composer-name", + # const="c", + # action="store_const", + # help="Composer instance used to run the operations on.", + # ) + # parser.add_argument( + # "-L", "--composer-location", const="c", action="store_const", help="Composer location" + # ) + subparsers = parser.add_subparsers(help="sub-command help", metavar="COMMAND") + subparsers.required = True + + parser_a = subparsers.add_parser("shell", help=run_shell.__doc__) + parser_a.set_defaults(func=run_shell) + + parser_b = subparsers.add_parser("info", help=run_info.__doc__) + parser_b.set_defaults(func=run_info) + + parser_c = subparsers.add_parser("mysql", help=run_mysql.__doc__) + parser_c.set_defaults(func=run_info) + + parser_d = subparsers.add_parser("stop", help=run_stop.__doc__) + parser_d.set_defaults(func=run_stop) + + parser_e = subparsers.add_parser("build-image", help=run_build_image.__doc__) + parser_e.set_defaults(func=run_build_image) + return parser + + +parser = ger_parser() + +args = parser.parse_args() +args.func(args) \ No newline at end of file