Skip to content

Commit

Permalink
Move a dataset's Terraform files into their own folder (#2)
Browse files Browse the repository at this point in the history
  • Loading branch information
adlersantos committed Apr 26, 2021
1 parent 0e21d45 commit 73141df
Show file tree
Hide file tree
Showing 15 changed files with 41 additions and 22 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ $ python scripts/generate_terraform.py \
[--impersonating-acct] IMPERSONATING_SERVICE_ACCT
```

This generates Terraform files (`*.tf`) in the dataset directory to represent which GCP resources need to be actuated. If you passed in the `--tf-apply` parameter, the command will also run `terraform apply` to actuate those resources.
This generates Terraform files (`*.tf`) in a `_terraform` directory inside that dataset. The files contain instrastructure-as-code on which GCP resources need to be actuated for use by the pipelines. If you passed in the `--tf-apply` parameter, the command will also run `terraform apply` to actuate those resources.

In addition, the command above creates a "dot" directory in the project root. The directory name is the value you pass to the `--env` parameter of the command. If no `--env` argument was passed, the value defaults to `dev` (which generates the `.dev` folder).

Expand Down
File renamed without changes.
File renamed without changes.
21 changes: 15 additions & 6 deletions scripts/generate_terraform.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def generate_tfvars_file(
TEMPLATE_PATHS["tfvars"], {"tf_vars": tf_vars}
)

target_path = env_path / "datasets" / dataset_id / "terraform.tfvars"
target_path = env_path / "datasets" / dataset_id / "_terraform" / "terraform.tfvars"
write_to_file(contents + "\n", target_path)
terraform_fmt(target_path)
print_created_files([target_path])
Expand Down Expand Up @@ -202,14 +202,24 @@ def uppercase_bq_schema_types(schema: list) -> list:


def create_gitignored_env_path(dataset_id: str, env_path: pathlib.Path):
if not (PROJECT_ROOT / "datasets" / dataset_id).exists():
raise FileNotFoundError(
f"Directory {PROJECT_ROOT / 'datasets' / dataset_id} doesn't exist"
)
(env_path / "datasets" / dataset_id).mkdir(parents=True, exist_ok=True)


def create_file_in_dot_and_project_dirs(
dataset_id: str, contents: str, filename: str, env_path: pathlib.Path
):
filepaths = []
for prefix in (env_path / "datasets" / dataset_id, DATASETS_PATH / dataset_id):
for prefix in (
env_path / "datasets" / dataset_id / "_terraform",
DATASETS_PATH / dataset_id / "_terraform",
):
if not prefix.exists():
prefix.mkdir(parents=True, exist_ok=True)

target_path = prefix / filename
write_to_file(contents + "\n", target_path)
terraform_fmt(target_path)
Expand Down Expand Up @@ -243,10 +253,9 @@ def terraform_fmt(target_file: pathlib.Path):


def actuate_terraform_resources(dataset_id: str, env_path: pathlib.Path):
subprocess.check_call(["terraform", "init"], cwd=env_path / "datasets" / dataset_id)
subprocess.check_call(
["terraform", "apply"], cwd=env_path / "datasets" / dataset_id
)
cwd = env_path / "datasets" / dataset_id / "_terraform"
subprocess.check_call(["terraform", "init"], cwd=cwd)
subprocess.check_call(["terraform", "apply"], cwd=cwd)


def apply_substitutions_to_template(template: pathlib.Path, subs: dict) -> str:
Expand Down
40 changes: 25 additions & 15 deletions tests/scripts/test_generate_terraform.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,15 +116,17 @@ def test_main_generates_tf_files(
)

for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name,
generate_terraform.DATASETS_PATH / dataset_path.name,
ENV_DATASETS_PATH / dataset_path.name / "_terraform",
generate_terraform.DATASETS_PATH / dataset_path.name / "_terraform",
):
assert (path_prefix / "provider.tf").exists()
assert (path_prefix / f"{dataset_path.name}_dataset.tf").exists()
assert (path_prefix / f"{pipeline_path.name}_pipeline.tf").exists()
assert (path_prefix / "variables.tf").exists()

assert (ENV_DATASETS_PATH / dataset_path.name / "terraform.tfvars").exists()
assert (
ENV_DATASETS_PATH / dataset_path.name / "_terraform" / "terraform.tfvars"
).exists()


pipeline_path_2 = pipeline_path
Expand All @@ -151,16 +153,18 @@ def test_main_with_multiple_pipelines(
)

for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name,
generate_terraform.DATASETS_PATH / dataset_path.name,
ENV_DATASETS_PATH / dataset_path.name / "_terraform",
generate_terraform.DATASETS_PATH / dataset_path.name / "_terraform",
):
assert (path_prefix / "provider.tf").exists()
assert (path_prefix / f"{dataset_path.name}_dataset.tf").exists()
assert (path_prefix / f"{pipeline_path.name}_pipeline.tf").exists()
assert (path_prefix / f"{pipeline_path_2.name}_pipeline.tf").exists()
assert (path_prefix / "variables.tf").exists()

assert (ENV_DATASETS_PATH / dataset_path.name / "terraform.tfvars").exists()
assert (
ENV_DATASETS_PATH / dataset_path.name / "_terraform" / "terraform.tfvars"
).exists()


def test_dataset_without_any_pipelines(
Expand All @@ -173,8 +177,8 @@ def test_dataset_without_any_pipelines(
)

for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name,
generate_terraform.DATASETS_PATH / dataset_path.name,
ENV_DATASETS_PATH / dataset_path.name / "_terraform",
generate_terraform.DATASETS_PATH / dataset_path.name / "_terraform",
):
assert (path_prefix / "provider.tf").exists()
assert (path_prefix / f"{dataset_path.name}_dataset.tf").exists()
Expand Down Expand Up @@ -210,8 +214,8 @@ def test_generated_tf_files_contain_license_headers(
).read_text()

for path_prefix in (
ENV_DATASETS_PATH / dataset_path.name,
generate_terraform.DATASETS_PATH / dataset_path.name,
ENV_DATASETS_PATH / dataset_path.name / "_terraform",
generate_terraform.DATASETS_PATH / dataset_path.name / "_terraform",
):
assert (path_prefix / "provider.tf").read_text().count(license_header) == 1
assert (path_prefix / f"{dataset_path.name}_dataset.tf").read_text().count(
Expand All @@ -223,7 +227,7 @@ def test_generated_tf_files_contain_license_headers(
assert (path_prefix / "variables.tf").read_text().count(license_header) == 1

assert (
ENV_DATASETS_PATH / dataset_path.name / "terraform.tfvars"
ENV_DATASETS_PATH / dataset_path.name / "_terraform" / "terraform.tfvars"
).read_text().count(license_header) == 1


Expand Down Expand Up @@ -256,8 +260,10 @@ def test_validation_on_generated_tf_files_in_dot_env_dir(
)
env_dataset_path = ENV_DATASETS_PATH / dataset_path.name

subprocess.check_call(["terraform", "init"], cwd=env_dataset_path)
subprocess.check_call(["terraform", "validate"], cwd=env_dataset_path)
subprocess.check_call(["terraform", "init"], cwd=env_dataset_path / "_terraform")
subprocess.check_call(
["terraform", "validate"], cwd=env_dataset_path / "_terraform"
)


def test_validation_on_generated_tf_files_in_project_dir(
Expand All @@ -277,5 +283,9 @@ def test_validation_on_generated_tf_files_in_project_dir(
)
project_dataset_path = generate_terraform.DATASETS_PATH / dataset_path.name

subprocess.check_call(["terraform", "init"], cwd=(project_dataset_path))
subprocess.check_call(["terraform", "validate"], cwd=(project_dataset_path))
subprocess.check_call(
["terraform", "init"], cwd=(project_dataset_path / "_terraform")
)
subprocess.check_call(
["terraform", "validate"], cwd=(project_dataset_path / "_terraform")
)

0 comments on commit 73141df

Please sign in to comment.