Skip to content

Commit

Permalink
Merge branch 'main' into bls-dataset
Browse files Browse the repository at this point in the history
  • Loading branch information
adlersantos committed Jun 24, 2021
2 parents 3c83177 + ef01fe6 commit 0ee4376
Show file tree
Hide file tree
Showing 6 changed files with 213 additions and 131 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ repos:
hooks:
- id: flake8
- repo: https://github.com/pycqa/isort
rev: '5.8.0'
rev: '5.9.1'
hooks:
- id: isort
args: ["--profile", "black", "--filter-files"]
2 changes: 1 addition & 1 deletion Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ kubernetes = "*"
pandas-gbq = "==0.14.1"
pytest-mock = "*"
pytest = "*"
"ruamel.yaml" = "==0.17.9"
"ruamel.yaml" = "==0.17.10"
Jinja2 = "*"
SQLAlchemy = "==1.3.15"

Expand Down
261 changes: 134 additions & 127 deletions Pipfile.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion templates/terraform/google_bigquery_dataset.tf.jinja2
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ resource "google_bigquery_dataset" "{{ dataset_id }}" {
friendly_name = "{{ friendly_name }}"
{% endif -%}
{% if description -%}
description = "{{ description }}"
description = {{ description|tojson }}
{% endif -%}
{% if location -%}
location = "{{ location }}"
Expand Down
2 changes: 1 addition & 1 deletion templates/terraform/google_bigquery_table.tf.jinja2
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ resource "google_bigquery_table" "{{ tf_resource_name }}" {
table_id = "{{ table_id }}"

{% if description -%}
description = "{{ description }}"
description = {{ description|tojson }}
{%- endif %}
{% if schema -%}
schema = <<EOF
Expand Down
75 changes: 75 additions & 0 deletions tests/scripts/test_generate_terraform.py
Original file line number Diff line number Diff line change
Expand Up @@ -454,6 +454,44 @@ def test_dataset_tf_file_contains_description_when_specified(
assert re.search(r"description\s+\=", result.group(1))


def test_bq_dataset_can_have_a_description_with_newlines_and_quotes(
dataset_path,
pipeline_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
):
shutil.copyfile(SAMPLE_YAML_PATHS["dataset"], dataset_path / "dataset.yaml")
shutil.copyfile(SAMPLE_YAML_PATHS["pipeline"], pipeline_path / "pipeline.yaml")

config = yaml.load(open(dataset_path / "dataset.yaml"))

# Get a bigquery_dataset resource and modify the `description` field
bq_dataset = next(
(r for r in config["resources"] if r["type"] == "bigquery_dataset"), None
)
test_description = 'Multiline\nstring with\n"quotes"'
bq_dataset["description"] = test_description
with open(dataset_path / "dataset.yaml", "w") as file:
yaml.dump(config, file)

generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
None,
None,
)

env_dataset_path = ENV_DATASETS_PATH / dataset_path.name
subprocess.check_call(["terraform", "fmt"], cwd=env_dataset_path / "_terraform")


def test_dataset_tf_has_no_bq_dataset_description_when_unspecified(
dataset_path,
pipeline_path,
Expand Down Expand Up @@ -601,6 +639,43 @@ def test_pipeline_tf_has_no_bq_table_description_when_unspecified(
assert not re.search(r"description\s+\=", result.group(1))


def test_bq_table_can_have_a_description_with_newlines_and_quotes(
dataset_path,
pipeline_path,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
):
shutil.copyfile(SAMPLE_YAML_PATHS["dataset"], dataset_path / "dataset.yaml")
shutil.copyfile(SAMPLE_YAML_PATHS["pipeline"], pipeline_path / "pipeline.yaml")

config = yaml.load(open(pipeline_path / "pipeline.yaml"))

# Get a bigquery_table resource and modify the `description` field
bq_table = next(
(r for r in config["resources"] if r["type"] == "bigquery_table"), None
)
bq_table["description"] = 'Multiline\nstring with\n"quotes"'
with open(pipeline_path / "pipeline.yaml", "w") as file:
yaml.dump(config, file)

generate_terraform.main(
dataset_path.name,
project_id,
bucket_name_prefix,
region,
impersonating_acct,
env,
None,
None,
)

env_dataset_path = ENV_DATASETS_PATH / dataset_path.name
subprocess.check_call(["terraform", "fmt"], cwd=env_dataset_path / "_terraform")


def test_bq_table_name_starts_with_digits_but_tf_resource_name_does_not(
dataset_path,
pipeline_path,
Expand Down

0 comments on commit 0ee4376

Please sign in to comment.