Skip to content

Commit

Permalink
Merge branch 'main' into setting-up-new-data-provider
Browse files Browse the repository at this point in the history
  • Loading branch information
benhammondmusic committed May 29, 2024
2 parents 495fd62 + 16a9ab4 commit 77025ef
Show file tree
Hide file tree
Showing 57 changed files with 111,431 additions and 113,228 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/runSuperLinter.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
fetch-depth: 0
# Lint changed files of these langauges using Super Linter shared action
- name: Lint Code Base
uses: super-linter/super-linter/slim@v6.5.0
uses: super-linter/super-linter/slim@v6.5.1
env:
DEFAULT_BRANCH: main
VALIDATE_ALL_CODEBASE: false
Expand Down
6 changes: 3 additions & 3 deletions airflow/dags/cdc_restricted.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from airflow import DAG # type: ignore
from airflow.models import Variable # type: ignore
from airflow.utils.dates import days_ago # type: ignore
from airflow import DAG # pylint: disable=no-name-in-module
from airflow.models import Variable # pylint: disable=no-name-in-module
from airflow.utils.dates import days_ago # pylint: disable=no-name-in-module

import util

Expand Down
28 changes: 28 additions & 0 deletions airflow/dags/chr.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# Ignore the Airflow module, it is installed in both dev and prod
from airflow import DAG # pylint: disable=no-name-in-module
from airflow.utils.dates import days_ago # pylint: disable=no-name-in-module

import util

_CHR_WORKFLOW_ID = 'CHR_DATA'
_CHR_DATASET_NAME = 'chr_data'

default_args = {
'start_date': days_ago(0),
}

data_ingestion_dag = DAG(
'chr_ingestion_dag',
default_args=default_args,
schedule_interval=None,
description='Ingestion configuration for CHR',
)

chr_bq_payload = util.generate_bq_payload(_CHR_WORKFLOW_ID, _CHR_DATASET_NAME, demographic='race')
chr_pop_bq_operator = util.create_bq_ingest_operator('chr_to_bq', chr_bq_payload, data_ingestion_dag)

chr_exporter_payload = {'dataset_name': _CHR_DATASET_NAME}
chr_exporter_operator = util.create_exporter_operator('chr_exporter', chr_exporter_payload, data_ingestion_dag)

# Ingestion DAG
(chr_pop_bq_operator >> chr_exporter_operator)
7 changes: 7 additions & 0 deletions config/data_sources/chr_data.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# Resources and routines for CHR Data ingestion.

# Create a BigQuery dataset for CHR data.
resource "google_bigquery_dataset" "chr" {
dataset_id = "chr_data"
location = "US"
}
6,287 changes: 3,144 additions & 3,143 deletions data/cdc_svi_county/cdc_svi_county_totals.csv

Large diffs are not rendered by default.

Binary file not shown.
8 changes: 4 additions & 4 deletions exporter/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ google-auth==2.29.0
# google-cloud-storage
google-cloud==0.34.0
# via -r requirements.in
google-cloud-bigquery==3.23.0
google-cloud-bigquery==3.23.1
# via -r requirements.in
google-cloud-core==2.4.1
# via
Expand Down Expand Up @@ -94,7 +94,7 @@ packaging==21.3
# db-dtypes
# google-cloud-bigquery
# gunicorn
pandas==2.0.3
pandas==2.2.2
# via
# -r requirements.in
# db-dtypes
Expand All @@ -117,13 +117,13 @@ pyasn1-modules==0.2.8
# via google-auth
pyparsing==3.0.9
# via packaging
python-dateutil==2.8.2
python-dateutil==2.9.0.post0
# via
# google-cloud-bigquery
# pandas
pytz==2022.2.1
# via pandas
requests==2.32.0
requests==2.32.2
# via
# -r requirements.in
# google-api-core
Expand Down
Loading

0 comments on commit 77025ef

Please sign in to comment.