Skip to content

Commit

Permalink
Tidy up execution timings logic (#389)
Browse files Browse the repository at this point in the history
* Minor tidy up

* Simplifying timing extract
  • Loading branch information
glsdown committed Oct 5, 2023
1 parent 039004a commit 5d2df3f
Show file tree
Hide file tree
Showing 4 changed files with 33 additions and 160 deletions.
1 change: 1 addition & 0 deletions integration_test_project/example-env.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ export DBT_ENV_SPARK_DRIVER_PATH= # /Library/simba/spark/lib/libsparkodbc_sbu.dy
export DBT_ENV_SPARK_ENDPOINT= # The endpoint ID from the Databricks HTTP path

# dbt environment variables, change these
export DBT_VERSION="1_5_0"
export DBT_CLOUD_PROJECT_ID=
export DBT_CLOUD_JOB_ID=
export DBT_CLOUD_RUN_ID=
Expand Down
72 changes: 12 additions & 60 deletions macros/upload_individual_datasets/upload_seed_executions.sql
Original file line number Diff line number Diff line change
Expand Up @@ -38,26 +38,10 @@
'{{ model.thread_id }}', {# thread_id #}
'{{ model.status }}', {# status #}

{% if model.timing != [] %}
{% for stage in model.timing if stage.name == "compile" %}
{% if loop.length == 0 %}
null, {# compile_started_at #}
{% else %}
'{{ stage.started_at }}', {# compile_started_at #}
{% endif %}
{% endfor %}

{% for stage in model.timing if stage.name == "execute" %}
{% if loop.length == 0 %}
null, {# query_completed_at #}
{% else %}
'{{ stage.completed_at }}', {# query_completed_at #}
{% endif %}
{% endfor %}
{% else %}
null, {# compile_started_at #}
null, {# query_completed_at #}
{% endif %}
{% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %}
{% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #}
{% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %}
{% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #}

{{ model.execution_time }}, {# total_node_runtime #}
null, -- rows_affected not available {# Only available in Snowflake #}
Expand Down Expand Up @@ -95,26 +79,10 @@
'{{ model.thread_id }}', {# thread_id #}
'{{ model.status }}', {# status #}

{% if model.timing != [] %}
{% for stage in model.timing if stage.name == "compile" %}
{% if loop.length == 0 %}
null, {# compile_started_at #}
{% else %}
'{{ stage.started_at }}', {# compile_started_at #}
{% endif %}
{% endfor %}

{% for stage in model.timing if stage.name == "execute" %}
{% if loop.length == 0 %}
null, {# query_completed_at #}
{% else %}
'{{ stage.completed_at }}', {# query_completed_at #}
{% endif %}
{% endfor %}
{% else %}
null, {# compile_started_at #}
null, {# query_completed_at #}
{% endif %}
{% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %}
{% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #}
{% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %}
{% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #}

{{ model.execution_time }}, {# total_node_runtime #}
null, -- rows_affected not available {# Databricks #}
Expand Down Expand Up @@ -170,26 +138,10 @@
'{{ model.thread_id }}', {# thread_id #}
'{{ model.status }}', {# status #}

{% if model.timing != [] %}
{% for stage in model.timing if stage.name == "compile" %}
{% if loop.length == 0 %}
null, {# compile_started_at #}
{% else %}
'{{ stage.started_at }}', {# compile_started_at #}
{% endif %}
{% endfor %}

{% for stage in model.timing if stage.name == "execute" %}
{% if loop.length == 0 %}
null, {# query_completed_at #}
{% else %}
'{{ stage.completed_at }}', {# query_completed_at #}
{% endif %}
{% endfor %}
{% else %}
null, {# compile_started_at #}
null, {# query_completed_at #}
{% endif %}
{% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %}
{% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #}
{% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %}
{% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #}

{{ model.execution_time }}, {# total_node_runtime #}
try_cast('{{ model.adapter_response.rows_affected }}' as int), {# rows_affected #}
Expand Down
72 changes: 12 additions & 60 deletions macros/upload_individual_datasets/upload_snapshot_executions.sql
Original file line number Diff line number Diff line change
Expand Up @@ -38,26 +38,10 @@
'{{ model.thread_id }}', {# thread_id #}
'{{ model.status }}', {# status #}

{% if model.timing != [] %}
{% for stage in model.timing if stage.name == "compile" %}
{% if loop.length == 0 %}
null, {# compile_started_at #}
{% else %}
'{{ stage.started_at }}', {# compile_started_at #}
{% endif %}
{% endfor %}

{% for stage in model.timing if stage.name == "execute" %}
{% if loop.length == 0 %}
null, {# query_completed_at #}
{% else %}
'{{ stage.completed_at }}', {# query_completed_at #}
{% endif %}
{% endfor %}
{% else %}
null, {# compile_started_at #}
null, {# query_completed_at #}
{% endif %}
{% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %}
{% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #}
{% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %}
{% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #}

{{ model.execution_time }}, {# total_node_runtime #}
null, -- rows_affected not available {# Only available in Snowflake #}
Expand Down Expand Up @@ -95,26 +79,10 @@
'{{ model.thread_id }}', {# thread_id #}
'{{ model.status }}', {# status #}

{% if model.timing != [] %}
{% for stage in model.timing if stage.name == "compile" %}
{% if loop.length == 0 %}
null, {# compile_started_at #}
{% else %}
'{{ stage.started_at }}', {# compile_started_at #}
{% endif %}
{% endfor %}

{% for stage in model.timing if stage.name == "execute" %}
{% if loop.length == 0 %}
null, {# query_completed_at #}
{% else %}
'{{ stage.completed_at }}', {# query_completed_at #}
{% endif %}
{% endfor %}
{% else %}
null, {# compile_started_at #}
null, {# query_completed_at #}
{% endif %}
{% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %}
{% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #}
{% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %}
{% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #}

{{ model.execution_time }}, {# total_node_runtime #}
null, -- rows_affected not available {# Databricks #}
Expand Down Expand Up @@ -170,26 +138,10 @@
'{{ model.thread_id }}', {# thread_id #}
'{{ model.status }}', {# status #}

{% if model.timing != [] %}
{% for stage in model.timing if stage.name == "compile" %}
{% if loop.length == 0 %}
null, {# compile_started_at #}
{% else %}
'{{ stage.started_at }}', {# compile_started_at #}
{% endif %}
{% endfor %}

{% for stage in model.timing if stage.name == "execute" %}
{% if loop.length == 0 %}
null, {# query_completed_at #}
{% else %}
'{{ stage.completed_at }}', {# query_completed_at #}
{% endif %}
{% endfor %}
{% else %}
null, {# compile_started_at #}
null, {# query_completed_at #}
{% endif %}
{% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %}
{% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #}
{% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %}
{% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #}

{{ model.execution_time }}, {# total_node_runtime #}
try_cast('{{ model.adapter_response.rows_affected }}' as int), {# rows_affected #}
Expand Down
48 changes: 8 additions & 40 deletions macros/upload_individual_datasets/upload_test_executions.sql
Original file line number Diff line number Diff line change
Expand Up @@ -35,26 +35,10 @@
'{{ test.thread_id }}', {# thread_id #}
'{{ test.status }}', {# status #}

{% if test.timing != [] %}
{% for stage in test.timing if stage.name == "compile" %}
{% if loop.length == 0 %}
null, {# compile_started_at #}
{% else %}
'{{ stage.started_at }}', {# compile_started_at #}
{% endif %}
{% endfor %}

{% for stage in test.timing if stage.name == "execute" %}
{% if loop.length == 0 %}
null, {# query_completed_at #}
{% else %}
'{{ stage.completed_at }}', {# query_completed_at #}
{% endif %}
{% endfor %}
{% else %}
null, {# compile_started_at #}
null, {# query_completed_at #}
{% endif %}
{% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %}
{% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #}
{% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %}
{% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #}

{{ test.execution_time }}, {# total_node_runtime #}
null, {# rows_affected not available in Databricks #}
Expand Down Expand Up @@ -89,26 +73,10 @@
'{{ test.thread_id }}', {# thread_id #}
'{{ test.status }}', {# status #}

{% if test.timing != [] %}
{% for stage in test.timing if stage.name == "compile" %}
{% if loop.length == 0 %}
null, {# compile_started_at #}
{% else %}
'{{ stage.started_at }}', {# compile_started_at #}
{% endif %}
{% endfor %}

{% for stage in test.timing if stage.name == "execute" %}
{% if loop.length == 0 %}
null, {# query_completed_at #}
{% else %}
'{{ stage.completed_at }}', {# query_completed_at #}
{% endif %}
{% endfor %}
{% else %}
null, {# compile_started_at #}
null, {# query_completed_at #}
{% endif %}
{% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %}
{% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #}
{% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %}
{% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #}

{{ test.execution_time }}, {# total_node_runtime #}
null, {# rows_affected not available in Databricks #}
Expand Down

0 comments on commit 5d2df3f

Please sign in to comment.