diff --git a/setup.py b/setup.py index d45480c13e..d44ae54b47 100644 --- a/setup.py +++ b/setup.py @@ -94,6 +94,7 @@ "ruff~=0.7.0", "snowflake-connector-python[pandas,secure-local-storage]>=3.0.2", "sqlalchemy-stubs", + "time-machine", "types-croniter", "types-dateparser", "types-python-dateutil", diff --git a/tests/cli/test_cli.py b/tests/cli/test_cli.py index fe3c07a694..85b4cf0877 100644 --- a/tests/cli/test_cli.py +++ b/tests/cli/test_cli.py @@ -6,7 +6,7 @@ import pytest from click.testing import CliRunner -from freezegun import freeze_time +import time_machine from sqlmesh.cli.example_project import ProjectTemplate, init_example_project from sqlmesh.cli.main import cli @@ -14,7 +14,7 @@ from sqlmesh.integrations.dlt import generate_dlt_models from sqlmesh.utils.date import yesterday_ds -FREEZE_TIME = "2023-01-01 00:00:00" +FREEZE_TIME = "2023-01-01 00:00:00 UTC" pytestmark = pytest.mark.slow @@ -658,7 +658,7 @@ def test_run_no_prod(runner, tmp_path): @pytest.mark.parametrize("flag", ["--skip-backfill", "--dry-run"]) -@freeze_time(FREEZE_TIME) +@time_machine.travel(FREEZE_TIME) def test_run_dev(runner, tmp_path, flag): create_example_project(tmp_path) @@ -676,7 +676,7 @@ def test_run_dev(runner, tmp_path, flag): assert_model_batches_executed(result) -@freeze_time(FREEZE_TIME) +@time_machine.travel(FREEZE_TIME) def test_run_cron_not_elapsed(runner, tmp_path, caplog): create_example_project(tmp_path) init_prod_and_backfill(runner, tmp_path) @@ -692,11 +692,12 @@ def test_run_cron_elapsed(runner, tmp_path): create_example_project(tmp_path) # Create and backfill `prod` environment - with freeze_time("2023-01-01 23:59:00"): + with time_machine.travel("2023-01-01 23:59:00 UTC", tick=False) as traveler: + runner = CliRunner() init_prod_and_backfill(runner, tmp_path) - # Run `prod` environment with daily cron elapsed - with freeze_time("2023-01-02 00:01:00"): + # Run `prod` environment with daily cron elapsed + traveler.move_to("2023-01-02 00:01:00 UTC") result = runner.invoke(cli, ["--log-file-dir", tmp_path, "--paths", tmp_path, "run"]) assert result.exit_code == 0 diff --git a/tests/core/test_context.py b/tests/core/test_context.py index 038d4a4de5..89ea49e75e 100644 --- a/tests/core/test_context.py +++ b/tests/core/test_context.py @@ -6,7 +6,7 @@ from tempfile import TemporaryDirectory from unittest.mock import PropertyMock, call, patch -import freezegun +import time_machine import pytest import pandas as pd from pathlib import Path @@ -850,7 +850,7 @@ def test_plan_default_end(sushi_context_pre_scheduling: Context): @pytest.mark.slow def test_plan_start_ahead_of_end(copy_to_temp_path): path = copy_to_temp_path("examples/sushi") - with freezegun.freeze_time("2024-01-02 00:00:00"): + with time_machine.travel("2024-01-02 00:00:00 UTC"): context = Context(paths=path, gateway="duckdb_persistent") context.plan("prod", no_prompts=True, auto_apply=True) assert all( @@ -858,7 +858,7 @@ def test_plan_start_ahead_of_end(copy_to_temp_path): for i in context.state_sync.max_interval_end_per_model("prod").values() ) context.close() - with freezegun.freeze_time("2024-01-03 00:00:00"): + with time_machine.travel("2024-01-03 00:00:00 UTC"): context = Context(paths=path, gateway="duckdb_persistent") expression = d.parse( """ diff --git a/tests/core/test_integration.py b/tests/core/test_integration.py index d5fa808e1f..5e44c59c7d 100644 --- a/tests/core/test_integration.py +++ b/tests/core/test_integration.py @@ -9,7 +9,7 @@ import pandas as pd import pytest from pathlib import Path -from freezegun import freeze_time +import time_machine from pytest_mock.plugin import MockerFixture from sqlglot import exp from sqlglot.expressions import DataType @@ -75,7 +75,7 @@ def plan_choice(plan_builder: PlanBuilder, choice: SnapshotChangeCategory) -> No plan_builder.set_choice(snapshot, choice) -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") @pytest.mark.parametrize( "context_fixture", ["sushi_context", "sushi_no_default_catalog"], @@ -215,7 +215,7 @@ def test_forward_only_plan_with_effective_date(context_fixture: Context, request ] -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_forward_only_model_regular_plan(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) @@ -308,7 +308,7 @@ def test_forward_only_model_regular_plan(init_and_plan_context: t.Callable): assert not prod_df["event_date"].tolist() -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_forward_only_model_regular_plan_preview_enabled(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) @@ -358,7 +358,7 @@ def test_forward_only_model_regular_plan_preview_enabled(init_and_plan_context: assert dev_df["event_date"].tolist() == [pd.to_datetime("2023-01-07")] -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_full_history_restatement_model_regular_plan_preview_enabled( init_and_plan_context: t.Callable, ): @@ -431,7 +431,7 @@ def test_full_history_restatement_model_regular_plan_preview_enabled( context.apply(plan) -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_metadata_changed_regular_plan_preview_enabled(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) @@ -459,7 +459,7 @@ def test_metadata_changed_regular_plan_preview_enabled(init_and_plan_context: t. assert not plan.restatements -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_hourly_model_with_lookback_no_backfill_in_dev(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") @@ -486,7 +486,7 @@ def test_hourly_model_with_lookback_no_backfill_in_dev(init_and_plan_context: t. context.get_snapshot(model, raise_if_missing=True) top_waiters_snapshot = context.get_snapshot("sushi.top_waiters", raise_if_missing=True) - with freeze_time(now() + timedelta(hours=2)): + with time_machine.travel(now() + timedelta(hours=2)): plan = context.plan("dev", no_prompts=True, skip_tests=True) # Make sure the waiter_revenue_by_day model is not backfilled. assert plan.missing_intervals == [ @@ -505,7 +505,7 @@ def test_hourly_model_with_lookback_no_backfill_in_dev(init_and_plan_context: t. ] -@freeze_time("2023-01-08 00:00:00") +@time_machine.travel("2023-01-08 00:00:00 UTC", tick=False) def test_parent_cron_after_child(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") @@ -532,7 +532,7 @@ def test_parent_cron_after_child(init_and_plan_context: t.Callable): top_waiters_snapshot = context.get_snapshot("sushi.top_waiters", raise_if_missing=True) - with freeze_time("2023-01-08 23:55:00"): # Past parent's cron, but before child's + with time_machine.travel("2023-01-08 23:55:00 UTC"): # Past parent's cron, but before child's plan = context.plan("dev", no_prompts=True, skip_tests=True) # Make sure the waiter_revenue_by_day model is not backfilled. assert plan.missing_intervals == [ @@ -551,7 +551,7 @@ def test_parent_cron_after_child(init_and_plan_context: t.Callable): ] -@freeze_time("2023-01-08 00:00:00") +@time_machine.travel("2023-01-08 00:00:00 UTC") @pytest.mark.parametrize( "forward_only, expected_intervals", [ @@ -606,7 +606,7 @@ def test_cron_not_aligned_with_day_boundary( "sushi.waiter_revenue_by_day", raise_if_missing=True ) - with freeze_time("2023-01-08 00:10:00"): # Past model's cron. + with time_machine.travel("2023-01-08 00:10:00 UTC"): # Past model's cron. plan = context.plan( "dev", select_models=[model.name], no_prompts=True, skip_tests=True, enable_preview=True ) @@ -618,7 +618,7 @@ def test_cron_not_aligned_with_day_boundary( ] -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_forward_only_parent_created_in_dev_child_created_in_prod( init_and_plan_context: t.Callable, ): @@ -672,7 +672,7 @@ def test_forward_only_parent_created_in_dev_child_created_in_prod( context.apply(plan) -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_plan_set_choice_is_reflected_in_missing_intervals(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) @@ -817,7 +817,7 @@ def test_plan_set_choice_is_reflected_in_missing_intervals(init_and_plan_context ] -@freeze_time("2023-01-08 15:00:00", tick=True) +@time_machine.travel("2023-01-08 15:00:00 UTC", tick=True) @pytest.mark.parametrize("has_view_binding", [False, True]) def test_non_breaking_change_after_forward_only_in_dev( init_and_plan_context: t.Callable, has_view_binding: bool @@ -843,7 +843,7 @@ def test_non_breaking_change_after_forward_only_in_dev( plan.context_diff.snapshots[top_waiters_snapshot.snapshot_id].change_category == SnapshotChangeCategory.FORWARD_ONLY ) - assert plan.start == pd.to_datetime("2023-01-07") + assert plan.start == to_date("2023-01-07") assert plan.missing_intervals == [ SnapshotIntervals( snapshot_id=top_waiters_snapshot.snapshot_id, @@ -933,7 +933,7 @@ def test_non_breaking_change_after_forward_only_in_dev( assert prod_df.empty -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_indirect_non_breaking_change_after_forward_only_in_dev(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) @@ -1055,7 +1055,7 @@ def test_indirect_non_breaking_change_after_forward_only_in_dev(init_and_plan_co assert not context.plan("prod", no_prompts=True, skip_tests=True).requires_backfill -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_forward_only_precedence_over_indirect_non_breaking(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) @@ -1128,14 +1128,14 @@ def test_forward_only_precedence_over_indirect_non_breaking(init_and_plan_contex assert not context.plan("prod", no_prompts=True, skip_tests=True).requires_backfill -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_run_with_select_models( init_and_plan_context: t.Callable, ): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) - with freeze_time("2023-01-09 00:00:00"): + with time_machine.travel("2023-01-09 00:00:00 UTC"): assert context.run(select_models=["*waiter_revenue_by_day"]) snapshots = context.state_sync.state_sync.get_snapshots(context.snapshots.values()) @@ -1159,7 +1159,7 @@ def test_run_with_select_models( } -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_run_with_select_models_no_auto_upstream( init_and_plan_context: t.Callable, ): @@ -1171,7 +1171,7 @@ def test_run_with_select_models_no_auto_upstream( context.plan("prod", no_prompts=True, skip_tests=True, auto_apply=True) - with freeze_time("2023-01-09 00:00:00"): + with time_machine.travel("2023-01-09 00:00:00"): assert context.run(select_models=["*waiter_revenue_by_day"], no_auto_upstream=True) snapshots = context.state_sync.state_sync.get_snapshots(context.snapshots.values()) @@ -1195,7 +1195,7 @@ def test_run_with_select_models_no_auto_upstream( } -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_select_models(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) @@ -1273,7 +1273,7 @@ def test_select_models(init_and_plan_context: t.Callable): ) -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_select_unchanged_model_for_backfill(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) @@ -1347,14 +1347,14 @@ def test_select_unchanged_model_for_backfill(init_and_plan_context: t.Callable): assert {o.name for o in schema_objects} == {"waiter_revenue_by_day", "top_waiters"} -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_max_interval_end_per_model_not_applied_when_end_is_provided( init_and_plan_context: t.Callable, ): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) - with freeze_time("2023-01-09 00:00:00"): + with time_machine.travel("2023-01-09 00:00:00 UTC"): context.run() plan = context.plan( @@ -1363,7 +1363,7 @@ def test_max_interval_end_per_model_not_applied_when_end_is_provided( context.apply(plan) -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_select_models_for_backfill(init_and_plan_context: t.Callable): context, _ = init_and_plan_context("examples/sushi") @@ -1431,7 +1431,7 @@ def test_select_models_for_backfill(init_and_plan_context: t.Callable): ) -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_dbt_select_star_is_directly_modified(sushi_test_dbt_context: Context): context = sushi_test_dbt_context @@ -1476,7 +1476,7 @@ def test_model_attr(sushi_test_dbt_context: Context, assert_exp_eq): ) -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_incremental_by_partition(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) @@ -1530,7 +1530,7 @@ def test_incremental_by_partition(init_and_plan_context: t.Callable): ] -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_custom_materialization(init_and_plan_context: t.Callable): context, _ = init_and_plan_context("examples/sushi") @@ -1565,7 +1565,7 @@ def insert( assert custom_insert_called -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_unaligned_start_snapshot_with_non_deployable_downstream(init_and_plan_context: t.Callable): context, _ = init_and_plan_context("examples/sushi") @@ -1620,7 +1620,7 @@ def test_unaligned_start_snapshot_with_non_deployable_downstream(init_and_plan_c assert snapshot_interval.intervals[0][0] == to_timestamp("2023-01-07") -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_restatement_plan_ignores_changes(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) @@ -1651,7 +1651,7 @@ def test_restatement_plan_ignores_changes(init_and_plan_context: t.Callable): context.apply(plan) -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_plan_against_expired_environment(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) @@ -1682,7 +1682,7 @@ def test_plan_against_expired_environment(init_and_plan_context: t.Callable): context.apply(plan) -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_new_forward_only_model_concurrent_versions(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) @@ -1774,7 +1774,7 @@ def test_new_forward_only_model_concurrent_versions(init_and_plan_context: t.Cal assert df.to_dict() == {"ds": {0: "2023-01-07"}, "b": {0: None}} -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_new_forward_only_model_same_dev_environment(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) @@ -1877,7 +1877,7 @@ def test_plan_twice_with_star_macro_yields_no_diff(tmp_path: Path): assert not new_plan.new_snapshots -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_create_environment_no_changes_with_selector(init_and_plan_context: t.Callable): context, plan = init_and_plan_context("examples/sushi") context.apply(plan) @@ -1893,7 +1893,7 @@ def test_create_environment_no_changes_with_selector(init_and_plan_context: t.Ca assert {o.name for o in schema_objects} == {"top_waiters"} -@freeze_time("2023-01-08 15:00:00") +@time_machine.travel("2023-01-08 15:00:00 UTC") def test_empty_bacfkill(init_and_plan_context: t.Callable): context, _ = init_and_plan_context("examples/sushi") diff --git a/tests/core/test_plan.py b/tests/core/test_plan.py index 273df0b8f5..dbccb998b5 100644 --- a/tests/core/test_plan.py +++ b/tests/core/test_plan.py @@ -4,7 +4,7 @@ from unittest.mock import patch import pytest -from freezegun import freeze_time +import time_machine from pytest_mock.plugin import MockerFixture from sqlglot import parse_one @@ -727,7 +727,7 @@ def test_missing_intervals_lookback(make_snapshot, mocker: MockerFixture): @pytest.mark.slow -@freeze_time() +@time_machine.travel(now(), tick=False) def test_restate_models(sushi_context_pre_scheduling: Context): plan = sushi_context_pre_scheduling.plan( restate_models=["sushi.waiter_revenue_by_day", "tag:expensive"], no_prompts=True @@ -781,7 +781,7 @@ def test_restate_models(sushi_context_pre_scheduling: Context): @pytest.mark.slow -@freeze_time() +@time_machine.travel(now(), tick=False) def test_restate_models_with_existing_missing_intervals(sushi_context: Context): yesterday_ts = to_timestamp(yesterday_ds()) diff --git a/tests/core/test_state_sync.py b/tests/core/test_state_sync.py index aee24e1058..fe4c7700fc 100644 --- a/tests/core/test_state_sync.py +++ b/tests/core/test_state_sync.py @@ -7,7 +7,7 @@ import duckdb import pandas as pd import pytest -from freezegun import freeze_time +import time_machine from pytest_mock.plugin import MockerFixture from sqlglot import exp @@ -939,7 +939,7 @@ def test_promote_snapshots_no_gaps(state_sync: EngineAdapterStateSync, make_snap ) -@freeze_time("2023-01-08 16:00:00") +@time_machine.travel("2023-01-08 16:00:00 UTC", tick=False) def test_promote_snapshots_no_gaps_lookback( state_sync: EngineAdapterStateSync, make_snapshot: t.Callable ): diff --git a/tests/dbt/test_integration.py b/tests/dbt/test_integration.py index 4c68f2f785..452be016dc 100644 --- a/tests/dbt/test_integration.py +++ b/tests/dbt/test_integration.py @@ -8,7 +8,7 @@ import pandas as pd import pytest from dbt.cli.main import dbtRunner -from freezegun import freeze_time +import time_machine from sqlmesh import Context from sqlmesh.core.config.connection import DuckDBConnectionConfig @@ -289,7 +289,7 @@ def _init_test( adapter.create_schema("sushi") if test_type.is_sqlmesh_runtime: self._replace_source_table(adapter, []) - with freeze_time("2019-12-31 00:00:00"): + with time_machine.travel("2019-12-31 00:00:00 UTC"): context.plan("prod", auto_apply=True, no_prompts=True) # type: ignore return run, adapter, context @@ -316,7 +316,7 @@ def test_scd_type_2_by_time( t.List[t.Tuple[int, str, str]], t.List[t.Tuple[int, str, str, str, t.Optional[str]]] ], ] = { - "2020-01-01 00:00:00": ( + "2020-01-01 00:00:00 UTC": ( [ (1, "a", "2020-01-01 00:00:00"), (2, "b", "2020-01-01 00:00:00"), @@ -328,7 +328,7 @@ def test_scd_type_2_by_time( (3, "c", "2020-01-01 00:00:00", "2020-01-01 00:00:00", None), ], ), - "2020-01-02 00:00:00": ( + "2020-01-02 00:00:00 UTC": ( [ # Update to "x" (1, "x", "2020-01-02 00:00:00"), @@ -353,7 +353,7 @@ def test_scd_type_2_by_time( (4, "d", "2020-01-02 00:00:00", "2020-01-02 00:00:00", None), ], ), - "2020-01-04 00:00:00": ( + "2020-01-04 00:00:00 UTC": ( [ # Update to "y" (1, "y", "2020-01-03 00:00:00"), @@ -399,7 +399,7 @@ def test_scd_type_2_by_time( time_start_end_mapping = {} for time, (starting_source_data, expected_table_data) in time_expected_mapping.items(): self._replace_source_table(adapter, starting_source_data) - with freeze_time(time): + with time_machine.travel(time): start_time = self._get_duckdb_now(adapter) run() end_time = self._get_duckdb_now(adapter) @@ -437,7 +437,7 @@ def test_scd_type_2_by_column( t.List[t.Tuple[int, str, str]], t.List[t.Tuple[int, str, str, str, t.Optional[str]]] ], ] = { - "2020-01-01 00:00:00": ( + "2020-01-01 00:00:00 UTC": ( [ (1, "a", "2020-01-01 00:00:00"), (2, "b", "2020-01-01 00:00:00"), @@ -449,7 +449,7 @@ def test_scd_type_2_by_column( (3, "c", "2020-01-01 00:00:00", "2020-01-01 00:00:00", None), ], ), - "2020-01-02 00:00:00": ( + "2020-01-02 00:00:00 UTC": ( [ # Update to "x" (1, "x", "2020-01-02 00:00:00"), @@ -474,7 +474,7 @@ def test_scd_type_2_by_column( (4, "d", "2020-01-02 00:00:00", "2020-01-02 00:00:00", None), ], ), - "2020-01-04 00:00:00": ( + "2020-01-04 00:00:00 UTC": ( [ # Update to "y" (1, "y", "2020-01-03 00:00:00"), @@ -516,7 +516,7 @@ def test_scd_type_2_by_column( time_start_end_mapping = {} for time, (starting_source_data, expected_table_data) in time_expected_mapping.items(): self._replace_source_table(adapter, starting_source_data) - with freeze_time(time): + with time_machine.travel(time, tick=False): start_time = self._get_duckdb_now(adapter) run() end_time = self._get_duckdb_now(adapter) diff --git a/tests/dbt/test_transformation.py b/tests/dbt/test_transformation.py index 7de94c5386..a366c8d87d 100644 --- a/tests/dbt/test_transformation.py +++ b/tests/dbt/test_transformation.py @@ -9,7 +9,7 @@ import pytest from dbt.adapters.base import BaseRelation from dbt.exceptions import CompilationError -from freezegun import freeze_time +import time_machine from pytest_mock.plugin import MockerFixture from sqlglot import exp from sqlmesh.core import dialect as d @@ -1233,7 +1233,7 @@ def test_snapshot_json_payload(): @pytest.mark.xdist_group("dbt_manifest") -@freeze_time("2023-01-08 00:00:00") +@time_machine.travel("2023-01-08 00:00:00 UTC") def test_dbt_package_macros(sushi_test_project: Project): context = sushi_test_project.context diff --git a/tests/integrations/github/cicd/test_integration.py b/tests/integrations/github/cicd/test_integration.py index 1c8b4aaa93..65d35d7174 100644 --- a/tests/integrations/github/cicd/test_integration.py +++ b/tests/integrations/github/cicd/test_integration.py @@ -9,7 +9,7 @@ from unittest import mock import pytest -from freezegun import freeze_time +import time_machine from pytest_mock.plugin import MockerFixture from sqlglot import exp @@ -52,7 +52,7 @@ def get_columns( return controller._context.engine_adapter.columns(table) -@freeze_time("2023-01-01 15:00:00") +@time_machine.travel("2023-01-01 15:00:00 UTC") def test_merge_pr_has_non_breaking_change( github_client, make_controller, @@ -248,7 +248,7 @@ def test_merge_pr_has_non_breaking_change( ) -@freeze_time("2023-01-01 15:00:00") +@time_machine.travel("2023-01-01 15:00:00 UTC") def test_merge_pr_has_non_breaking_change_diff_start( github_client, make_controller, @@ -447,7 +447,7 @@ def test_merge_pr_has_non_breaking_change_diff_start( ) -@freeze_time("2023-01-01 15:00:00") +@time_machine.travel("2023-01-01 15:00:00 UTC") def test_merge_pr_has_non_breaking_change_no_categorization( github_client, make_controller, @@ -757,7 +757,7 @@ def test_merge_pr_has_no_changes( ) -@freeze_time("2023-01-01 15:00:00") +@time_machine.travel("2023-01-01 15:00:00 UTC") def test_no_merge_since_no_deploy_signal( github_client, make_controller, @@ -938,7 +938,7 @@ def test_no_merge_since_no_deploy_signal( ) -@freeze_time("2023-01-01 15:00:00") +@time_machine.travel("2023-01-01 15:00:00 UTC") def test_no_merge_since_no_deploy_signal_no_approvers_defined( github_client, make_controller, @@ -1098,7 +1098,7 @@ def test_no_merge_since_no_deploy_signal_no_approvers_defined( ) -@freeze_time("2023-01-01 15:00:00") +@time_machine.travel("2023-01-01 15:00:00 UTC") def test_deploy_comment_pre_categorized( github_client, make_controller, @@ -1285,7 +1285,7 @@ def test_deploy_comment_pre_categorized( ) -@freeze_time("2023-01-01 15:00:00") +@time_machine.travel("2023-01-01 15:00:00 UTC") def test_error_msg_when_applying_plan_with_bug( github_client, make_controller, @@ -1438,7 +1438,7 @@ def test_error_msg_when_applying_plan_with_bug( ) -@freeze_time("2023-01-01 15:00:00") +@time_machine.travel("2023-01-01 15:00:00 UTC") def test_overlapping_changes_models( github_client, make_controller, @@ -1644,7 +1644,7 @@ def test_overlapping_changes_models( ) -@freeze_time("2023-01-01 15:00:00") +@time_machine.travel("2023-01-01 15:00:00 UTC") def test_pr_delete_model( github_client, make_controller, diff --git a/tests/integrations/jupyter/example_outputs.ipynb b/tests/integrations/jupyter/example_outputs.ipynb index 59190d84e9..cfe3aa2457 100644 --- a/tests/integrations/jupyter/example_outputs.ipynb +++ b/tests/integrations/jupyter/example_outputs.ipynb @@ -12,7 +12,7 @@ "import shutil\n", "import pathlib\n", "\n", - "from freezegun import freeze_time\n", + "import time_machine\n", "\n", "# import to register magics\n", "import sqlmesh" @@ -25,7 +25,7 @@ "metadata": {}, "outputs": [], "source": [ - "freezer = freeze_time(\"2032-01-01 00:00:00\")\n", + "freezer = time_machine.travel(\"2032-01-01 00:00:00 UTC\")\n", "freezer.start()" ] }, @@ -51,7 +51,7 @@ " prev_parent_stem = parent.stem\n", "else:\n", " raise RuntimeError(\"Couldn't find root dir\")\n", - " \n", + "\n", "EXAMPLE_SUSHI_DIR = pathlib.Path(root_dir) / \"examples\" / \"sushi\"\n", "str(EXAMPLE_SUSHI_DIR)" ] @@ -336,7 +336,7 @@ "metadata": {}, "outputs": [], "source": [ - "freezer = freeze_time(\"2032-01-02 00:00:00\")\n", + "freezer = time_machine.travel(\"2032-01-02 00:00:00\")\n", "freezer.start()" ] }, diff --git a/tests/integrations/jupyter/test_magics.py b/tests/integrations/jupyter/test_magics.py index 8d85cabb07..90421dafa4 100644 --- a/tests/integrations/jupyter/test_magics.py +++ b/tests/integrations/jupyter/test_magics.py @@ -5,7 +5,7 @@ import pytest from bs4 import BeautifulSoup -from freezegun import freeze_time +import time_machine from hyperscript import h from IPython.core.error import UsageError from IPython.testing.globalipapp import start_ipython @@ -23,7 +23,7 @@ SUCCESS_STYLE = "color: #008000; text-decoration-color: #008000" NEUTRAL_STYLE = "color: #008080; text-decoration-color: #008080" RICH_PRE_STYLE = "white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace" -FREEZE_TIME = "2023-01-01 00:00:00" +FREEZE_TIME = "2023-01-01 00:00:00 UTC" pytestmark = pytest.mark.jupyter @@ -55,7 +55,7 @@ def sushi_context(copy_to_temp_path, notebook, tmp_path) -> Context: @pytest.fixture -@freeze_time(FREEZE_TIME) +@time_machine.travel(FREEZE_TIME) def loaded_sushi_context(sushi_context) -> Context: with capture_output(): sushi_context.plan(no_prompts=True, auto_apply=True) @@ -183,7 +183,7 @@ def test_render_no_format( @pytest.mark.slow -@freeze_time(FREEZE_TIME) +@time_machine.travel(FREEZE_TIME) def test_evaluate(notebook, loaded_sushi_context): with capture_output() as output: notebook.run_line_magic(magic_name="evaluate", line="sushi.top_waiters") @@ -313,7 +313,7 @@ def test_plan( @pytest.mark.slow -@freeze_time("2023-01-03 00:00:00") +@time_machine.travel("2023-01-03 00:00:00 UTC") def test_run_dag( notebook, loaded_sushi_context, convert_all_html_output_to_text, get_all_html_output ): @@ -366,7 +366,7 @@ def test_run_dag( @pytest.mark.slow -@freeze_time(FREEZE_TIME) +@time_machine.travel(FREEZE_TIME) def test_invalidate( notebook, loaded_sushi_context, convert_all_html_output_to_text, get_all_html_output ): @@ -628,7 +628,7 @@ def test_create_external_models(notebook, loaded_sushi_context): @pytest.mark.slow -@freeze_time(FREEZE_TIME) +@time_machine.travel(FREEZE_TIME) def test_table_diff(notebook, loaded_sushi_context, convert_all_html_output_to_text): with capture_output(): loaded_sushi_context.plan("dev", no_prompts=True, auto_apply=True, include_unmodified=True) @@ -650,7 +650,7 @@ def test_table_diff(notebook, loaded_sushi_context, convert_all_html_output_to_t @pytest.mark.slow -@freeze_time(FREEZE_TIME) +@time_machine.travel(FREEZE_TIME) def test_table_name(notebook, loaded_sushi_context, convert_all_html_output_to_text): with capture_output() as output: notebook.run_line_magic(magic_name="table_name", line="sushi.orders") diff --git a/tests/utils/test_date.py b/tests/utils/test_date.py index e33d115697..73810bc83f 100644 --- a/tests/utils/test_date.py +++ b/tests/utils/test_date.py @@ -2,7 +2,7 @@ from datetime import date, datetime import pytest -from freezegun import freeze_time +import time_machine from sqlglot import exp from sqlmesh.utils.date import ( @@ -57,7 +57,7 @@ def test_to_datetime() -> None: ], ) def test_to_datetime_with_expressions(expression, result) -> None: - with freeze_time("2023-01-20 12:30:30"): + with time_machine.travel("2023-01-20 12:30:30 UTC", tick=False): assert to_datetime(expression) == result