Skip to content

Commit

Permalink
Upgrade pendulum to latest major version ~2.0 (#9184)
Browse files Browse the repository at this point in the history
  • Loading branch information
crhyatt committed Jun 10, 2020
1 parent e0c0e01 commit c41192f
Show file tree
Hide file tree
Showing 24 changed files with 89 additions and 79 deletions.
8 changes: 4 additions & 4 deletions airflow/models/dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -412,15 +412,15 @@ def following_schedule(self, dttm):
if not self.is_fixed_time_schedule():
# relative offset (eg. every 5 minutes)
delta = cron.get_next(datetime) - naive
following = dttm.in_timezone(self.timezone).add_timedelta(delta)
following = dttm.in_timezone(self.timezone) + delta
else:
# absolute (e.g. 3 AM)
naive = cron.get_next(datetime)
tz = pendulum.timezone(self.timezone.name)
following = timezone.make_aware(naive, tz)
return timezone.convert_to_utc(following)
elif self.normalized_schedule_interval is not None:
return dttm + self.normalized_schedule_interval
return timezone.convert_to_utc(dttm + self.normalized_schedule_interval)

def previous_schedule(self, dttm):
"""
Expand All @@ -440,15 +440,15 @@ def previous_schedule(self, dttm):
if not self.is_fixed_time_schedule():
# relative offset (eg. every 5 minutes)
delta = naive - cron.get_prev(datetime)
previous = dttm.in_timezone(self.timezone).subtract_timedelta(delta)
previous = dttm.in_timezone(self.timezone) - delta
else:
# absolute (e.g. 3 AM)
naive = cron.get_prev(datetime)
tz = pendulum.timezone(self.timezone.name)
previous = timezone.make_aware(naive, tz)
return timezone.convert_to_utc(previous)
elif self.normalized_schedule_interval is not None:
return dttm - self.normalized_schedule_interval
return timezone.convert_to_utc(dttm - self.normalized_schedule_interval)

def get_run_dates(self, start_date, end_date=None):
"""
Expand Down
6 changes: 3 additions & 3 deletions airflow/models/taskinstance.py
Original file line number Diff line number Diff line change
Expand Up @@ -640,7 +640,7 @@ def get_previous_execution_date(
self,
state: Optional[str] = None,
session: Session = None,
) -> Optional[pendulum.datetime]:
) -> Optional[pendulum.DateTime]:
"""
The execution date from property previous_ti_success.
Expand All @@ -655,7 +655,7 @@ def get_previous_start_date(
self,
state: Optional[str] = None,
session: Session = None
) -> Optional[pendulum.datetime]:
) -> Optional[pendulum.DateTime]:
"""
The start date from property previous_ti_success.
Expand All @@ -666,7 +666,7 @@ def get_previous_start_date(
return prev_ti and prev_ti.start_date

@property
def previous_start_date_success(self) -> Optional[pendulum.datetime]:
def previous_start_date_success(self) -> Optional[pendulum.DateTime]:
"""
This attribute is deprecated.
Please use `airflow.models.taskinstance.TaskInstance.get_previous_start_date` method.
Expand Down
4 changes: 2 additions & 2 deletions airflow/models/xcom.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import pickle
from typing import Any, Iterable, Optional, Union

from pendulum import pendulum
import pendulum
from sqlalchemy import Column, LargeBinary, String, and_
from sqlalchemy.orm import Query, Session, reconstructor

Expand Down Expand Up @@ -120,7 +120,7 @@ def set(
@classmethod
@provide_session
def get_many(cls,
execution_date: pendulum.datetime,
execution_date: pendulum.DateTime,
key: Optional[str] = None,
task_ids: Optional[Union[str, Iterable[str]]] = None,
dag_ids: Optional[Union[str, Iterable[str]]] = None,
Expand Down
2 changes: 1 addition & 1 deletion airflow/operators/latest_only_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def choose_branch(self, context: Dict) -> Union[str, Iterable[str]]:
"Externally triggered DAG_Run: allowing execution to proceed.")
return list(context['task'].get_direct_relative_ids(upstream=False))

now = pendulum.utcnow()
now = pendulum.now('UTC')
left_window = context['dag'].following_schedule(
context['execution_date'])
right_window = context['dag'].following_schedule(left_window)
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/google/cloud/operators/dataproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ def _build_lifecycle_config(self, cluster_data):
if self.auto_delete_time:
utc_auto_delete_time = timezone.convert_to_utc(self.auto_delete_time)
cluster_data['config']['lifecycle_config']['auto_delete_time'] = \
utc_auto_delete_time.format('%Y-%m-%dT%H:%M:%S.%fZ', formatter='classic')
utc_auto_delete_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
elif self.auto_delete_ttl:
cluster_data['config']['lifecycle_config']['auto_delete_ttl'] = \
"{}s".format(self.auto_delete_ttl)
Expand Down
9 changes: 6 additions & 3 deletions airflow/serialization/serialized_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import cattr
import pendulum
from dateutil import relativedelta
from pendulum.tz.timezone import Timezone

from airflow.exceptions import AirflowException
from airflow.models import Connection
Expand Down Expand Up @@ -186,7 +187,7 @@ def _serialize(cls, var: Any) -> Any: # Unfortunately there is no support for r
return cls._encode(var.timestamp(), type_=DAT.DATETIME)
elif isinstance(var, datetime.timedelta):
return cls._encode(var.total_seconds(), type_=DAT.TIMEDELTA)
elif isinstance(var, (pendulum.tz.Timezone, pendulum.tz.timezone_info.TimezoneInfo)):
elif isinstance(var, (Timezone)):
return cls._encode(str(var.name), type_=DAT.TIMEZONE)
elif isinstance(var, relativedelta.relativedelta):
encoded = {k: v for k, v in var.__dict__.items() if not k.startswith("_") and v}
Expand All @@ -212,6 +213,7 @@ def _serialize(cls, var: Any) -> Any: # Unfortunately there is no support for r
except Exception: # pylint: disable=broad-except
log.error('Failed to stringify.', exc_info=True)
return FAILED

# pylint: enable=too-many-return-statements

@classmethod
Expand Down Expand Up @@ -239,7 +241,7 @@ def _deserialize(cls, encoded_var: Any) -> Any: # pylint: disable=too-many-retu
elif type_ == DAT.TIMEDELTA:
return datetime.timedelta(seconds=var)
elif type_ == DAT.TIMEZONE:
return pendulum.timezone(var)
return Timezone(var)
elif type_ == DAT.RELATIVEDELTA:
if 'weekday' in var:
var['weekday'] = relativedelta.weekday(*var['weekday']) # type: ignore
Expand All @@ -252,7 +254,7 @@ def _deserialize(cls, encoded_var: Any) -> Any: # pylint: disable=too-many-retu
raise TypeError('Invalid type {!s} in deserialization.'.format(type_))

_deserialize_datetime = pendulum.from_timestamp
_deserialize_timezone = pendulum.timezone
_deserialize_timezone = pendulum.tz.timezone

@classmethod
def _deserialize_timedelta(cls, seconds: int) -> datetime.timedelta:
Expand Down Expand Up @@ -538,6 +540,7 @@ def __get_constructor_defaults(): # pylint: disable=no-method-argument
param_to_attr.get(k, k): v.default for k, v in signature(DAG).parameters.items()
if v.default is not v.empty
}

_CONSTRUCTOR_PARAMS = __get_constructor_defaults.__func__() # type: ignore
del __get_constructor_defaults

Expand Down
8 changes: 4 additions & 4 deletions airflow/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,13 +39,13 @@
log = logging.getLogger(__name__)


TIMEZONE = pendulum.timezone('UTC')
TIMEZONE = pendulum.tz.timezone('UTC')
try:
tz = conf.get("core", "default_timezone")
if tz == "system":
TIMEZONE = pendulum.local_timezone()
TIMEZONE = pendulum.tz.local_timezone()
else:
TIMEZONE = pendulum.timezone(tz)
TIMEZONE = pendulum.tz.timezone(tz)
except Exception:
pass
log.info("Configured default timezone %s" % TIMEZONE)
Expand Down Expand Up @@ -214,7 +214,7 @@ def dispose_orm():


def configure_adapters():
from pendulum import Pendulum
from pendulum import DateTime as Pendulum
try:
from sqlite3 import register_adapter
register_adapter(Pendulum, lambda val: val.isoformat(' '))
Expand Down
2 changes: 1 addition & 1 deletion airflow/ti_deps/dep_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def __init__(
self.ignore_ti_state = ignore_ti_state
self.finished_tasks = finished_tasks

def ensure_finished_tasks(self, dag, execution_date: pendulum.datetime, session: Session):
def ensure_finished_tasks(self, dag, execution_date: pendulum.DateTime, session: Session):
"""
This method makes sure finished_tasks is populated if it's currently None.
This is for the strange feature of running tasks without dag_run.
Expand Down
2 changes: 1 addition & 1 deletion airflow/utils/sqlalchemy.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@

log = logging.getLogger(__name__)

utc = pendulum.timezone('UTC')
utc = pendulum.tz.timezone('UTC')

using_mysql = conf.get('core', 'sql_alchemy_conn').lower().startswith('mysql')

Expand Down
4 changes: 2 additions & 2 deletions airflow/utils/timezone.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from airflow.settings import TIMEZONE

# UTC time zone as a tzinfo instance.
utc = pendulum.timezone('UTC')
utc = pendulum.tz.timezone('UTC')


def is_localized(value):
Expand Down Expand Up @@ -176,4 +176,4 @@ def parse(string, timezone=None):
:param string: time string
"""
return pendulum.parse(string, tz=timezone or TIMEZONE)
return pendulum.parse(string, tz=timezone or TIMEZONE, strict=False)
2 changes: 1 addition & 1 deletion airflow/www/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def wrapper(*args, **kwargs):

if 'execution_date' in request.values:
log.execution_date = pendulum.parse(
request.values.get('execution_date'))
request.values.get('execution_date'), strict=False)

session.add(log)

Expand Down
18 changes: 9 additions & 9 deletions requirements/requirements-python3.6.txt
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ apispec==1.3.3
appdirs==1.4.4
argcomplete==1.11.1
asn1crypto==1.3.0
astroid==2.3.3
astroid==2.4.2
async-generator==1.10
async-timeout==3.0.1
atlasclient==1.0.0
Expand All @@ -66,15 +66,15 @@ azure-nspkg==3.0.2
azure-storage-blob==2.1.0
azure-storage-common==2.1.0
azure-storage==0.36.0
backcall==0.1.0
backcall==0.2.0
bcrypt==3.1.7
beautifulsoup4==4.7.1
billiard==3.6.3.0
black==19.10b0
blinker==1.4
boto3==1.13.25
boto3==1.13.26
boto==2.49.0
botocore==1.16.25
botocore==1.16.26
bowler==0.8.0
cached-property==1.5.1
cachetools==4.1.0
Expand Down Expand Up @@ -145,7 +145,7 @@ google-auth-oauthlib==0.4.1
google-auth==1.16.1
google-cloud-automl==0.10.0
google-cloud-bigquery-datatransfer==1.0.0
google-cloud-bigquery==1.24.0
google-cloud-bigquery==1.25.0
google-cloud-bigtable==1.2.1
google-cloud-container==0.5.0
google-cloud-core==1.3.0
Expand Down Expand Up @@ -187,7 +187,7 @@ ijson==2.6.1
imagesize==1.2.0
immutables==0.14
importlib-metadata==1.6.1
importlib-resources==2.0.0
importlib-resources==1.5.0
inflection==0.5.0
ipdb==0.13.2
ipython-genutils==0.2.0
Expand Down Expand Up @@ -223,7 +223,7 @@ monotonic==1.5
more-itertools==8.3.0
moto==1.3.14
msgpack==1.0.0
msrest==0.6.15
msrest==0.6.16
msrestazure==0.6.3
multi-key-dict==2.0.3
multidict==4.7.6
Expand All @@ -233,7 +233,7 @@ mysql-connector-python==8.0.18
mysqlclient==1.3.14
natsort==7.0.1
nbclient==0.3.1
nbformat==5.0.6
nbformat==5.0.7
nest-asyncio==1.3.3
networkx==2.4
nodeenv==1.4.0
Expand All @@ -252,7 +252,7 @@ paramiko==2.7.1
parso==0.7.0
pathspec==0.8.0
pbr==5.4.5
pendulum==1.4.4
pendulum==2.1.0
pep562==1.0
pexpect==4.8.0
pickleshare==0.7.5
Expand Down
16 changes: 8 additions & 8 deletions requirements/requirements-python3.7.txt
Original file line number Diff line number Diff line change
Expand Up @@ -66,15 +66,15 @@ azure-nspkg==3.0.2
azure-storage-blob==2.1.0
azure-storage-common==2.1.0
azure-storage==0.36.0
backcall==0.1.0
backcall==0.2.0
bcrypt==3.1.7
beautifulsoup4==4.7.1
billiard==3.6.3.0
black==19.10b0
blinker==1.4
boto3==1.13.25
boto3==1.13.26
boto==2.49.0
botocore==1.16.25
botocore==1.16.26
bowler==0.8.0
cached-property==1.5.1
cachetools==4.1.0
Expand Down Expand Up @@ -144,7 +144,7 @@ google-auth-oauthlib==0.4.1
google-auth==1.16.1
google-cloud-automl==0.10.0
google-cloud-bigquery-datatransfer==1.0.0
google-cloud-bigquery==1.24.0
google-cloud-bigquery==1.25.0
google-cloud-bigtable==1.2.1
google-cloud-container==0.5.0
google-cloud-core==1.3.0
Expand Down Expand Up @@ -219,7 +219,7 @@ monotonic==1.5
more-itertools==8.3.0
moto==1.3.14
msgpack==1.0.0
msrest==0.6.15
msrest==0.6.16
msrestazure==0.6.3
multi-key-dict==2.0.3
multidict==4.7.6
Expand All @@ -229,7 +229,7 @@ mysql-connector-python==8.0.18
mysqlclient==1.3.14
natsort==7.0.1
nbclient==0.3.1
nbformat==5.0.6
nbformat==5.0.7
nest-asyncio==1.3.3
networkx==2.4
nodeenv==1.4.0
Expand All @@ -248,7 +248,7 @@ paramiko==2.7.1
parso==0.7.0
pathspec==0.8.0
pbr==5.4.5
pendulum==1.4.4
pendulum==2.1.0
pexpect==4.8.0
pickleshare==0.7.5
pinotdb==0.1.1
Expand Down Expand Up @@ -378,7 +378,7 @@ virtualenv==20.0.21
watchtower==0.7.3
wcwidth==0.2.4
websocket-client==0.57.0
wrapt==1.12.1
wrapt==1.11.2
xmltodict==0.12.0
yamllint==1.23.0
yandexcloud==0.41.0
Expand Down
Loading

0 comments on commit c41192f

Please sign in to comment.