Skip to content

Commit

Permalink
MNT: Use tz-aware datetime methods
Browse files Browse the repository at this point in the history
Python 3.12 deprecated UTC tz-naive methods utcnow() and
utcfromtimestamp(). This adds a filter for one import-time warning from
dateutil.
  • Loading branch information
dopplershift committed Nov 8, 2023
1 parent 135390d commit 0746c1a
Show file tree
Hide file tree
Showing 8 changed files with 50 additions and 33 deletions.
4 changes: 2 additions & 2 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
# All configuration values have a default; values that are commented out
# serve to show the default.

from datetime import datetime
from datetime import datetime, timezone
import os
from pathlib import Path
import re
Expand Down Expand Up @@ -128,7 +128,7 @@

# The encoding of source files.
# source_encoding = 'utf-8-sig'
cur_date = datetime.utcnow()
cur_date = datetime.now(timezone.utc)

# The main toctree document.
master_doc = 'index'
Expand Down
2 changes: 1 addition & 1 deletion examples/meteogram_metpy.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def __init__(self, fig, dates, probeid, time=None, axis=0):
axis: number that controls the new axis to be plotted (FOR FUTURE)
"""
if not time:
time = dt.datetime.utcnow()
time = dt.datetime.now(dt.timezone.utc)
self.start = dates[0]
self.fig = fig
self.end = dates[-1]
Expand Down
7 changes: 6 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,12 @@ markers = "xfail_dask: marks tests as expected to fail with Dask arrays"
norecursedirs = "build docs .idea"
doctest_optionflags = "NORMALIZE_WHITESPACE"
mpl-results-path = "test_output"
filterwarnings = ["ignore:numpy.ndarray size changed:RuntimeWarning"]
filterwarnings = [
"ignore:numpy.ndarray size changed:RuntimeWarning",
# To be removed in the next python-dateutil release.
# See: https://github.com/dateutil/dateutil/issues/1314
'ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:dateutil.tz.tz:37'
]

[tool.ruff]
line-length = 95
Expand Down
5 changes: 3 additions & 2 deletions src/metpy/io/nexrad.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import bz2
from collections import defaultdict, namedtuple, OrderedDict
import contextlib
import datetime
from datetime import datetime, timezone
import logging
import pathlib
import re
Expand Down Expand Up @@ -75,7 +75,8 @@ def bzip_blocks_decompress_all(data):
def nexrad_to_datetime(julian_date, ms_midnight):
"""Convert NEXRAD date time format to python `datetime.datetime`."""
# Subtracting one from julian_date is because epoch date is 1
return datetime.datetime.utcfromtimestamp((julian_date - 1) * day + ms_midnight * milli)
return datetime.fromtimestamp((julian_date - 1) * day + ms_midnight * milli,
tz=timezone.utc)


def remap_status(val):
Expand Down
4 changes: 2 additions & 2 deletions src/metpy/io/text.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"""Support reading information from various text file formats."""

import contextlib
from datetime import datetime
from datetime import datetime, timezone
import re
import string

Expand Down Expand Up @@ -95,7 +95,7 @@ def parse_wpc_surface_bulletin(bulletin, year=None):
text = file.read().decode('utf-8')

parsed_text = []
valid_time = datetime.utcnow()
valid_time = datetime.now(timezone.utc)
for parts in _regroup_lines(text.splitlines()):
# A single file may have multiple sets of data that are valid at different times. Set
# the valid_time string that will correspond to all the following lines parsed, until
Expand Down
6 changes: 3 additions & 3 deletions src/metpy/plots/_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# SPDX-License-Identifier: BSD-3-Clause
"""Utilities for use in making plots."""

from datetime import datetime
from datetime import datetime, timezone

from matplotlib.collections import LineCollection
import matplotlib.patheffects as mpatheffects
Expand All @@ -24,7 +24,7 @@ def add_timestamp(ax, time=None, x=0.99, y=-0.04, ha='right', high_contrast=Fals
ax : `matplotlib.axes.Axes`
The `Axes` instance used for plotting
time : `datetime.datetime` (or any object with a compatible ``strftime`` method)
Specific time to be plotted - datetime.utcnow will be use if not specified
Specific time to be plotted - ``datetime.now(UTC)`` will be use if not specified
x : float
Relative x position on the axes of the timestamp
y : float
Expand Down Expand Up @@ -52,7 +52,7 @@ def add_timestamp(ax, time=None, x=0.99, y=-0.04, ha='right', high_contrast=Fals
text_args = {}
text_args.update(**kwargs)
if not time:
time = datetime.utcnow()
time = datetime.now(timezone.utc)
timestr = time.strftime(time_format)
# If we don't have a time string after that, assume xarray/numpy and see if item
if not isinstance(timestr, str):
Expand Down
49 changes: 30 additions & 19 deletions tests/io/test_nexrad.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# SPDX-License-Identifier: BSD-3-Clause
"""Test the `nexrad` module."""
import contextlib
from datetime import datetime
from datetime import datetime, timezone
from io import BytesIO
import logging
from pathlib import Path
Expand All @@ -26,18 +26,24 @@
# KTLX 20150530 has missing segments for message 18, which was causing exception
# KICX has message type 29 (MDM)
# KVWX and KLTX have some legacy "quirks"; KLTX was crashing the parser
level2_files = [('KTLX20130520_201643_V06.gz', datetime(2013, 5, 20, 20, 16, 46), 17, 4, 6, 0),
('KTLX19990503_235621.gz', datetime(1999, 5, 3, 23, 56, 21), 16, 1, 3, 0),
('Level2_KFTG_20150430_1419.ar2v', datetime(2015, 4, 30, 14, 19, 11),
12, 4, 6, 0),
('KTLX20150530_000802_V06.bz2', datetime(2015, 5, 30, 0, 8, 3), 14, 4, 6, 2),
('KICX_20170712_1458', datetime(2017, 7, 12, 14, 58, 5), 14, 4, 6, 1),
('TDAL20191021021543V08.raw.gz', datetime(2019, 10, 21, 2, 15, 43), 10, 1,
3, 0),
('Level2_FOP1_20191223_003655.ar2v', datetime(2019, 12, 23, 0, 36, 55, 649000),
16, 5, 7, 0),
('KVWX_20050626_221551.gz', datetime(2005, 6, 26, 22, 15, 51), 11, 1, 3, 21),
('KLTX20050329_100015.gz', datetime(2005, 3, 29, 10, 0, 15), 11, 1, 3, 21)]
level2_files = [('KTLX20130520_201643_V06.gz',
datetime(2013, 5, 20, 20, 16, 46, tzinfo=timezone.utc), 17, 4, 6, 0),
('KTLX19990503_235621.gz',
datetime(1999, 5, 3, 23, 56, 21, tzinfo=timezone.utc), 16, 1, 3, 0),
('Level2_KFTG_20150430_1419.ar2v',
datetime(2015, 4, 30, 14, 19, 11, tzinfo=timezone.utc), 12, 4, 6, 0),
('KTLX20150530_000802_V06.bz2',
datetime(2015, 5, 30, 0, 8, 3, tzinfo=timezone.utc), 14, 4, 6, 2),
('KICX_20170712_1458',
datetime(2017, 7, 12, 14, 58, 5, tzinfo=timezone.utc), 14, 4, 6, 1),
('TDAL20191021021543V08.raw.gz',
datetime(2019, 10, 21, 2, 15, 43, tzinfo=timezone.utc), 10, 1, 3, 0),
('Level2_FOP1_20191223_003655.ar2v',
datetime(2019, 12, 23, 0, 36, 55, 649000, tzinfo=timezone.utc), 16, 5, 7, 0),
('KVWX_20050626_221551.gz',
datetime(2005, 6, 26, 22, 15, 51, tzinfo=timezone.utc), 11, 1, 3, 21),
('KLTX20050329_100015.gz',
datetime(2005, 3, 29, 10, 0, 15, tzinfo=timezone.utc), 11, 1, 3, 21)]


# ids here fixes how things are presented in pycharm
Expand Down Expand Up @@ -99,7 +105,8 @@ def test_msg15():
f = Level2File(get_test_data('KTLX20130520_201643_V06.gz', as_file_obj=False))
data = f.clutter_filter_map['data']
assert isinstance(data[0][0], list)
assert f.clutter_filter_map['datetime'] == datetime(2013, 5, 19, 5, 15, 0, 0)
assert f.clutter_filter_map['datetime'] == datetime(2013, 5, 19, 5, 15, 0, 0,
tzinfo=timezone.utc)


def test_single_chunk(caplog):
Expand Down Expand Up @@ -154,9 +161,12 @@ def test_level3_files(fname):
def test_basic():
"""Test reading one specific NEXRAD NIDS file based on the filename."""
f = Level3File(get_test_data('nids/Level3_FFC_N0Q_20140407_1805.nids', as_file_obj=False))
assert f.metadata['prod_time'].replace(second=0) == datetime(2014, 4, 7, 18, 5)
assert f.metadata['vol_time'].replace(second=0) == datetime(2014, 4, 7, 18, 5)
assert f.metadata['msg_time'].replace(second=0) == datetime(2014, 4, 7, 18, 6)
assert f.metadata['prod_time'].replace(second=0) == datetime(2014, 4, 7, 18, 5,
tzinfo=timezone.utc)
assert f.metadata['vol_time'].replace(second=0) == datetime(2014, 4, 7, 18, 5,
tzinfo=timezone.utc)
assert f.metadata['msg_time'].replace(second=0) == datetime(2014, 4, 7, 18, 6,
tzinfo=timezone.utc)
assert f.filename == get_test_data('nids/Level3_FFC_N0Q_20140407_1805.nids',
as_file_obj=False)

Expand Down Expand Up @@ -203,7 +213,7 @@ def test_tdwr():
def test_dhr():
"""Test reading a time field for DHR product."""
f = Level3File(get_test_data('nids/KOUN_SDUS54_DHRTLX_201305202016'))
assert f.metadata['avg_time'] == datetime(2013, 5, 20, 20, 18)
assert f.metadata['avg_time'] == datetime(2013, 5, 20, 20, 18, tzinfo=timezone.utc)


def test_fobj():
Expand Down Expand Up @@ -232,7 +242,8 @@ def test_power_removed_control():
assert f.metadata['rpg_cut_num'] == 1
assert f.metadata['cmd_generated'] == 0
assert f.metadata['el_angle'] == -0.2
assert f.metadata['clutter_filter_map_dt'] == datetime(2020, 8, 17, 4, 16)
assert f.metadata['clutter_filter_map_dt'] == datetime(2020, 8, 17, 4, 16,
tzinfo=timezone.utc)
assert f.metadata['compression'] == 1
assert f.sym_block[0][0]

Expand Down
6 changes: 3 additions & 3 deletions tests/io/test_text.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Test text handling functions."""
from datetime import datetime
from datetime import datetime, timezone

import numpy as np

Expand Down Expand Up @@ -34,7 +34,7 @@ def test_parse_wpc_surface_bulletin_highres():
assert df.geometry[47] == sgeom.LineString([[-100.5, 32.4], [-101.0, 31.9],
[-101.9, 31.5], [-102.9, 31.2]])

assert all(df.valid == datetime(2021, 6, 28, 18, 0, 0))
assert all(df.valid == datetime(2021, 6, 28, 18, 0, 0, tzinfo=timezone.utc))


@needs_module('shapely')
Expand All @@ -60,4 +60,4 @@ def test_parse_wpc_surface_bulletin():
assert df.geometry[47] == sgeom.LineString([[-100, 32], [-101, 32],
[-102, 32], [-103, 31]])

assert all(df.valid == datetime(2021, 6, 28, 18, 0, 0))
assert all(df.valid == datetime(2021, 6, 28, 18, 0, 0, tzinfo=timezone.utc))

0 comments on commit 0746c1a

Please sign in to comment.