Skip to content

Commit

Permalink
Merge pull request #2391 from sfinkens/fix-warning-stacklevel
Browse files Browse the repository at this point in the history
  • Loading branch information
mraspaud committed Feb 15, 2023
2 parents 4236df9 + 2a2fb18 commit 5f2e3a1
Show file tree
Hide file tree
Showing 23 changed files with 225 additions and 85 deletions.
12 changes: 10 additions & 2 deletions satpy/composites/__init__.py
Expand Up @@ -425,7 +425,11 @@ def _get_sensors(self, projectables):
def __call__(self, projectables, nonprojectables=None, **attrs):
"""Build the composite."""
if 'deprecation_warning' in self.attrs:
warnings.warn(self.attrs['deprecation_warning'], UserWarning)
warnings.warn(
self.attrs['deprecation_warning'],
UserWarning,
stacklevel=2
)
self.attrs.pop('deprecation_warning', None)
num = len(projectables)
mode = attrs.get('mode')
Expand Down Expand Up @@ -513,7 +517,11 @@ class RGBCompositor(GenericCompositor):

def __call__(self, projectables, nonprojectables=None, **info):
"""Generate the composite."""
warnings.warn("RGBCompositor is deprecated, use GenericCompositor instead.", DeprecationWarning)
warnings.warn(
"RGBCompositor is deprecated, use GenericCompositor instead.",
DeprecationWarning,
stacklevel=2
)
if len(projectables) != 3:
raise ValueError("Expected 3 datasets, got %d" % (len(projectables),))
return super(RGBCompositor, self).__call__(projectables, **info)
Expand Down
9 changes: 6 additions & 3 deletions satpy/composites/config_loader.py
Expand Up @@ -131,9 +131,12 @@ def _get_modifier_loader_from_config(modifier_name, modifier_info):
loader = modifier_info.pop('modifier', None)
if loader is None:
loader = modifier_info.pop('compositor')
warnings.warn("Modifier '{}' uses deprecated 'compositor' "
"key to point to Python class, replace "
"with 'modifier'.".format(modifier_name))
warnings.warn(
"Modifier '{}' uses deprecated 'compositor' "
"key to point to Python class, replace "
"with 'modifier'.".format(modifier_name),
stacklevel=5
)
except KeyError:
raise ValueError("'modifier' key missing or empty for '{}'. Option keys = {}".format(
modifier_name, str(modifier_info.keys())))
Expand Down
5 changes: 4 additions & 1 deletion satpy/composites/spectral.py
Expand Up @@ -176,5 +176,8 @@ def __init__(self, *args, fractions=(0.85, 0.15), **kwargs):
"""Set default keyword argument values."""
warnings.warn(
"'GreenCorrector' is deprecated, use 'SpectralBlender' instead, or 'HybridGreen' for hybrid green"
" correction following Miller et al. (2016).", UserWarning)
" correction following Miller et al. (2016).",
UserWarning,
stacklevel=2
)
super().__init__(fractions=fractions, *args, **kwargs)
6 changes: 5 additions & 1 deletion satpy/enhancements/__init__.py
Expand Up @@ -129,7 +129,11 @@ def wrapper(data, **kwargs):
def crefl_scaling(img, **kwargs):
"""Apply non-linear stretch used by CREFL-based RGBs."""
LOG.debug("Applying the crefl_scaling")
warnings.warn("'crefl_scaling' is deprecated, use 'piecewise_linear_stretch' instead.", DeprecationWarning)
warnings.warn(
"'crefl_scaling' is deprecated, use 'piecewise_linear_stretch' instead.",
DeprecationWarning,
stacklevel=2
)
img.data.data = img.data.data / 100
return piecewise_linear_stretch(img, xp=kwargs['idx'], fp=kwargs['sc'], reference_scale_factor=255)

Expand Down
8 changes: 6 additions & 2 deletions satpy/modifiers/_crefl.py
Expand Up @@ -53,8 +53,12 @@ def __init__(self, *args, dem_filename=None, dem_sds="averaged elevation",
"""
if dem_filename is not None:
warnings.warn("'dem_filename' for 'ReflectanceCorrector' is "
"deprecated. Use 'url' instead.", DeprecationWarning)
warnings.warn(
"'dem_filename' for 'ReflectanceCorrector' is "
"deprecated. Use 'url' instead.",
DeprecationWarning,
stacklevel=2
)

super(ReflectanceCorrector, self).__init__(*args, **kwargs)
self.dem_sds = dem_sds
Expand Down
4 changes: 3 additions & 1 deletion satpy/modifiers/angles.py
Expand Up @@ -185,7 +185,9 @@ def _warn_if_irregular_input_chunks(args, modified_args):
"has been rechunked for caching, but this is not optimal for "
"future calculations. "
f"Original chunks: {arg_chunks}; New chunks: {new_chunks}",
PerformanceWarning)
PerformanceWarning,
stacklevel=3
)

def _cache_results(self, res, zarr_format):
os.makedirs(os.path.dirname(zarr_format), exist_ok=True)
Expand Down
6 changes: 4 additions & 2 deletions satpy/modifiers/parallax.py
Expand Up @@ -388,8 +388,10 @@ def _check_overlap(self, cth_dataset):
by ``cth_dataset``, raise a `MissingHeightError`.
"""
warnings.warn(
"Overlap checking not impelemented. Waiting for "
"fix for https://github.com/pytroll/pyresample/issues/329")
"Overlap checking not implemented. Waiting for "
"fix for https://github.com/pytroll/pyresample/issues/329",
stacklevel=3
)

def _get_corrected_lon_lat(self, base_lon, base_lat, shifted_area):
"""Calculate the corrected lon/lat based from the shifted area.
Expand Down
9 changes: 7 additions & 2 deletions satpy/multiscene.py
Expand Up @@ -300,7 +300,9 @@ def from_files(cls, files_to_sort, reader=None,
warnings.warn(
"Argument ensure_all_readers is deprecated. Use "
"missing='skip' instead.",
DeprecationWarning)
DeprecationWarning,
stacklevel=2
)
file_groups = [fg for fg in file_groups if all(fg.values())]
scenes = (Scene(filenames=fg, **scene_kwargs) for fg in file_groups)
return cls(scenes)
Expand Down Expand Up @@ -639,7 +641,10 @@ def load_data(frame_gen, q):
load_thread.join(10)
if load_thread.is_alive():
import warnings
warnings.warn("Background thread still alive after failing to die gracefully")
warnings.warn(
"Background thread still alive after failing to die gracefully",
stacklevel=3
)
else:
log.debug("Child thread died successfully")

Expand Down
19 changes: 12 additions & 7 deletions satpy/readers/__init__.py
Expand Up @@ -180,10 +180,12 @@ def _get_file_keys_for_reader_files(reader_files, group_keys=None):
group_key = tuple(file_info.get(k) for k in group_keys)
if all(g is None for g in group_key):
warnings.warn(
f"Found matching file {f:s} for reader "
"{reader_name:s}, but none of group keys found. "
"Group keys requested: " + ", ".join(group_keys),
UserWarning)
f"Found matching file {f:s} for reader "
"{reader_name:s}, but none of group keys found. "
"Group keys requested: " + ", ".join(group_keys),
UserWarning,
stacklevel=3
)
file_keys[reader_name].append((group_key, f))
return file_keys

Expand Down Expand Up @@ -356,9 +358,12 @@ def get_valid_reader_names(reader):

if reader_name in PENDING_OLD_READER_NAMES:
new_name = PENDING_OLD_READER_NAMES[reader_name]
warnings.warn("Reader name '{}' is being deprecated and will be removed soon."
"Please use '{}' instead.".format(reader_name, new_name),
FutureWarning)
warnings.warn(
"Reader name '{}' is being deprecated and will be removed soon."
"Please use '{}' instead.".format(reader_name, new_name),
FutureWarning,
stacklevel=2
)
new_readers.append(new_name)
else:
new_readers.append(reader_name)
Expand Down
10 changes: 8 additions & 2 deletions satpy/readers/ahi_hsd.py
Expand Up @@ -458,7 +458,10 @@ def _modify_observation_time_for_nominal(self, observation_time):
"""
timeline = "{:04d}".format(self.basic_info['observation_timeline'][0])
if not self._is_valid_timeline(timeline):
warnings.warn("Observation timeline is fill value, not rounding observation time.")
warnings.warn(
"Observation timeline is fill value, not rounding observation time.",
stacklevel=3
)
return observation_time

if self.observation_area == 'FLDK':
Expand Down Expand Up @@ -512,7 +515,10 @@ def _get_area_def(self):
def _check_fpos(self, fp_, fpos, offset, block):
"""Check file position matches blocksize."""
if fp_.tell() + offset != fpos:
warnings.warn(f"Actual {block} header size does not match expected")
warnings.warn(
f"Actual {block} header size does not match expected",
stacklevel=3
)
return

def _read_header(self, fp_):
Expand Down
3 changes: 2 additions & 1 deletion satpy/readers/mviri_l1b_fiduceo_nc.py
Expand Up @@ -433,7 +433,8 @@ def check(self):
warnings.warn(
'All pixels of the VIS channel are flagged as "use with '
'caution". Use datasets "quality_pixel_bitmask" and '
'"data_quality_bitmask" to find out why.'
'"data_quality_bitmask" to find out why.',
stacklevel=2
)

def mask(self, ds):
Expand Down
3 changes: 2 additions & 1 deletion satpy/readers/seviri_base.py
Expand Up @@ -781,7 +781,8 @@ def get_orbit_polynomial(self, time, max_delta=6):
except ValueError:
warnings.warn(
'No orbit polynomial valid for {}. Using closest '
'match.'.format(time)
'match.'.format(time),
stacklevel=2
)
match = self._get_closest_interval_within(time, max_delta)
return OrbitPolynomial(
Expand Down
7 changes: 6 additions & 1 deletion satpy/readers/seviri_l1b_native.py
Expand Up @@ -297,7 +297,12 @@ def _read_header(self):
self.mda['hrv_number_of_columns'] = cols_hrv

if self.header['15_MAIN_PRODUCT_HEADER']['QQOV']['Value'] == 'NOK':
warnings.warn("The quality flag for this file indicates not OK. Use this data with caution!", UserWarning)
warnings.warn(
"The quality flag for this file indicates not OK. "
"Use this data with caution!",
UserWarning,
stacklevel=2
)

def _read_trailer(self):

Expand Down
7 changes: 5 additions & 2 deletions satpy/readers/slstr_l1b.py
Expand Up @@ -170,8 +170,11 @@ def _apply_radiance_adjustment(self, radiances):
if chan_name in CHANCALIB_FACTORS:
adjust_fac = CHANCALIB_FACTORS[chan_name]
else:
warnings.warn("Warning: No radiance adjustment supplied " +
"for channel " + chan_name)
warnings.warn(
"Warning: No radiance adjustment supplied " +
"for channel " + chan_name,
stacklevel=3
)
return radiances
return radiances * adjust_fac

Expand Down
8 changes: 5 additions & 3 deletions satpy/readers/utils.py
Expand Up @@ -358,9 +358,11 @@ def get_user_calibration_factors(band_name, correction_dict):
"supply 'slope' and 'offset' keys.")
else:
# If coefficients not present, warn user and use slope=1, offset=0
warnings.warn("WARNING: You have selected radiance correction but "
" have not supplied coefficients for channel " +
band_name)
warnings.warn(
"WARNING: You have selected radiance correction but "
" have not supplied coefficients for channel " + band_name,
stacklevel=2
)
return 1., 0.

return slope, offset
Expand Down
4 changes: 2 additions & 2 deletions satpy/readers/yaml_reader.py
Expand Up @@ -504,10 +504,10 @@ def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=No
except KeyError as req:
msg = "No handler for reading requirement {} for {}".format(
req, filename)
warnings.warn(msg)
warnings.warn(msg, stacklevel=4)
continue
except RuntimeError as err:
warnings.warn(str(err) + ' for {}'.format(filename))
warnings.warn(str(err) + ' for {}'.format(filename), stacklevel=4)
continue

yield filetype_cls(filename, filename_info, filetype_info, *req_fh, **fh_kwargs)
Expand Down
64 changes: 46 additions & 18 deletions satpy/resample.py
Expand Up @@ -523,7 +523,10 @@ def precompute(self, mask=None, radius_of_influence=None, epsilon=0,

def _adjust_radius_of_influence(self, radius_of_influence):
"""Adjust radius of influence."""
warnings.warn("Upgrade 'pyresample' for a more accurate default 'radius_of_influence'.")
warnings.warn(
"Upgrade 'pyresample' for a more accurate default 'radius_of_influence'.",
stacklevel=3
)
try:
radius_of_influence = self.source_geo_def.lons.resolution * 3
except AttributeError:
Expand Down Expand Up @@ -561,8 +564,10 @@ def _check_numpy_cache(self, cache_dir, mask=None,
LOG.debug("Check if %s exists", fname_np)
if os.path.exists(fname_np) and not os.path.exists(fname_zarr):
import warnings
warnings.warn("Using Numpy files as resampling cache is "
"deprecated.")
warnings.warn(
"Using Numpy files as resampling cache is deprecated.",
stacklevel=3
)
LOG.warning("Converting resampling LUT from .npz to .zarr")
zarr_out = xr.Dataset()
with np.load(fname_np, 'r') as fid:
Expand Down Expand Up @@ -684,9 +689,12 @@ class _LegacySatpyEWAResampler(BaseResampler):

def __init__(self, source_geo_def, target_geo_def):
"""Init _LegacySatpyEWAResampler."""
warnings.warn("A new version of pyresample is available. Please "
"upgrade to get access to a newer 'ewa' and "
"'ewa_legacy' resampler.")
warnings.warn(
"A new version of pyresample is available. Please "
"upgrade to get access to a newer 'ewa' and "
"'ewa_legacy' resampler.",
stacklevel=2
)
super(_LegacySatpyEWAResampler, self).__init__(source_geo_def, target_geo_def)
self.cache = {}

Expand Down Expand Up @@ -897,8 +905,11 @@ def load_bil_info(self, cache_dir, **kwargs):
try:
self.resampler.load_resampling_info(filename)
except AttributeError:
warnings.warn("Bilinear resampler can't handle caching, "
"please upgrade Pyresample to 0.17.0 or newer.")
warnings.warn(
"Bilinear resampler can't handle caching, "
"please upgrade Pyresample to 0.17.0 or newer.",
stacklevel=2
)
raise IOError
else:
raise IOError
Expand All @@ -916,8 +927,11 @@ def save_bil_info(self, cache_dir, **kwargs):
try:
self.resampler.save_resampling_info(filename)
except AttributeError:
warnings.warn("Bilinear resampler can't handle caching, "
"please upgrade Pyresample to 0.17.0 or newer.")
warnings.warn(
"Bilinear resampler can't handle caching, "
"please upgrade Pyresample to 0.17.0 or newer.",
stacklevel=2
)

def compute(self, data, fill_value=None, **kwargs):
"""Resample the given data using bilinear interpolation."""
Expand Down Expand Up @@ -1075,8 +1089,12 @@ def _rechunk_if_nonfactor_chunks(dask_arr, y_size, x_size):
new_dim_chunk = lcm(chunk_size, agg_size)
new_chunks[dim_idx] = new_dim_chunk
if need_rechunk:
warnings.warn("Array chunk size is not divisible by aggregation factor. "
"Re-chunking to continue native resampling.", PerformanceWarning)
warnings.warn(
"Array chunk size is not divisible by aggregation factor. "
"Re-chunking to continue native resampling.",
PerformanceWarning,
stacklevel=5
)
dask_arr = dask_arr.rechunk(tuple(new_chunks))
return dask_arr

Expand Down Expand Up @@ -1107,14 +1125,21 @@ def _get_arg_to_pass_for_skipna_handling(**kwargs):

if PR_USE_SKIPNA:
if 'mask_all_nan' in kwargs:
warnings.warn('Argument mask_all_nan is deprecated. Please use skipna for missing values handling. '
'Continuing with default skipna=True, if not provided differently.', DeprecationWarning)
warnings.warn(
'Argument mask_all_nan is deprecated. Please use skipna for missing values handling. '
'Continuing with default skipna=True, if not provided differently.',
DeprecationWarning,
stacklevel=3
)
kwargs.pop('mask_all_nan')
else:
if 'mask_all_nan' in kwargs:
warnings.warn('Argument mask_all_nan is deprecated.'
'Please update Pyresample and use skipna for missing values handling.',
DeprecationWarning)
warnings.warn(
'Argument mask_all_nan is deprecated.'
'Please update Pyresample and use skipna for missing values handling.',
DeprecationWarning,
stacklevel=3
)
kwargs.setdefault('mask_all_nan', False)
kwargs.pop('skipna')

Expand Down Expand Up @@ -1362,7 +1387,10 @@ def prepare_resampler(source_area, destination_area, resampler=None, **resample_
resampler_class = RESAMPLERS.get(resampler, None)
if resampler_class is None:
if resampler == "gradient_search":
warnings.warn('Gradient search resampler not available. Maybe missing `shapely`?')
warnings.warn(
'Gradient search resampler not available. Maybe missing `shapely`?',
stacklevel=2
)
raise KeyError("Resampler '%s' not available" % resampler)
else:
resampler_class = resampler
Expand Down

0 comments on commit 5f2e3a1

Please sign in to comment.