Skip to content

Commit

Permalink
formatted code using black formatter: https://black.readthedocs.io/en…
Browse files Browse the repository at this point in the history
  • Loading branch information
monocongo committed Nov 11, 2018
1 parent 5976104 commit 1d48835
Show file tree
Hide file tree
Showing 6 changed files with 81 additions and 77 deletions.
14 changes: 10 additions & 4 deletions climate_indices/compute.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from enum import Enum
import logging

# from dask.array import pad
# from dask_image.ndfilters import convolve
from lmoments3 import distr
import numba
import numpy as np
Expand Down Expand Up @@ -83,6 +85,14 @@ def sum_to_scale(values, scale):
# pad the first (n - 1) elements of the array with NaN values
return np.hstack(([np.NaN] * (scale - 1), sliding_sums))

# BELOW FOR dask/xarray DataArray integration
# # pad the values array with (scale - 1) NaNs
# values = pad(values, pad_width=(scale - 1, 0), mode='constant', constant_values=np.NaN)
#
# start = 1
# end = -(scale - 2)
# return convolve(values, np.ones(scale), mode='reflect', cval=0.0, origin=0)[start: end]


# -----------------------------------------------------------------------------------------------------------------------
@numba.jit
Expand Down Expand Up @@ -408,10 +418,6 @@ def transform_fitted_gamma(
if (calibration_start_year < data_start_year) or (
calibration_end_year > data_end_year
):
# _logger.info('Insufficient data for the specified calibration period ({0}-{1}),'.format(calibration_start_year,
# calibration_end_year) +
# ' instead using the full period of record ({0}-{1})'.format(data_start_year,
# data_end_year))
calibration_start_year = data_start_year
calibration_end_year = data_end_year

Expand Down
2 changes: 1 addition & 1 deletion scripts/process_divisions.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ def _compute_and_write_division(self, div_index):
if not np.isnan(precip_time_series).all():

# put precipitation into inches if not already
mm_to_inches_multiplier = 0.0393701
mm_to_inches_multiplier = 0.039_370_1
possible_mm_units = ["millimeters", "millimeter", "mm"]
if input_divisions[self.var_name_precip].units in possible_mm_units:
precip_time_series = precip_time_series * mm_to_inches_multiplier
Expand Down
40 changes: 15 additions & 25 deletions scripts/process_grid_ufunc.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@

from climate_indices import compute, indices

# the number of worker processes we'll use for process pools
_NUMBER_OF_WORKER_PROCESSES = multiprocessing.cpu_count()

# ----------------------------------------------------------------------------------------------------------------------
# set up a basic, global _logger which will write to the console as standard error
logging.basicConfig(
Expand Down Expand Up @@ -285,9 +288,9 @@ def compute_write_spi(kwrgs):
data_start_year = int(str(dataset["time"].values[0])[0:4])

# get the scale increment for use in later log messages
if kwrgs["periodicity"] is compute.Periodicity.daily:
if kwrgs["periodicity"] == compute.Periodicity.daily:
scale_increment = "day"
elif kwrgs["periodicity"] is compute.Periodicity.monthly:
elif kwrgs["periodicity"] == compute.Periodicity.monthly:
scale_increment = "month"
else:
raise ValueError(
Expand Down Expand Up @@ -321,7 +324,9 @@ def compute_write_spi(kwrgs):
}

# apply the SPI function to the data array
da_spi = xr.apply_ufunc(indices.spi, da_precip_groupby, kwargs=args_dict)
da_spi = xr.apply_ufunc(
indices.spi, da_precip_groupby, dask="allowed", kwargs=args_dict
)

# unstack the array back into original dimensions
da_spi = da_spi.unstack("point")
Expand Down Expand Up @@ -366,9 +371,9 @@ def compute_write_pnp(kwrgs):
data_start_year = int(str(dataset["time"].values[0])[0:4])

# get the scale increment for use in later log messages
if kwrgs["periodicity"] is compute.Periodicity.daily:
if kwrgs["periodicity"] == compute.Periodicity.daily:
scale_increment = "day"
elif kwrgs["periodicity"] is compute.Periodicity.monthly:
elif kwrgs["periodicity"] == compute.Periodicity.monthly:
scale_increment = "month"
else:
raise ValueError(
Expand Down Expand Up @@ -445,9 +450,9 @@ def compute_write_spei(kwrgs):
data_start_year = int(str(dataset["time"].values[0])[0:4])

# get the scale increment for use in later log messages
if kwrgs["periodicity"] is compute.Periodicity.daily:
if kwrgs["periodicity"] == compute.Periodicity.daily:
scale_increment = "day"
elif kwrgs["periodicity"] is compute.Periodicity.monthly:
elif kwrgs["periodicity"] == compute.Periodicity.monthly:
scale_increment = "month"
else:
raise ValueError(
Expand Down Expand Up @@ -751,13 +756,8 @@ def run_multi_pnp(
output_file_base,
):

# the number of worker processes we'll use in our process pool
number_of_workers = (
multiprocessing.cpu_count()
) # NOTE use 1 here when debugging for less butt hurt

# create a process Pool for worker processes which will compute indices
pool = multiprocessing.Pool(processes=number_of_workers)
pool = multiprocessing.Pool(processes=_NUMBER_OF_WORKER_PROCESSES)

# create an iterable of arguments specific to the function that we'll call within each worker process
args = []
Expand Down Expand Up @@ -797,13 +797,8 @@ def run_multi_spi(
output_file_base,
):

# the number of worker processes we'll use in our process pool
number_of_workers = (
multiprocessing.cpu_count()
) # NOTE use 1 here when debugging for less butt hurt

# create a process Pool for worker processes which will compute indices
pool = multiprocessing.Pool(processes=number_of_workers)
pool = multiprocessing.Pool(processes=_NUMBER_OF_WORKER_PROCESSES)

# create an iterable of arguments specific to the function that we'll call within each worker process
args = []
Expand Down Expand Up @@ -848,13 +843,8 @@ def run_multi_spei(
output_file_base,
):

# the number of worker processes we'll use in our process pool
number_of_workers = (
multiprocessing.cpu_count()
) # NOTE use 1 here when debugging for less butt hurt

# create a process Pool for worker processes which will compute indices
pool = multiprocessing.Pool(processes=number_of_workers)
pool = multiprocessing.Pool(processes=_NUMBER_OF_WORKER_PROCESSES)

# create an iterable of arguments specific to the function that we'll call within each worker process
args = []
Expand Down
81 changes: 44 additions & 37 deletions tests/test_compute.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,46 +173,46 @@ def test_pearson3_fitting_values(self):
[
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[
48.539987664499996,
53.9852487665,
44.284745065842102,
62.583727384894736,
125.72157689160528,
182.03053042784214,
159.00575657926319,
170.92269736865791,
189.8925781252895,
155.13420024692104,
72.953125000026319,
43.31532689144737,
48.539_987_664_499_996,
53.985_248_766_5,
44.284_745_065_842_102,
62.583_727_384_894_736,
125.721_576_891_605_28,
182.030_530_427_842_14,
159.005_756_579_263_19,
170.922_697_368_657_91,
189.892_578_125_289_5,
155.134_200_246_921_04,
72.953_125_000_026_319,
43.315_326_891_447_37,
],
[
33.781507724523095,
43.572151699968387,
40.368173442404107,
44.05329691434887,
60.10621716019174,
59.343178125457186,
49.228795303727473,
66.775653341386999,
65.362977393206421,
94.467597091088265,
72.63706898364299,
34.250906049301463,
33.781_507_724_523_095,
43.572_151_699_968_387,
40.368_173_442_404_107,
44.053_296_914_348_87,
60.106_217_160_191_74,
59.343_178_125_457_186,
49.228_795_303_727_473,
66.775_653_341_386_999,
65.362_977_393_206_421,
94.467_597_091_088_265,
72.637_068_983_642_99,
34.250_906_049_301_463,
],
[
0.76530966976335302,
1.2461447518219784,
2.275517179222323,
0.8069305098698194,
-0.6783037020197018,
1.022194696224529,
0.40876120732817578,
1.2372551346168916,
0.73881116931924118,
0.91911763257003465,
2.3846715887263725,
1.4700559294571962,
0.765_309_669_763_353_02,
1.246_144_751_821_978_4,
2.275_517_179_222_323,
0.806_930_509_869_819_4,
-0.678_303_702_019_701_8,
1.022_194_696_224_529,
0.408_761_207_328_175_78,
1.237_255_134_616_891_6,
0.738_811_169_319_241_18,
0.919_117_632_570_034_65,
2.384_671_588_726_372_5,
1.470_055_929_457_196_2,
],
]
)
Expand Down Expand Up @@ -312,14 +312,21 @@ def test_sum_to_scale():
"""

# test an input array with no missing values
values = np.array([3, 4, 6, 2, 1, 3, 5, 8, 5])
values = np.array([3.0, 4, 6, 2, 1, 3, 5, 8, 5])
computed_values = compute.sum_to_scale(values, 3)
expected_values = np.array([np.NaN, np.NaN, 13, 12, 9, 6, 9, 16, 18])
np.testing.assert_allclose(
computed_values,
expected_values,
err_msg="Sliding sums not computed as expected",
)
computed_values = compute.sum_to_scale(values, 4)
expected_values = np.array([np.NaN, np.NaN, np.NaN, 15, 13, 12, 11, 17, 21])
np.testing.assert_allclose(
computed_values,
expected_values,
err_msg="Sliding sums not computed as expected",
)

# test an input array with missing values on the end
values = np.array([3, 4, 6, 2, 1, 3, 5, 8, 5, np.NaN, np.NaN, np.NaN])
Expand Down
6 changes: 3 additions & 3 deletions tests/test_eto.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def test_sunset_hour_angle(self):
err_msg="Sunset hour angle not computed as expected",
)

expected_value = 1.6018925422201806
expected_value = 1.601_892_542_220_180_6
computed_value = eto._sunset_hour_angle(np.deg2rad(10.0), np.deg2rad(10.0))
np.testing.assert_equal(
computed_value,
Expand All @@ -137,7 +137,7 @@ def test_solar_declination(self):
self.assertRaises(ValueError, eto._solar_declination, 5000)
self.assertRaises(ValueError, eto._solar_declination, np.NaN)

expected_value = -0.313551072399921
expected_value = -0.313_551_072_399_921
computed_value = eto._solar_declination(30)
np.testing.assert_equal(
computed_value,
Expand All @@ -153,7 +153,7 @@ def test_daylight_hours(self):
self.assertRaises(ValueError, eto._daylight_hours, -1.0)
self.assertRaises(ValueError, eto._daylight_hours, np.NaN)

expected_value = 7.999999999999999
expected_value = 7.999_999_999_999_999
computed_value = eto._daylight_hours(math.pi / 3)
np.testing.assert_equal(
computed_value,
Expand Down
15 changes: 8 additions & 7 deletions tests/test_indices.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,10 +142,11 @@ def test_pnp(self):
self.fixture_calibration_year_end_monthly,
compute.Periodicity.monthly,
)
np.testing.assert_equal(
computed_pnp,
all_nan_precips,
"All-NaN input array does not result in the expected all-NaN result",
np.testing.assert_allclose(
computed_pnp.flatten(),
all_nan_precips.flatten(),
equal_nan=True,
err_msg="All-NaN input array does not result in the expected all-NaN result",
)

# compute PNP from the daily precipitation array
Expand All @@ -160,9 +161,9 @@ def test_pnp(self):

# confirm PNP is being computed as expected
np.testing.assert_allclose(
self.fixture_pnp_6month,
computed_pnp_6month,
atol=0.001,
self.fixture_pnp_6month.flatten(),
computed_pnp_6month.flatten(),
atol=0.01,
equal_nan=True,
err_msg="PNP values not computed as expected",
)
Expand Down

0 comments on commit 1d48835

Please sign in to comment.