Skip to content

Commit

Permalink
update(contour_array): add tri_mask kwarg to parameters (#2078)
Browse files Browse the repository at this point in the history
* move `tri_mask` kwarg to parameters
* update docstrings for `tri_mask` kwarg
* re-lint with new version of black (v24.1.1)
* pin pytest < 8.0.0 (temporarily)
  • Loading branch information
jlarsen-usgs committed Feb 1, 2024
1 parent f8eac0f commit 92853a9
Show file tree
Hide file tree
Showing 81 changed files with 182 additions and 98 deletions.
1 change: 1 addition & 0 deletions flopy/__init__.py
Expand Up @@ -19,6 +19,7 @@
contribute.
"""

# See CITATION.cff for authors
__author__ = "FloPy Team"

Expand Down
6 changes: 3 additions & 3 deletions flopy/export/netcdf.py
Expand Up @@ -249,9 +249,9 @@ def __init__(
}
for n, v in spatial_attribs.items():
self.global_attributes["flopy_sr_" + n] = v
self.global_attributes[
"start_datetime"
] = self.model_time.start_datetime
self.global_attributes["start_datetime"] = (
self.model_time.start_datetime
)

self.fillvalue = FILLVALUE

Expand Down
1 change: 1 addition & 0 deletions flopy/export/shapefile_utils.py
Expand Up @@ -2,6 +2,7 @@
Module for exporting and importing flopy model attributes
"""

import copy
import json
import os
Expand Down
1 change: 1 addition & 0 deletions flopy/mbase.py
Expand Up @@ -4,6 +4,7 @@
all of the other models inherit from.
"""

import abc
import copy
import os
Expand Down
34 changes: 16 additions & 18 deletions flopy/mf6/data/mfdatastorage.py
Expand Up @@ -1372,16 +1372,16 @@ def store_internal(
# convert numbers to be multiplied by the original factor
data = data * adjustment
if const:
self.layer_storage[
layer
].data_storage_type = DataStorageType.internal_constant
self.layer_storage[layer].data_storage_type = (
DataStorageType.internal_constant
)
self.layer_storage[layer].data_const_value = [
mfdatautil.get_first_val(data)
]
else:
self.layer_storage[
layer
].data_storage_type = DataStorageType.internal_array
self.layer_storage[layer].data_storage_type = (
DataStorageType.internal_array
)
try:
self.layer_storage[layer].internal_data = np.reshape(
data, dimensions
Expand Down Expand Up @@ -1410,12 +1410,10 @@ def store_internal(
data_type = self.data_dimensions.structure.get_datum_type(True)
dt = self.layer_storage[layer].internal_data.dtype
if dt != data_type:
self.layer_storage[
layer
].internal_data = self.layer_storage[
layer
].internal_data.astype(
data_type
self.layer_storage[layer].internal_data = (
self.layer_storage[layer].internal_data.astype(
data_type
)
)
if not preserve_record:
self.layer_storage[layer].factor = multiplier
Expand Down Expand Up @@ -1804,9 +1802,9 @@ def store_external(
if self._calc_data_size(data, 2) == 1 and data_size > 1:
# constant data, need to expand
self.layer_storage[layer_new].data_const_value = data
self.layer_storage[
layer_new
].data_storage_type = DataStorageType.internal_constant
self.layer_storage[layer_new].data_storage_type = (
DataStorageType.internal_constant
)
data = self._fill_const_layer(layer)
elif isinstance(data, list):
data = self._to_ndarray(data, layer)
Expand Down Expand Up @@ -1863,9 +1861,9 @@ def set_ext_file_attributes(self, layer, file_path, print_format, binary):
self.layer_storage[layer].fname = file_path
self.layer_storage[layer].iprn = print_format
self.layer_storage[layer].binary = binary
self.layer_storage[
layer
].data_storage_type = DataStorageType.external_file
self.layer_storage[layer].data_storage_type = (
DataStorageType.external_file
)

def point_to_existing_external_file(self, arr_line, layer):
(
Expand Down
6 changes: 3 additions & 3 deletions flopy/mf6/data/mffileaccess.py
Expand Up @@ -1802,9 +1802,9 @@ def load_list_line(
keyword_data_item.type = (
DatumType.string
)
self._temp_dict[
data_item.name
] = keyword_data_item
self._temp_dict[data_item.name] = (
keyword_data_item
)
(
data_index,
more_data_expected,
Expand Down
19 changes: 10 additions & 9 deletions flopy/mf6/data/mfstructure.py
Expand Up @@ -3,6 +3,7 @@
"""

import ast
import keyword
import os
Expand Down Expand Up @@ -275,9 +276,9 @@ def get_block_structure_dict(self, path, common, model_file, block_parent):
current_block.add_dataset(block_dataset_struct)
else:
new_data_item_struct.block_type = block_type
dataset_items_in_block[
new_data_item_struct.name
] = new_data_item_struct
dataset_items_in_block[new_data_item_struct.name] = (
new_data_item_struct
)

# if data item belongs to existing dataset(s)
item_location_found = False
Expand Down Expand Up @@ -585,9 +586,9 @@ def get_block_structure_dict(self, path, common, model_file, block_parent):
current_block.add_dataset(block_dataset_struct)
else:
new_data_item_struct.block_type = block_type
dataset_items_in_block[
new_data_item_struct.name
] = new_data_item_struct
dataset_items_in_block[new_data_item_struct.name] = (
new_data_item_struct
)

# if data item belongs to existing dataset(s)
item_location_found = False
Expand Down Expand Up @@ -651,9 +652,9 @@ def get_block_structure_dict(self, path, common, model_file, block_parent):
key,
val,
) in new_data_item_struct.keystring_dict.items():
keystring_items_needed_dict[
key
] = new_data_item_struct
keystring_items_needed_dict[key] = (
new_data_item_struct
)

# if data set does not exist
if not item_location_found:
Expand Down
1 change: 1 addition & 0 deletions flopy/mf6/mfbase.py
@@ -1,4 +1,5 @@
""" Base classes for Modflow 6 """

import copy
import inspect
import os
Expand Down
6 changes: 3 additions & 3 deletions flopy/mf6/mfmodel.py
Expand Up @@ -907,9 +907,9 @@ def inspect_cells(
# call the package's "inspect_cells" method
package_output = pp.inspect_cells(cell_list, stress_period)
if len(package_output) > 0:
output_by_package[
f"{pp.package_name} package"
] = package_output
output_by_package[f"{pp.package_name} package"] = (
package_output
)
# get dependent variable
if inspect_dependent_var:
try:
Expand Down
18 changes: 9 additions & 9 deletions flopy/mf6/mfpackage.py
Expand Up @@ -109,9 +109,9 @@ def __init__(
"blk_post_comment",
)
if self.blk_trailing_comment_path not in simulation_data.mfdata:
simulation_data.mfdata[
self.blk_trailing_comment_path
] = MFComment("", "", simulation_data, 0)
simulation_data.mfdata[self.blk_trailing_comment_path] = (
MFComment("", "", simulation_data, 0)
)
if self.blk_post_comment_path not in simulation_data.mfdata:
simulation_data.mfdata[self.blk_post_comment_path] = MFComment(
"\n", "", simulation_data, 0
Expand Down Expand Up @@ -856,9 +856,9 @@ def load(self, block_header, fd, strict=True):
aux_vars = self._container_package.auxiliary.get_data()
if aux_vars is not None:
for var_name in list(aux_vars[0])[1:]:
self.datasets_keyword[
(var_name,)
] = self._container_package.aux.structure
self.datasets_keyword[(var_name,)] = (
self._container_package.aux.structure
)

comments = []

Expand Down Expand Up @@ -2838,9 +2838,9 @@ def is_valid(self):

def _load_blocks(self, fd_input_file, strict=True, max_blocks=sys.maxsize):
# init
self._simulation_data.mfdata[
self.path + ("pkg_hdr_comments",)
] = MFComment("", self.path, self._simulation_data)
self._simulation_data.mfdata[self.path + ("pkg_hdr_comments",)] = (
MFComment("", self.path, self._simulation_data)
)
self.post_block_comments = MFComment(
"", self.path, self._simulation_data
)
Expand Down
1 change: 1 addition & 0 deletions flopy/mf6/utils/createpackages.py
Expand Up @@ -80,6 +80,7 @@
files, the package classes, and updated init.py that createpackages.py created.
"""

import datetime
import os
import textwrap
Expand Down
46 changes: 28 additions & 18 deletions flopy/mf6/utils/model_splitter.py
Expand Up @@ -2299,9 +2299,11 @@ def _remap_obs(self, package, mapped_data, remapper, pkg_type=None):
remaps = remapper[idt]
idx = np.where(idtype == idt)
new_cellid1[idx] = [
remaps[i][-1] + 1
if isinstance(i, int)
else i
(
remaps[i][-1] + 1
if isinstance(i, int)
else i
)
for i in obsid[idx]
]
new_model1[idx] = [
Expand All @@ -2311,9 +2313,11 @@ def _remap_obs(self, package, mapped_data, remapper, pkg_type=None):
else:
new_cellid1 = np.array(
[
remapper[i][-1] + 1
if isinstance(i, int)
else i
(
remapper[i][-1] + 1
if isinstance(i, int)
else i
)
for i in obsid
],
dtype=object,
Expand Down Expand Up @@ -2409,9 +2413,11 @@ def _remap_obs(self, package, mapped_data, remapper, pkg_type=None):
]
if self._modelgrid.grid_type == "structured":
cellid2 = [
(0, cid[1], cid[2])
if cid is not None
else None
(
(0, cid[1], cid[2])
if cid is not None
else None
)
for cid in cellid2
]
else:
Expand Down Expand Up @@ -2497,17 +2503,21 @@ def _remap_obs(self, package, mapped_data, remapper, pkg_type=None):
remaps = remapper[idt]
idx = np.where(idtype == idt)
new_cellid2[idx] = [
remaps[i][-1] + 1
if isinstance(i, int)
else i
(
remaps[i][-1] + 1
if isinstance(i, int)
else i
)
for i in obsid[idx]
]
else:
new_cellid2 = np.array(
[
remapper[i][-1] + 1
if isinstance(i, int)
else i
(
remapper[i][-1] + 1
if isinstance(i, int)
else i
)
for i in obsid
],
dtype=object,
Expand Down Expand Up @@ -2886,9 +2896,9 @@ def _remap_package(self, package, ismvr=False):
)
else:
for mkey in self._model_dict.keys():
mapped_data[mkey][
item
] = self._ivert_vert_remap[mkey][item]
mapped_data[mkey][item] = (
self._ivert_vert_remap[mkey][item]
)
mapped_data[mkey]["nvert"] = len(
self._ivert_vert_remap[mkey][item]
)
Expand Down
1 change: 0 additions & 1 deletion flopy/mf6/utils/output_util.py
Expand Up @@ -14,7 +14,6 @@


class MF6Output:

"""
A class that uses meta programming to get output
Expand Down
1 change: 1 addition & 0 deletions flopy/mf6/utils/reference.py
Expand Up @@ -2,6 +2,7 @@
Module spatial referencing for flopy model objects
"""

import numpy as np


Expand Down
1 change: 1 addition & 0 deletions flopy/mfusg/__init__.py
@@ -1,4 +1,5 @@
"""Initialize MfUsg."""

from .cln_dtypes import MfUsgClnDtypes
from .mfusg import MfUsg
from .mfusgbcf import MfUsgBcf
Expand Down
1 change: 1 addition & 0 deletions flopy/mfusg/cln_dtypes.py
@@ -1,4 +1,5 @@
"""Mfusg CLN dtype class."""

import numpy as np


Expand Down
1 change: 1 addition & 0 deletions flopy/mfusg/mfusg.py
@@ -1,4 +1,5 @@
"""Mfusg module."""

import os
from inspect import getfullargspec
from typing import Union
Expand Down
1 change: 1 addition & 0 deletions flopy/mfusg/mfusgbcf.py
Expand Up @@ -4,6 +4,7 @@
Contains the MfUsgBcf class. Note that the user can
access the MfUsgBcf class as `flopy.mfusg.MfUsgBcf`.
"""

import numpy as np

from ..modflow import ModflowBcf
Expand Down
1 change: 1 addition & 0 deletions flopy/mfusg/mfusgdisu.py
Expand Up @@ -2,6 +2,7 @@
mfdisu module. Contains the MfUsgDisU class. Note that the user can access
the MfUsgDisU class as `flopy.mfusg.MfUsgDisU`.
"""

import numpy as np

from ..discretization.unstructuredgrid import UnstructuredGrid
Expand Down
1 change: 1 addition & 0 deletions flopy/mfusg/mfusggnc.py
Expand Up @@ -5,6 +5,7 @@
Contains the MfUsgGnc class. Note that the user can access
the MfUsgGnc class as `flopy.mfusg.MfUsgGnc`.
"""

import numpy as np

from ..modflow.mfparbc import ModflowParBc as mfparbc
Expand Down
1 change: 1 addition & 0 deletions flopy/mfusg/mfusglpf.py
Expand Up @@ -8,6 +8,7 @@
MODFLOW Guide
<https://water.usgs.gov/ogw/modflow/MODFLOW-2005-Guide/lpf.html>`_.
"""

import numpy as np

from ..modflow.mflpf import ModflowLpf
Expand Down
1 change: 0 additions & 1 deletion flopy/mfusg/mfusgwel.py
Expand Up @@ -9,7 +9,6 @@
<https://water.usgs.gov/ogw/modflow/MODFLOW-2005-Guide/wel.html>`_.
"""


from ..modflow.mfwel import ModflowWel
from ..utils import MfList
from .mfusg import MfUsg
Expand Down

0 comments on commit 92853a9

Please sign in to comment.