You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
# Fallback method to build sparse matrix
sw = lat2SW(*shape, criterion)
if"nodatavals"in da.attrs and da.attrs["nodatavals"]:
> sw = sw[mask]
E TypeError: 'dia_matrix' object is not subscriptable
libpysal/weights/raster.py:252: TypeError
Perhaps this is related to #419, based on the test names.
Platform information: posix linux posix.uname_result(sysname='Linux', release='5.12.11-300.fc34.x86_64', version='#1 SMP Wed Jun 16 15:47:58 UTC 2021', machine='x86_64')
Python version: 3.10.0b3 (default, Jun 17 2021, 00:00:00) [GCC 11.1.1 20210617 (Red Hat 11.1.1-5)]
SciPy version: 1.7.0
NumPy version: 1.20.1
Full pytest traceback
_____________________________ Testraster.test_da2W _____________________________
da = <xarray.DataArray (band: 1, y: 4, x: 4)>
array([[[-32768, -32768, -32768, 129],
[-32768, -32768, -32768, -3...float64 90.030.0-30.0-90.0
* x (x) float64 -180.0 -60.0 60.0 180.0
Attributes:
nodatavals: (-32768.0,)
criterion = 'queen', z_value = None
coords_labels = {'x_label': 'x', 'y_label': 'y', 'z_label': 'band'}, k = 2
include_nodata = False, n_jobs = -1
defda2WSP(
da,
criterion="queen",
z_value=None,
coords_labels={},
k=1,
include_nodata=False,
n_jobs=1,
):
"""
Create a WSPobjectfrom xarray.DataArray with an additional
attribute index containing coordinate values of the raster
in the form of Pandas.Index/MultiIndex.
Parameters
----------
da : xarray.DataArray
Input 2Dor3D DataArray with shape=(z, y, x)
criterion : {"rook", "queen"}
Type of contiguity. Default is queen.
z_value : int/string/float
Select the z_value of 3D DataArray with multiple layers.
coords_labels : dictionary
Pass dimension labels for coordinates and layers if they do not
belong to default dimensions, which are (band/time, y/lat, x/lon)
e.g. coords_labels = {"y_label": "latitude", "x_label": "longitude", "z_label": "year"}
Default is {} empty dictionary.
k : int
Order of contiguity, this will select all neighbors upto kth order.
Default is1.
include_nodata : boolean
If True, missing values will be assumed as non-missing when
selecting higher_order neighbors, Default isFalse
n_jobs : int
Number of cores to be used in the sparse weight construction. If -1,
all available cores are used. Default is1.
Returns
-------
wsp : libpysal.weights.WSP
instance of spatial weights classWSPwith an index attribute
Notes
-----1. Lower order contiguities are also selected.
2. Returned object contains `index` attribute that includes a
`Pandas.MultiIndex`objectfrom the DataArray.
Examples
-------->>>from libpysal.weights.raster import da2WSP, testDataArray
>>> da = testDataArray().rename(
{'band': 'layer', 'x': 'longitude', 'y': 'latitude'})
>>> da.dims
('layer', 'latitude', 'longitude')
>>> da.shape
(3, 4, 4)
>>> da.coords
Coordinates:
* layer (layer) int64 123* latitude (latitude) float64 90.030.0-30.0-90.0* longitude (longitude) float64 -180.0-60.060.0180.0>>> da.attrs
{'nodatavals': (-32768.0,)}
>>> coords_labels = {
"z_label": "layer",
"y_label": "latitude",
"x_label": "longitude"
}
>>> wsp = da2WSP(da, z_value=2, coords_labels=coords_labels)
>>> wsp.n
10>>> pct_sp = wsp.sparse.nnz *1. / wsp.n**2>>>"%.3f"%pct_sp
'0.300'>>>print(wsp.sparse[4].todense())
[[0010011100]]
>>> wsp.index[:2]
MultiIndex([(2, 90.0, 60.0),
(2, 90.0, 180.0)],
names=['layer', 'latitude', 'longitude'])
See Also
--------
:class:`libpysal.weights.weights.WSP`"""
z_id, coords_labels = _da_checker(da, z_value, coords_labels)
shape = da.shape
if z_id:
slice_dict = {}
slice_dict[coords_labels["z_label"]] =0
shape = da[slice_dict].shape
slice_dict[coords_labels["z_label"]] =slice(z_id -1, z_id)
da = da[slice_dict]
ser = da.to_series()
dtype = np.int32 if (shape[0] * shape[1]) <46340**2else np.int64
if"nodatavals"in da.attrs and da.attrs["nodatavals"]:
mask = (ser != da.attrs["nodatavals"][0]).to_numpy()
ids = np.where(mask)[0]
id_map = _idmap(ids, mask, dtype)
ser = ser[ser != da.attrs["nodatavals"][0]]
else:
ids = np.arange(len(ser), dtype=dtype)
id_map = ids.copy()
n =len(ids)
try:
> import numba
E ModuleNotFoundError: No module named 'numba'
libpysal/weights/raster.py:241: ModuleNotFoundError
During handling of the above exception, another exception occurred:
self = <libpysal.weights.tests.test_raster.Testraster testMethod=test_da2W>
deftest_da2W(self):
> w1 = raster.da2W(self.da1, "queen", k=2, n_jobs=-1)
libpysal/weights/tests/test_raster.py:20:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
libpysal/weights/raster.py:124: in da2W
wsp = da2WSP(da, criterion, z_value, coords_labels, k, include_nodata, n_jobs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
da = <xarray.DataArray (band: 1, y: 4, x: 4)>
array([[[-32768, -32768, -32768, 129],
[-32768, -32768, -32768, -3...float64 90.030.0-30.0-90.0
* x (x) float64 -180.0 -60.0 60.0 180.0
Attributes:
nodatavals: (-32768.0,)
criterion = 'queen', z_value = None
coords_labels = {'x_label': 'x', 'y_label': 'y', 'z_label': 'band'}, k = 2
include_nodata = False, n_jobs = -1
defda2WSP(
da,
criterion="queen",
z_value=None,
coords_labels={},
k=1,
include_nodata=False,
n_jobs=1,
):
"""
Create a WSPobjectfrom xarray.DataArray with an additional
attribute index containing coordinate values of the raster
in the form of Pandas.Index/MultiIndex.
Parameters
----------
da : xarray.DataArray
Input 2Dor3D DataArray with shape=(z, y, x)
criterion : {"rook", "queen"}
Type of contiguity. Default is queen.
z_value : int/string/float
Select the z_value of 3D DataArray with multiple layers.
coords_labels : dictionary
Pass dimension labels for coordinates and layers if they do not
belong to default dimensions, which are (band/time, y/lat, x/lon)
e.g. coords_labels = {"y_label": "latitude", "x_label": "longitude", "z_label": "year"}
Default is {} empty dictionary.
k : int
Order of contiguity, this will select all neighbors upto kth order.
Default is1.
include_nodata : boolean
If True, missing values will be assumed as non-missing when
selecting higher_order neighbors, Default isFalse
n_jobs : int
Number of cores to be used in the sparse weight construction. If -1,
all available cores are used. Default is1.
Returns
-------
wsp : libpysal.weights.WSP
instance of spatial weights classWSPwith an index attribute
Notes
-----1. Lower order contiguities are also selected.
2. Returned object contains `index` attribute that includes a
`Pandas.MultiIndex`objectfrom the DataArray.
Examples
-------->>>from libpysal.weights.raster import da2WSP, testDataArray
>>> da = testDataArray().rename(
{'band': 'layer', 'x': 'longitude', 'y': 'latitude'})
>>> da.dims
('layer', 'latitude', 'longitude')
>>> da.shape
(3, 4, 4)
>>> da.coords
Coordinates:
* layer (layer) int64 123* latitude (latitude) float64 90.030.0-30.0-90.0* longitude (longitude) float64 -180.0-60.060.0180.0>>> da.attrs
{'nodatavals': (-32768.0,)}
>>> coords_labels = {
"z_label": "layer",
"y_label": "latitude",
"x_label": "longitude"
}
>>> wsp = da2WSP(da, z_value=2, coords_labels=coords_labels)
>>> wsp.n
10>>> pct_sp = wsp.sparse.nnz *1. / wsp.n**2>>>"%.3f"%pct_sp
'0.300'>>>print(wsp.sparse[4].todense())
[[0010011100]]
>>> wsp.index[:2]
MultiIndex([(2, 90.0, 60.0),
(2, 90.0, 180.0)],
names=['layer', 'latitude', 'longitude'])
See Also
--------
:class:`libpysal.weights.weights.WSP`"""
z_id, coords_labels = _da_checker(da, z_value, coords_labels)
shape = da.shape
if z_id:
slice_dict = {}
slice_dict[coords_labels["z_label"]] =0
shape = da[slice_dict].shape
slice_dict[coords_labels["z_label"]] =slice(z_id -1, z_id)
da = da[slice_dict]
ser = da.to_series()
dtype = np.int32 if (shape[0] * shape[1]) <46340**2else np.int64
if"nodatavals"in da.attrs and da.attrs["nodatavals"]:
mask = (ser != da.attrs["nodatavals"][0]).to_numpy()
ids = np.where(mask)[0]
id_map = _idmap(ids, mask, dtype)
ser = ser[ser != da.attrs["nodatavals"][0]]
else:
ids = np.arange(len(ser), dtype=dtype)
id_map = ids.copy()
n =len(ids)
try:
import numba
except (ModuleNotFoundError, ImportError):
warn(
"numba cannot be imported, parallel processing ""and include_nodata functionality will be disabled. ""falling back to slower method"
)
include_nodata =False# Fallback method to build sparse matrix
sw = lat2SW(*shape, criterion)
if"nodatavals"in da.attrs and da.attrs["nodatavals"]:
> sw = sw[mask]
E TypeError: 'dia_matrix' object is not subscriptable
libpysal/weights/raster.py:252: TypeError
____________________________ Testraster.test_da2WSP ____________________________
da = <xarray.DataArray (band: 1, y: 4, x: 4)>
array([[[-32768, -32768, -32768, 129],
[-32768, -32768, -32768, -3...float64 90.030.0-30.0-90.0
* x (x) float64 -180.0 -60.0 60.0 180.0
Attributes:
nodatavals: (-32768.0,)
criterion = 'rook', z_value = None
coords_labels = {'x_label': 'x', 'y_label': 'y', 'z_label': 'band'}, k = 1
include_nodata = False, n_jobs = -1
defda2WSP(
da,
criterion="queen",
z_value=None,
coords_labels={},
k=1,
include_nodata=False,
n_jobs=1,
):
"""
Create a WSPobjectfrom xarray.DataArray with an additional
attribute index containing coordinate values of the raster
in the form of Pandas.Index/MultiIndex.
Parameters
----------
da : xarray.DataArray
Input 2Dor3D DataArray with shape=(z, y, x)
criterion : {"rook", "queen"}
Type of contiguity. Default is queen.
z_value : int/string/float
Select the z_value of 3D DataArray with multiple layers.
coords_labels : dictionary
Pass dimension labels for coordinates and layers if they do not
belong to default dimensions, which are (band/time, y/lat, x/lon)
e.g. coords_labels = {"y_label": "latitude", "x_label": "longitude", "z_label": "year"}
Default is {} empty dictionary.
k : int
Order of contiguity, this will select all neighbors upto kth order.
Default is1.
include_nodata : boolean
If True, missing values will be assumed as non-missing when
selecting higher_order neighbors, Default isFalse
n_jobs : int
Number of cores to be used in the sparse weight construction. If -1,
all available cores are used. Default is1.
Returns
-------
wsp : libpysal.weights.WSP
instance of spatial weights classWSPwith an index attribute
Notes
-----1. Lower order contiguities are also selected.
2. Returned object contains `index` attribute that includes a
`Pandas.MultiIndex`objectfrom the DataArray.
Examples
-------->>>from libpysal.weights.raster import da2WSP, testDataArray
>>> da = testDataArray().rename(
{'band': 'layer', 'x': 'longitude', 'y': 'latitude'})
>>> da.dims
('layer', 'latitude', 'longitude')
>>> da.shape
(3, 4, 4)
>>> da.coords
Coordinates:
* layer (layer) int64 123* latitude (latitude) float64 90.030.0-30.0-90.0* longitude (longitude) float64 -180.0-60.060.0180.0>>> da.attrs
{'nodatavals': (-32768.0,)}
>>> coords_labels = {
"z_label": "layer",
"y_label": "latitude",
"x_label": "longitude"
}
>>> wsp = da2WSP(da, z_value=2, coords_labels=coords_labels)
>>> wsp.n
10>>> pct_sp = wsp.sparse.nnz *1. / wsp.n**2>>>"%.3f"%pct_sp
'0.300'>>>print(wsp.sparse[4].todense())
[[0010011100]]
>>> wsp.index[:2]
MultiIndex([(2, 90.0, 60.0),
(2, 90.0, 180.0)],
names=['layer', 'latitude', 'longitude'])
See Also
--------
:class:`libpysal.weights.weights.WSP`"""
z_id, coords_labels = _da_checker(da, z_value, coords_labels)
shape = da.shape
if z_id:
slice_dict = {}
slice_dict[coords_labels["z_label"]] =0
shape = da[slice_dict].shape
slice_dict[coords_labels["z_label"]] =slice(z_id -1, z_id)
da = da[slice_dict]
ser = da.to_series()
dtype = np.int32 if (shape[0] * shape[1]) <46340**2else np.int64
if"nodatavals"in da.attrs and da.attrs["nodatavals"]:
mask = (ser != da.attrs["nodatavals"][0]).to_numpy()
ids = np.where(mask)[0]
id_map = _idmap(ids, mask, dtype)
ser = ser[ser != da.attrs["nodatavals"][0]]
else:
ids = np.arange(len(ser), dtype=dtype)
id_map = ids.copy()
n =len(ids)
try:
> import numba
E ModuleNotFoundError: No module named 'numba'
libpysal/weights/raster.py:241: ModuleNotFoundError
During handling of the above exception, another exception occurred:
self = <libpysal.weights.tests.test_raster.Testraster testMethod=test_da2WSP>
deftest_da2WSP(self):
> w1 = raster.da2WSP(self.da1, "rook", n_jobs=-1)
libpysal/weights/tests/test_raster.py:54:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
da = <xarray.DataArray (band: 1, y: 4, x: 4)>
array([[[-32768, -32768, -32768, 129],
[-32768, -32768, -32768, -3...float64 90.030.0-30.0-90.0
* x (x) float64 -180.0 -60.0 60.0 180.0
Attributes:
nodatavals: (-32768.0,)
criterion = 'rook', z_value = None
coords_labels = {'x_label': 'x', 'y_label': 'y', 'z_label': 'band'}, k = 1
include_nodata = False, n_jobs = -1
defda2WSP(
da,
criterion="queen",
z_value=None,
coords_labels={},
k=1,
include_nodata=False,
n_jobs=1,
):
"""
Create a WSPobjectfrom xarray.DataArray with an additional
attribute index containing coordinate values of the raster
in the form of Pandas.Index/MultiIndex.
Parameters
----------
da : xarray.DataArray
Input 2Dor3D DataArray with shape=(z, y, x)
criterion : {"rook", "queen"}
Type of contiguity. Default is queen.
z_value : int/string/float
Select the z_value of 3D DataArray with multiple layers.
coords_labels : dictionary
Pass dimension labels for coordinates and layers if they do not
belong to default dimensions, which are (band/time, y/lat, x/lon)
e.g. coords_labels = {"y_label": "latitude", "x_label": "longitude", "z_label": "year"}
Default is {} empty dictionary.
k : int
Order of contiguity, this will select all neighbors upto kth order.
Default is1.
include_nodata : boolean
If True, missing values will be assumed as non-missing when
selecting higher_order neighbors, Default isFalse
n_jobs : int
Number of cores to be used in the sparse weight construction. If -1,
all available cores are used. Default is1.
Returns
-------
wsp : libpysal.weights.WSP
instance of spatial weights classWSPwith an index attribute
Notes
-----1. Lower order contiguities are also selected.
2. Returned object contains `index` attribute that includes a
`Pandas.MultiIndex`objectfrom the DataArray.
Examples
-------->>>from libpysal.weights.raster import da2WSP, testDataArray
>>> da = testDataArray().rename(
{'band': 'layer', 'x': 'longitude', 'y': 'latitude'})
>>> da.dims
('layer', 'latitude', 'longitude')
>>> da.shape
(3, 4, 4)
>>> da.coords
Coordinates:
* layer (layer) int64 123* latitude (latitude) float64 90.030.0-30.0-90.0* longitude (longitude) float64 -180.0-60.060.0180.0>>> da.attrs
{'nodatavals': (-32768.0,)}
>>> coords_labels = {
"z_label": "layer",
"y_label": "latitude",
"x_label": "longitude"
}
>>> wsp = da2WSP(da, z_value=2, coords_labels=coords_labels)
>>> wsp.n
10>>> pct_sp = wsp.sparse.nnz *1. / wsp.n**2>>>"%.3f"%pct_sp
'0.300'>>>print(wsp.sparse[4].todense())
[[0010011100]]
>>> wsp.index[:2]
MultiIndex([(2, 90.0, 60.0),
(2, 90.0, 180.0)],
names=['layer', 'latitude', 'longitude'])
See Also
--------
:class:`libpysal.weights.weights.WSP`"""
z_id, coords_labels = _da_checker(da, z_value, coords_labels)
shape = da.shape
if z_id:
slice_dict = {}
slice_dict[coords_labels["z_label"]] =0
shape = da[slice_dict].shape
slice_dict[coords_labels["z_label"]] =slice(z_id -1, z_id)
da = da[slice_dict]
ser = da.to_series()
dtype = np.int32 if (shape[0] * shape[1]) <46340**2else np.int64
if"nodatavals"in da.attrs and da.attrs["nodatavals"]:
mask = (ser != da.attrs["nodatavals"][0]).to_numpy()
ids = np.where(mask)[0]
id_map = _idmap(ids, mask, dtype)
ser = ser[ser != da.attrs["nodatavals"][0]]
else:
ids = np.arange(len(ser), dtype=dtype)
id_map = ids.copy()
n =len(ids)
try:
import numba
except (ModuleNotFoundError, ImportError):
warn(
"numba cannot be imported, parallel processing ""and include_nodata functionality will be disabled. ""falling back to slower method"
)
include_nodata =False# Fallback method to build sparse matrix
sw = lat2SW(*shape, criterion)
if"nodatavals"in da.attrs and da.attrs["nodatavals"]:
> sw = sw[mask]
E TypeError: 'dia_matrix' object is not subscriptable
libpysal/weights/raster.py:252: TypeError
____________________________ Testraster.test_wsp2da ____________________________
da = <xarray.DataArray (band: 1, y: 4, x: 4)>
array([[[-32768, -32768, -32768, 129],
[-32768, -32768, -32768, -3...float64 90.030.0-30.0-90.0
* x (x) float64 -180.0 -60.0 60.0 180.0
Attributes:
nodatavals: (-32768.0,)
criterion = 'queen', z_value = None
coords_labels = {'x_label': 'x', 'y_label': 'y', 'z_label': 'band'}, k = 1
include_nodata = False, n_jobs = 1
defda2WSP(
da,
criterion="queen",
z_value=None,
coords_labels={},
k=1,
include_nodata=False,
n_jobs=1,
):
"""
Create a WSPobjectfrom xarray.DataArray with an additional
attribute index containing coordinate values of the raster
in the form of Pandas.Index/MultiIndex.
Parameters
----------
da : xarray.DataArray
Input 2Dor3D DataArray with shape=(z, y, x)
criterion : {"rook", "queen"}
Type of contiguity. Default is queen.
z_value : int/string/float
Select the z_value of 3D DataArray with multiple layers.
coords_labels : dictionary
Pass dimension labels for coordinates and layers if they do not
belong to default dimensions, which are (band/time, y/lat, x/lon)
e.g. coords_labels = {"y_label": "latitude", "x_label": "longitude", "z_label": "year"}
Default is {} empty dictionary.
k : int
Order of contiguity, this will select all neighbors upto kth order.
Default is1.
include_nodata : boolean
If True, missing values will be assumed as non-missing when
selecting higher_order neighbors, Default isFalse
n_jobs : int
Number of cores to be used in the sparse weight construction. If -1,
all available cores are used. Default is1.
Returns
-------
wsp : libpysal.weights.WSP
instance of spatial weights classWSPwith an index attribute
Notes
-----1. Lower order contiguities are also selected.
2. Returned object contains `index` attribute that includes a
`Pandas.MultiIndex`objectfrom the DataArray.
Examples
-------->>>from libpysal.weights.raster import da2WSP, testDataArray
>>> da = testDataArray().rename(
{'band': 'layer', 'x': 'longitude', 'y': 'latitude'})
>>> da.dims
('layer', 'latitude', 'longitude')
>>> da.shape
(3, 4, 4)
>>> da.coords
Coordinates:
* layer (layer) int64 123* latitude (latitude) float64 90.030.0-30.0-90.0* longitude (longitude) float64 -180.0-60.060.0180.0>>> da.attrs
{'nodatavals': (-32768.0,)}
>>> coords_labels = {
"z_label": "layer",
"y_label": "latitude",
"x_label": "longitude"
}
>>> wsp = da2WSP(da, z_value=2, coords_labels=coords_labels)
>>> wsp.n
10>>> pct_sp = wsp.sparse.nnz *1. / wsp.n**2>>>"%.3f"%pct_sp
'0.300'>>>print(wsp.sparse[4].todense())
[[0010011100]]
>>> wsp.index[:2]
MultiIndex([(2, 90.0, 60.0),
(2, 90.0, 180.0)],
names=['layer', 'latitude', 'longitude'])
See Also
--------
:class:`libpysal.weights.weights.WSP`"""
z_id, coords_labels = _da_checker(da, z_value, coords_labels)
shape = da.shape
if z_id:
slice_dict = {}
slice_dict[coords_labels["z_label"]] =0
shape = da[slice_dict].shape
slice_dict[coords_labels["z_label"]] =slice(z_id -1, z_id)
da = da[slice_dict]
ser = da.to_series()
dtype = np.int32 if (shape[0] * shape[1]) <46340**2else np.int64
if"nodatavals"in da.attrs and da.attrs["nodatavals"]:
mask = (ser != da.attrs["nodatavals"][0]).to_numpy()
ids = np.where(mask)[0]
id_map = _idmap(ids, mask, dtype)
ser = ser[ser != da.attrs["nodatavals"][0]]
else:
ids = np.arange(len(ser), dtype=dtype)
id_map = ids.copy()
n =len(ids)
try:
> import numba
E ModuleNotFoundError: No module named 'numba'
libpysal/weights/raster.py:241: ModuleNotFoundError
During handling of the above exception, another exception occurred:
self = <libpysal.weights.tests.test_raster.Testraster testMethod=test_wsp2da>
deftest_wsp2da(self):
> wsp1 = raster.da2WSP(self.da1, "queen")
libpysal/weights/tests/test_raster.py:89:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
da = <xarray.DataArray (band: 1, y: 4, x: 4)>
array([[[-32768, -32768, -32768, 129],
[-32768, -32768, -32768, -3...float64 90.030.0-30.0-90.0
* x (x) float64 -180.0 -60.0 60.0 180.0
Attributes:
nodatavals: (-32768.0,)
criterion = 'queen', z_value = None
coords_labels = {'x_label': 'x', 'y_label': 'y', 'z_label': 'band'}, k = 1
include_nodata = False, n_jobs = 1
defda2WSP(
da,
criterion="queen",
z_value=None,
coords_labels={},
k=1,
include_nodata=False,
n_jobs=1,
):
"""
Create a WSPobjectfrom xarray.DataArray with an additional
attribute index containing coordinate values of the raster
in the form of Pandas.Index/MultiIndex.
Parameters
----------
da : xarray.DataArray
Input 2Dor3D DataArray with shape=(z, y, x)
criterion : {"rook", "queen"}
Type of contiguity. Default is queen.
z_value : int/string/float
Select the z_value of 3D DataArray with multiple layers.
coords_labels : dictionary
Pass dimension labels for coordinates and layers if they do not
belong to default dimensions, which are (band/time, y/lat, x/lon)
e.g. coords_labels = {"y_label": "latitude", "x_label": "longitude", "z_label": "year"}
Default is {} empty dictionary.
k : int
Order of contiguity, this will select all neighbors upto kth order.
Default is1.
include_nodata : boolean
If True, missing values will be assumed as non-missing when
selecting higher_order neighbors, Default isFalse
n_jobs : int
Number of cores to be used in the sparse weight construction. If -1,
all available cores are used. Default is1.
Returns
-------
wsp : libpysal.weights.WSP
instance of spatial weights classWSPwith an index attribute
Notes
-----1. Lower order contiguities are also selected.
2. Returned object contains `index` attribute that includes a
`Pandas.MultiIndex`objectfrom the DataArray.
Examples
-------->>>from libpysal.weights.raster import da2WSP, testDataArray
>>> da = testDataArray().rename(
{'band': 'layer', 'x': 'longitude', 'y': 'latitude'})
>>> da.dims
('layer', 'latitude', 'longitude')
>>> da.shape
(3, 4, 4)
>>> da.coords
Coordinates:
* layer (layer) int64 123* latitude (latitude) float64 90.030.0-30.0-90.0* longitude (longitude) float64 -180.0-60.060.0180.0>>> da.attrs
{'nodatavals': (-32768.0,)}
>>> coords_labels = {
"z_label": "layer",
"y_label": "latitude",
"x_label": "longitude"
}
>>> wsp = da2WSP(da, z_value=2, coords_labels=coords_labels)
>>> wsp.n
10>>> pct_sp = wsp.sparse.nnz *1. / wsp.n**2>>>"%.3f"%pct_sp
'0.300'>>>print(wsp.sparse[4].todense())
[[0010011100]]
>>> wsp.index[:2]
MultiIndex([(2, 90.0, 60.0),
(2, 90.0, 180.0)],
names=['layer', 'latitude', 'longitude'])
See Also
--------
:class:`libpysal.weights.weights.WSP`"""
z_id, coords_labels = _da_checker(da, z_value, coords_labels)
shape = da.shape
if z_id:
slice_dict = {}
slice_dict[coords_labels["z_label"]] =0
shape = da[slice_dict].shape
slice_dict[coords_labels["z_label"]] =slice(z_id -1, z_id)
da = da[slice_dict]
ser = da.to_series()
dtype = np.int32 if (shape[0] * shape[1]) <46340**2else np.int64
if"nodatavals"in da.attrs and da.attrs["nodatavals"]:
mask = (ser != da.attrs["nodatavals"][0]).to_numpy()
ids = np.where(mask)[0]
id_map = _idmap(ids, mask, dtype)
ser = ser[ser != da.attrs["nodatavals"][0]]
else:
ids = np.arange(len(ser), dtype=dtype)
id_map = ids.copy()
n =len(ids)
try:
import numba
except (ModuleNotFoundError, ImportError):
warn(
"numba cannot be imported, parallel processing ""and include_nodata functionality will be disabled. ""falling back to slower method"
)
include_nodata =False# Fallback method to build sparse matrix
sw = lat2SW(*shape, criterion)
if"nodatavals"in da.attrs and da.attrs["nodatavals"]:
> sw = sw[mask]
E TypeError: 'dia_matrix' object is not subscriptable
libpysal/weights/raster.py:252: TypeError
The text was updated successfully, but these errors were encountered:
I am updating the Fedora package to 4.5.1, but seeing issues with 3 tests:
libpysal/weights/tests/test_raster.py::Testraster::test_da2W
libpysal/weights/tests/test_raster.py::Testraster::test_da2WSP
libpysal/weights/tests/test_raster.py::Testraster::test_wsp2da
All fail in the fallback to non-
numba
code at:Perhaps this is related to #419, based on the test names.
posix linux posix.uname_result(sysname='Linux', release='5.12.11-300.fc34.x86_64', version='#1 SMP Wed Jun 16 15:47:58 UTC 2021', machine='x86_64')
3.10.0b3 (default, Jun 17 2021, 00:00:00) [GCC 11.1.1 20210617 (Red Hat 11.1.1-5)]
Full pytest traceback
The text was updated successfully, but these errors were encountered: