Skip to content

Commit

Permalink
Change how we create objects! For example, Vector(float, size=2).
Browse files Browse the repository at this point in the history
I think the changes here make things cleaner.
Next commit will deprecate and change e.g. `Matrix.new`.
  • Loading branch information
eriknw committed Apr 11, 2022
1 parent 8e9acb9 commit 2107279
Show file tree
Hide file tree
Showing 10 changed files with 134 additions and 153 deletions.
42 changes: 11 additions & 31 deletions grblas/_ss/matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from ..base import call, record_raw
from ..dtypes import _INDEX, INT64, lookup_dtype
from ..exceptions import check_status, check_status_carg
from ..scalar import Scalar, _as_scalar
from ..scalar import Scalar, _as_scalar, _scalar_index
from ..utils import (
_CArray,
_Pointer,
Expand Down Expand Up @@ -266,7 +266,6 @@ def normalize_chunks(chunks, shape):
def _concat_mn(tiles, *, is_matrix=None):
"""Argument checking for `Matrix.ss.concat` and returns number of tiles in each dimension"""
from ..matrix import Matrix, TransposedMatrix
from ..scalar import Scalar
from ..vector import Vector

valid_types = (Matrix, TransposedMatrix, Vector, Scalar)
Expand Down Expand Up @@ -495,9 +494,7 @@ def split(self, chunks, *, name=None):
# Copy to a new handle so we can free `tiles`
new_matrix = ffi.new("GrB_Matrix*")
new_matrix[0] = tiles[index]
tile = Matrix(new_matrix, dtype, name=f"{name}_{i}x{j}")
tile._nrows = nrows
tile._ncols = ncols
tile = Matrix._from_obj(new_matrix, dtype, nrows, ncols, name=f"{name}_{i}x{j}")
cur.append(tile)
index += 1
rv.append(cur)
Expand Down Expand Up @@ -804,8 +801,7 @@ def _export(self, format=None, *, sort=False, give_ownership=False, raw=False, m
nvals = parent._nvals
rows = _CArray(size=nvals, name="&rows_array")
columns = _CArray(size=nvals, name="&columns_array")
n = ffi_new("GrB_Index*")
scalar = Scalar(n, _INDEX, name="s_nvals", is_cscalar=True, empty=True)
scalar = _scalar_index("s_nvals")
scalar.value = nvals
call(
f"GrB_Matrix_extractTuples_{parent.dtype.name}",
Expand Down Expand Up @@ -1346,9 +1342,7 @@ def _import_csr(
"Matrix",
mhandle[0],
)
matrix = gb.Matrix(mhandle, dtype, name=name)
matrix._nrows = nrows
matrix._ncols = ncols
matrix = gb.Matrix._from_obj(mhandle, dtype, nrows, ncols, name=name)
else:
check_status(status, matrix)
unclaim_buffer(indptr)
Expand Down Expand Up @@ -1521,9 +1515,7 @@ def _import_csc(
"Matrix",
mhandle[0],
)
matrix = gb.Matrix(mhandle, dtype, name=name)
matrix._nrows = nrows
matrix._ncols = ncols
matrix = gb.Matrix._from_obj(mhandle, dtype, nrows, ncols, name=name)
else:
check_status(status, matrix)
unclaim_buffer(indptr)
Expand Down Expand Up @@ -1717,9 +1709,7 @@ def _import_hypercsr(
"Matrix",
mhandle[0],
)
matrix = gb.Matrix(mhandle, dtype, name=name)
matrix._nrows = nrows
matrix._ncols = ncols
matrix = gb.Matrix._from_obj(mhandle, dtype, nrows, ncols, name=name)
else:
check_status(status, matrix)
unclaim_buffer(indptr)
Expand Down Expand Up @@ -1913,9 +1903,7 @@ def _import_hypercsc(
"Matrix",
mhandle[0],
)
matrix = gb.Matrix(mhandle, dtype, name=name)
matrix._nrows = nrows
matrix._ncols = ncols
matrix = gb.Matrix._from_obj(mhandle, dtype, nrows, ncols, name=name)
else:
check_status(status, matrix)
unclaim_buffer(indptr)
Expand Down Expand Up @@ -2093,9 +2081,7 @@ def _import_bitmapr(
"Matrix",
mhandle[0],
)
matrix = gb.Matrix(mhandle, dtype, name=name)
matrix._nrows = nrows
matrix._ncols = ncols
matrix = gb.Matrix._from_obj(mhandle, dtype, nrows, ncols, name=name)
else:
check_status(status, matrix)
unclaim_buffer(bitmap)
Expand Down Expand Up @@ -2271,9 +2257,7 @@ def _import_bitmapc(
"Matrix",
mhandle[0],
)
matrix = gb.Matrix(mhandle, dtype, name=name)
matrix._nrows = nrows
matrix._ncols = ncols
matrix = gb.Matrix._from_obj(mhandle, dtype, nrows, ncols, name=name)
else:
check_status(status, matrix)
unclaim_buffer(bitmap)
Expand Down Expand Up @@ -2421,9 +2405,7 @@ def _import_fullr(
"Matrix",
mhandle[0],
)
matrix = gb.Matrix(mhandle, dtype, name=name)
matrix._nrows = nrows
matrix._ncols = ncols
matrix = gb.Matrix._from_obj(mhandle, dtype, nrows, ncols, name=name)
else:
check_status(status, matrix)
unclaim_buffer(values)
Expand Down Expand Up @@ -2569,9 +2551,7 @@ def _import_fullc(
"Matrix",
mhandle[0],
)
matrix = gb.Matrix(mhandle, dtype, name=name)
matrix._nrows = nrows
matrix._ncols = ncols
matrix = gb.Matrix._from_obj(mhandle, dtype, nrows, ncols, name=name)
else:
check_status(status, matrix)
unclaim_buffer(values)
Expand Down
12 changes: 4 additions & 8 deletions grblas/_ss/vector.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,8 +214,7 @@ def split(self, chunks, *, name=None):
# Copy to a new handle so we can free `tiles`
new_vector = ffi.new("GrB_Vector*")
new_vector[0] = ffi.cast("GrB_Vector", tiles[i])
tile = Vector(new_vector, dtype, name=f"{name}_{i}")
tile._size = size
tile = Vector._from_obj(new_vector, dtype, size, name=f"{name}_{i}")
rv.append(tile)
return rv

Expand Down Expand Up @@ -825,8 +824,7 @@ def _import_sparse(
"Vector",
vhandle[0],
)
vector = gb.Vector(vhandle, dtype, name=name)
vector._size = size
vector = gb.Vector._from_obj(vhandle, dtype, size, name=name)
else:
check_status(status, vector)
unclaim_buffer(indices)
Expand Down Expand Up @@ -995,8 +993,7 @@ def _import_bitmap(
"Vector",
vhandle[0],
)
vector = gb.Vector(vhandle, dtype, name=name)
vector._size = size
vector = gb.Vector._from_obj(vhandle, dtype, size, name=name)
else:
check_status(status, vector)
unclaim_buffer(bitmap)
Expand Down Expand Up @@ -1136,8 +1133,7 @@ def _import_full(
"Vector",
vhandle[0],
)
vector = gb.Vector(vhandle, dtype, name=name)
vector._size = size
vector = gb.Vector._from_obj(vhandle, dtype, size, name=name)
else:
check_status(status, vector)
unclaim_buffer(values)
Expand Down
8 changes: 0 additions & 8 deletions grblas/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
from . import config, ffi
from . import replace as replace_singleton
from .descriptor import lookup as descriptor_lookup
from .dtypes import lookup_dtype
from .exceptions import check_status
from .expr import AmbiguousAssignOrExtract, Updater
from .mask import Mask
Expand Down Expand Up @@ -186,13 +185,6 @@ class BaseType:
# Flag for operations which depend on scalar vs vector/matrix
_is_scalar = False

def __init__(self, gb_obj, dtype, name):
if not isinstance(gb_obj, CData):
raise TypeError("Object passed to __init__ must be CData type")
self.gb_obj = gb_obj
self.dtype = lookup_dtype(dtype)
self.name = name

def __call__(
self, *optional_mask_accum_replace, mask=None, accum=None, replace=False, input_mask=None
):
Expand Down
3 changes: 3 additions & 0 deletions grblas/dtypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,9 @@ def lookup_dtype(key, value=None):
pass
if value is not None and hasattr(value, "dtype") and value.dtype in _registry:
return _registry[value.dtype]
# np.dtype(x) accepts some weird values; we may want to guard against some
if key is None:
raise TypeError("Bad dtype: None. A valid dtype must be provided.")
try:
return lookup_dtype(np.dtype(key))
except Exception:
Expand Down
69 changes: 35 additions & 34 deletions grblas/matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@
from . import _automethods, backend, binary, ffi, lib, monoid, semiring, utils
from ._ss.matrix import ss
from .base import BaseExpression, BaseType, call
from .dtypes import _INDEX, lookup_dtype, unify
from .dtypes import _INDEX, FP64, lookup_dtype, unify
from .exceptions import DimensionMismatch, NoValue, check_status
from .expr import AmbiguousAssignOrExtract, IndexerResolver, Updater
from .mask import StructuralMask, ValueMask
from .operator import get_semiring, get_typed_op
from .scalar import _MATERIALIZE, Scalar, ScalarExpression, _as_scalar
from .scalar import _MATERIALIZE, Scalar, ScalarExpression, _as_scalar, _scalar_index
from .utils import (
_CArray,
_Pointer,
Expand All @@ -36,15 +36,31 @@ class Matrix(BaseType):
_is_transposed = False
_name_counter = itertools.count()

def __init__(self, gb_obj, dtype, *, parent=None, name=None):
if name is None:
name = f"M_{next(Matrix._name_counter)}"
self._nrows = None
self._ncols = None
super().__init__(gb_obj, dtype, name)
# Add ss extension methods
def __new__(cls, dtype=FP64, nrows=0, ncols=0, *, name=None):
self = object.__new__(cls)
self.dtype = lookup_dtype(dtype)
nrows = _as_scalar(nrows, _INDEX, is_cscalar=True)
ncols = _as_scalar(ncols, _INDEX, is_cscalar=True)
self.name = f"M_{next(Matrix._name_counter)}" if name is None else name
self.gb_obj = ffi_new("GrB_Matrix*")
call("GrB_Matrix_new", [_Pointer(self), self.dtype, nrows, ncols])
self._nrows = nrows.value
self._ncols = ncols.value
self._parent = None
self.ss = ss(self)
return self

@classmethod
def _from_obj(cls, gb_obj, dtype, nrows, ncols, *, parent=None, name=None):
self = object.__new__(cls)
self.gb_obj = gb_obj
self.dtype = dtype
self.name = f"M_{next(Matrix._name_counter)}" if name is None else name
self._nrows = nrows
self._ncols = ncols
self._parent = parent
self.ss = ss(self)
return self

def __del__(self):
parent = getattr(self, "_parent", None)
Expand Down Expand Up @@ -132,8 +148,7 @@ def __iter__(self):

def __sizeof__(self):
size = ffi_new("size_t*")
scalar = Scalar(size, _INDEX, name="s_size", is_cscalar=True, empty=True)
call("GxB_Matrix_memoryUsage", [_Pointer(scalar), self])
check_status(lib.GxB_Matrix_memoryUsage(size, self.gb_obj[0]), self)
return size[0] + object.__sizeof__(self)

def isequal(self, other, *, check_dtype=False):
Expand Down Expand Up @@ -197,28 +212,25 @@ def isclose(self, other, *, rel_tol=1e-7, abs_tol=0.0, check_dtype=False):

@property
def nrows(self):
n = ffi_new("GrB_Index*")
scalar = Scalar(n, _INDEX, name="s_nrows", is_cscalar=True, empty=True)
scalar = _scalar_index("s_nrows")
call("GrB_Matrix_nrows", [_Pointer(scalar), self])
return n[0]
return scalar.gb_obj[0]

@property
def ncols(self):
n = ffi_new("GrB_Index*")
scalar = Scalar(n, _INDEX, name="s_ncols", is_cscalar=True, empty=True)
scalar = _scalar_index("s_ncols")
call("GrB_Matrix_ncols", [_Pointer(scalar), self])
return n[0]
return scalar.gb_obj[0]

@property
def shape(self):
return (self._nrows, self._ncols)

@property
def nvals(self):
n = ffi_new("GrB_Index*")
scalar = Scalar(n, _INDEX, name="s_nvals", is_cscalar=True, empty=True)
scalar = _scalar_index("s_nvals")
call("GrB_Matrix_nvals", [_Pointer(scalar), self])
return n[0]
return scalar.gb_obj[0]

@property
def _nvals(self):
Expand Down Expand Up @@ -250,8 +262,7 @@ def to_values(self, dtype=None):
rows = _CArray(size=nvals, name="&rows_array")
columns = _CArray(size=nvals, name="&columns_array")
values = _CArray(size=nvals, dtype=self.dtype, name="&values_array")
n = ffi_new("GrB_Index*")
scalar = Scalar(n, _INDEX, name="s_nvals", is_cscalar=True, empty=True)
scalar = _scalar_index("s_nvals")
scalar.value = nvals
dtype_name = "UDT" if self.dtype._is_udt else self.dtype.name
call(
Expand Down Expand Up @@ -328,10 +339,8 @@ def dup(self, dtype=None, *, mask=None, name=None):
rv(mask=mask)[...] = self
else:
new_mat = ffi_new("GrB_Matrix*")
rv = Matrix(new_mat, self.dtype, name=name)
rv = Matrix._from_obj(new_mat, self.dtype, self._nrows, self._ncols, name=name)
call("GrB_Matrix_dup", [_Pointer(rv), self])
rv._nrows = self._nrows
rv._ncols = self._ncols
return rv

def diag(self, k=0, dtype=None, *, name=None):
Expand All @@ -356,15 +365,7 @@ def new(cls, dtype, nrows=0, ncols=0, *, name=None):
GrB_Matrix_new
Create a new empty Matrix from the given type, number of rows, and number of columns
"""
new_matrix = ffi_new("GrB_Matrix*")
dtype = lookup_dtype(dtype)
rv = cls(new_matrix, dtype, name=name)
nrows = _as_scalar(nrows, _INDEX, is_cscalar=True)
ncols = _as_scalar(ncols, _INDEX, is_cscalar=True)
call("GrB_Matrix_new", [_Pointer(rv), dtype, nrows, ncols])
rv._nrows = nrows.value
rv._ncols = ncols.value
return rv
return Matrix(dtype, nrows, ncols, name=name)

@classmethod
def from_values(
Expand Down

0 comments on commit 2107279

Please sign in to comment.