Skip to content

Commit

Permalink
resplaced numpy with np in code and left numpy as np in docstrings an…
Browse files Browse the repository at this point in the history
…d comments
  • Loading branch information
NeuralNoble committed Jul 12, 2024
1 parent 1ab7ed9 commit 1ce672a
Show file tree
Hide file tree
Showing 20 changed files with 68 additions and 67 deletions.
2 changes: 1 addition & 1 deletion armi/bookkeeping/db/compareDB3.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ def _diffSpecialData(
Compare specially-formatted datasets.
This employs the pack/unpackSpecialData functions to reconstitute complicated
datasets for comparison. These usually don't behave well as giant np arrays, so
datasets for comparison. These usually don't behave well as giant numpy arrays, so
we go element-by-element to calculate the diffs, then concatenate them.
"""
name = refData.name
Expand Down
28 changes: 14 additions & 14 deletions armi/bookkeeping/db/database3.py
Original file line number Diff line number Diff line change
Expand Up @@ -942,7 +942,7 @@ def _getShape(arr: [np.ndarray, List, Tuple]):
data = data.astype("S")

if data.dtype.kind == "O":
# Something was added to the data array that caused np to want to
# Something was added to the data array that caused numpy to want to
# treat it as a general-purpose Object array. This usually happens
# because:
# - the data contain NoDefaults
Expand Down Expand Up @@ -1058,7 +1058,7 @@ def _readParams(h5group, compTypeName, comps, allowMissing=False):
if "linkedDims" in attrs:
linkedDims = np.char.decode(attrs["linkedDims"])

# iterating of np is not fast...
# iterating of numpy is not fast...
for c, val, linkedDim in itertools.zip_longest(
comps, data.tolist(), linkedDims, fillvalue=""
):
Expand Down Expand Up @@ -1169,7 +1169,7 @@ def getHistoriesByLocation(

for h5TimeNodeGroup in self.genTimeStepGroups(timeSteps):
if "layout" not in h5TimeNodeGroup:
# layout hasnt been written for this time step, so we can't get anything
# layout hasn't been written for this time step, so we can't get anything
# useful here. Perhaps the current value is of use, in which case the
# DatabaseInterface should be used.
continue
Expand All @@ -1194,7 +1194,7 @@ def getHistoriesByLocation(
]
)

# This could also be way more efficient if lLocation were a np array
# This could also be way more efficient if lLocation were a numpy array
objectLocationsInLayout = [lLocation[i] for i in objectIndicesInLayout]

objectIndicesInData = np.array(layout.indexInData)[
Expand Down Expand Up @@ -1409,7 +1409,7 @@ def getHistories(
len(reorderedComps),
)

# iterating of np is not fast..
# iterating of numpy is not fast..
for c, val in zip(reorderedComps, data.tolist()):
if paramName == "location":
val = tuple(val)
Expand Down Expand Up @@ -1510,17 +1510,17 @@ def packSpecialData(
arrayData: [np.ndarray, JaggedArray], paramName: str
) -> Tuple[Optional[np.ndarray], Dict[str, Any]]:
"""
Reduce data that wouldn't otherwise play nicely with HDF5/np arrays to a format
Reduce data that wouldn't otherwise play nicely with HDF5/numpy arrays to a format
that will.
This is the main entry point for conforming "strange" data into something that will
both fit into a np array/HDF5 dataset, and be recoverable to its original-ish
both fit into a numpy array/HDF5 dataset, and be recoverable to its original-ish
state when reading it back in. This is accomplished by detecting a handful of known
offenders and using various HDF5 attributes to store necessary auxiliary data. It is
important to keep in mind that the data that is passed in has already been converted
to a np array, so the top dimension is always representing the collection of
to a numpy array, so the top dimension is always representing the collection of
composites that are storing the parameters. For instance, if we are dealing with a
Block parameter, the first index in the np array of data is the block index; so
Block parameter, the first index in the numpy array of data is the block index; so
if each block has a parameter that is a dictionary, ``data`` would be a ndarray,
offset += arr.size
where each element is a dictionary. This routine supports a number of different
Expand Down Expand Up @@ -1556,7 +1556,7 @@ def packSpecialData(
if isinstance(arrayData, JaggedArray):
data = arrayData.flattenedArray
else:
# Check to make sure that we even need to do this. If the np data type is
# Check to make sure that we even need to do this. If the numpy data type is
# not "O", chances are we have nice, clean data.
if arrayData.dtype != "O":
return arrayData, {}
Expand Down Expand Up @@ -1602,7 +1602,7 @@ def packSpecialData(
# The data themselves are nasty. We could support this, but best to wait for
# a credible use case.
raise TypeError(
"Unable to coerce dictionary data into usable np array for "
"Unable to coerce dictionary data into usable numpy array for "
"{}".format(paramName)
)
attrs["keys"] = np.array(keys).astype("S")
Expand All @@ -1616,7 +1616,7 @@ def packSpecialData(
attrs["noneLocations"] = arrayData.nones
return data, attrs

# conform non-np arrays to np
# conform non-numpy arrays to numpy
for i, val in enumerate(data):
if isinstance(val, (list, tuple)):
data[i] = np.array(val)
Expand All @@ -1632,7 +1632,7 @@ def packSpecialData(

if len(nones) == 0:
raise TypeError(
"Cannot write {} to the database, it did not resolve to a np/HDF5 "
"Cannot write {} to the database, it did not resolve to a numpy/HDF5 "
"type.".format(paramName)
)

Expand All @@ -1642,7 +1642,7 @@ def packSpecialData(

def unpackSpecialData(data: np.ndarray, attrs, paramName: str) -> np.ndarray:
"""
Extract data from a specially-formatted HDF5 dataset into a np array.
Extract data from a specially-formatted HDF5 dataset into a numpy array.
This should invert the operations performed by :py:func:`packSpecialData`.
Expand Down
18 changes: 9 additions & 9 deletions armi/bookkeeping/db/jaggedArray.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,15 @@

class JaggedArray:
"""
Take a list of np arrays or lists and flatten them into a single 1D array.
Take a list of numpy arrays or lists and flatten them into a single 1D array.
This implementation can preserve the structure of a multi-dimensional np array
by storing the dimensions in self.shapes and then re-populating a np array of
This implementation can preserve the structure of a multi-dimensional numpy array
by storing the dimensions in self.shapes and then re-populating a numpy array of
that shape from the flattened 1D array. However, it can only preserve one layer of
jaggedness in a list of lists (or other iterables). For example, a list of tuples
with varying lengths can be flattened and reconstituted exactly. But, if a list of
lists of tuples is passed in, the tuples in that final layer of nesting will all be
flattened to a single 1D np array after a round trip. No structure is retained
flattened to a single 1D numpy array after a round trip. No structure is retained
from nested lists of jagged lists or tuples.
"""

Expand All @@ -45,7 +45,7 @@ def __init__(self, jaggedData, paramName):
Parameters
----------
jaggedData: list of np.ndarray
A list of np arrays (or lists or tuples) to be flattened into a single array
A list of numpy arrays (or lists or tuples) to be flattened into a single array
paramName: str
The name of the parameter represented by this data
"""
Expand All @@ -66,7 +66,7 @@ def __init__(self, jaggedData, paramName):
offset += npArray.size
flattenedArray.extend(npArray.flatten())
except: # noqa: E722
# np might fail if it's jagged
# numpy might fail if it's jagged
flattenedList = self.flatten(arr)
shapes.append(
len(flattenedList),
Expand All @@ -89,7 +89,7 @@ def __init__(self, jaggedData, paramName):
runLog.error(
"Error! It seems like ARMI may have tried to flatten a jagged array "
"where the elements have different numbers of dimensions. `shapes` "
"attribute of the JaggedArray for {} cannot be made into a np "
"attribute of the JaggedArray for {} cannot be made into a numpy "
"array; it might be jagged.".format(paramName)
)
runLog.error(shapes)
Expand Down Expand Up @@ -134,7 +134,7 @@ def fromH5(cls, data, offsets, shapes, nones, dtype, paramName):
Parameters
----------
data: np.ndarray
A flattened 1D np array read in from an HDF5 file
A flattened 1D numpy array read in from an HDF5 file
offsets: np.ndarray
Offset indices for the zeroth element of each constituent array
shapes: np.ndarray
Expand Down Expand Up @@ -170,7 +170,7 @@ def unpack(self):
Returns
-------
unpackedJaggedData: list of np.ndarray
List of np arrays with varying dimensions (i.e., jagged arrays)
List of numpy arrays with varying dimensions (i.e., jagged arrays)
"""
unpackedJaggedData: List[Optional[np.ndarray]] = []
numElements = len(self.offsets) + len(self.nones)
Expand Down
6 changes: 3 additions & 3 deletions armi/bookkeeping/db/layout.py
Original file line number Diff line number Diff line change
Expand Up @@ -728,7 +728,7 @@ def replaceNonesWithNonsense(
Parameters
----------
data
The np array containing ``None`` values that need to be replaced.
The numpy array containing ``None`` values that need to be replaced.
paramName
The name of the parameter who's data we are treating. Only used for diagnostics.
Expand All @@ -742,8 +742,8 @@ def replaceNonesWithNonsense(
Notes
-----
This only supports situations where the data is a straight-up ``None``, or a valid,
database-storable np array (or easily convertable to one (e.g. tuples/lists with
numerical values)). This does not support, for instance, a np ndarray with some
database-storable numpy array (or easily convertable to one (e.g. tuples/lists with
numerical values)). This does not support, for instance, a numpy ndarray with some
Nones in it.
For example, the following is supported::
Expand Down
4 changes: 2 additions & 2 deletions armi/bookkeeping/db/tests/test_database3.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,9 +222,9 @@ def makeHistory(self):

def _compareArrays(self, ref, src):
"""
Compare two np arrays.
Compare two numpy arrays.
Comparing np arrays that may have unsavory data (NaNs, Nones, jagged
Comparing numpy arrays that may have unsavory data (NaNs, Nones, jagged
data, etc.) is really difficult. For now, convert to a list and compare
element-by-element.
"""
Expand Down
8 changes: 4 additions & 4 deletions armi/bookkeeping/db/tests/test_jaggedArray.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,19 +82,19 @@ def _compareRoundTrip(self, data, paramName):

def _compareArrays(self, ref, src):
"""
Compare two np arrays.
Compare two numpy arrays.
Comparing np arrays that may have unsavory data (NaNs, Nones, jagged
Comparing numpy arrays that may have unsavory data (NaNs, Nones, jagged
data, etc.) is really difficult. For now, convert to a list and compare
element-by-element.
Several types of data do not survive a round trip. The if-elif branch
here converts the initial data into the format expected to be produced
by the round trip. The conversions are:
- For scalar values (int, float, etc.), the data becomes a np
- For scalar values (int, float, etc.), the data becomes a numpy
array with a dimension of 1 after the round trip.
- Tuples and lists become np arrays
- Tuples and lists become numpy arrays
- Empty lists become `None`
"""
Expand Down
10 changes: 5 additions & 5 deletions armi/bookkeeping/visualization/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,13 +54,13 @@ def __init__(self, vertices, connectivity, offsets, cellTypes):
"""
Parameters
----------
vertices : np array
An Nx3 np array with one row per (x,y,z) vertex
connectivity : np array
vertices : numpy array
An Nx3 numpy array with one row per (x,y,z) vertex
connectivity : numpy array
A 1-D array containing the vertex indices belonging to each cell
offsets : np array
offsets : numpy array
A 1-D array containing the index of the first vertex for the next cell
cellTypes : np array
cellTypes : numpy array
A 1-D array contining the cell type ID for each cell
"""
self.vertices = vertices
Expand Down
2 changes: 1 addition & 1 deletion armi/materials/material.py
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,7 @@ def getTemperatureAtDensity(
"""Get the temperature at which the perturbed density occurs (in Celcius)."""
# 0 at tempertature of targetDensity
densFunc = lambda temp: self.density(Tc=temp) - targetDensity
# is a np array if fsolve is called
# is a numpy array if fsolve is called
tAtTargetDensity = float(fsolve(densFunc, tempGuessInC))
return tAtTargetDensity

Expand Down
2 changes: 1 addition & 1 deletion armi/nuclearDataIO/cccc/cccc.py
Original file line number Diff line number Diff line change
Expand Up @@ -715,7 +715,7 @@ def getBlockBandwidth(m, nintj, nblok):
This function computes JL and JU for these purposes. It also converts
JL and JU to zero based indices rather than 1 based ones, as is almost
always wanted when dealing with python/np matrices.
always wanted when dealing with python/numpy matrices.
The term *bandwidth* refers to a kind of sparse matrix representation.
Some rows only have columns JL to JH in them rather than 0 to JMAX.
Expand Down
2 changes: 1 addition & 1 deletion armi/nuclearDataIO/cccc/fixsrc.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def __init__(self, fileName, fileMode, fixSrc):
If 'wb', this class writes a FIXSRC binary file.
If 'rb', this class reads a preexisting FIXSRC binary file.
fixSrc : np array
fixSrc : numpy array
Core-wide multigroup gamma fixed-source data.
"""
cccc.Stream.__init__(self, fileName, fileMode)
Expand Down
12 changes: 6 additions & 6 deletions armi/nuclearDataIO/xsCollections.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ class XSCollection:

_zeroes = {}
"""
A dict of np arrays set to the size of XSLibrary.numGroups.
A dict of numpy arrays set to the size of XSLibrary.numGroups.
This is used to initialize cross sections which may not exist for the specific nuclide.
Consequently, there should never be a situation where a cross section does not exist.
Expand Down Expand Up @@ -161,7 +161,7 @@ def __getitem__(self, key):
Notes
-----
These containers were originally
dicts, but upgraded to objects with np values as specialization
dicts, but upgraded to objects with numpy values as specialization
was needed. This access method could/should be phased out.
"""
return self.__dict__[key]
Expand Down Expand Up @@ -230,7 +230,7 @@ def clear(self):
"""Zero out all the cross sections; this is useful for creating dummy cross sections."""
for xsAttr in ALL_XS:
value = getattr(self, xsAttr)
# it should either be a list, a np array, or a sparse matrix
# it should either be a list, a numpy array, or a sparse matrix
if isinstance(value, list):
value = [0.0] * len(value)
elif isinstance(value, np.ndarray):
Expand Down Expand Up @@ -625,7 +625,7 @@ def computeNeutronEnergyDepositionConstants(numberDensities, lib, microSuffix):
Returns
-------
energyDepositionConsts : np array
energyDepositionConsts : numpy array
Neutron energy deposition group constants. (J/cm)
Notes
Expand Down Expand Up @@ -664,7 +664,7 @@ def computeGammaEnergyDepositionConstants(numberDensities, lib, microSuffix):
Returns
-------
energyDepositionConsts : np array
energyDepositionConsts : numpy array
gamma energy deposition group constants. (J/cm)
Notes
Expand Down Expand Up @@ -855,7 +855,7 @@ def computeMacroscopicGroupConstants(
Returns
-------
macroGroupConstant : np array
macroGroupConstant : numpy array
Macroscopic group constants for the requested reaction.
"""
skippedNuclides = []
Expand Down
4 changes: 2 additions & 2 deletions armi/reactor/composites.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ class FlagSerializer(parameters.Serializer):
This operates by converting each set of Flags (too large to fit in a uint64) into a
sequence of enough uint8 elements to represent all flags. These constitute a
dimension of a 2-D np array containing all Flags for all objects provided to the
dimension of a 2-D numpy array containing all Flags for all objects provided to the
``pack()`` function.
"""

Expand All @@ -67,7 +67,7 @@ class FlagSerializer(parameters.Serializer):
@staticmethod
def pack(data):
"""
Flags are represented as a 2-D np array of uint8 (single-byte, unsigned
Flags are represented as a 2-D numpy array of uint8 (single-byte, unsigned
integers), where each row contains the bytes representing a single Flags
instance. We also store the list of field names so that we can verify that the
reader and the writer can agree on the meaning of each bit.
Expand Down
4 changes: 2 additions & 2 deletions armi/reactor/converters/axialExpansionChanger.py
Original file line number Diff line number Diff line change
Expand Up @@ -709,9 +709,9 @@ def updateComponentTempsBy1DTempField(self, tempGrid, tempField):
Parameters
----------
tempGrid : np array
tempGrid : numpy array
1D axial temperature grid (i.e., physical locations where temp is stored)
tempField : np array
tempField : numpy array
temperature values along grid
Notes
Expand Down
2 changes: 1 addition & 1 deletion armi/reactor/converters/uniformMesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -1402,7 +1402,7 @@ def paramGetter(self, block, paramNames):
paramVals = []
for paramName in paramNames:
val = block.p[paramName]
# list-like should be treated as a np array
# list-like should be treated as a numpy array
if isinstance(val, (tuple, list, np.ndarray)):
paramVals.append(np.array(val) if len(val) > 0 else None)
else:
Expand Down
2 changes: 1 addition & 1 deletion armi/reactor/grids/locations.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ def indices(self) -> np.ndarray:
This strips off the annoying ``grid`` tagalong which is there to ensure proper
equality (i.e. (0,0,0) in a storage rack is not equal to (0,0,0) in a core).
It is a np array for two reasons:
It is a numpy array for two reasons:
1. It can be added and subtracted for the recursive computations
through different coordinate systems
Expand Down
Loading

0 comments on commit 1ce672a

Please sign in to comment.