Skip to content
This repository has been archived by the owner on Sep 1, 2023. It is now read-only.

Commit

Permalink
NUP-2506: Add test to all Serializable subclasses and fix related is…
Browse files Browse the repository at this point in the history
…sues (#3826)

* NUP-2506: Add test to all Serializable subclasses making sure all the fields are initialized
* NUP-2506: Fix new serialization issues
* NUP-2506: Make sure cells4 are serialized when using BacktrackingTMCPP
* NUP-2506: Fix serializable test
  • Loading branch information
lscheinkman authored and rhyolight committed Apr 11, 2018
1 parent a7ab556 commit 682fd2e
Show file tree
Hide file tree
Showing 42 changed files with 831 additions and 200 deletions.
2 changes: 1 addition & 1 deletion requirements.txt
Expand Up @@ -18,5 +18,5 @@ prettytable==0.7.2

# When updating nupic.bindings, also update any shared dependencies to keep
# versions in sync.
nupic.bindings==1.0.3
nupic.bindings==1.0.4
numpy==1.12.1
@@ -1,6 +1,6 @@
@0x8602f38429407eb0;

struct AnomalyLikelihoodRegionProto {
struct AnomalyLikelihoodProto {
iteration @0 :UInt64;
historicalScores @1 :List(Score);
distribution @2 :Distribution;
Expand Down
49 changes: 30 additions & 19 deletions src/nupic/algorithms/anomaly_likelihood.py
Expand Up @@ -122,6 +122,12 @@
from nupic.serializable import Serializable
from nupic.utils import MovingAverage

try:
import capnp
except ImportError:
capnp = None
if capnp:
from nupic.algorithms.anomaly_likelihood_capnp import AnomalyLikelihoodProto

class AnomalyLikelihood(Serializable):
"""
Expand Down Expand Up @@ -252,12 +258,16 @@ def _calcSkipRecords(numIngested, windowSize, learningPeriod):
return min(numIngested, max(0, learningPeriod - numShiftedOut))


@classmethod
def getSchema(cls):
return AnomalyLikelihoodProto

@classmethod
def read(cls, proto):
""" capnp deserialization method for the anomaly likelihood object
:param proto: (Object) capnp proto object specified in
nupic.regions.AnomalyLikelihoodRegion.capnp
nupic.regions.anomaly_likelihood.capnp
:returns: (Object) the deserialized AnomalyLikelihood object
"""
Expand Down Expand Up @@ -303,7 +313,7 @@ def write(self, proto):
""" capnp serialization method for the anomaly likelihood object
:param proto: (Object) capnp proto object specified in
nupic.regions.AnomalyLikelihoodRegion.capnp
nupic.regions.anomaly_likelihood.capnp
"""

proto.iteration = self._iteration
Expand All @@ -315,27 +325,28 @@ def write(self, proto):
record.value = float(value)
record.anomalyScore = float(anomalyScore)

proto.distribution.name = self._distribution["distribution"]["name"]
proto.distribution.mean = float(self._distribution["distribution"]["mean"])
proto.distribution.variance = float(self._distribution["distribution"]["variance"])
proto.distribution.stdev = float(self._distribution["distribution"]["stdev"])
if self._distribution:
proto.distribution.name = self._distribution["distribution"]["name"]
proto.distribution.mean = float(self._distribution["distribution"]["mean"])
proto.distribution.variance = float(self._distribution["distribution"]["variance"])
proto.distribution.stdev = float(self._distribution["distribution"]["stdev"])

proto.distribution.movingAverage.windowSize = float(self._distribution["movingAverage"]["windowSize"])
proto.distribution.movingAverage.windowSize = float(self._distribution["movingAverage"]["windowSize"])

historicalValues = self._distribution["movingAverage"]["historicalValues"]
pHistValues = proto.distribution.movingAverage.init(
"historicalValues", len(historicalValues))
for i, value in enumerate(historicalValues):
pHistValues[i] = float(value)
historicalValues = self._distribution["movingAverage"]["historicalValues"]
pHistValues = proto.distribution.movingAverage.init(
"historicalValues", len(historicalValues))
for i, value in enumerate(historicalValues):
pHistValues[i] = float(value)

#proto.distribution.movingAverage.historicalValues = self._distribution["movingAverage"]["historicalValues"]
proto.distribution.movingAverage.total = float(self._distribution["movingAverage"]["total"])
#proto.distribution.movingAverage.historicalValues = self._distribution["movingAverage"]["historicalValues"]
proto.distribution.movingAverage.total = float(self._distribution["movingAverage"]["total"])

historicalLikelihoods = self._distribution["historicalLikelihoods"]
pHistLikelihoods = proto.distribution.init("historicalLikelihoods",
len(historicalLikelihoods))
for i, likelihood in enumerate(historicalLikelihoods):
pHistLikelihoods[i] = float(likelihood)
historicalLikelihoods = self._distribution["historicalLikelihoods"]
pHistLikelihoods = proto.distribution.init("historicalLikelihoods",
len(historicalLikelihoods))
for i, likelihood in enumerate(historicalLikelihoods):
pHistLikelihoods[i] = float(likelihood)

proto.probationaryPeriod = self._probationaryPeriod
proto.learningPeriod = self._learningPeriod
Expand Down
7 changes: 6 additions & 1 deletion src/nupic/algorithms/backtracking_tm.capnp
Expand Up @@ -32,6 +32,7 @@ struct SegmentUpdateProto {
weaklyPredicting @7 :Bool;
}

# Next ID: 61
struct BacktrackingTMProto {
version @0 :UInt16;
random @1 :RandomProto;
Expand Down Expand Up @@ -69,7 +70,11 @@ struct BacktrackingTMProto {
lrnIterationIdx @28 :UInt32;
iterationIdx @29 :UInt32;
segID @30 :UInt32;
currentOutput @31 :List(List(Bool));

currentOutput :union {
none @60 :Void;
list @31 :List(List(Bool));
}

pamCounter @32 :UInt32;
collectSequenceStats @33 :Bool;
Expand Down
12 changes: 9 additions & 3 deletions src/nupic/algorithms/backtracking_tm.py
Expand Up @@ -470,8 +470,10 @@ def write(self, proto):
proto.lrnIterationIdx = self.lrnIterationIdx
proto.iterationIdx = self.iterationIdx
proto.segID = self.segID
if self.currentOutput is not None:
proto.currentOutput = self.currentOutput.tolist()
if self.currentOutput is None:
proto.currentOutput.none = None
else:
proto.currentOutput.list = self.currentOutput.tolist()
proto.pamCounter = self.pamCounter
proto.collectSequenceStats = self.collectSequenceStats
proto.resetCalled = self.resetCalled
Expand Down Expand Up @@ -595,7 +597,11 @@ def read(cls, proto):
# Initialize various structures
obj._initEphemerals()

obj.currentOutput = numpy.array(proto.currentOutput, dtype='float32')
if proto.currentOutput.which() == "none":
obj.currentOutput = None
else:
obj.currentOutput = numpy.array(proto.currentOutput.list,
dtype='float32')

for pattern in proto.prevLrnPatterns:
obj.prevLrnPatterns.append([v for v in pattern])
Expand Down
98 changes: 39 additions & 59 deletions src/nupic/algorithms/backtracking_tm_cpp.py
Expand Up @@ -126,7 +126,7 @@ def __init__(self,
# If set to False, Cells4 will *not* be treated as an ephemeral member
# and full BacktrackingTMCPP pickling is possible. This is useful for testing
# pickle/unpickle without saving Cells4 to an external file
self.makeCells4Ephemeral = True
self.makeCells4Ephemeral = False

#---------------------------------------------------------------------------------
# Store the seed for constructing Cells4
Expand Down Expand Up @@ -162,6 +162,8 @@ def __init__(self,
outputType = outputType,
)

if not self.makeCells4Ephemeral:
self._initCells4()

@classmethod
def getSchema(cls):
Expand Down Expand Up @@ -211,41 +213,44 @@ def read(cls, proto):

return obj

def _initCells4(self):
self.cells4 = Cells4(self.numberOfCols,
self.cellsPerColumn,
self.activationThreshold,
self.minThreshold,
self.newSynapseCount,
self.segUpdateValidDuration,
self.initialPerm,
self.connectedPerm,
self.permanenceMax,
self.permanenceDec,
self.permanenceInc,
self.globalDecay,
self.doPooling,
self.seed,
self.allocateStatesInCPP,
self.checkSynapseConsistency)

self.cells4.setVerbosity(self.verbosity)
self.cells4.setPamLength(self.pamLength)
self.cells4.setMaxAge(self.maxAge)
self.cells4.setMaxInfBacktrack(self.maxInfBacktrack)
self.cells4.setMaxLrnBacktrack(self.maxLrnBacktrack)
self.cells4.setMaxSeqLength(self.maxSeqLength)
self.cells4.setMaxSegmentsPerCell(self.maxSegmentsPerCell)
self.cells4.setMaxSynapsesPerCell(self.maxSynapsesPerSegment)

# Reset internal C++ pointers to states
self._setStatePointers()


def __setstate__(self, state):
"""
Set the state of ourself from a serialized state.
"""
super(BacktrackingTMCPP, self).__setstate__(state)
if self.makeCells4Ephemeral:
self.cells4 = Cells4(self.numberOfCols,
self.cellsPerColumn,
self.activationThreshold,
self.minThreshold,
self.newSynapseCount,
self.segUpdateValidDuration,
self.initialPerm,
self.connectedPerm,
self.permanenceMax,
self.permanenceDec,
self.permanenceInc,
self.globalDecay,
self.doPooling,
self.seed,
self.allocateStatesInCPP,
self.checkSynapseConsistency)

self.cells4.setVerbosity(self.verbosity)
self.cells4.setPamLength(self.pamLength)
self.cells4.setMaxAge(self.maxAge)
self.cells4.setMaxInfBacktrack(self.maxInfBacktrack)
self.cells4.setMaxLrnBacktrack(self.maxLrnBacktrack)
self.cells4.setMaxSeqLength(self.maxSeqLength)
self.cells4.setMaxSegmentsPerCell(self.maxSegmentsPerCell)
self.cells4.setMaxSynapsesPerCell(self.maxSynapsesPerSegment)

# Reset internal C++ pointers to states
self._setStatePointers()
self._initCells4()


def _getEphemeralMembers(self):
Expand Down Expand Up @@ -276,33 +281,7 @@ def _initEphemerals(self):
self.retrieveLearningStates = False

if self.makeCells4Ephemeral:
self.cells4 = Cells4(self.numberOfCols,
self.cellsPerColumn,
self.activationThreshold,
self.minThreshold,
self.newSynapseCount,
self.segUpdateValidDuration,
self.initialPerm,
self.connectedPerm,
self.permanenceMax,
self.permanenceDec,
self.permanenceInc,
self.globalDecay,
self.doPooling,
self.seed,
self.allocateStatesInCPP,
self.checkSynapseConsistency)

self.cells4.setVerbosity(self.verbosity)
self.cells4.setPamLength(self.pamLength)
self.cells4.setMaxAge(self.maxAge)
self.cells4.setMaxInfBacktrack(self.maxInfBacktrack)
self.cells4.setMaxLrnBacktrack(self.maxLrnBacktrack)
self.cells4.setMaxSeqLength(self.maxSeqLength)
self.cells4.setMaxSegmentsPerCell(self.maxSegmentsPerCell)
self.cells4.setMaxSynapsesPerCell(self.maxSynapsesPerSegment)

self._setStatePointers()
self._initCells4()

def saveToFile(self, filePath):
"""
Expand All @@ -315,6 +294,7 @@ def loadFromFile(self, filePath):
"""
Load Cells4 state from a file saved with :meth:`saveToFile`.
"""
self._setStatePointers()
self.cells4.loadFromFile(filePath)


Expand All @@ -336,7 +316,7 @@ def __getattr__(self, name):
"""

try:
return super(BacktrackingTM, self).__getattr__(name)
return super(BacktrackingTMCPP, self).__getattr__(name)
except AttributeError:
raise AttributeError("'TM' object has no attribute '%s'" % name)

Expand Down Expand Up @@ -428,8 +408,8 @@ def _copyAllocatedStates(self):
assert False
(activeT, activeT1, predT, predT1, colConfidenceT, colConfidenceT1, confidenceT,
confidenceT1) = self.cells4.getStates()
self.confidence['t-1'] = confidenceT1.reshape((self.numberOfCols, self.cellsPerColumn))
self.confidence['t'] = confidenceT.reshape((self.numberOfCols, self.cellsPerColumn))
self.cellConfidence['t'] = confidenceT.reshape((self.numberOfCols, self.cellsPerColumn))
self.cellConfidence['t-1'] = confidenceT1.reshape((self.numberOfCols, self.cellsPerColumn))
self.colConfidence['t'] = colConfidenceT.reshape(self.numberOfCols)
self.colConfidence['t-1'] = colConfidenceT1.reshape(self.numberOfCols)
self.infActiveState['t-1'] = activeT1.reshape((self.numberOfCols, self.cellsPerColumn))
Expand Down
39 changes: 38 additions & 1 deletion src/nupic/algorithms/backtracking_tm_shim.py
Expand Up @@ -33,7 +33,30 @@


class MonitoredTemporalMemory(TemporalMemoryMonitorMixin,
TemporalMemory): pass
TemporalMemory):
def __init__(self, *args, **kwargs):
TemporalMemoryMonitorMixin.__init__(self, *args, **kwargs)
TemporalMemory.__init__(self, *args, **kwargs)

@classmethod
def read(cls, proto):
"""
Intercepts TemporalMemory deserialization request in order to initialize
`TemporalMemoryMonitorMixin` state
@param proto (DynamicStructBuilder) Proto object
@return (TemporalMemory) TemporalMemory shim instance
"""
tm = super(TemporalMemoryMonitorMixin, cls).read(proto)

# initialize `TemporalMemoryMonitorMixin` attributes
tm.mmName = None
tm._mmTraces = None
tm._mmData = None
tm.mmClearHistory()
tm._mmResetActive = True
return tm



Expand Down Expand Up @@ -215,6 +238,20 @@ def __init__(self,

self.infActiveState = {"t": None}

@classmethod
def read(cls, proto):
"""
Intercepts TemporalMemory deserialization request in order to initialize
`self.infActiveState`
@param proto (DynamicStructBuilder) Proto object
@return (TemporalMemory) TemporalMemory shim instance
"""
tm = super(MonitoredTMShim, cls).read(proto)
tm.infActiveState = {"t": None}
return tm


def compute(self, bottomUpInput, enableLearn, computeInfOutput=None):
"""
Expand Down
17 changes: 17 additions & 0 deletions src/nupic/algorithms/connections.py
Expand Up @@ -23,6 +23,12 @@
from collections import defaultdict

from nupic.serializable import Serializable
try:
import capnp
except ImportError:
capnp = None
if capnp:
from nupic.proto.ConnectionsProto_capnp import ConnectionsProto

EPSILON = 0.00001 # constant error threshold to check equality of permanences to
# other floats
Expand Down Expand Up @@ -107,6 +113,12 @@ class CellData(object):
def __init__(self):
self._segments = []

def __eq__(self, other):
return self._segments == other._segments

def __ne__(self, other):
return not self.__eq__(other)



def binSearch(arr, val):
Expand Down Expand Up @@ -443,6 +455,11 @@ def write(self, proto):
protoSynapses[k].permanence = synapse.permanence


@classmethod
def getSchema(cls):
return ConnectionsProto


@classmethod
def read(cls, proto):
"""
Expand Down

0 comments on commit 682fd2e

Please sign in to comment.