This repository has been archived by the owner on Sep 1, 2023. It is now read-only.
/
tp_constant_test.py
157 lines (125 loc) · 4.88 KB
/
tp_constant_test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
This file tests that we can learn and predict the particularly vexing case of a
single constant signal!
"""
import numpy as np
import unittest2 as unittest
from nupic.research import fdrutilities as fdrutils
from nupic.research.TP import TP
from nupic.research.TP10X2 import TP10X2
from nupic.support.unittesthelpers.testcasebase import (TestCaseBase,
TestOptionParser)
def _printOneTrainingVector(x):
"Print a single vector succinctly."
print ''.join('1' if k != 0 else '.' for k in x)
def _getSimplePatterns(numOnes, numPatterns):
"""Very simple patterns. Each pattern has numOnes consecutive
bits on. There are numPatterns*numOnes bits in the vector. These patterns
are used as elements of sequences when building up a training set."""
numCols = numOnes * numPatterns
p = []
for i in xrange(numPatterns):
x = np.zeros(numCols, dtype='float32')
x[i*numOnes:(i + 1)*numOnes] = 1
p.append(x)
return p
def _createTps(numCols):
"""Create two instances of temporal poolers (TP.py and TP10X2.py) with
identical parameter settings."""
# Keep these fixed:
minThreshold = 4
activationThreshold = 5
newSynapseCount = 7
initialPerm = 0.3
connectedPerm = 0.5
permanenceInc = 0.1
permanenceDec = 0.05
globalDecay = 0
cellsPerColumn = 1
cppTp = TP10X2(numberOfCols=numCols, cellsPerColumn=cellsPerColumn,
initialPerm=initialPerm, connectedPerm=connectedPerm,
minThreshold=minThreshold, newSynapseCount=newSynapseCount,
permanenceInc=permanenceInc, permanenceDec=permanenceDec,
activationThreshold=activationThreshold,
globalDecay=globalDecay, burnIn=1,
seed=SEED, verbosity=VERBOSITY,
checkSynapseConsistency=True,
pamLength=1000)
# Ensure we are copying over learning states for TPDiff
cppTp.retrieveLearningStates = True
pyTp = TP(numberOfCols=numCols, cellsPerColumn=cellsPerColumn,
initialPerm=initialPerm, connectedPerm=connectedPerm,
minThreshold=minThreshold, newSynapseCount=newSynapseCount,
permanenceInc=permanenceInc, permanenceDec=permanenceDec,
activationThreshold=activationThreshold,
globalDecay=globalDecay, burnIn=1,
seed=SEED, verbosity=VERBOSITY,
pamLength=1000)
return cppTp, pyTp
class TPConstantTest(TestCaseBase):
def setUp(self):
self.cppTp, self.pyTp = _createTps(100)
def _basicTest(self, tp=None):
"""Test creation, pickling, and basic run of learning and inference."""
trainingSet = _getSimplePatterns(10, 10)
# Learn on several constant sequences, with a reset in between
for _ in range(2):
for seq in trainingSet[0:5]:
for _ in range(10):
tp.learn(seq)
tp.reset()
print "Learning completed"
# Infer
print "Running inference"
tp.collectStats = True
for seq in trainingSet[0:5]:
tp.reset()
tp.resetStats()
for _ in range(10):
tp.infer(seq)
if VERBOSITY > 1 :
print
_printOneTrainingVector(seq)
tp.printStates(False, False)
print
print
if VERBOSITY > 1:
print tp.getStats()
# Ensure our predictions are accurate for each sequence
self.assertGreater(tp.getStats()['predictionScoreAvg2'], 0.8)
print ("tp.getStats()['predictionScoreAvg2'] = ",
tp.getStats()['predictionScoreAvg2'])
print "TPConstant basicTest ok"
def testCppTpBasic(self):
self._basicTest(self.cppTp)
def testPyTpBasic(self):
self._basicTest(self.pyTp)
def testIdenticalTps(self):
self.assertTrue(fdrutils.tpDiff2(self.cppTp, self.pyTp))
if __name__=="__main__":
parser = TestOptionParser()
options, _ = parser.parse_args()
SEED = options.seed
VERBOSITY = options.verbosity
np.random.seed(SEED)
unittest.main()