You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
I trained one of the example models as shown below but I had the error:
TypeError: _standardize_user_data() got an unexpected keyword argument 'check_batch_axis'
from future import print_function
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.optimizers import RMSprop
from importance_sampling.training import ImportanceTraining
batch_size = 128
num_classes = 10
epochs = 3
the data, shuffled and split between train and test sets
/content/.local/lib/python3.6/site-packages/importance_sampling/training.py in fit_dataset(self, dataset, steps_per_epoch, batch_size, epochs, verbose, callbacks)
208
209 # Importance sampling is done here
--> 210 idxs, (x, y), w = sampler.sample(batch_size)
211 # Train on the sampled data
212 loss, metrics, scores = self.model.train_batch(x, y, w)
/content/.local/lib/python3.6/site-packages/importance_sampling/samplers.py in sample(self, batch_size)
60 def sample(self, batch_size):
61 # Get the importance scores of some samples
---> 62 idxs1, scores, xy = self._get_samples_with_scores(batch_size)
63
64 # Sample from the available ones
/content/.local/lib/python3.6/site-packages/importance_sampling/samplers.py in _get_samples_with_scores(self, batch_size)
126 idxs = np.random.choice(self.N, self.large_batch)
127 x, y = self.dataset.train_data[idxs]
--> 128 scores = self.model.score(x, y, batch_size=self.forward_batch_size)
129
130 return (
/content/.local/lib/python3.6/site-packages/importance_sampling/model_wrappers.py in score(self, x, y, batch_size)
93 result = np.hstack([
94 self.score_batch(xi, yi).T
---> 95 for xi, yi in self._iterate_batches(x, y, batch_size)
96 ]).T
97
/content/.local/lib/python3.6/site-packages/importance_sampling/model_wrappers.py in (.0)
93 result = np.hstack([
94 self.score_batch(xi, yi).T
---> 95 for xi, yi in self._iterate_batches(x, y, batch_size)
96 ]).T
97
I am preparing a new release that changes quite a lot of things and fixes this bug as well. I will be pushing to the repository today but the release will probably be published after Easter because I have to update the documentation as well.
I trained one of the example models as shown below but I had the error:
TypeError: _standardize_user_data() got an unexpected keyword argument 'check_batch_axis'
from future import print_function
import keras
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.optimizers import RMSprop
from importance_sampling.training import ImportanceTraining
batch_size = 128
num_classes = 10
epochs = 3
the data, shuffled and split between train and test sets
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
model = Sequential()
model.add(Dense(512, activation='relu', input_shape=(784,)))
model.add(Dropout(0.2))
model.add(Dense(512, activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(10, activation='softmax'))
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer=RMSprop(),
metrics=['accuracy'])
history = ImportanceTraining(model, forward_batch_size=1024).fit(
x_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1
)
score = model.evaluate(x_test, y_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
Had this error:
TypeError Traceback (most recent call last)
in ()
47 batch_size=batch_size,
48 epochs=epochs,
---> 49 verbose=1
50 )
51 score = model.evaluate(x_test, y_test, verbose=0)
/content/.local/lib/python3.6/site-packages/importance_sampling/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, steps_per_epoch)
97 steps_per_epoch=steps_per_epoch,
98 verbose=verbose,
---> 99 callbacks=callbacks
100 )
101
/content/.local/lib/python3.6/site-packages/importance_sampling/training.py in fit_dataset(self, dataset, steps_per_epoch, batch_size, epochs, verbose, callbacks)
208
209 # Importance sampling is done here
--> 210 idxs, (x, y), w = sampler.sample(batch_size)
211 # Train on the sampled data
212 loss, metrics, scores = self.model.train_batch(x, y, w)
/content/.local/lib/python3.6/site-packages/importance_sampling/samplers.py in sample(self, batch_size)
60 def sample(self, batch_size):
61 # Get the importance scores of some samples
---> 62 idxs1, scores, xy = self._get_samples_with_scores(batch_size)
63
64 # Sample from the available ones
/content/.local/lib/python3.6/site-packages/importance_sampling/samplers.py in _get_samples_with_scores(self, batch_size)
126 idxs = np.random.choice(self.N, self.large_batch)
127 x, y = self.dataset.train_data[idxs]
--> 128 scores = self.model.score(x, y, batch_size=self.forward_batch_size)
129
130 return (
/content/.local/lib/python3.6/site-packages/importance_sampling/model_wrappers.py in score(self, x, y, batch_size)
93 result = np.hstack([
94 self.score_batch(xi, yi).T
---> 95 for xi, yi in self._iterate_batches(x, y, batch_size)
96 ]).T
97
/content/.local/lib/python3.6/site-packages/importance_sampling/model_wrappers.py in (.0)
93 result = np.hstack([
94 self.score_batch(xi, yi).T
---> 95 for xi, yi in self._iterate_batches(x, y, batch_size)
96 ]).T
97
/content/.local/lib/python3.6/site-packages/importance_sampling/model_wrappers.py in score_batch(self, x, y)
228 dummy_target = np.zeros((y.shape[0], 1))
229 inputs = _tolist(x) + [y, dummy_weights]
--> 230 outputs = self.model.test_on_batch(inputs, dummy_target)
231
232 return outputs[self.SCORE].ravel()
/content/.local/lib/python3.6/site-packages/transparent_keras/transparent_model.py in test_on_batch(self, x, y, sample_weight)
187 x, y,
188 sample_weight=sample_weight,
--> 189 check_batch_axis=True
190 )
191
TypeError: _standardize_user_data() got an unexpected keyword argument 'check_batch_axis'
The text was updated successfully, but these errors were encountered: