You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
ValueError: Trying to share variable rnn/attention_cell_wrapper/multi_rnn_cell/cell_0/gru_cell/gates/kernel, but specified shape (200, 200) and found shape (2428, 200).
#7
model = RNNModel()
input_ = train[0]
target = train[1]
with tf.Session() as sess:
init = tf.initialize_all_variables()
sess.run([init])
loss = 2000
for e in range(NUM_EPOCHS):
state = sess.run(model.zero_state)
epoch_loss =0
for batch in range(0,NUM_TRAIN_BATCHES):
start = batch*BATCH_SIZE
end = start + BATCH_SIZE
feed = {
model.input_data:input_[start:end],
model.target_data:target[start:end],
model.dropout_prob:0.5,
model.start_state:state
}
_,loss,acc,state = sess.run(
[
model.train_op,
model.loss,
model.accuracy,
model.end_state
]
,feed_dict=feed
)
epoch_loss+=loss
print('step - {0} loss - {1} acc - {2}'.format((e),epoch_loss,acc))
final_preds =np.array([])
for batch in range(0,NUM_VAL_BATCHES):
start = batch*BATCH_SIZE
end = start + BATCH_SIZE
feed = {
model.input_data:val[0][start:end],
model.target_data:val[1][start:end],
model.dropout_prob:1,
model.start_state:state
}
acc,preds,state = sess.run(
[
model.accuracy,
model.predictions,
model.end_state
]
,feed_dict=feed
)
print(acc)
assert len(preds) == BATCH_SIZE
final_preds = np.concatenate((final_preds,preds),axis=0)
WARNING:tensorflow:<tensorflow.contrib.rnn.python.ops.rnn_cell.AttentionCellWrapper object at 0x0000021E0CFB02B0>: Using a concatenated state is slower and will soon be deprecated. Use state_is_tuple=True.
ValueError Traceback (most recent call last)
in ()
1 with tf.Graph().as_default():
----> 2 model = RNNModel()
3 input_ = train[0]
4 target = train[1]
5 with tf.Session() as sess:
in init(self)
44 scope.reuse_variables()
45
---> 46 output, state = self.gru_cell(inp, state)
47 states.append(state)
48 outputs.append(output)
c:\python35\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py in _rnn_get_variable(self, getter, *args, **kwargs)
181
182 def _rnn_get_variable(self, getter, *args, **kwargs):
--> 183 variable = getter(*args, **kwargs)
184 trainable = (variable in tf_variables.trainable_variables() or
185 (isinstance(variable, tf_variables.PartitionedVariable) and
c:\python35\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py in _rnn_get_variable(self, getter, *args, **kwargs)
181
182 def _rnn_get_variable(self, getter, *args, **kwargs):
--> 183 variable = getter(*args, **kwargs)
184 trainable = (variable in tf_variables.trainable_variables() or
185 (isinstance(variable, tf_variables.PartitionedVariable) and
c:\python35\lib\site-packages\tensorflow\python\ops\variable_scope.py in _true_getter(name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource)
350 trainable=trainable, collections=collections,
351 caching_device=caching_device, validate_shape=validate_shape,
--> 352 use_resource=use_resource)
353
354 if custom_getter is not None:
c:\python35\lib\site-packages\tensorflow\python\ops\variable_scope.py in _get_single_variable(self, name, shape, dtype, initializer, regularizer, partition_info, reuse, trainable, collections, caching_device, validate_shape, use_resource)
667 raise ValueError("Trying to share variable %s, but specified shape %s"
668 " and found shape %s." % (name, shape,
--> 669 found_var.get_shape()))
670 if not dtype.is_compatible_with(found_var.dtype):
671 dtype_str = dtype.name
ValueError: Trying to share variable rnn/attention_cell_wrapper/multi_rnn_cell/cell_0/gru_cell/gates/kernel, but specified shape (200, 200) and found shape (2428, 200).
The text was updated successfully, but these errors were encountered:
Training the RNN¶
with tf.Graph().as_default():
WARNING:tensorflow:<tensorflow.contrib.rnn.python.ops.rnn_cell.AttentionCellWrapper object at 0x0000021E0CFB02B0>: Using a concatenated state is slower and will soon be deprecated. Use state_is_tuple=True.
ValueError Traceback (most recent call last)
in ()
1 with tf.Graph().as_default():
----> 2 model = RNNModel()
3 input_ = train[0]
4 target = train[1]
5 with tf.Session() as sess:
in init(self)
44 scope.reuse_variables()
45
---> 46 output, state = self.gru_cell(inp, state)
47 states.append(state)
48 outputs.append(output)
c:\python35\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py in call(self, inputs, state, scope)
178 with vs.variable_scope(vs.get_variable_scope(),
179 custom_getter=self._rnn_get_variable):
--> 180 return super(RNNCell, self).call(inputs, state)
181
182 def _rnn_get_variable(self, getter, *args, **kwargs):
c:\python35\lib\site-packages\tensorflow\python\layers\base.py in call(self, inputs, *args, **kwargs)
439 # Check input assumptions set after layer building, e.g. input shape.
440 self._assert_input_compatibility(inputs)
--> 441 outputs = self.call(inputs, *args, **kwargs)
442
443 # Apply activity regularization.
c:\python35\lib\site-packages\tensorflow\contrib\rnn\python\ops\rnn_cell.py in call(self, inputs, state)
1111 input_size = inputs.get_shape().as_list()[1]
1112 inputs = _linear([inputs, attns], input_size, True)
-> 1113 lstm_output, new_state = self._cell(inputs, state)
1114 if self._state_is_tuple:
1115 new_state_cat = array_ops.concat(nest.flatten(new_state), 1)
c:\python35\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py in call(self, inputs, state, scope)
178 with vs.variable_scope(vs.get_variable_scope(),
179 custom_getter=self._rnn_get_variable):
--> 180 return super(RNNCell, self).call(inputs, state)
181
182 def _rnn_get_variable(self, getter, *args, **kwargs):
c:\python35\lib\site-packages\tensorflow\python\layers\base.py in call(self, inputs, *args, **kwargs)
439 # Check input assumptions set after layer building, e.g. input shape.
440 self._assert_input_compatibility(inputs)
--> 441 outputs = self.call(inputs, *args, **kwargs)
442
443 # Apply activity regularization.
c:\python35\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py in call(self, inputs, state)
914 [-1, cell.state_size])
915 cur_state_pos += cell.state_size
--> 916 cur_inp, new_state = cell(cur_inp, cur_state)
917 new_states.append(new_state)
918
c:\python35\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py in call(self, inputs, state, scope)
178 with vs.variable_scope(vs.get_variable_scope(),
179 custom_getter=self._rnn_get_variable):
--> 180 return super(RNNCell, self).call(inputs, state)
181
182 def _rnn_get_variable(self, getter, *args, **kwargs):
c:\python35\lib\site-packages\tensorflow\python\layers\base.py in call(self, inputs, *args, **kwargs)
439 # Check input assumptions set after layer building, e.g. input shape.
440 self._assert_input_compatibility(inputs)
--> 441 outputs = self.call(inputs, *args, **kwargs)
442
443 # Apply activity regularization.
c:\python35\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py in call(self, inputs, state)
293 value = math_ops.sigmoid(
294 _linear([inputs, state], 2 * self._num_units, True, bias_ones,
--> 295 self._kernel_initializer))
296 r, u = array_ops.split(value=value, num_or_size_splits=2, axis=1)
297 with vs.variable_scope("candidate"):
c:\python35\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py in _linear(args, output_size, bias, bias_initializer, kernel_initializer)
1015 _WEIGHTS_VARIABLE_NAME, [total_arg_size, output_size],
1016 dtype=dtype,
-> 1017 initializer=kernel_initializer)
1018 if len(args) == 1:
1019 res = math_ops.matmul(args[0], weights)
c:\python35\lib\site-packages\tensorflow\python\ops\variable_scope.py in get_variable(name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter)
1063 collections=collections, caching_device=caching_device,
1064 partitioner=partitioner, validate_shape=validate_shape,
-> 1065 use_resource=use_resource, custom_getter=custom_getter)
1066 get_variable_or_local_docstring = (
1067 """%s
c:\python35\lib\site-packages\tensorflow\python\ops\variable_scope.py in get_variable(self, var_store, name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter)
960 collections=collections, caching_device=caching_device,
961 partitioner=partitioner, validate_shape=validate_shape,
--> 962 use_resource=use_resource, custom_getter=custom_getter)
963
964 def _get_partitioned_variable(self,
c:\python35\lib\site-packages\tensorflow\python\ops\variable_scope.py in get_variable(self, name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter)
358 reuse=reuse, trainable=trainable, collections=collections,
359 caching_device=caching_device, partitioner=partitioner,
--> 360 validate_shape=validate_shape, use_resource=use_resource)
361 else:
362 return _true_getter(
c:\python35\lib\site-packages\tensorflow\python\ops\variable_scope.py in wrapped_custom_getter(getter, *args, **kwargs)
1403 return custom_getter(
1404 functools.partial(old_getter, getter),
-> 1405 *args, **kwargs)
1406 return wrapped_custom_getter
1407
c:\python35\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py in _rnn_get_variable(self, getter, *args, **kwargs)
181
182 def _rnn_get_variable(self, getter, *args, **kwargs):
--> 183 variable = getter(*args, **kwargs)
184 trainable = (variable in tf_variables.trainable_variables() or
185 (isinstance(variable, tf_variables.PartitionedVariable) and
c:\python35\lib\site-packages\tensorflow\python\ops\variable_scope.py in wrapped_custom_getter(getter, *args, **kwargs)
1403 return custom_getter(
1404 functools.partial(old_getter, getter),
-> 1405 *args, **kwargs)
1406 return wrapped_custom_getter
1407
c:\python35\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py in _rnn_get_variable(self, getter, *args, **kwargs)
181
182 def _rnn_get_variable(self, getter, *args, **kwargs):
--> 183 variable = getter(*args, **kwargs)
184 trainable = (variable in tf_variables.trainable_variables() or
185 (isinstance(variable, tf_variables.PartitionedVariable) and
c:\python35\lib\site-packages\tensorflow\python\ops\rnn_cell_impl.py in _rnn_get_variable(self, getter, *args, **kwargs)
181
182 def _rnn_get_variable(self, getter, *args, **kwargs):
--> 183 variable = getter(*args, **kwargs)
184 trainable = (variable in tf_variables.trainable_variables() or
185 (isinstance(variable, tf_variables.PartitionedVariable) and
c:\python35\lib\site-packages\tensorflow\python\ops\variable_scope.py in _true_getter(name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource)
350 trainable=trainable, collections=collections,
351 caching_device=caching_device, validate_shape=validate_shape,
--> 352 use_resource=use_resource)
353
354 if custom_getter is not None:
c:\python35\lib\site-packages\tensorflow\python\ops\variable_scope.py in _get_single_variable(self, name, shape, dtype, initializer, regularizer, partition_info, reuse, trainable, collections, caching_device, validate_shape, use_resource)
667 raise ValueError("Trying to share variable %s, but specified shape %s"
668 " and found shape %s." % (name, shape,
--> 669 found_var.get_shape()))
670 if not dtype.is_compatible_with(found_var.dtype):
671 dtype_str = dtype.name
ValueError: Trying to share variable rnn/attention_cell_wrapper/multi_rnn_cell/cell_0/gru_cell/gates/kernel, but specified shape (200, 200) and found shape (2428, 200).
The text was updated successfully, but these errors were encountered: