Description
{
"name": "ValueError",
"message": "Unrecognized keyword arguments passed to Embedding: {'batch_input_shape': [32, None]}",
"stack": "---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
/home/user/dev/introtodeeplearning/lab1/Part2_Music_Generation.ipynb Cell 29 line 2
24 return model
26 # Build a simple model with default hyperparameters. You will get the
27 # chance to change these later.
---> 28 model = build_model(len(vocab), embedding_dim=256, rnn_units=1024, batch_size=32)
/home/user/dev/introtodeeplearning/lab1/Part2_Music_Generation.ipynb Cell 29 line 8
4 def build_model(vocab_size, embedding_dim, rnn_units, batch_size):
5 model = tf.keras.Sequential([
6 # Layer 1: Embedding layer to transform indices into dense vectors
7 # of a fixed embedding size
----> 8 tf.keras.layers.Embedding(vocab_size, embedding_dim, batch_input_shape=[batch_size, None]),
9 # tf.keras.layers.Embedding(vocab_size, embedding_dim, input_shape=batch_size),
10 #tf.keras.layers.Embedding(vocab_size, embedding_dim, input_shape=(batch_size, None)),
11
12 # Layer 2: LSTM with rnn_units
number of units.
13 # TODO: Call the LSTM function defined above to add this layer.
14 LSTM(rnn_units),
15 # LSTM('''TODO'''),
16
17 # Layer 3: Dense (fully-connected) layer that transforms the LSTM output
18 # into the vocabulary size.
19 # TODO: Add the Dense layer.
20 tf.keras.layers.Dense(vocab_size)
21 # '''TODO: DENSE LAYER HERE'''
22 ])
24 return model
File ~/dev/introtodeeplearning/.venv/lib64/python3.12/site-packages/keras/src/layers/core/embedding.py:89, in Embedding.init(self, input_dim, output_dim, embeddings_initializer, embeddings_regularizer, embeddings_constraint, mask_zero, lora_rank, **kwargs)
85 if input_length is not None:
86 warnings.warn(
87 "Argument input_length
is deprecated. Just remove it."
88 )
---> 89 super().init(**kwargs)
90 self.input_dim = input_dim
91 self.output_dim = output_dim
File ~/dev/introtodeeplearning/.venv/lib64/python3.12/site-packages/keras/src/layers/layer.py:263, in Layer.init(self, activity_regularizer, trainable, dtype, autocast, name, **kwargs)
261 self._input_shape_arg = input_shape_arg
262 if kwargs:
--> 263 raise ValueError(
264 "Unrecognized keyword arguments "
265 f"passed to {self.class.name}: {kwargs}"
266 )
268 self.built = False
269 self.autocast = autocast
ValueError: Unrecognized keyword arguments passed to Embedding: {'batch_input_shape': [32, None]}"
}