From 8bcb793cae04aab6eaaccdf7b4962348d4c6b9fa Mon Sep 17 00:00:00 2001 From: Matt Watson Date: Thu, 19 May 2022 11:07:05 -0700 Subject: [PATCH] Remove incorrect embedding size limit --- keras_nlp/layers/token_and_position_embedding.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/keras_nlp/layers/token_and_position_embedding.py b/keras_nlp/layers/token_and_position_embedding.py index 8fbf86aef7..e2607ee651 100644 --- a/keras_nlp/layers/token_and_position_embedding.py +++ b/keras_nlp/layers/token_and_position_embedding.py @@ -26,8 +26,7 @@ class TokenAndPositionEmbedding(keras.layers.Layer): to the sequence dimension. Args: - vocabulary_size: The size of the vocabulary (should be no larger - than 999) + vocabulary_size: The size of the vocabulary. sequence_length: The maximum length of input sequence embedding_dim: The output dimension of the embedding layer embeddings_initializer: The initializer to use for the Embedding