Skip to content

Commit

Permalink
chore: remove repetitive words (#30174)
Browse files Browse the repository at this point in the history
Signed-off-by: hugehope <cmm7@sina.cn>
  • Loading branch information
hugehope committed Apr 11, 2024
1 parent e50be9a commit 58b170c
Show file tree
Hide file tree
Showing 4 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion src/transformers/models/canine/modeling_canine.py
Original file line number Diff line number Diff line change
Expand Up @@ -608,7 +608,7 @@ def forward(
chunk_end = min(from_seq_length, chunk_start + self.attend_from_chunk_width)
from_chunks.append((chunk_start, chunk_end))

# Determine the chunks (windows) that will will attend *to*.
# Determine the chunks (windows) that will attend *to*.
to_chunks = []
if self.first_position_attends_to_all:
to_chunks.append((0, to_seq_length))
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/mamba/configuration_mamba.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ class MambaConfig(PretrainedConfig):
residual_in_fp32 (`bool`, *optional*, defaults to `True`):
Whether or not residuals should be in `float32`. If set to `False` residuals will keep the same `dtype` as the rest of the model
time_step_rank (`Union[int,str]`, *optional*, defaults to `"auto"`):
Rank of the the discretization projection matrix. `"auto"` means that it will default to `math.ceil(self.hidden_size / 16)`
Rank of the discretization projection matrix. `"auto"` means that it will default to `math.ceil(self.hidden_size / 16)`
time_step_scale (`float`, *optional*, defaults to 1.0):
Scale used used to scale `dt_proj.bias`.
time_step_min (`float`, *optional*, defaults to 0.001):
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/rwkv/configuration_rwkv.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ class RwkvConfig(PretrainedConfig):
Vocabulary size of the RWKV model. Defines the number of different tokens that can be represented by the
`inputs_ids` passed when calling [`RwkvModel`].
context_length (`int`, *optional*, defaults to 1024):
The maximum sequence length that this model can be be used with in a single forward (using it in RNN mode
The maximum sequence length that this model can be used with in a single forward (using it in RNN mode
lets use any sequence length).
hidden_size (`int`, *optional*, defaults to 4096):
Dimensionality of the embeddings and hidden states.
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ def get_polynomial_decay_schedule_with_warmup(

lr_init = optimizer.defaults["lr"]
if not (lr_init > lr_end):
raise ValueError(f"lr_end ({lr_end}) must be be smaller than initial lr ({lr_init})")
raise ValueError(f"lr_end ({lr_end}) must be smaller than initial lr ({lr_init})")

lr_lambda = partial(
_get_polynomial_decay_schedule_with_warmup_lr_lambda,
Expand Down

0 comments on commit 58b170c

Please sign in to comment.