Skip to content

Commit

Permalink
move device statements outside if statements (#11292)
Browse files Browse the repository at this point in the history
  • Loading branch information
e-yi committed Apr 19, 2021
1 parent d9c6204 commit 5a34d8d
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 5 deletions.
7 changes: 4 additions & 3 deletions src/transformers/models/ctrl/modeling_ctrl.py
Original file line number Diff line number Diff line change
Expand Up @@ -394,13 +394,14 @@ def forward(
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")

device = input_ids.device if input_ids is not None else inputs_embeds.device

if past_key_values is None:
past_length = 0
past_key_values = tuple([None] * len(self.h))
else:
past_length = past_key_values[0][0].size(-2)
if position_ids is None:
device = input_ids.device if input_ids is not None else inputs_embeds.device
position_ids = torch.arange(past_length, input_shape[-1] + past_length, dtype=torch.long, device=device)
position_ids = position_ids.unsqueeze(0).view(-1, input_shape[-1])

Expand Down Expand Up @@ -438,11 +439,11 @@ def forward(
inputs_embeds = self.w(input_ids)
# inputs_embeds = embedded.unsqueeze(0) if len(input_ids.shape)<2 else embedded
seq_len = input_shape[-1]
mask = torch.triu(torch.ones(seq_len + past_length, seq_len + past_length), 1).to(inputs_embeds.device)
mask = torch.triu(torch.ones(seq_len + past_length, seq_len + past_length), 1).to(device)

inputs_embeds *= np.sqrt(self.d_model_size)

pos_embeds = self.pos_encoding[position_ids, :].to(inputs_embeds.device)
pos_embeds = self.pos_encoding[position_ids, :].to(device)

hidden_states = inputs_embeds + pos_embeds + token_type_embeds

Expand Down
3 changes: 2 additions & 1 deletion src/transformers/models/gpt2/modeling_gpt2.py
Original file line number Diff line number Diff line change
Expand Up @@ -675,6 +675,8 @@ def forward(
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")

device = input_ids.device if input_ids is not None else inputs_embeds.device

if token_type_ids is not None:
token_type_ids = token_type_ids.view(-1, input_shape[-1])
if position_ids is not None:
Expand All @@ -686,7 +688,6 @@ def forward(
else:
past_length = past_key_values[0][0].size(-2)
if position_ids is None:
device = input_ids.device if input_ids is not None else inputs_embeds.device
position_ids = torch.arange(past_length, input_shape[-1] + past_length, dtype=torch.long, device=device)
position_ids = position_ids.unsqueeze(0).view(-1, input_shape[-1])

Expand Down
3 changes: 2 additions & 1 deletion src/transformers/models/gpt_neo/modeling_gpt_neo.py
Original file line number Diff line number Diff line change
Expand Up @@ -755,6 +755,8 @@ def forward(
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")

device = input_ids.device if input_ids is not None else inputs_embeds.device

if token_type_ids is not None:
token_type_ids = token_type_ids.view(-1, input_shape[-1])
if position_ids is not None:
Expand All @@ -766,7 +768,6 @@ def forward(
else:
past_length = past_key_values[0][0].size(-2)
if position_ids is None:
device = input_ids.device if input_ids is not None else inputs_embeds.device
position_ids = torch.arange(past_length, input_shape[-1] + past_length, dtype=torch.long, device=device)
position_ids = position_ids.unsqueeze(0).view(-1, input_shape[-1])

Expand Down

0 comments on commit 5a34d8d

Please sign in to comment.