Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 15 additions & 3 deletions tests/models/test_modeling_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -691,6 +691,9 @@ def test_deprecated_kwargs(self):
def test_cpu_offload(self):
config, inputs_dict = self.prepare_init_args_and_inputs_for_common()
model = self.model_class(**config).eval()
if model._no_split_modules is None:
return

model = model.to(torch_device)

torch.manual_seed(0)
Expand Down Expand Up @@ -718,6 +721,9 @@ def test_cpu_offload(self):
def test_disk_offload_without_safetensors(self):
config, inputs_dict = self.prepare_init_args_and_inputs_for_common()
model = self.model_class(**config).eval()
if model._no_split_modules is None:
return

model = model.to(torch_device)

torch.manual_seed(0)
Expand All @@ -728,12 +734,12 @@ def test_disk_offload_without_safetensors(self):
model.cpu().save_pretrained(tmp_dir, safe_serialization=False)

with self.assertRaises(ValueError):
max_size = int(self.model_split_percents[1] * model_size)
max_size = int(self.model_split_percents[0] * model_size)
max_memory = {0: max_size, "cpu": max_size}
# This errors out because it's missing an offload folder
new_model = self.model_class.from_pretrained(tmp_dir, device_map="auto", max_memory=max_memory)

max_size = int(self.model_split_percents[1] * model_size)
max_size = int(self.model_split_percents[0] * model_size)
max_memory = {0: max_size, "cpu": max_size}
new_model = self.model_class.from_pretrained(
tmp_dir, device_map="auto", max_memory=max_memory, offload_folder=tmp_dir
Expand All @@ -749,6 +755,9 @@ def test_disk_offload_without_safetensors(self):
def test_disk_offload_with_safetensors(self):
config, inputs_dict = self.prepare_init_args_and_inputs_for_common()
model = self.model_class(**config).eval()
if model._no_split_modules is None:
return

model = model.to(torch_device)

torch.manual_seed(0)
Expand All @@ -758,7 +767,7 @@ def test_disk_offload_with_safetensors(self):
with tempfile.TemporaryDirectory() as tmp_dir:
model.cpu().save_pretrained(tmp_dir)

max_size = int(self.model_split_percents[1] * model_size)
max_size = int(self.model_split_percents[0] * model_size)
max_memory = {0: max_size, "cpu": max_size}
new_model = self.model_class.from_pretrained(
tmp_dir, device_map="auto", offload_folder=tmp_dir, max_memory=max_memory
Expand All @@ -774,6 +783,9 @@ def test_disk_offload_with_safetensors(self):
def test_model_parallelism(self):
config, inputs_dict = self.prepare_init_args_and_inputs_for_common()
model = self.model_class(**config).eval()
if model._no_split_modules is None:
return

model = model.to(torch_device)

torch.manual_seed(0)
Expand Down