Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 18 additions & 11 deletions gptqmodel/looper/named_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,19 @@
# SPDX-FileCopyrightText: 2024-2025 qubitium@modelcloud.ai
# SPDX-License-Identifier: Apache-2.0
# Contact: qubitium@modelcloud.ai, x.com/qubitium

import threading
from typing import Any, Optional

import torch
import transformers
from torch import nn, Tensor
from torch import Tensor, nn
from torch.nn import Parameter
from torch.nn.modules.conv import _ConvNd


class NamedModule(torch.nn.Module):
_lock = threading.Lock()

def __init__(self, module: torch.nn.Module, name: str, full_name:str, layer_index: int) -> None:
super().__init__()

Expand Down Expand Up @@ -59,14 +61,17 @@ def __init__(self, module: torch.nn.Module, name: str, full_name:str, layer_inde
def register_buffer(
self, name: str, tensor: Optional[Tensor], persistent: bool = True
) -> None:
return self.module.register_buffer(name, tensor, persistent)
with self._lock:
return self.module.register_buffer(name, tensor, persistent)

def unregister_buffer(self, name: str):
del self.module._buffers[name]
delattr(self.module, name)
with self._lock:
del self.module._buffers[name]
delattr(self.module, name)

def register_parameter(self, name: str, param: Optional[Parameter]) -> None:
return self.module.register_parameter(name, param)
with self._lock:
return self.module.register_parameter(name, param)

# return stats for mo
# def stats(self) -> Dict[str, float]:
Expand All @@ -80,11 +85,13 @@ def register_parameter(self, name: str, param: Optional[Parameter]) -> None:

# getattr is only called if python cannot find attr for `self`
def __getattr__(self, name: str):
return getattr(self.module, name)
with self._lock:
return getattr(self.module, name)

# setattr is always called by python even if attr exists in `self`
def __setattr__(self, name: str, value: Any) -> None:
if name in ["module", "name", "full_name", "layer_index", "state"]:
self.__dict__[name] = value
else:
self.module.__dict__[name] = value
with self._lock:
if name in ["module", "module_dtype", "name", "full_name", "layer_index", "state", "target_device", "register_buffer", "unregister_buffer", "register_parameter"]:
self.__dict__[name] = value
else:
self.module.__dict__[name] = value