Skip to content

Commit

Permalink
Update tp_layer.py
Browse files Browse the repository at this point in the history
  • Loading branch information
zhangsheng377 committed Jul 15, 2024
1 parent 1e9375f commit 68fb4ce
Showing 1 changed file with 2 additions and 1 deletion.
3 changes: 2 additions & 1 deletion src/peft/tuners/lora/tp_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations

import importlib
import math
Expand Down Expand Up @@ -116,7 +117,7 @@ def update_layer(
else:
lora_dropout_layer = nn.Identity()

self.lora_dropout.update(nn.ModuleDict({adapter_name: lora_dropout_layer}))
self.lora_dropout[adapter_name] = lora_dropout_layer

megatron_config = parallel_linear_kwargs["megatron_config"]
# lora needs to be forced to upgrade to 32-bit precision, otherwise it will overflow
Expand Down

0 comments on commit 68fb4ce

Please sign in to comment.