Skip to content

Commit

Permalink
address #7
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Apr 24, 2024
1 parent e36ce95 commit a0d8a4e
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 2 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'soft-moe-pytorch',
packages = find_packages(exclude=[]),
version = '0.1.7',
version = '0.1.8',
license='MIT',
description = 'Soft MoE - Pytorch',
author = 'Phil Wang',
Expand Down
5 changes: 4 additions & 1 deletion soft_moe_pytorch/soft_moe.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,10 @@ def __init__(
super().__init__()
assert exists(seq_len) ^ exists(num_slots), 'either seq_len, or num_slots must be passed into SoftMoE'

num_slots = default(num_slots, seq_len // num_experts)
if exists(seq_len):
num_slots = default(num_slots, seq_len // num_experts)
elif exists(num_slots):
seq_len = num_slots * num_experts

norm_klass = LayerNorm if use_layernorm else RMSNorm
self.norm = norm_klass(dim)
Expand Down

0 comments on commit a0d8a4e

Please sign in to comment.