Skip to content

Commit

Permalink
causal flag
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Jan 10, 2022
1 parent 39c59a7 commit f12d2de
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion rela_transformer/rela_transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def __init__(
self.layers = nn.ModuleList([])
for _ in range(depth):
self.layers.append(nn.ModuleList([
ReLA(dim = dim, heads = heads, dim_head = dim_head, num_memory_kv = num_memory_kv),
ReLA(dim = dim, heads = heads, dim_head = dim_head, num_memory_kv = num_memory_kv, causal = causal),
FeedForward(dim = dim, mult = ff_mult) if not no_ff else None
]))

Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'rela-transformer',
packages = find_packages(exclude=[]),
version = '0.0.2',
version = '0.0.3',
license='MIT',
description = 'ReLA Transformer',
author = 'Phil Wang',
Expand Down

0 comments on commit f12d2de

Please sign in to comment.