From 931466e487e1be87d1182b17ed4ecfac9e70948d Mon Sep 17 00:00:00 2001 From: Phil Wang Date: Tue, 5 Jul 2022 11:57:56 -0700 Subject: [PATCH] unnecessary, pytorch native softmax is numerically stable --- alphafold2_pytorch/alphafold2.py | 1 - setup.py | 3 ++- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/alphafold2_pytorch/alphafold2.py b/alphafold2_pytorch/alphafold2.py index 8a10aa3..b183640 100644 --- a/alphafold2_pytorch/alphafold2.py +++ b/alphafold2_pytorch/alphafold2.py @@ -168,7 +168,6 @@ def forward(self, x, mask = None, attn_bias = None, context = None, context_mask # attention - dots = dots - dots.max(dim = -1, keepdims = True).values attn = dots.softmax(dim = -1) attn = self.dropout(attn) diff --git a/setup.py b/setup.py index f31e166..84b65b4 100644 --- a/setup.py +++ b/setup.py @@ -3,9 +3,10 @@ setup( name = 'alphafold2-pytorch', packages = find_packages(), - version = '0.4.31', + version = '0.4.32', license='MIT', description = 'AlphaFold2 - Pytorch', + long_description_content_type = 'text/markdown', author = 'Phil Wang, Eric Alcaide', author_email = 'lucidrains@gmail.com, ericalcaide1@gmail.com', url = 'https://github.com/lucidrains/alphafold2',