From 87aae848e7f6c79ee0a92272319e82488e598736 Mon Sep 17 00:00:00 2001 From: Larry Neal Date: Sun, 28 Jan 2018 23:55:28 -0800 Subject: [PATCH 1/2] Fix UserWarning This fixes the following warning in mnist/main.py src/torch_mnist.py:68: UserWarning: Implicit dimension choice for log_softmax has been deprecated. Change the call to include dim=X as an argument. Performance is unaffected. --- mnist/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mnist/main.py b/mnist/main.py index 3f71e3cdbe..61a747d9cc 100644 --- a/mnist/main.py +++ b/mnist/main.py @@ -65,7 +65,7 @@ def forward(self, x): x = F.relu(self.fc1(x)) x = F.dropout(x, training=self.training) x = self.fc2(x) - return F.log_softmax(x) + return F.log_softmax(x, dim=1) model = Net() if args.cuda: From b359a8f8047242a7b8744bceb6fb2eb1e043e6ba Mon Sep 17 00:00:00 2001 From: Larry Neal Date: Mon, 29 Jan 2018 00:03:34 -0800 Subject: [PATCH 2/2] Fix UserWarning in mnist_hogwild In this case, dim=1 because the input tensor x has ndim=2. See _get_softmax_dim in https://github.com/pytorch/pytorch/blob/master/torch/nn/functional.py --- mnist_hogwild/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mnist_hogwild/main.py b/mnist_hogwild/main.py index 2ad9217ff8..be80047996 100644 --- a/mnist_hogwild/main.py +++ b/mnist_hogwild/main.py @@ -42,7 +42,7 @@ def forward(self, x): x = F.relu(self.fc1(x)) x = F.dropout(x, training=self.training) x = self.fc2(x) - return F.log_softmax(x) + return F.log_softmax(x, dim=1) if __name__ == '__main__': args = parser.parse_args()