diff --git a/docs/tutorials/tutorial_6.rst b/docs/tutorials/tutorial_6.rst index be5fe314..5ee4848a 100644 --- a/docs/tutorials/tutorial_6.rst +++ b/docs/tutorials/tutorial_6.rst @@ -265,22 +265,16 @@ The convolutional network architecture to be used is: mem2 = self.lif2.init_leaky() mem3 = self.lif3.init_leaky() - # Record the final layer - spk3_rec = [] - mem3_rec = [] - - for step in range(num_steps): - cur1 = F.max_pool2d(self.conv1(x), 2) - spk1, mem1 = self.lif1(cur1, mem1) - cur2 = F.max_pool2d(self.conv2(spk1), 2) - spk2, mem2 = self.lif2(cur2, mem2) - cur3 = self.fc1(spk2.view(batch_size, -1)) - spk3, mem3 = self.lif3(cur3, mem3) - - spk3_rec.append(spk3) - mem3_rec.append(mem3) + cur1 = F.max_pool2d(self.conv1(x), 2) + spk1, mem1 = self.lif1(cur1, mem1) + + cur2 = F.max_pool2d(self.conv2(spk1), 2) + spk2, mem2 = self.lif2(cur2, mem2) + + cur3 = self.fc1(spk2.view(batch_size, -1)) + spk3, mem3 = self.lif3(cur3, mem3) - return torch.stack(spk3_rec), torch.stack(mem3_rec) + return spk3, mem3 In the previous tutorial, the network was wrapped inside of a class, as shown above. With increasing network complexity, this adds a @@ -559,6 +553,8 @@ be able to start running your own experiments. `In the next tutorial `__, we will train a network using a neuromorphic dataset. +A special thanks to `Gianfresco Angelini `__ for providing valuable feedback on the tutorial. + If you like this project, please consider starring ⭐ the repo on GitHub as it is the easiest and best way to support it. Additional Resources diff --git a/examples/tutorial_6_CNN.ipynb b/examples/tutorial_6_CNN.ipynb index 67a885ee..434a03ad 100644 --- a/examples/tutorial_6_CNN.ipynb +++ b/examples/tutorial_6_CNN.ipynb @@ -349,25 +349,18 @@ "\n", " # Initialize hidden states and outputs at t=0\n", " mem1 = self.lif1.init_leaky()\n", - " mem2 = self.lif2.init_leaky() \n", + " mem2 = self.lif2.init_leaky()\n", " mem3 = self.lif3.init_leaky()\n", "\n", - " # Record the final layer\n", - " spk3_rec = []\n", - " mem3_rec = []\n", + " cur1 = F.max_pool2d(self.conv1(x), 2)\n", + " spk1, mem1 = self.lif1(cur1, mem1)\n", "\n", - " for step in range(num_steps):\n", - " cur1 = F.max_pool2d(self.conv1(x), 2)\n", - " spk1, mem1 = self.lif1(cur1, mem1)\n", - " cur2 = F.max_pool2d(self.conv2(spk1), 2)\n", - " spk2, mem2 = self.lif2(cur2, mem2)\n", - " cur3 = self.fc1(spk2.view(batch_size, -1))\n", - " spk3, mem3 = self.lif3(cur3, mem3)\n", + " cur2 = F.max_pool2d(self.conv2(spk1), 2)\n", + " spk2, mem2 = self.lif2(cur2, mem2)\n", "\n", - " spk3_rec.append(spk3)\n", - " mem3_rec.append(mem3)\n", - "\n", - " return torch.stack(spk3_rec), torch.stack(mem3_rec)" + " cur3 = self.fc1(spk2.view(batch_size, -1))\n", + " spk3, mem3 = self.lif3(cur3, mem3)\n", + " return spk3, mem3" ] }, { @@ -418,6 +411,11 @@ "This enables the final layer to return both the spike and membrane potential response of the neuron." ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [] + }, { "cell_type": "markdown", "metadata": { @@ -786,6 +784,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "s0dAgWUt2o6E" @@ -794,6 +793,8 @@ "# Conclusion\n", "You should now have a grasp of the basic features of snnTorch and be able to start running your own experiments. [In the next tutorial](https://snntorch.readthedocs.io/en/latest/tutorials/index.html), we will train a network using a neuromorphic dataset.\n", "\n", + "A special thanks to [Gianfresco Angelini](https://github.com/gianfa) for providing valuable feedback on the tutorial.\n", + "\n", "If you like this project, please consider starring ⭐ the repo on GitHub as it is the easiest and best way to support it." ] }, @@ -822,7 +823,7 @@ "language_info": { "codemirror_mode": { "name": "ipython", - "version": 2 + "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python",