From a287a1ac122d7e4ff5459153ca718ca1cd1af569 Mon Sep 17 00:00:00 2001 From: gianfa Date: Wed, 7 Dec 2022 21:16:32 +0100 Subject: [PATCH 1/3] enhance(tutorial6): net definition and link * make the Net class `foward` method single step * insert link to the tutorial 5 where cited --- examples/tutorial_6_CNN.ipynb | 35 +++++++++++++++++------------------ 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/examples/tutorial_6_CNN.ipynb b/examples/tutorial_6_CNN.ipynb index 44a03682..60d7d4be 100644 --- a/examples/tutorial_6_CNN.ipynb +++ b/examples/tutorial_6_CNN.ipynb @@ -349,25 +349,19 @@ "\n", " # Initialize hidden states and outputs at t=0\n", " mem1 = self.lif1.init_leaky()\n", - " mem2 = self.lif2.init_leaky() \n", + " mem2 = self.lif2.init_leaky()\n", " mem3 = self.lif3.init_leaky()\n", "\n", " # Record the final layer\n", - " spk3_rec = []\n", - " mem3_rec = []\n", + " cur1 = F.max_pool2d(self.conv1(x), 2)\n", + " spk1, mem1 = self.lif1(cur1, mem1)\n", "\n", - " for step in range(num_steps):\n", - " cur1 = F.max_pool2d(self.conv1(x), 2)\n", - " spk1, mem1 = self.lif1(cur1, mem1)\n", - " cur2 = F.max_pool2d(self.conv2(spk1), 2)\n", - " spk2, mem2 = self.lif2(cur2, mem2)\n", - " cur3 = self.fc1(spk2.view(batch_size, -1))\n", - " spk3, mem3 = self.lif3(cur3, mem3)\n", + " cur2 = F.max_pool2d(self.conv2(spk1), 2)\n", + " spk2, mem2 = self.lif2(cur2, mem2)\n", "\n", - " spk3_rec.append(spk3)\n", - " mem3_rec.append(mem3)\n", - "\n", - " return torch.stack(spk3_rec), torch.stack(mem3_rec)" + " cur3 = self.fc1(spk2.view(batch_size, -1))\n", + " spk3, mem3 = self.lif3(cur3, mem3)\n", + " return spk3, mem3" ] }, { @@ -376,7 +370,7 @@ "id": "HVn3aYAUnWqH" }, "source": [ - "In the previous tutorial, the network was wrapped inside of a class, as shown above. \n", + "In the [previous tutorial](https://snntorch.readthedocs.io/en/latest/tutorials/tutorial_5.html#define-the-network), the network was wrapped inside of a class, as shown above. \n", "With increasing network complexity, this adds a lot of boilerplate code that we might wish to avoid. Alternatively, the `nn.Sequential` method can be used instead:" ] }, @@ -819,14 +813,19 @@ "language_info": { "codemirror_mode": { "name": "ipython", - "version": 2 + "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.6" + "pygments_lexer": "ipython3", + "version": "3.9.12" + }, + "vscode": { + "interpreter": { + "hash": "eda5fd9936a48666dafc89750ff76ddb1cf4d2280a6b0442eddf55dce32bd13c" + } } }, "nbformat": 4, From 7eea3ed63069ce2536de84c8b14a6d7b4488899d Mon Sep 17 00:00:00 2001 From: gianfa Date: Thu, 8 Dec 2022 08:01:47 +0100 Subject: [PATCH 2/3] chores(tutorial6): solve conflicts in ipynb metad --- examples/tutorial_6_CNN.ipynb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/tutorial_6_CNN.ipynb b/examples/tutorial_6_CNN.ipynb index 60d7d4be..925006c1 100644 --- a/examples/tutorial_6_CNN.ipynb +++ b/examples/tutorial_6_CNN.ipynb @@ -819,12 +819,12 @@ "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.12" + "pygments_lexer": "ipython2", + "version": "3.7.0" }, "vscode": { "interpreter": { - "hash": "eda5fd9936a48666dafc89750ff76ddb1cf4d2280a6b0442eddf55dce32bd13c" + "hash": "cda51994da7a97c2e92f9e8832b8fe412980cf29d1c6c8cbe827a5abbabd9c57" } } }, From 2b70e0a9ef2d4c73b2aee9936294d208680427f6 Mon Sep 17 00:00:00 2001 From: jeshraghian Date: Wed, 14 Dec 2022 08:10:34 -0800 Subject: [PATCH 3/3] tutorial-6 static html update --- docs/tutorials/tutorial_6.rst | 26 +++++++++++--------------- examples/tutorial_6_CNN.ipynb | 13 ++++++++++--- 2 files changed, 21 insertions(+), 18 deletions(-) diff --git a/docs/tutorials/tutorial_6.rst b/docs/tutorials/tutorial_6.rst index 7e408b09..a818a447 100644 --- a/docs/tutorials/tutorial_6.rst +++ b/docs/tutorials/tutorial_6.rst @@ -265,22 +265,16 @@ The convolutional network architecture to be used is: mem2 = self.lif2.init_leaky() mem3 = self.lif3.init_leaky() - # Record the final layer - spk3_rec = [] - mem3_rec = [] - - for step in range(num_steps): - cur1 = F.max_pool2d(self.conv1(x), 2) - spk1, mem1 = self.lif1(cur1, mem1) - cur2 = F.max_pool2d(self.conv2(spk1), 2) - spk2, mem2 = self.lif2(cur2, mem2) - cur3 = self.fc1(spk2.view(batch_size, -1)) - spk3, mem3 = self.lif3(cur3, mem3) - - spk3_rec.append(spk3) - mem3_rec.append(mem3) + cur1 = F.max_pool2d(self.conv1(x), 2) + spk1, mem1 = self.lif1(cur1, mem1) + + cur2 = F.max_pool2d(self.conv2(spk1), 2) + spk2, mem2 = self.lif2(cur2, mem2) + + cur3 = self.fc1(spk2.view(batch_size, -1)) + spk3, mem3 = self.lif3(cur3, mem3) - return torch.stack(spk3_rec), torch.stack(mem3_rec) + return spk3, mem3 In the previous tutorial, the network was wrapped inside of a class, as shown above. With increasing network complexity, this adds a @@ -556,6 +550,8 @@ be able to start running your own experiments. `In the next tutorial `__, we will train a network using a neuromorphic dataset. +A special thanks to `Gianfresco Angelini `__ for providing valuable feedback on the tutorial. + If you like this project, please consider starring ⭐ the repo on GitHub as it is the easiest and best way to support it. Additional Resources diff --git a/examples/tutorial_6_CNN.ipynb b/examples/tutorial_6_CNN.ipynb index 925006c1..21628af3 100644 --- a/examples/tutorial_6_CNN.ipynb +++ b/examples/tutorial_6_CNN.ipynb @@ -352,7 +352,6 @@ " mem2 = self.lif2.init_leaky()\n", " mem3 = self.lif3.init_leaky()\n", "\n", - " # Record the final layer\n", " cur1 = F.max_pool2d(self.conv1(x), 2)\n", " spk1, mem1 = self.lif1(cur1, mem1)\n", "\n", @@ -409,6 +408,11 @@ "This enables the final layer to return both the spike and membrane potential response of the neuron." ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [] + }, { "cell_type": "markdown", "metadata": { @@ -777,6 +781,7 @@ ] }, { + "attachments": {}, "cell_type": "markdown", "metadata": { "id": "s0dAgWUt2o6E" @@ -785,6 +790,8 @@ "# Conclusion\n", "You should now have a grasp of the basic features of snnTorch and be able to start running your own experiments. [In the next tutorial](https://snntorch.readthedocs.io/en/latest/tutorials/index.html), we will train a network using a neuromorphic dataset.\n", "\n", + "A special thanks to [Gianfresco Angelini](https://github.com/gianfa) for providing valuable feedback on the tutorial.\n", + "\n", "If you like this project, please consider starring ⭐ the repo on GitHub as it is the easiest and best way to support it." ] }, @@ -820,11 +827,11 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython2", - "version": "3.7.0" + "version": "3.11.1 (v3.11.1:a7a450f84a, Dec 6 2022, 15:24:06) [Clang 13.0.0 (clang-1300.0.29.30)]" }, "vscode": { "interpreter": { - "hash": "cda51994da7a97c2e92f9e8832b8fe412980cf29d1c6c8cbe827a5abbabd9c57" + "hash": "aee8b7b246df8f9039afb4144a1f6fd8d2ca17a180786b69acc140d282b71a49" } } },