Skip to content

Commit

Permalink
AlexNet seems to be fully implemented
Browse files Browse the repository at this point in the history
  • Loading branch information
jmaczan committed Feb 21, 2024
1 parent 815456b commit c307d38
Showing 1 changed file with 6 additions and 4 deletions.
10 changes: 6 additions & 4 deletions src/models/components/alexnet.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import torch
from torch import nn


Expand All @@ -20,10 +19,12 @@ def __init__(self):
padding=0,
),
nn.ReLU(),
nn.LocalResponseNorm(size=5, alpha=1e-4, beta=0.75, k=2),
nn.MaxPool2d(kernel_size=(3, 3), stride=2),
# 2nd conv layer
nn.Conv2d(in_channels=96, out_channels=256, kernel_size=(5, 5), padding=2),
nn.ReLU(),
nn.LocalResponseNorm(size=5, alpha=1e-4, beta=0.75, k=2),
nn.MaxPool2d(kernel_size=(3, 3), stride=2),
# 3rd conv layer
nn.Conv2d(in_channels=256, out_channels=384, kernel_size=(3, 3)),
Expand All @@ -34,14 +35,15 @@ def __init__(self):
# 5th conv layer
nn.Conv2d(in_channels=256, out_channels=256, kernel_size=(3, 3)),
nn.ReLU(),
nn.MaxPool2d(kernel_size=(3, 3), stride=2),
# 1st fc layer with dropout
nn.Linear(in_features=256, out_features=4096),
nn.Linear(in_features=9216, out_features=4096),
nn.Dropout(p=0.5),
nn.ReLU(),
# 2nd fc layer with dropout
nn.Linear(in_features=4096, out_features=2048),
nn.Linear(in_features=4096, out_features=4096),
nn.Dropout(p=0.5),
nn.ReLU(),
# 3rd fc layer
nn.Linear(in_features=2048, out_features=1000),
nn.Linear(in_features=4096, out_features=1000),
)

0 comments on commit c307d38

Please sign in to comment.