|
import torch |
|
import torch.nn as nn |
|
import torch.nn.functional as F |
|
|
|
class Net(nn.Module): |
|
def __init__(self): |
|
super(Net, self).__init__() |
|
self.fc1 = nn.Linear(28*28, 128) |
|
self.fc2 = nn.Linear(128, 128) |
|
self.fc3 = nn.Linear(128, 64) |
|
self.fc4 = nn.Linear(64, 10) |
|
|
|
def forward(self, x): |
|
x = x.view(x.shape[0], -1) |
|
x = torch.relu(self.fc1(x)) |
|
x = torch.relu(self.fc2(x)) |
|
x = torch.relu(self.fc3(x)) |
|
return self.fc4(x) |
|
|
|
class NetConv(nn.Module): |
|
def __init__(self): |
|
super(NetConv, self).__init__() |
|
self.conv1 = nn.Conv2d(1, 32, 3) |
|
self.conv2 = nn.Conv2d(32, 64, 3) |
|
self.fc1 = nn.Linear(64 * 5 * 5, 128) |
|
self.fc2 = nn.Linear(128, 10) |
|
|
|
def forward(self, x): |
|
x = F.max_pool2d(F.relu(self.conv1(x)), (2, 2)) |
|
x = F.max_pool2d(F.relu(self.conv2(x)), 2) |
|
x = x.view(-1, self.num_flat_features(x)) |
|
x = F.relu(self.fc1(x)) |
|
x = self.fc2(x) |
|
return F.log_softmax(x, dim=1) |
|
|
|
def num_flat_features(self, x): |
|
size = x.size()[1:] |
|
num_features = 1 |
|
for s in size: |
|
num_features *= s |
|
return num_features |