113 lines
4.5 KiB
Python
113 lines
4.5 KiB
Python
from __future__ import print_function
|
|
import os
|
|
import sys
|
|
import torch
|
|
import torch.nn as nn
|
|
import torch.nn.functional as F
|
|
import torch.optim as optim
|
|
import torchvision
|
|
from torchvision import datasets, transforms
|
|
import torchvision.models as models
|
|
import matplotlib.pyplot as plt
|
|
import numpy as np
|
|
|
|
|
|
CurrentPath = os.path.split(os.path.realpath(__file__))[0]+"/"
|
|
sys.path.append(CurrentPath+'../tools')
|
|
sys.path.append(CurrentPath+'../')
|
|
from tools import UniModule
|
|
|
|
class Net535(UniModule.ModuleBase):
|
|
def __init__(self):
|
|
super(Net535, self).__init__()
|
|
layers = []
|
|
layers += [nn.Conv2d(1, 8, kernel_size=5,bias=False),nn.MaxPool2d(kernel_size=2, stride=2),nn.Sigmoid()]
|
|
layers += [nn.Conv2d(8, 8, kernel_size=3,bias=False),nn.MaxPool2d(kernel_size=2, stride=2),nn.Sigmoid()]
|
|
layers += [nn.Conv2d(8, 10, kernel_size=5,bias=False)]
|
|
self.features = nn.Sequential(*layers)
|
|
|
|
def forward(self, x):
|
|
x = self.features(x)
|
|
x = x.view(-1, 1*10)
|
|
return F.log_softmax(x, dim=1)
|
|
|
|
class Net5Grad35(UniModule.ModuleBase):
|
|
def __init__(self):
|
|
super(Net5Grad35, self).__init__()
|
|
layers = []
|
|
layers += [nn.Conv2d(1, 8, kernel_size=5,bias=False),nn.MaxPool2d(kernel_size=2, stride=2),nn.Sigmoid()]
|
|
layers += [nn.Conv2d(8, 8, kernel_size=3,bias=False),nn.MaxPool2d(kernel_size=2, stride=2),nn.Sigmoid()]
|
|
layers += [nn.Conv2d(8, 10, kernel_size=5,bias=False)]
|
|
self.features = nn.Sequential(*layers)
|
|
self.SetConvRequiresGrad(0,False)
|
|
|
|
def forward(self, x):
|
|
x = self.features(x)
|
|
x = x.view(-1, 1*10)
|
|
return F.log_softmax(x, dim=1)
|
|
|
|
class Net3335(UniModule.ModuleBase):
|
|
def __init__(self):
|
|
super(Net3335, self).__init__()
|
|
layers = []
|
|
layers += [nn.Conv2d(1, 8, kernel_size=3,bias=False,padding=1),nn.MaxPool2d(kernel_size=2, stride=2),nn.Sigmoid()]
|
|
layers += [nn.Conv2d(8, 8, kernel_size=3,bias=False),nn.MaxPool2d(kernel_size=2, stride=2),nn.Sigmoid()]
|
|
layers += [nn.Conv2d(8, 8, kernel_size=3,bias=False),nn.Sigmoid()]
|
|
layers += [nn.Conv2d(8, 10, kernel_size=5,bias=False)]
|
|
self.features = nn.Sequential(*layers)
|
|
def forward(self, x):
|
|
x = self.features(x)
|
|
x = x.view(-1, 1*10)
|
|
return F.log_softmax(x, dim=1)
|
|
|
|
class Net3335BN(UniModule.ModuleBase):
|
|
def __init__(self):
|
|
super(Net3335BN, self).__init__()
|
|
layers = []
|
|
layers += [nn.Conv2d(1, 8, kernel_size=3,bias=False,padding=1),nn.MaxPool2d(kernel_size=2, stride=2),nn.Sigmoid()]
|
|
layers += [nn.BatchNorm2d(8)]
|
|
layers += [nn.Conv2d(8, 8, kernel_size=3,bias=False),nn.MaxPool2d(kernel_size=2, stride=2),nn.Sigmoid()]
|
|
layers += [nn.BatchNorm2d(8)]
|
|
layers += [nn.Conv2d(8, 8, kernel_size=3,bias=False),nn.Sigmoid()]
|
|
layers += [nn.BatchNorm2d(8)]
|
|
layers += [nn.Conv2d(8, 10, kernel_size=5,bias=False)]
|
|
self.features = nn.Sequential(*layers)
|
|
def forward(self, x):
|
|
x = self.features(x)
|
|
x = x.view(-1, 1*10)
|
|
return F.log_softmax(x, dim=1)
|
|
|
|
class Net3Grad335(UniModule.ModuleBase):
|
|
def __init__(self):
|
|
super(Net3Grad335, self).__init__()
|
|
layers = []
|
|
layers += [nn.Conv2d(1, 8, kernel_size=3,bias=False,padding=1),nn.MaxPool2d(kernel_size=2, stride=2),nn.Sigmoid()]
|
|
layers += [nn.Conv2d(8, 8, kernel_size=3,bias=False),nn.MaxPool2d(kernel_size=2, stride=2),nn.Sigmoid()]
|
|
layers += [nn.Conv2d(8, 8, kernel_size=3,bias=False),nn.Sigmoid()]
|
|
layers += [nn.Conv2d(8, 10, kernel_size=5,bias=False)]
|
|
self.features = nn.Sequential(*layers)
|
|
self.SetConvRequiresGrad(0,False)
|
|
def forward(self, x):
|
|
x = self.features(x)
|
|
x = x.view(-1, 1*10)
|
|
return F.log_softmax(x, dim=1)
|
|
|
|
class Net31535(UniModule.ModuleBase):
|
|
def __init__(self):
|
|
super(Net31535, self).__init__()
|
|
layers = []
|
|
layers += [nn.Conv2d(1, 8, kernel_size=[1,3],bias=False,padding=[0,1]),nn.Sigmoid()]
|
|
layers += [nn.Conv2d(8, 8, kernel_size=5,bias=False),nn.MaxPool2d(kernel_size=2, stride=2),nn.Sigmoid()]
|
|
layers += [nn.Conv2d(8, 8, kernel_size=3,bias=False),nn.MaxPool2d(kernel_size=2, stride=2),nn.Sigmoid()]
|
|
layers += [nn.Conv2d(8, 10, kernel_size=5,bias=False)]
|
|
self.features = nn.Sequential(*layers)
|
|
|
|
def forward(self, x):
|
|
x = self.features(x)
|
|
x = x.view(-1, 1*10)
|
|
return F.log_softmax(x, dim=1)
|
|
|
|
|
|
|
|
|