Test 7x7 image of Mnist.

This commit is contained in:
c 2019-12-09 22:23:29 +08:00
parent a368c4eae5
commit fa7a066b87
8 changed files with 70 additions and 22 deletions

View File

@ -35,7 +35,8 @@ layer = 0
# model = utils.SetDevice(Model.Net5Grad35()) # model = utils.SetDevice(Model.Net5Grad35())
# model = utils.SetDevice(Model.Net31535()) # model = utils.SetDevice(Model.Net31535())
# model = utils.SetDevice(Model.Net3Grad335()) # model = utils.SetDevice(Model.Net3Grad335())
model = utils.SetDevice(Model.Net3Grad334()) # model = utils.SetDevice(Model.Net3Grad334())
model = utils.SetDevice(Model.Net3Grad33())
# model = utils.SetDevice(Model.Net3()) # model = utils.SetDevice(Model.Net3())
model.PrintLayer() model.PrintLayer()
@ -43,7 +44,8 @@ model.PrintLayer()
traindata, testdata = Loader.MNIST(batchsize) # traindata, testdata = Loader.MNIST(batchsize)
traindata, testdata = Loader.MNIST(batchsize, resize=7)
# traindata, testdata = Loader.RandomMnist(batchsize, style="Vertical") # traindata, testdata = Loader.RandomMnist(batchsize, style="Vertical")
# traindata, testdata = Loader.RandomMnist(batchsize, style="Horizontal") # traindata, testdata = Loader.RandomMnist(batchsize, style="Horizontal")
# traindata, testdata = Loader.RandomMnist(batchsize, style="VerticalOneLine") # traindata, testdata = Loader.RandomMnist(batchsize, style="VerticalOneLine")
@ -57,12 +59,12 @@ traindata, testdata = Loader.MNIST(batchsize)
# weight,active = EvaluatorUnsuper.UnsuperLearnSearchWeight(model, layer, traindata, NumSearch=500000, SearchChannelRatio=32, Interation=5) # weight,active = EvaluatorUnsuper.UnsuperLearnSearchWeight(model, layer, traindata, NumSearch=500000, SearchChannelRatio=32, Interation=5)
# np.save("WeightSearch.npy", weight) # np.save("WeightSearch.npy", weight)
# weight = np.load(CurrentPath+"WeightSearch.npy") weight = np.load(CurrentPath+"WeightSearch.npy")
# utils.NumpyToImage(weight, CurrentPath+"image",title="SearchWeight") utils.NumpyToImage(weight, CurrentPath+"image",title="SearchWeight")
# weight = np.load(CurrentPath+"WeightSearch.npy") weight = np.load(CurrentPath+"WeightSearch.npy")
# weight = weight[0:256] # weight = weight[0:256]
# bestweight,index = EvaluatorUnsuper.UnsuperLearnFindBestWeight(model,layer,weight,traindata,32,4000000) bestweight,index = EvaluatorUnsuper.UnsuperLearnFindBestWeight(model,layer,weight,traindata,32,4000000)
# np.save(CurrentPath+"bestweightSearch.npy", bestweight) np.save(CurrentPath+"bestweightSearch.npy", bestweight)
bestweight = np.load(CurrentPath+"bestweightSearch.npy") bestweight = np.load(CurrentPath+"bestweightSearch.npy")
utils.NumpyToImage(bestweight, CurrentPath+"image",title="SearchWerightBest") utils.NumpyToImage(bestweight, CurrentPath+"image",title="SearchWerightBest")
EvaluatorUnsuper.SetModelConvWeight(model,layer,bestweight) EvaluatorUnsuper.SetModelConvWeight(model,layer,bestweight)

View File

@ -60,6 +60,33 @@ class Net3335(UniModule.ModuleBase):
x = x.view(-1, 1*10) x = x.view(-1, 1*10)
return F.log_softmax(x, dim=1) return F.log_softmax(x, dim=1)
class Net333(UniModule.ModuleBase):
def __init__(self):
super(Net333, self).__init__()
layers = []
layers += [nn.Conv2d(1, 8, kernel_size=3,bias=False),nn.Sigmoid()]
layers += [nn.Conv2d(8, 8, kernel_size=3,bias=False),nn.Sigmoid()]
layers += [nn.Conv2d(8, 10, kernel_size=3,bias=False)]
self.features = nn.Sequential(*layers)
def forward(self, x):
x = self.features(x)
x = x.view(-1, 1*10)
return F.log_softmax(x, dim=1)
class Net3Grad33(UniModule.ModuleBase):
def __init__(self):
super(Net3Grad33, self).__init__()
layers = []
layers += [nn.Conv2d(1, 8, kernel_size=3,bias=False),nn.Sigmoid()]
layers += [nn.Conv2d(8, 8, kernel_size=3,bias=False),nn.Sigmoid()]
layers += [nn.Conv2d(8, 10, kernel_size=3,bias=False)]
self.features = nn.Sequential(*layers)
self.SetConvRequiresGrad(0,False)
def forward(self, x):
x = self.features(x)
x = x.view(-1, 1*10)
return F.log_softmax(x, dim=1)
class Net3334(UniModule.ModuleBase): class Net3334(UniModule.ModuleBase):
def __init__(self): def __init__(self):
super(Net3334, self).__init__() super(Net3334, self).__init__()

View File

@ -39,7 +39,8 @@ batchsize = 128
# model = utils.LoadModel(model, CurrentPath+"/checkpointSearch.pkl") # model = utils.LoadModel(model, CurrentPath+"/checkpointSearch.pkl")
traindata, testdata = Loader.MNIST(batchsize, num_workers=4) # traindata, testdata = Loader.MNIST(batchsize, num_workers=4, trainsize=5000)
traindata, testdata = Loader.MNIST(batchsize, resize=7, trainsize=5000)
# traindata, testdata = Loader.RandomMnist(batchsize, num_workers=4, style="Vertical") # traindata, testdata = Loader.RandomMnist(batchsize, num_workers=4, style="Vertical")
# traindata, testdata = Loader.RandomMnist(batchsize, num_workers=4, style="Horizontal") # traindata, testdata = Loader.RandomMnist(batchsize, num_workers=4, style="Horizontal")
# traindata, testdata = Loader.RandomMnist(batchsize, num_workers=4, style="VerticalOneLine") # traindata, testdata = Loader.RandomMnist(batchsize, num_workers=4, style="VerticalOneLine")
@ -48,7 +49,6 @@ traindata, testdata = Loader.MNIST(batchsize, num_workers=4)
# traindata, testdata = Loader.Cifar10Mono(batchsize, num_workers=4,shuffle=True,trainsize=0) # traindata, testdata = Loader.Cifar10Mono(batchsize, num_workers=4,shuffle=True,trainsize=0)
WebVisual.InitVisdom() WebVisual.InitVisdom()
window = WebVisual.LineWin() window = WebVisual.LineWin()
lineNoPre = WebVisual.Line(window, "NoPre") lineNoPre = WebVisual.Line(window, "NoPre")
@ -67,22 +67,33 @@ linePretrainTrain = WebVisual.Line(window, "PretrainTrain")
#model = utils.SetDevice(Model.Net3335BN()) #model = utils.SetDevice(Model.Net3335BN())
## model = utils.LoadModel(model, CurrentPath+"/checkpointTrain.pkl")
#optimizer = optim.SGD(model.parameters(), lr=0.1) #optimizer = optim.SGD(model.parameters(), lr=0.1)
#Train.TrainEpochs(model,traindata,optimizer,testdata,3000,15,lineNoPreBN) #Train.TrainEpochs(model,traindata,optimizer,testdata,3000,15,lineNoPreBN)
model = utils.SetDevice(Model.Net3334()) # model = utils.SetDevice(Model.Net3334())
# model = utils.LoadModel(model, CurrentPath+"/checkpointTrain.pkl") # optimizer = optim.SGD(model.parameters(), lr=0.1)
# Train.TrainEpochs(model,traindata,optimizer,testdata,3000,15,lineNoPre)
# model = utils.SetDevice(Model.Net3Grad334())
# model = utils.LoadModel(model, CurrentPath+"/checkpointSearch.pkl")
# optimizer = optim.SGD(model.parameters(), lr=0.1)
# Train.TrainEpochs(model,traindata,optimizer,testdata,3000,15,linePretrainSearch)
model = utils.SetDevice(Model.Net333())
optimizer = optim.SGD(model.parameters(), lr=0.1) optimizer = optim.SGD(model.parameters(), lr=0.1)
Train.TrainEpochs(model,traindata,optimizer,testdata,3000,15,lineNoPre) Train.TrainEpochs(model,traindata,optimizer,testdata,1000,15,lineNoPre)
model = utils.SetDevice(Model.Net3Grad33())
model = utils.SetDevice(Model.Net3Grad334())
model = utils.LoadModel(model, CurrentPath+"/checkpointSearch.pkl") model = utils.LoadModel(model, CurrentPath+"/checkpointSearch.pkl")
optimizer = optim.SGD(model.parameters(), lr=0.1) optimizer = optim.SGD(model.parameters(), lr=0.1)
Train.TrainEpochs(model,traindata,optimizer,testdata,3000,15,linePretrainSearch) Train.TrainEpochs(model,traindata,optimizer,testdata,1000,15,linePretrainSearch)

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -8,14 +8,21 @@ from torch.utils.data import Dataset, DataLoader, Subset
import random import random
def MNIST(batchsize=8, num_workers=0, shuffle=False, trainsize=0): def MNIST(batchsize=8, num_workers=0, shuffle=False, trainsize=0, resize=0):
CurrentPath = os.path.split(os.path.realpath(__file__))[0]+"/" CurrentPath = os.path.split(os.path.realpath(__file__))[0]+"/"
if resize == 0:
resize = 28
if shuffle:
trans = transforms.Compose([transforms.ColorJitter(0.2, 0.2),
transforms.RandomRotation(30),
transforms.RandomResizedCrop(28),
transforms.Resize(resize),
transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))])
else:
trans = transforms.Compose(
[transforms.Resize(resize), transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))])
traindata = datasets.MNIST(root=CurrentPath+'../Dataset/', train=True, download=True, traindata = datasets.MNIST(root=CurrentPath+'../Dataset/', train=True, download=True,
transform=transforms.Compose([ transform=trans)
transforms.ColorJitter(0.2, 0.2),
transforms.RandomRotation(30),
transforms.RandomResizedCrop(28),
transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]))
if trainsize == 0: if trainsize == 0:
trainsize = traindata.data.shape[0] trainsize = traindata.data.shape[0]
train_loader = torch.utils.data.DataLoader( train_loader = torch.utils.data.DataLoader(
@ -23,6 +30,7 @@ def MNIST(batchsize=8, num_workers=0, shuffle=False, trainsize=0):
batch_size=batchsize, shuffle=shuffle, num_workers=num_workers, drop_last=True) batch_size=batchsize, shuffle=shuffle, num_workers=num_workers, drop_last=True)
test_loader = torch.utils.data.DataLoader( test_loader = torch.utils.data.DataLoader(
datasets.MNIST(root=CurrentPath+'../Dataset/', train=False, transform=transforms.Compose([ datasets.MNIST(root=CurrentPath+'../Dataset/', train=False, transform=transforms.Compose([
transforms.Resize(resize),
transforms.ToTensor(), transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,)) transforms.Normalize((0.1307,), (0.3081,))
])), batch_size=batchsize, shuffle=shuffle, num_workers=num_workers, drop_last=True) ])), batch_size=batchsize, shuffle=shuffle, num_workers=num_workers, drop_last=True)