from __future__ import print_function import os import sys import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optim import torchvision from torchvision import datasets, transforms import torchvision.models as models import matplotlib.pyplot as plt import numpy as np import struct from struct import Struct CurrentPath = os.path.split(os.path.realpath(__file__))[0]+"/" resnet50 = models.resnet50(pretrained=True) torch.save(resnet50, CurrentPath+'params.pth') resnet50 = torch.load(CurrentPath+'params.pth') print("===========================") print("===========================") print("===========================") print(resnet50) print("===========================") print("===========================") print("===========================") # ss = resnet50.conv1.weight.cpu().detach().numpy().reshape(-1) # ss = ss.tolist() # strs = '' # # for s in ss: # # strs += str(s) + "," # bs = struct.pack("f",1.0) # f = open('data.hex', 'wb') # f.write(bs) # f.close() # print(strs) # ssa = ss.array() ResNet50 = { "conv1": "Conv2d", "bn1": "BatchNorm2d", "relu": "ReLU", "maxpool": "MaxPool2d", "layer1": { "_modules": { "0": { "conv1": "Conv2d", "bn1": "BatchNorm2d", "conv2": "Conv2d", "bn2": "BatchNorm2d", "conv3": "Conv2d", "bn3": "BatchNorm2d", "relu": "ReLU", "downsample": { "_modules": { "0": "Conv2d", "1": "BatchNorm2d", } } } } }, "layer2": { "_modules": { "0": { "conv1": "Conv2d", "bn1": "BatchNorm2d", "conv2": "Conv2d", "bn2": "BatchNorm2d", "conv3": "Conv2d", "bn3": "BatchNorm2d", "relu": "ReLU", "downsample": { "_modules": { "0": "Conv2d", "1": "BatchNorm2d", } } }, "1": { "conv1": "Conv2d", "bn1": "BatchNorm2d", "conv2": "Conv2d", "bn2": "BatchNorm2d", "conv3": "Conv2d", "bn3": "BatchNorm2d", "relu": "ReLU", }, "2": { "conv1": "Conv2d", "bn1": "BatchNorm2d", "conv2": "Conv2d", "bn2": "BatchNorm2d", "conv3": "Conv2d", "bn3": "BatchNorm2d", "relu": "ReLU", }, "3": { "conv1": "Conv2d", "bn1": "BatchNorm2d", "conv2": "Conv2d", "bn2": "BatchNorm2d", "conv3": "Conv2d", "bn3": "BatchNorm2d", "relu": "ReLU", } } }, "layer3": { "_modules": { "0": { "conv1": "Conv2d", "bn1": "BatchNorm2d", "conv2": "Conv2d", "bn2": "BatchNorm2d", "conv3": "Conv2d", "bn3": "BatchNorm2d", "relu": "ReLU", "downsample": { "_modules": { "0": "Conv2d", "1": "BatchNorm2d", } } }, "1": { "conv1": "Conv2d", "bn1": "BatchNorm2d", "conv2": "Conv2d", "bn2": "BatchNorm2d", "conv3": "Conv2d", "bn3": "BatchNorm2d", "relu": "ReLU", }, "2": { "conv1": "Conv2d", "bn1": "BatchNorm2d", "conv2": "Conv2d", "bn2": "BatchNorm2d", "conv3": "Conv2d", "bn3": "BatchNorm2d", "relu": "ReLU", }, "3": { "conv1": "Conv2d", "bn1": "BatchNorm2d", "conv2": "Conv2d", "bn2": "BatchNorm2d", "conv3": "Conv2d", "bn3": "BatchNorm2d", "relu": "ReLU", }, "4": { "conv1": "Conv2d", "bn1": "BatchNorm2d", "conv2": "Conv2d", "bn2": "BatchNorm2d", "conv3": "Conv2d", "bn3": "BatchNorm2d", "relu": "ReLU", }, "5": { "conv1": "Conv2d", "bn1": "BatchNorm2d", "conv2": "Conv2d", "bn2": "BatchNorm2d", "conv3": "Conv2d", "bn3": "BatchNorm2d", "relu": "ReLU", } } }, "layer4": { "_modules": { "0": { "conv1": "Conv2d", "bn1": "BatchNorm2d", "conv2": "Conv2d", "bn2": "BatchNorm2d", "conv3": "Conv2d", "bn3": "BatchNorm2d", "relu": "ReLU", "downsample": { "_modules": { "0": "Conv2d", "1": "BatchNorm2d", } } }, "1": { "conv1": "Conv2d", "bn1": "BatchNorm2d", "conv2": "Conv2d", "bn2": "BatchNorm2d", "conv3": "Conv2d", "bn3": "BatchNorm2d", "relu": "ReLU", }, "2": { "conv1": "Conv2d", "bn1": "BatchNorm2d", "conv2": "Conv2d", "bn2": "BatchNorm2d", "conv3": "Conv2d", "bn3": "BatchNorm2d", "relu": "ReLU", } } }, "avgpool": "AdaptiveAvgPool2d", "fc": "Linear" } weightfile = open(CurrentPath+'ResNet50Weight.cc', 'w') binaryfile = open(CurrentPath+'ResNet50Weight.bin', 'wb') currentbyte = 0 def printDick(d, head, obj): global currentbyte global binaryfile strg = "" for item in d: if type(d[item]).__name__ == 'dict': objsub = getattr(obj, item, '') if objsub == '': objsub = obj[item] strg = strg + printDick(d[item], head+"_"+item, objsub) else: objsub = getattr(obj, item, '') if objsub == '': objsub = obj[item] if d[item] == "Conv2d": strg = strg + "int "+head+"_"+item+"_weight[]={" array = objsub.weight.cpu().detach().numpy().reshape(-1) strg += str(currentbyte) + "," for a in array: bs = struct.pack("f", a) binaryfile.write(bs) currentbyte = currentbyte+1 strg += str(currentbyte-1) + "," strg = strg + "};\n" if d[item] == "BatchNorm2d": strg = strg + "int "+head+"_"+item+"_running_mean[]={" array = objsub.running_mean.cpu().detach().numpy().reshape(-1) strg += str(currentbyte) + "," for a in array: bs = struct.pack("f", a) binaryfile.write(bs) currentbyte = currentbyte+1 strg += str(currentbyte-1) + "," strg = strg + "};\n" strg = strg + "int "+head+"_"+item+"_running_var[]={" array = objsub.running_var.cpu().detach().numpy().reshape(-1) strg += str(currentbyte) + "," for a in array: bs = struct.pack("f", a) binaryfile.write(bs) currentbyte = currentbyte+1 strg += str(currentbyte-1) + "," strg = strg + "};\n" if d[item] == "Linear": strg = strg + "int "+head+"_"+item+"_weight[]={" array = objsub.weight.cpu().detach().numpy().reshape(-1) strg += str(currentbyte) + "," for a in array: bs = struct.pack("f", a) binaryfile.write(bs) currentbyte = currentbyte+1 strg += str(currentbyte-1) + "," strg = strg + "};\n" strg = strg + "int "+head+"_"+item+"_bias[]={" array = objsub.bias.cpu().detach().numpy().reshape(-1) strg += str(currentbyte) + "," for a in array: bs = struct.pack("f", a) binaryfile.write(bs) currentbyte = currentbyte+1 strg += str(currentbyte-1) + "," strg = strg + "};\n" return strg ss = printDick(ResNet50, "RN50", resnet50) weightfile.write(ss) binaryfile.close() weightfile.close() print(ss) print("===========================") print("===========================") print("===========================") # ResNet( # (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False) # (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False) # (layer1): Sequential( # (0): Bottleneck( # (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # (downsample): Sequential( # (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) # (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # ) # ) # (1): Bottleneck( # (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # ) # (2): Bottleneck( # (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # ) # ) # (layer2): Sequential( # (0): Bottleneck( # (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # (downsample): Sequential( # (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False) # (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # ) # ) # (1): Bottleneck( # (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # ) # (2): Bottleneck( # (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # ) # (3): Bottleneck( # (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # ) # ) # (layer3): Sequential( # (0): Bottleneck( # (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # (downsample): Sequential( # (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False) # (1): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # ) # ) # (1): Bottleneck( # (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # ) # (2): Bottleneck( # (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # ) # (3): Bottleneck( # (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # ) # (4): Bottleneck( # (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # ) # (5): Bottleneck( # (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # ) # ) # (layer4): Sequential( # (0): Bottleneck( # (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # (downsample): Sequential( # (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False) # (1): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # ) # ) # (1): Bottleneck( # (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # ) # (2): Bottleneck( # (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) # (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False) # (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) # (relu): ReLU(inplace=True) # ) # ) # (avgpool): AdaptiveAvgPool2d(output_size=(1, 1)) # (fc): Linear(in_features=2048, out_features=1000, bias=True) # )