add layer1 inner print

This commit is contained in:
colin 2020-07-27 16:30:54 +08:00
parent 6309c2f066
commit e2e2b01b54
3 changed files with 149 additions and 48 deletions

Binary file not shown.

View File

@ -265,34 +265,74 @@ int RN50_layer4__modules_2_bn3_weight[] = { 94228224,94236415 };
int RN50_layer4__modules_2_bn3_bias[] = { 94236416,94244607 }; int RN50_layer4__modules_2_bn3_bias[] = { 94236416,94244607 };
int RN50_fc_weight[] = { 94244608,102436607 }; int RN50_fc_weight[] = { 94244608,102436607 };
int RN50_fc_bias[] = { 102436608,102440607 }; int RN50_fc_bias[] = { 102436608,102440607 };
int verify_input[] = { 102440608,103042719 }; int input_0[] = { 102440608,103042719 };
int verify_conv1[] = { 103042720,106253983 }; int output_0[] = { 103042720,103046719 };
int verify_bn1[] = { 106253984,109465247 }; int input_1[] = { 103046720,103648831 };
int verify_relu[] = { 109465248,112676511 }; int output_1[] = { 103648832,103652831 };
int verify_maxpool[] = { 112676512,113479327 }; int input_2[] = { 103652832,104254943 };
int verify_layer1[] = { 113479328,116690591 }; int output_2[] = { 104254944,104258943 };
int verify_layer2[] = { 116690592,118296223 }; int input_3[] = { 104258944,104861055 };
int verify_layer3[] = { 118296224,119099039 }; int output_3[] = { 104861056,104865055 };
int verify_layer4[] = { 119099040,119500447 }; int input_4[] = { 104865056,105467167 };
int verify_avgpool[] = { 119500448,119508639 }; int output_4[] = { 105467168,105471167 };
int verify_fc[] = { 119508640,119512639 }; int input_5[] = { 105471168,106073279 };
int input_0[] = { 119512640,120114751 }; int output_5[] = { 106073280,106077279 };
int output_0[] = { 120114752,120118751 }; int input_6[] = { 106077280,106679391 };
int input_1[] = { 120118752,120720863 }; int output_6[] = { 106679392,106683391 };
int output_1[] = { 120720864,120724863 }; int input_7[] = { 106683392,107285503 };
int input_2[] = { 120724864,121326975 }; int output_7[] = { 107285504,107289503 };
int output_2[] = { 121326976,121330975 }; int input_8[] = { 107289504,107891615 };
int input_3[] = { 121330976,121933087 }; int output_8[] = { 107891616,107895615 };
int output_3[] = { 121933088,121937087 }; int input_9[] = { 107895616,108497727 };
int input_4[] = { 121937088,122539199 }; int output_9[] = { 108497728,108501727 };
int output_4[] = { 122539200,122543199 }; int verify_input[] = { 108501728,109103839 };
int input_5[] = { 122543200,123145311 }; int verify_conv1[] = { 109103840,112315103 };
int output_5[] = { 123145312,123149311 }; int verify_bn1[] = { 112315104,115526367 };
int input_6[] = { 123149312,123751423 }; int verify_relu[] = { 115526368,118737631 };
int output_6[] = { 123751424,123755423 }; int verify_maxpool[] = { 118737632,119540447 };
int input_7[] = { 123755424,124357535 }; int verify_layer1[] = { 119540448,122751711 };
int output_7[] = { 124357536,124361535 }; int verify_layer2[] = { 122751712,124357343 };
int input_8[] = { 124361536,124963647 }; int verify_layer3[] = { 124357344,125160159 };
int output_8[] = { 124963648,124967647 }; int verify_layer4[] = { 125160160,125561567 };
int input_9[] = { 124967648,125569759 }; int verify_avgpool[] = { 125561568,125569759 };
int output_9[] = { 125569760,125573759 }; int verify_fc[] = { 125569760,125573759 };
int layer1_block0_conv1_input[] = { 125573760,126376575 };
int layer1_block0_conv1_output[] = { 126376576,127179391 };
int layer1_block0_bn1_input[] = { 127179392,127982207 };
int layer1_block0_bn1_output[] = { 127982208,128785023 };
int layer1_block0_conv2_input[] = { 128785024,129587839 };
int layer1_block0_conv2_output[] = { 129587840,130390655 };
int layer1_block0_bn2_input[] = { 130390656,131193471 };
int layer1_block0_bn2_output[] = { 131193472,131996287 };
int layer1_block0_conv3_input[] = { 131996288,132799103 };
int layer1_block0_conv3_output[] = { 132799104,136010367 };
int layer1_block0_bn3_input[] = { 136010368,139221631 };
int layer1_block0_bn3_output[] = { 139221632,142432895 };
int layer1_block0_downsample_conv_input[] = { 142432896,143235711 };
int layer1_block0_downsample_conv_output[] = { 143235712,146446975 };
int layer1_block0_downsample_bn_input[] = { 146446976,149658239 };
int layer1_block0_downsample_bn_output[] = { 149658240,152869503 };
int layer1_block1_conv1_input[] = { 152869504,156080767 };
int layer1_block1_conv1_output[] = { 156080768,156883583 };
int layer1_block1_bn1_input[] = { 156883584,157686399 };
int layer1_block1_bn1_output[] = { 157686400,158489215 };
int layer1_block1_conv2_input[] = { 158489216,159292031 };
int layer1_block1_conv2_output[] = { 159292032,160094847 };
int layer1_block1_bn2_input[] = { 160094848,160897663 };
int layer1_block1_bn2_output[] = { 160897664,161700479 };
int layer1_block1_conv3_input[] = { 161700480,162503295 };
int layer1_block1_conv3_output[] = { 162503296,165714559 };
int layer1_block1_bn3_input[] = { 165714560,168925823 };
int layer1_block1_bn3_output[] = { 168925824,172137087 };
int layer1_block2_conv1_input[] = { 172137088,175348351 };
int layer1_block2_conv1_output[] = { 175348352,176151167 };
int layer1_block2_bn1_input[] = { 176151168,176953983 };
int layer1_block2_bn1_output[] = { 176953984,177756799 };
int layer1_block2_conv2_input[] = { 177756800,178559615 };
int layer1_block2_conv2_output[] = { 178559616,179362431 };
int layer1_block2_bn2_input[] = { 179362432,180165247 };
int layer1_block2_bn2_output[] = { 180165248,180968063 };
int layer1_block2_conv3_input[] = { 180968064,181770879 };
int layer1_block2_conv3_output[] = { 181770880,184982143 };
int layer1_block2_bn3_input[] = { 184982144,188193407 };
int layer1_block2_bn3_output[] = { 188193408,191404671 };

View File

@ -31,21 +31,19 @@ from struct import Struct
CurrentPath = os.path.split(os.path.realpath(__file__))[0]+"/" CurrentPath = os.path.split(os.path.realpath(__file__))[0]+"/"
resnet50 = models.resnet50(pretrained=True) # resnet50 = models.resnet50(pretrained=True)
# torch.save(resnet50, CurrentPath+'params.pth') # torch.save(resnet50, CurrentPath+'params.pth')
resnet50 = torch.load(CurrentPath+'params.pth') resnet50 = torch.load(CurrentPath+'params.pth')
resnet50.eval() resnet50.eval()
print("===========================") # print("===========================")
print("===========================") # print("===========================")
print("===========================") # print("===========================")
print(resnet50) # print(resnet50)
print("===========================") # print("===========================")
print("===========================") # print("===========================")
print("===========================") # print("===========================")
ResNet50 = { ResNet50 = {
@ -245,7 +243,7 @@ ResNet50 = {
weightfile = open(CurrentPath+'ResNet50Weight.cc', 'w') weightfile = open(CurrentPath+'ResNet50Weight.cc', 'w')
binaryfile = open(CurrentPath+'ResNet50Weight.bin', 'wb') binaryfile = open(CurrentPath+'ResNet50Weight.bin', 'wb')
currentbyte = 0 currentbyte = 0
strg = ''
def genData(name, data, currentbyte, binaryfile, strg): def genData(name, data, currentbyte, binaryfile, strg):
strg = strg + "int "+name+"[] = { " strg = strg + "int "+name+"[] = { "
@ -260,6 +258,37 @@ def genData(name, data, currentbyte, binaryfile, strg):
return (currentbyte,binaryfile,strg) return (currentbyte,binaryfile,strg)
def hook_fn(m, i, o):
print(m)
print("------------Input Grad------------")
for grad in i:
try:
print(grad.shape)
except AttributeError:
print ("None found for Gradient")
print("------------Output Grad------------")
for grad in o:
try:
print(grad.shape)
except AttributeError:
print ("None found for Gradient")
print("\n")
def hook_print(name, m, i, o):
global currentbyte
global binaryfile
global strg
currentbyte, binaryfile, strg = genData(
name+"_input", i[0], currentbyte, binaryfile, strg)
currentbyte, binaryfile, strg = genData(
name+"_output", o[0], currentbyte, binaryfile, strg)
def printDick(d, head, obj): def printDick(d, head, obj):
global currentbyte global currentbyte
global binaryfile global binaryfile
@ -296,8 +325,6 @@ def printDick(d, head, obj):
return strg return strg
strg = ''
strg = printDick(ResNet50, "RN50", resnet50) strg = printDick(ResNet50, "RN50", resnet50)
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
@ -314,6 +341,16 @@ val_loader = torch.utils.data.DataLoader(
num_workers=1, pin_memory=True) num_workers=1, pin_memory=True)
for batch_idx, (data, target) in enumerate(val_loader):
currentbyte, binaryfile,strg = genData("input_"+str(batch_idx), data, currentbyte, binaryfile, strg)
out = resnet50(data)
currentbyte, binaryfile, strg = genData(
"output_"+str(batch_idx), out, currentbyte, binaryfile, strg)
for batch_idx, (data, target) in enumerate(val_loader): for batch_idx, (data, target) in enumerate(val_loader):
currentbyte,binaryfile,strg = genData("verify_input", data, currentbyte, binaryfile, strg) currentbyte,binaryfile,strg = genData("verify_input", data, currentbyte, binaryfile, strg)
x = resnet50.conv1(data) x = resnet50.conv1(data)
@ -340,11 +377,35 @@ for batch_idx, (data, target) in enumerate(val_loader):
break break
resnet50.layer1._modules['0'].bn1.register_forward_hook(lambda m, i, o: hook_print("layer1_block0_bn1", m, i, o))
resnet50.layer1._modules['0'].bn2.register_forward_hook(lambda m, i, o: hook_print("layer1_block0_bn2", m, i, o))
resnet50.layer1._modules['0'].bn3.register_forward_hook(lambda m, i, o: hook_print("layer1_block0_bn3", m, i, o))
resnet50.layer1._modules['0'].conv1.register_forward_hook(lambda m, i, o: hook_print("layer1_block0_conv1", m, i, o))
resnet50.layer1._modules['0'].conv2.register_forward_hook(lambda m, i, o: hook_print("layer1_block0_conv2", m, i, o))
resnet50.layer1._modules['0'].conv3.register_forward_hook(lambda m, i, o: hook_print("layer1_block0_conv3", m, i, o))
resnet50.layer1._modules['0'].downsample._modules['0'].register_forward_hook(lambda m, i, o: hook_print("layer1_block0_downsample_conv", m, i, o))
resnet50.layer1._modules['0'].downsample._modules['1'].register_forward_hook(lambda m, i, o: hook_print("layer1_block0_downsample_bn", m, i, o))
resnet50.layer1._modules['1'].bn1.register_forward_hook(lambda m, i, o: hook_print("layer1_block1_bn1", m, i, o))
resnet50.layer1._modules['1'].bn2.register_forward_hook(lambda m, i, o: hook_print("layer1_block1_bn2", m, i, o))
resnet50.layer1._modules['1'].bn3.register_forward_hook(lambda m, i, o: hook_print("layer1_block1_bn3", m, i, o))
resnet50.layer1._modules['1'].conv1.register_forward_hook(lambda m, i, o: hook_print("layer1_block1_conv1", m, i, o))
resnet50.layer1._modules['1'].conv2.register_forward_hook(lambda m, i, o: hook_print("layer1_block1_conv2", m, i, o))
resnet50.layer1._modules['1'].conv3.register_forward_hook(lambda m, i, o: hook_print("layer1_block1_conv3", m, i, o))
resnet50.layer1._modules['2'].bn1.register_forward_hook(lambda m, i, o: hook_print("layer1_block2_bn1", m, i, o))
resnet50.layer1._modules['2'].bn2.register_forward_hook(lambda m, i, o: hook_print("layer1_block2_bn2", m, i, o))
resnet50.layer1._modules['2'].bn3.register_forward_hook(lambda m, i, o: hook_print("layer1_block2_bn3", m, i, o))
resnet50.layer1._modules['2'].conv1.register_forward_hook(lambda m, i, o: hook_print("layer1_block2_conv1", m, i, o))
resnet50.layer1._modules['2'].conv2.register_forward_hook(lambda m, i, o: hook_print("layer1_block2_conv2", m, i, o))
resnet50.layer1._modules['2'].conv3.register_forward_hook(lambda m, i, o: hook_print("layer1_block2_conv3", m, i, o))
for batch_idx, (data, target) in enumerate(val_loader): for batch_idx, (data, target) in enumerate(val_loader):
currentbyte, binaryfile,strg = genData("input_"+str(batch_idx), data, currentbyte, binaryfile, strg)
out = resnet50(data) out = resnet50(data)
currentbyte, binaryfile, strg = genData( break
"output_"+str(batch_idx), out, currentbyte, binaryfile, strg)
weightfile.write(strg) weightfile.write(strg)
@ -352,7 +413,7 @@ binaryfile.close()
weightfile.close() weightfile.close()
print(strg) # print(strg)
print("===========================") print("===========================")
print("===========================") print("===========================")