change name

This commit is contained in:
Dun Liang 2020-04-20 17:14:07 +08:00
parent 2c8124d144
commit 9b51b565f7
5 changed files with 75 additions and 69 deletions

View File

@ -1,9 +1,18 @@
from . import resnet
from .resnet import *
from . import vgg
from .vgg import *
from . import alexnet
from .alexnet import *
from . import squeezenet
from .squeezenet import *
from . import inception
from .inception import *
from . import googlenet
from .googlenet import *
from . import mobilenet
from .mobilenet import *
from . import mnasnet
from . import shufflenetv2
from .mnasnet import *
from . import shufflenetv2
from .shufflenetv2 import *

View File

@ -11,7 +11,8 @@
import jittor as jt
from jittor import nn
__all__ = ['ResNet', 'Resnet18', 'Resnet34', 'Resnet50', 'Resnet101', 'Resnet152', 'Resnext50_32x4d', 'Resnext101_32x8d', 'Wide_resnet50_2', 'Wide_resnet101_2']
__all__ = ['ResNet', 'Resnet18', 'Resnet34', 'Resnet50', 'Resnet101', 'Resnet152', 'Resnext50_32x4d', 'Resnext101_32x8d', 'Wide_resnet50_2', 'Wide_resnet101_2',
'resnet18', 'resnet34', 'resnet50', 'resnet101', 'resnet152', 'resnext50_32x4d', 'resnext101_32x8d', 'wide_resnet50_2', 'wide_resnet101_2']
def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
return nn.Conv(in_planes, out_planes, kernel_size=3, stride=stride, padding=dilation, groups=groups, bias=False, dilation=dilation)
@ -150,33 +151,42 @@ def _resnet(block, layers, **kwargs):
def Resnet18(**kwargs):
return _resnet(BasicBlock, [2, 2, 2, 2], **kwargs)
resnet18 = Resnet18
def Resnet34(**kwargs):
return _resnet( BasicBlock, [3, 4, 6, 3], **kwargs)
resnet34 = Resnet34
def Resnet50(**kwargs):
return _resnet(Bottleneck, [3, 4, 6, 3], **kwargs)
resnet50 = Resnet50
def Resnet101(**kwargs):
return _resnet(Bottleneck, [3, 4, 23, 3], **kwargs)
resnet101 = Resnet101
def Resnet152(**kwargs):
return _resnet(Bottleneck, [3, 8, 36, 3], **kwargs)
resnet152 = Resnet152
def Resnext50_32x4d(**kwargs):
kwargs['groups'] = 32
kwargs['width_per_group'] = 4
return _resnet(Bottleneck, [3, 4, 6, 3], **kwargs)
resnext50_32x4d = Resnext50_32x4d
def Resnext101_32x8d(**kwargs):
kwargs['groups'] = 32
kwargs['width_per_group'] = 8
return _resnet(Bottleneck, [3, 4, 23, 3], **kwargs)
resnext101_32x8d = Resnext101_32x8d
def Wide_resnet50_2(**kwargs):
kwargs['width_per_group'] = (64 * 2)
return _resnet(Bottleneck, [3, 4, 6, 3], **kwargs)
wide_resnet50_2 = Wide_resnet50_2
def Wide_resnet101_2(**kwargs):
kwargs['width_per_group'] = (64 * 2)
return _resnet(Bottleneck, [3, 4, 23, 3], **kwargs)
wide_resnet101_2 = Wide_resnet101_2

View File

@ -11,8 +11,8 @@ import jittor as jt
from jittor import nn
__all__ = [
'VGG', 'VGG11', 'VGG11_bn', 'VGG13', 'VGG13_bn', 'VGG16', 'VGG16_bn',
'VGG19_bn', 'VGG19',
'VGG', 'vgg11', 'vgg11_bn', 'vgg13', 'vgg13_bn', 'vgg16', 'vgg16_bn',
'vgg19_bn', 'vgg19',
]
class VGG(nn.Module):
@ -67,33 +67,33 @@ def _vgg(arch, cfg, batch_norm, **kwargs):
return model
def VGG11(**kwargs):
def vgg11(**kwargs):
return _vgg('vgg11', 'A', False, **kwargs)
def VGG11_bn(**kwargs):
def vgg11_bn(**kwargs):
return _vgg('vgg11_bn', 'A', True, **kwargs)
def VGG13(**kwargs):
def vgg13(**kwargs):
return _vgg('vgg13', 'B', False, **kwargs)
def VGG13_bn(**kwargs):
def vgg13_bn(**kwargs):
return _vgg('vgg13_bn', 'B', True, **kwargs)
def VGG16(**kwargs):
def vgg16(**kwargs):
return _vgg('vgg16', 'D', False, **kwargs)
def VGG16_bn(**kwargs):
def vgg16_bn(**kwargs):
return _vgg('vgg16_bn', 'D', True, **kwargs)
def VGG19(**kwargs):
def vgg19(**kwargs):
return _vgg('vgg19', 'E', False, **kwargs)
def VGG19_bn(**kwargs):
def vgg19_bn(**kwargs):
return _vgg('vgg19_bn', 'E', True, **kwargs)

View File

@ -28,74 +28,61 @@ class test_models(unittest.TestCase):
@classmethod
def setUpClass(self):
self.models = [
['inception_v3','inception_v3'],
['squeezenet1_0','squeezenet1_0'],
['squeezenet1_1','squeezenet1_1'],
['alexnet','alexnet'],
['resnet18','Resnet18'],
['resnet34','Resnet34'],
['resnet50','Resnet50'],
['resnet101','Resnet101'],
['resnet152','Resnet152'],
['resnext50_32x4d','Resnext50_32x4d'],
['resnext101_32x8d','Resnext101_32x8d'],
['vgg11','VGG11'],
['vgg11_bn','VGG11_bn'],
['vgg13','VGG13'],
['vgg13_bn','VGG13_bn'],
['vgg16','VGG16'],
['vgg16_bn','VGG16_bn'],
['vgg19','VGG19'],
['vgg19_bn','VGG19_bn'],
['wide_resnet50_2','Wide_resnet50_2'],
['wide_resnet101_2','Wide_resnet101_2'],
['googlenet','googlenet'],
['mobilenet_v2','mobilenet_v2'],
['mnasnet0_5','mnasnet0_5'],
['mnasnet0_75','mnasnet0_75'],
['mnasnet1_0','mnasnet1_0'],
['mnasnet1_3','mnasnet1_3'],
['shufflenet_v2_x0_5','shufflenet_v2_x0_5'],
['shufflenet_v2_x1_0','shufflenet_v2_x1_0'],
['shufflenet_v2_x1_5','shufflenet_v2_x1_5'],
['shufflenet_v2_x2_0','shufflenet_v2_x2_0']
'inception_v3',
'squeezenet1_0',
'squeezenet1_1',
'alexnet',
'resnet18',
'resnet34',
'resnet50',
'resnet101',
'resnet152',
'resnext50_32x4d',
'resnext101_32x8d',
'vgg11',
'vgg11_bn',
'vgg13',
'vgg13_bn',
'vgg16',
'vgg16_bn',
'vgg19',
'vgg19_bn',
'wide_resnet50_2',
'wide_resnet101_2',
'googlenet',
'mobilenet_v2',
'mnasnet0_5',
'mnasnet0_75',
'mnasnet1_0',
'mnasnet1_3',
'shufflenet_v2_x0_5',
'shufflenet_v2_x1_0',
'shufflenet_v2_x1_5',
'shufflenet_v2_x2_0',
]
@unittest.skipIf(not jt.has_cuda, "Cuda not found")
@jt.flag_scope(use_cuda=1, use_stat_allocator=1)
@jt.flag_scope(use_cuda=1)
def test_models(self):
def to_cuda(x):
if jt.has_cuda:
return x.cuda()
return x
threshold = 1e-2
# Define numpy input image
bs = 1
test_img = np.random.random((bs,3,224,224)).astype('float32')
# Define pytorch & jittor input image
pytorch_test_img = torch.Tensor(test_img).cuda()
pytorch_test_img = to_cuda(torch.Tensor(test_img))
jittor_test_img = jt.array(test_img)
for test_model in self.models:
if test_model[0] == "inception_v3":
if test_model == "inception_v3":
test_img = np.random.random((bs,3,300,300)).astype('float32')
pytorch_test_img = torch.Tensor(test_img).cuda()
pytorch_test_img = to_cuda(torch.Tensor(test_img))
jittor_test_img = jt.array(test_img)
# Define pytorch & jittor model
pytorch_model = tcmodels.__dict__[test_model[0]]().cuda()
if 'resne' in test_model[0]:
jittor_model = jtmodels.resnet.__dict__[test_model[1]]()
elif 'vgg' in test_model[0]:
jittor_model = jtmodels.vgg.__dict__[test_model[1]]()
elif 'alexnet' in test_model[0]:
jittor_model = jtmodels.alexnet.__dict__[test_model[1]]()
elif 'squeezenet' in test_model[0]:
jittor_model = jtmodels.squeezenet.__dict__[test_model[1]]()
elif 'inception' in test_model[0]:
jittor_model = jtmodels.inception.__dict__[test_model[1]]()
elif 'googlenet' in test_model[0]:
jittor_model = jtmodels.googlenet.__dict__[test_model[1]]()
elif 'mobilenet' in test_model[0]:
jittor_model = jtmodels.mobilenet.__dict__[test_model[1]]()
elif 'mnasnet' in test_model[0]:
jittor_model = jtmodels.mnasnet.__dict__[test_model[1]]()
elif 'shufflenet' in test_model[0]:
jittor_model = jtmodels.shufflenetv2.__dict__[test_model[1]]()
pytorch_model = to_cuda(tcmodels.__dict__[test_model]())
jittor_model = jtmodels.__dict__[test_model]()
# Set eval to avoid dropout layer
pytorch_model.eval()
jittor_model.eval()
@ -108,8 +95,8 @@ class test_models(unittest.TestCase):
y = jittor_result.data + 1
relative_error = abs(x - y) / abs(y)
diff = relative_error.mean()
assert diff < threshold, f"[*] {test_model[1]} forward fails..., Relative Error: {diff}"
print(f"[*] {test_model[1]} forword passes with Relative Error {diff}")
assert diff < threshold, f"[*] {test_model} forward fails..., Relative Error: {diff}"
print(f"[*] {test_model} forword passes with Relative Error {diff}")
print('all models pass test.')
if __name__ == "__main__":

View File

@ -24,7 +24,7 @@ skip_model_test = not model_test
class MnistNet(Module):
def __init__(self):
self.model = vgg.VGG16_bn()
self.model = vgg.vgg16_bn()
self.layer = nn.Linear(1000,10)
def execute(self, x):
x = self.model(x)