mirror of https://github.com/Jittor/Jittor
complete docments
This commit is contained in:
parent
626ca2c272
commit
6713e6bb89
|
@ -6,8 +6,9 @@ jittor.models
|
|||
```eval_rst
|
||||
|
||||
.. automodule:: jittor.models
|
||||
:members:
|
||||
:members:
|
||||
:imported-members:
|
||||
:undoc-members:
|
||||
:exclude-members: ResNet,ShuffleNetV2,SqueezeNet,VGG
|
||||
```
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ struct MpiAllReduceOp : Op {
|
|||
Args:
|
||||
|
||||
* x: variable to be all reduced.
|
||||
* op: 'sum' or 'add' means sum all [x], 'mean' means average all [x].
|
||||
* op: 'sum' or 'add' means sum all [x], 'mean' means average all [x]. Default: 'add'.
|
||||
*/
|
||||
MpiAllReduceOp(Var* x, NanoString op=ns_add);
|
||||
void infer_shape() override;
|
||||
|
|
|
@ -22,7 +22,7 @@ struct MpiBroadcastOp : Op {
|
|||
Args:
|
||||
|
||||
* x: variable to be broadcasted.
|
||||
* root: ID of MPI node to be broadcasted.
|
||||
* root: ID of MPI node to be broadcasted. Default: 0.
|
||||
*/
|
||||
MpiBroadcastOp(Var* x, int root=0);
|
||||
void infer_shape() override;
|
||||
|
|
|
@ -23,8 +23,8 @@ struct MpiReduceOp : Op {
|
|||
Args:
|
||||
|
||||
* x: variable to be reduced.
|
||||
* op: 'sum' or 'add' means sum all [x], 'mean' means average all [x].
|
||||
* root: ID of MPI node to output.
|
||||
* op: 'sum' or 'add' means sum all [x], 'mean' means average all [x]. Default: 'add'.
|
||||
* root: ID of MPI node to output. Default: 0.
|
||||
*/
|
||||
MpiReduceOp(Var* x, NanoString op=ns_add, int root=0);
|
||||
void infer_shape() override;
|
||||
|
|
|
@ -13,6 +13,19 @@ import jittor.nn as nn
|
|||
__all__ = ['AlexNet', 'alexnet']
|
||||
|
||||
class AlexNet(nn.Module):
|
||||
""" AlexNet model architecture.
|
||||
|
||||
Args:
|
||||
|
||||
* num_classes: Number of classes. Default: 1000.
|
||||
|
||||
Example::
|
||||
|
||||
model = jittor.models.AlexNet(500)
|
||||
x = jittor.random([10,224,224,3])
|
||||
y = model(x) # [10, 500]
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, num_classes=1000):
|
||||
super(AlexNet, self).__init__()
|
||||
|
|
|
@ -16,6 +16,15 @@ def googlenet(**kwargs):
|
|||
return GoogLeNet(**kwargs)
|
||||
|
||||
class GoogLeNet(nn.Module):
|
||||
""" GoogLeNet model architecture.
|
||||
|
||||
Args:
|
||||
|
||||
* num_classes: Number of classes. Default: 1000.
|
||||
* aux_logits: If True, add an auxiliary branch that can improve training. Default: True
|
||||
* init_weights: Defualt: True.
|
||||
* blocks: List of three blocks, [conv_block, inception_block, inception_aux_block]. If None, will use [BasicConv2d, Inception, InceptionAux] instead. Default: None.
|
||||
"""
|
||||
|
||||
def __init__(self, num_classes=1000, aux_logits=True, init_weights=True, blocks=None):
|
||||
super(GoogLeNet, self).__init__()
|
||||
|
|
|
@ -7,7 +7,16 @@ def inception_v3(pretrained=False, progress=True, **kwargs):
|
|||
return Inception3(**kwargs)
|
||||
|
||||
class Inception3(nn.Module):
|
||||
""" Inceptionv3 model architecture.
|
||||
|
||||
Args:
|
||||
|
||||
* num_classes: Number of classes. Default: 1000.
|
||||
* aux_logits: If True, add an auxiliary branch that can improve training. Default: True
|
||||
* inception_blocks: List of seven blocks, [conv_block, inception_a, inception_b, inception_c, inception_d, inception_e, inception_aux]. If None, will use [BasicConv2d, InceptionA, InceptionB, InceptionC, InceptionD, InceptionE, InceptionAux] instead. Default: None.
|
||||
* init_weights: Defualt: True.
|
||||
"""
|
||||
|
||||
def __init__(self, num_classes=1000, aux_logits=True, inception_blocks=None, init_weights=True):
|
||||
super(Inception3, self).__init__()
|
||||
if (inception_blocks is None):
|
||||
|
|
|
@ -47,6 +47,14 @@ def _get_depths(alpha):
|
|||
return [_round_to_multiple_of((depth * alpha), 8) for depth in depths]
|
||||
|
||||
class MNASNet(nn.Module):
|
||||
""" MNASNet model architecture. version=2.
|
||||
|
||||
Args:
|
||||
|
||||
* alpha: Depth multiplier.
|
||||
* num_classes: Number of classes. Default: 1000.
|
||||
* dropout: Dropout probability of dropout layer.
|
||||
"""
|
||||
_version = 2
|
||||
|
||||
def __init__(self, alpha, num_classes=1000, dropout=0.2):
|
||||
|
|
|
@ -48,6 +48,17 @@ class InvertedResidual(nn.Module):
|
|||
return self.conv(x)
|
||||
|
||||
class MobileNetV2(nn.Module):
|
||||
""" MobileNetV2 model architecture.
|
||||
|
||||
Args:
|
||||
|
||||
* num_classes: Number of classes. Default: 1000.
|
||||
* width_mult: Width multiplier - adjusts number of channels in each layer by this amount. Default: 1.0.
|
||||
* init_weights: Defualt: True.
|
||||
* inverted_residual_setting: Network structure
|
||||
* round_nearest: Round the number of channels in each layer to be a multiple of this number. Set to 1 to turn off rounding. Default: 8.
|
||||
* block: Module specifying inverted residual building block for mobilenet. If None, use InvertedResidual instead. Default: None.
|
||||
"""
|
||||
|
||||
def __init__(self, num_classes=1000, width_mult=1.0, inverted_residual_setting=None, round_nearest=8, block=None):
|
||||
super(MobileNetV2, self).__init__()
|
||||
|
|
|
@ -162,6 +162,16 @@ def Resnet50(**kwargs):
|
|||
resnet50 = Resnet50
|
||||
|
||||
def Resnet101(**kwargs):
|
||||
"""
|
||||
ResNet-101 model architecture.
|
||||
|
||||
Example::
|
||||
|
||||
model = jittor.models.Resnet101()
|
||||
x = jittor.random([10,224,224,3])
|
||||
y = model(x) # [10, 1000]
|
||||
|
||||
"""
|
||||
return _resnet(Bottleneck, [3, 4, 23, 3], **kwargs)
|
||||
resnet101 = Resnet101
|
||||
|
||||
|
|
Loading…
Reference in New Issue