1、nn.Module
python
复制代码
import torch
from torch import nn
import torch.nn.functional as F
class Model_Seq(nn.Module):
""
def __init__(self,in_dim,n_hidden_1,n_hidden_2,n_hidden_3,out_dim):
super(Model_Seq,self).__init__()
self.flatten = nn.Flatten()
self.linear1=nn.Linear(in_dim,n_hidden_1)
self.bn1=nn.BatchNorm1d(n_hidden_1)
self.linear2 = nn.Linear(n_hidden_1,n_hidden_2)
self.bn2 = nn.BatchNorm2d(n_hidden_2)
self.linear3 = nn.Linear(n_hidden_2,n_hidden_3)
self.bn3 = nn.BatchNorm3d(n_hidden_3)
self.out=nn.Linear(n_hidden_3,out_dim)
def forward(self,x):
x=self.flatten(x)
x=self.linear1(x)
x=self.bn1(x)
x=F.relu(x)
x = self.linear2(x)
x = self.bn2(x)
x = F.relu(x)
x = self.linear3(x)
x = self.bn3(x)
x = F.relu(x)
x=self.out(x)
x=F.softmax(x,dim=1)
return x
in_dim=1000
n_hidden_1=800
n_hidden_2=500
n_hidden_3=800
out_dim=1000
model_seq= Model_Seq(in_dim,n_hidden_1,n_hidden_2,n_hidden_3,out_dim)
print(model_seq)
2、Sequential
python
复制代码
import torch
from torch import nn
in_dim=1000
n_hidden_1=800
n_hidden_2=500
n_hidden_3=800
out_dim=1000
Seq_arg=nn.Sequential(
nn.Flatten(),
nn.Linear(in_dim, n_hidden_1),
nn.BatchNorm1d(n_hidden_1),
nn.ReLU(),
nn.Linear(n_hidden_1,n_hidden_2 ),
nn.BatchNorm1d(n_hidden_2),
nn.ReLU(),
nn.Linear(n_hidden_2, n_hidden_3),
nn.BatchNorm1d(n_hidden_3),
nn.ReLU(),
nn.Linear(n_hidden_3, out_dim),
nn.Softmax(dim=1)
)
print(Seq_arg)
3、add_module
python
复制代码
import torch
from torch import nn
from torchsummary import summary
in_dim=1000
n_hidden_1=800
n_hidden_2=500
n_hidden_3=800
out_dim=1000
Seq_module=nn.Sequential()
Seq_module.add_module("flatten",nn.Flatten())
Seq_module.add_module("linear1", nn.Linear(in_dim,n_hidden_1))
Seq_module.add_module("bn1",nn.BatchNorm1d(n_hidden_1))
Seq_module.add_module("relu1",nn.ReLU())
Seq_module.add_module("linear2", nn.Linear(n_hidden_1,n_hidden_2))
Seq_module.add_module("bn2",nn.BatchNorm1d(n_hidden_2))
Seq_module.add_module("relu2",nn.ReLU())
Seq_module.add_module("linear3", nn.Linear(n_hidden_2,n_hidden_3))
Seq_module.add_module("bn3",nn.BatchNorm1d(n_hidden_3))
Seq_module.add_module("relu3",nn.ReLU())
Seq_module.add_module("out",nn.Linear(n_hidden_3,out_dim))
Seq_module.add_module("softmax",nn.Softmax(dim=1))
print(Seq_module)
4、OrderedDict
python
复制代码
import torch
from torch import nn
from collections import OrderedDict
in_dim=1000
n_hidden_1=800
n_hidden_2=500
n_hidden_3=800
out_dim=1000
Seq_dict = nn.Sequential(OrderedDict([
("flatten",nn.Flatten()),
("linear1",nn.Linear(in_dim,n_hidden_1)),
("bn1",nn.BatchNorm1d(n_hidden_1)),
("relu1",nn.ReLU()),
("linear2",nn.Linear(n_hidden_1, n_hidden_2)),
("bn2",nn.BatchNorm1d(n_hidden_2)),
("relu2",nn.ReLU()),
("linear3",nn.Linear(n_hidden_2, n_hidden_3)),
("bn3",nn.BatchNorm1d(n_hidden_3)),
("relu3",nn.ReLU()),
("out",nn.Linear(n_hidden_3, out_dim)),
("softmax",nn.Softmax(dim=1))]))
print(Seq_dict)
5、继承nn.Module基类
python
复制代码
import torch
from torch import nn
import torch.nn.functional as F
in_dim=1000
n_hidden_1=800
n_hidden_2=500
n_hidden_3=800
out_dim=1000
class Model_lay(nn.Module):
"""
使用sequential构建网络,Sequential()函数的功能是将网络的层组合到一起
"""
def __init__(self, in_dim, n_hidden_1, n_hidden_2, n_hidden_3,out_dim):
super(Model_lay, self).__init__()
self.flatten = nn.Flatten()
self.layer1 = nn.Sequential(nn.Linear(in_dim, n_hidden_1), nn.BatchNorm1d(n_hidden_1))
self.layer2 = nn.Sequential(nn.Linear(n_hidden_1, n_hidden_2), nn.BatchNorm1d(n_hidden_2))
self.layer3 = nn.Sequential(nn.Linear(n_hidden_2, n_hidden_3), nn.BatchNorm1d(n_hidden_3))
self.out = nn.Sequential(nn.Linear(n_hidden_3, out_dim))
def forward(self, x):
x = self.flatten(x)
x = F.relu(self.layer1(x))
x = F.relu(self.layer2(x))
x = F.relu(self.layer3(x))
x = F.softmax(self.out(x), dim=1)
return x
model_lay= Model_lay(in_dim, n_hidden_1, n_hidden_2 ,n_hidden_3,out_dim)
print(model_lay)
6、nn.Modulelist模型容器
python
复制代码
import torch
from torch import nn
import torch.nn.functional as F
in_dim=1000
n_hidden_1=800
n_hidden_2=500
n_hidden_3=800
out_dim=1000
class Model_lst(nn.Module):
def __init__(self, in_dim, n_hidden_1, n_hidden_2,n_hidden_3, out_dim):
super(Model_lst, self).__init__()
self.layers = nn.ModuleList([
nn.Flatten(),
nn.Linear(in_dim, n_hidden_1),
nn.BatchNorm1d(n_hidden_1),
nn.ReLU(),
nn.Linear(n_hidden_1, n_hidden_2),
nn.BatchNorm1d(n_hidden_2),
nn.ReLU(),
nn.Linear(n_hidden_2, n_hidden_3),
nn.BatchNorm1d(n_hidden_3),
nn.ReLU(),
nn.Linear(n_hidden_3, out_dim),
nn.Softmax(dim=1)])
def forward(self, x):
for layer in self.layers:
x = layer(x)
return x
class Model_lst(nn.Module):
def __init__(self, in_dim, n_hidden_1, n_hidden_2, n_hidden_3,out_dim):
super(Model_lst, self).__init__()
self.layers = nn.ModuleList([
nn.Flatten(),
nn.Linear(in_dim, n_hidden_1),
nn.BatchNorm1d(n_hidden_1),
nn.ReLU(),
nn.Linear(n_hidden_1, n_hidden_2),
nn.BatchNorm1d(n_hidden_2),
nn.ReLU(),
nn.Linear(n_hidden_2, n_hidden_3),
nn.BatchNorm1d(n_hidden_3),
nn.ReLU(),
nn.Linear(n_hidden_3, out_dim),
nn.Softmax(dim=1)])
def forward(self, x):
for layer in self.layers:
x = layer(x)
return x
model_lst = Model_lst(in_dim, n_hidden_1, n_hidden_2, n_hidden_3,out_dim)
print(model_lst)
7、nn.ModuleDict模型容器
python
复制代码
import torch
from torch import nn
in_dim=1000
n_hidden_1=800
n_hidden_2=500
n_hidden_3=800
out_dim=1000
class Model_dict(nn.Module):
def __init__(self, in_dim, n_hidden_1, n_hidden_2,n_hidden_3, out_dim):
super(Model_dict, self).__init__()
self.layers_dict = nn.ModuleDict({"flatten": nn.Flatten(),
"linear1": nn.Linear(in_dim, n_hidden_1),
"bn1": nn.BatchNorm1d(n_hidden_1),
"relu": nn.ReLU(),
"linear2": nn.Linear(n_hidden_1, n_hidden_2),
"bn2": nn.BatchNorm1d(n_hidden_2),
"linear3": nn.Linear(n_hidden_2, n_hidden_3),
"bn2": nn.BatchNorm1d(n_hidden_3),
"out": nn.Linear(n_hidden_3, out_dim),
"softmax": nn.Softmax(dim=1)
})
def forward(self, x):
layers = ["flatten", "linear1", "bn1", "relu", "linear2", "bn2", "relu", "out", "softmax"]
for layer in layers:
x = self.layers_dict[layer](x)
return x
model_dict = Model_dict(in_dim, n_hidden_1, n_hidden_2,n_hidden_3, out_dim)
print(model_dict)