深度学习笔记_2、多种方法定义神经网络

1、nn.Module

python 复制代码
import torch
from torch import nn
import torch.nn.functional as F
class Model_Seq(nn.Module):
    ""
    def __init__(self,in_dim,n_hidden_1,n_hidden_2,n_hidden_3,out_dim):

        super(Model_Seq,self).__init__()
        self.flatten = nn.Flatten()

        self.linear1=nn.Linear(in_dim,n_hidden_1)
        self.bn1=nn.BatchNorm1d(n_hidden_1)

        self.linear2  = nn.Linear(n_hidden_1,n_hidden_2)
        self.bn2 = nn.BatchNorm2d(n_hidden_2)

        self.linear3  = nn.Linear(n_hidden_2,n_hidden_3)
        self.bn3 = nn.BatchNorm3d(n_hidden_3)

        self.out=nn.Linear(n_hidden_3,out_dim)


    def forward(self,x):
        x=self.flatten(x)

        x=self.linear1(x)
        x=self.bn1(x)
        x=F.relu(x)

        x = self.linear2(x)
        x = self.bn2(x)
        x = F.relu(x)

        x = self.linear3(x)
        x = self.bn3(x)
        x = F.relu(x)

        x=self.out(x)
        x=F.softmax(x,dim=1)
        return x

in_dim=1000
n_hidden_1=800
n_hidden_2=500
n_hidden_3=800
out_dim=1000
model_seq=  Model_Seq(in_dim,n_hidden_1,n_hidden_2,n_hidden_3,out_dim)


print(model_seq)

2、Sequential

python 复制代码
import torch
from torch import nn

in_dim=1000
n_hidden_1=800
n_hidden_2=500
n_hidden_3=800
out_dim=1000

Seq_arg=nn.Sequential(
    nn.Flatten(),

    nn.Linear(in_dim, n_hidden_1),
    nn.BatchNorm1d(n_hidden_1),
    nn.ReLU(),

    nn.Linear(n_hidden_1,n_hidden_2 ),
    nn.BatchNorm1d(n_hidden_2),
    nn.ReLU(),

    nn.Linear(n_hidden_2, n_hidden_3),
    nn.BatchNorm1d(n_hidden_3),
    nn.ReLU(),

    nn.Linear(n_hidden_3, out_dim),
    nn.Softmax(dim=1)

)

print(Seq_arg)

3、add_module

python 复制代码
import torch
from torch import nn
from torchsummary import summary


in_dim=1000
n_hidden_1=800
n_hidden_2=500
n_hidden_3=800
out_dim=1000

Seq_module=nn.Sequential()
Seq_module.add_module("flatten",nn.Flatten())

Seq_module.add_module("linear1", nn.Linear(in_dim,n_hidden_1))
Seq_module.add_module("bn1",nn.BatchNorm1d(n_hidden_1))
Seq_module.add_module("relu1",nn.ReLU())

Seq_module.add_module("linear2", nn.Linear(n_hidden_1,n_hidden_2))
Seq_module.add_module("bn2",nn.BatchNorm1d(n_hidden_2))
Seq_module.add_module("relu2",nn.ReLU())

Seq_module.add_module("linear3", nn.Linear(n_hidden_2,n_hidden_3))
Seq_module.add_module("bn3",nn.BatchNorm1d(n_hidden_3))
Seq_module.add_module("relu3",nn.ReLU())

Seq_module.add_module("out",nn.Linear(n_hidden_3,out_dim))
Seq_module.add_module("softmax",nn.Softmax(dim=1))

print(Seq_module)

4、OrderedDict

python 复制代码
import torch
from torch import nn
from collections import OrderedDict
in_dim=1000
n_hidden_1=800
n_hidden_2=500
n_hidden_3=800
out_dim=1000

Seq_dict = nn.Sequential(OrderedDict([
    ("flatten",nn.Flatten()),
    ("linear1",nn.Linear(in_dim,n_hidden_1)),
    ("bn1",nn.BatchNorm1d(n_hidden_1)),
    ("relu1",nn.ReLU()),

    ("linear2",nn.Linear(n_hidden_1, n_hidden_2)),
    ("bn2",nn.BatchNorm1d(n_hidden_2)),
    ("relu2",nn.ReLU()),

    ("linear3",nn.Linear(n_hidden_2, n_hidden_3)),
    ("bn3",nn.BatchNorm1d(n_hidden_3)),
    ("relu3",nn.ReLU()),

    ("out",nn.Linear(n_hidden_3, out_dim)),
    ("softmax",nn.Softmax(dim=1))]))


print(Seq_dict)

5、继承nn.Module基类

python 复制代码
import torch
from torch import nn
import torch.nn.functional as F

in_dim=1000
n_hidden_1=800
n_hidden_2=500
n_hidden_3=800
out_dim=1000

class Model_lay(nn.Module):
    """
    使用sequential构建网络,Sequential()函数的功能是将网络的层组合到一起
    """

    def __init__(self, in_dim, n_hidden_1, n_hidden_2, n_hidden_3,out_dim):
        super(Model_lay, self).__init__()
        self.flatten = nn.Flatten()
        self.layer1 = nn.Sequential(nn.Linear(in_dim, n_hidden_1), nn.BatchNorm1d(n_hidden_1))
        self.layer2 = nn.Sequential(nn.Linear(n_hidden_1, n_hidden_2), nn.BatchNorm1d(n_hidden_2))
        self.layer3 = nn.Sequential(nn.Linear(n_hidden_2, n_hidden_3), nn.BatchNorm1d(n_hidden_3))
        self.out = nn.Sequential(nn.Linear(n_hidden_3, out_dim))

    def forward(self, x):
        x = self.flatten(x)
        x = F.relu(self.layer1(x))
        x = F.relu(self.layer2(x))
        x = F.relu(self.layer3(x))
        x = F.softmax(self.out(x), dim=1)
        return x

model_lay= Model_lay(in_dim, n_hidden_1, n_hidden_2 ,n_hidden_3,out_dim)
print(model_lay)

6、nn.Modulelist模型容器

python 复制代码
import torch
from torch import nn
import torch.nn.functional as F

in_dim=1000
n_hidden_1=800
n_hidden_2=500
n_hidden_3=800
out_dim=1000

class Model_lst(nn.Module):

    def __init__(self, in_dim, n_hidden_1, n_hidden_2,n_hidden_3, out_dim):
        super(Model_lst, self).__init__()
        self.layers = nn.ModuleList([
            nn.Flatten(),
            nn.Linear(in_dim, n_hidden_1),
            nn.BatchNorm1d(n_hidden_1),
            nn.ReLU(),

            nn.Linear(n_hidden_1, n_hidden_2),
            nn.BatchNorm1d(n_hidden_2),
            nn.ReLU(),

            nn.Linear(n_hidden_2, n_hidden_3),
            nn.BatchNorm1d(n_hidden_3),
            nn.ReLU(),

            nn.Linear(n_hidden_3, out_dim),
            nn.Softmax(dim=1)])

    def forward(self, x):
        for layer in self.layers:
            x = layer(x)
        return x

class Model_lst(nn.Module):

    def __init__(self, in_dim, n_hidden_1, n_hidden_2, n_hidden_3,out_dim):
        super(Model_lst, self).__init__()
        self.layers = nn.ModuleList([
            nn.Flatten(),
            nn.Linear(in_dim, n_hidden_1),
            nn.BatchNorm1d(n_hidden_1),
            nn.ReLU(),

            nn.Linear(n_hidden_1, n_hidden_2),
            nn.BatchNorm1d(n_hidden_2),
            nn.ReLU(),

            nn.Linear(n_hidden_2, n_hidden_3),
            nn.BatchNorm1d(n_hidden_3),
            nn.ReLU(),

            nn.Linear(n_hidden_3, out_dim),
            nn.Softmax(dim=1)])

    def forward(self, x):
        for layer in self.layers:
            x = layer(x)
        return x

model_lst = Model_lst(in_dim, n_hidden_1, n_hidden_2, n_hidden_3,out_dim)
print(model_lst)

7、nn.ModuleDict模型容器

python 复制代码
import torch
from torch import nn
in_dim=1000
n_hidden_1=800
n_hidden_2=500
n_hidden_3=800
out_dim=1000
class Model_dict(nn.Module):

    def __init__(self, in_dim, n_hidden_1, n_hidden_2,n_hidden_3, out_dim):
        super(Model_dict, self).__init__()
        self.layers_dict = nn.ModuleDict({"flatten": nn.Flatten(),
                                          "linear1": nn.Linear(in_dim, n_hidden_1),
                                          "bn1": nn.BatchNorm1d(n_hidden_1),
                                          "relu": nn.ReLU(),
                                          "linear2": nn.Linear(n_hidden_1, n_hidden_2),
                                          "bn2": nn.BatchNorm1d(n_hidden_2),
                                          "linear3": nn.Linear(n_hidden_2, n_hidden_3),
                                          "bn2": nn.BatchNorm1d(n_hidden_3),
                                          "out": nn.Linear(n_hidden_3, out_dim),
                                          "softmax": nn.Softmax(dim=1)
                                          })

    def forward(self, x):
        layers = ["flatten", "linear1", "bn1", "relu", "linear2", "bn2", "relu", "out", "softmax"]
        for layer in layers:
            x = self.layers_dict[layer](x)
        return x

model_dict = Model_dict(in_dim, n_hidden_1, n_hidden_2,n_hidden_3, out_dim)
print(model_dict)
相关推荐
Dekesas96953 小时前
【深度学习】基于Faster R-CNN的黄瓜幼苗智能识别与定位系统,农业AI新突破
人工智能·深度学习·r语言
清风一徐3 小时前
禅道从18.3升级到21.7.6版本
笔记
Jack___Xue3 小时前
LangChain实战快速入门笔记(六)--LangChain使用之Agent
笔记·langchain·unix
零度@3 小时前
SQL 调优全解:从 20 秒到 200 ms 的 6 步实战笔记(附脚本)
数据库·笔记·sql
im_AMBER4 小时前
Leetcode 78 识别数组中的最大异常值 | 镜像对之间最小绝对距离
笔记·学习·算法·leetcode
哥布林学者4 小时前
吴恩达深度学习课程四:计算机视觉 第二周:经典网络结构 (三)1×1卷积与Inception网络
深度学习·ai
鼾声鼾语4 小时前
matlab的ros2发布的消息,局域网内其他设备收不到情况吗?但是matlab可以订阅其他局域网的ros2发布的消息(问题总结)
开发语言·人工智能·深度学习·算法·matlab·isaaclab
其美杰布-富贵-李4 小时前
HDF5文件学习笔记
数据结构·笔记·学习
d111111111d6 小时前
在STM32函数指针是什么,怎么使用还有典型应用场景。
笔记·stm32·单片机·嵌入式硬件·学习·算法
静小谢6 小时前
前后台一起部署,vite配置笔记base\build
前端·javascript·笔记