python
# logistic regression 二分类
# 导入pytorch 和 torchvision
import numpy as np
import torch
import torchvision
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import matplotlib.pyplot as plt
x_data = torch.tensor([[1.0], [2.0], [3.0]]) # x_data是一个张量
y_data = torch.Tensor([[0], [0], [1]]) # Tensor是一个类,tesor是一个张量
# 定义logistic regression模型
class LogisticRegressionModel(nn.Module):
def __init__(self):
super(LogisticRegressionModel, self).__init__() # 等价于nn.Module.__init__(self)
self.linear = nn.Linear(1, 1) # 输入和输出的维度都是1
def forward(self, x): # forward函数是必须要有的,用来构建计算图
# 二分类问题,所以用sigmoid函数作为激活函数
y_pred = torch.sigmoid(self.linear(x)) # forward
return y_pred
model = LogisticRegressionModel() # 实例化一个模型
criterion = nn.BCELoss(size_average=False) # 损失函数
optimizer = torch.optim.SGD(model.parameters(), lr=0.01) # 优化器 lr为学习率
# 训练模型
for epoch in range(100): # 训练100次
y_pred = model(x_data) # forward
loss = criterion(y_pred, y_data) # compute loss
print(epoch, loss.item()) # 打印loss
optimizer.zero_grad() # 梯度清零
loss.backward() # backward
optimizer.step() # update
# 测试模型
x_test = torch.tensor([[4.0]])
y_test = model(x_test)
print("predict (after training)", y_test.data) # 预测
# 绘制训练次数和预测值的关系
x = np.linspace(0, 10, 200) # 从0到10均匀取200个点
x_t = torch.Tensor(x).view(200, 1) # 转换成200行1列的张量 用Tensor是因为要用到torch.sigmoid
y_t = model(x_t) # 预测
y = y_t.data.numpy() # 转换成numpy数组
plt.plot(x, y) # 绘制预测值和x的关系
plt.plot([0, 10], [0.5, 0.5], c='r') # 绘制y=0.5的直线
plt.xlabel("Hours") # x轴标签
plt.ylabel("Probability of Pass") # y轴标签
plt.grid() # 绘制网格
plt.show() # 显示图像
结果