python
复制代码
import numpy as np
from math import *
#sigmoid函数
def sigmoid(z):
return 1/(1+exp(-z))
#计算代价的函数
def get_cost_logistic(X,y,w,b):
m=X.shape[0]
cost=0.0
for i in range(m):
z_i=np.dot(X[i],w)+b
f_wb_i=sigmoid(z_i)
cost+=-y[i]*np.log(f_wb_i)-(1-y[i])*np.log(1-f_wb_i)
cost=cost/m
return cost
#计算梯度的函数
def get_gradient(x,y,w,b):
#获取数据量和特征数量
m=x.shape[0]
n=x.shape[1]
dj_dw=np.zeros((n,))
dj_db=0
for i in range(m):
error=sigmoid(np.dot(x[i,:],w)+b)-y[i]
dj_db+=error
for j in range(n):
dj_dw[j]+=(error*x[i,j])
dj_db=dj_db/m
dj_dw=dj_dw/m
return dj_dw,dj_db
#梯度下降函数
def gradient_descent(x,y,w_in,b_in,alpha,iters):
w=w_in
b=b_in
cost_his=[]
for i in range(iters):
dj_dw,dj_db=get_gradient(x,y,w,b)
w=w-dj_dw*alpha
b=b-dj_db*alpha
cost_his.append(get_cost_logistic(x,y,w,b))
if (i)%(iters/10)==0:
print(f'iteration:{i},cost:{cost_his[i]},w:{w},b:{b}')
print(f'final w:{w},b:{b}')
return w,b,cost_his