代码实例(线性回归的定义?百度百科不香么)
引入模块
import numpy as np
定义判别函数(假设的)
def hyFunction
(X,W
):
'''
:param X:行向量(需要扩维,常量位置为1在最后)
:param W: 列向量
:return:
'''
return X.dot
(W
)
pass
定义梯度函数
def gradientFunction
(X, W, y
):
return (X.dot
(W
) - y
).dot
(X
)
pass
定义梯度下降函数
def gradientDescent
(X, w, y, hyFunc, gFunc, lamb
= 0.001 , tolance
= 1.0e-8,
times = 2000000
):
'''
:param X: 样本
:param W: 猜的数据
:param y: 实际结果
:param hyFunc:判别函数
:param gFunc:梯度函数
:param lamb: 步进系数,与次数负相关,一般为0.1到0.001
:param tolance: 收敛条件,一般为10的负6次到10的负8次之间
:param times: 次数
:return:
'''
t
= 0
result
= hyFunc
(X, w
)
g
= gFunc
(X, w, y
)
newW
= w - lamb*g
newResult
= hyFunc
(X, newW
)
while np.sum
(np.abs
(result - newResult
))/X.shape
[0
] > tolance:
w
= newW
result
= newResult
g
= gFunc
(X, w, y
)
newW
= w - lamb * g
newResult
= hyFunc
(X, newW
)
t +
= 1
if t
> times:
break
pass
pass
print
(t
)
return w
pass
样本及结果
X
= np.array
([[3, 0
],
[4, 1
],
[5,2
],
[7,3
]])
y
= np.array
([9, 12,15, 20
])
row
= X.shape
[0
]
one
= np.ones
(row
)
print
(one
)
one
= one
[:,np.newaxis
]
print
(one
)
X
= np.hstack
((X, one))
print
(X
)
w
= gradientDescent
(X, np.array
([100, 200, 20
]), y, hyFunction, gradientFunction
)
print
(w
)
结果截图
与猜测结果(2,1,3)接近,成功!