Python 實現(xiàn)3種回歸模型(Linear Regression,Lasso,Ridge)的示例
更新時間:2020年10月15日 10:43:27 作者:農(nóng)大魯迅
這篇文章主要介紹了Python 實現(xiàn) 3 種回歸模型(Linear Regression,Lasso,Ridge)的示例,幫助大家更好的進行機器學習,感興趣的朋友可以了解下
公共的抽象基類
import numpy as np
from abc import ABCMeta, abstractmethod
class LinearModel(metaclass=ABCMeta):
"""
Abstract base class of Linear Model.
"""
def __init__(self):
# Before fit or predict, please transform samples' mean to 0, var to 1.
self.scaler = StandardScaler()
@abstractmethod
def fit(self, X, y):
"""fit func"""
def predict(self, X):
# before predict, you must run fit func.
if not hasattr(self, 'coef_'):
raise Exception('Please run `fit` before predict')
X = self.scaler.transform(X)
X = np.c_[np.ones(X.shape[0]), X]
# `x @ y` == `np.dot(x, y)`
return X @ self.coef_
Linear Regression
class LinearRegression(LinearModel): """ Linear Regression. """ def __init__(self): super().__init__() def fit(self, X, y): """ :param X_: shape = (n_samples + 1, n_features) :param y: shape = (n_samples]) :return: self """ self.scaler.fit(X) X = self.scaler.transform(X) X = np.c_[np.ones(X.shape[0]), X] self.coef_ = np.linalg.inv(X.T @ X) @ X.T @ y return self
Lasso
class Lasso(LinearModel):
"""
Lasso Regression, training by Coordinate Descent.
cost = ||X @ coef_||^2 + alpha * ||coef_||_1
"""
def __init__(self, alpha=1.0, n_iter=1000, e=0.1):
self.alpha = alpha
self.n_iter = n_iter
self.e = e
super().__init__()
def fit(self, X, y):
self.scaler.fit(X)
X = self.scaler.transform(X)
X = np.c_[np.ones(X.shape[0]), X]
self.coef_ = np.zeros(X.shape[1])
for _ in range(self.n_iter):
z = np.sum(X * X, axis=0)
tmp = np.zeros(X.shape[1])
for k in range(X.shape[1]):
wk = self.coef_[k]
self.coef_[k] = 0
p_k = X[:, k] @ (y - X @ self.coef_)
if p_k < -self.alpha / 2:
w_k = (p_k + self.alpha / 2) / z[k]
elif p_k > self.alpha / 2:
w_k = (p_k - self.alpha / 2) / z[k]
else:
w_k = 0
tmp[k] = w_k
self.coef_[k] = wk
if np.linalg.norm(self.coef_ - tmp) < self.e:
break
self.coef_ = tmp
return self
Ridge
class Ridge(LinearModel): """ Ridge Regression. """ def __init__(self, alpha=1.0): self.alpha = alpha super().__init__() def fit(self, X, y): """ :param X_: shape = (n_samples + 1, n_features) :param y: shape = (n_samples]) :return: self """ self.scaler.fit(X) X = self.scaler.transform(X) X = np.c_[np.ones(X.shape[0]), X] self.coef_ = np.linalg.inv( X.T @ X + self.alpha * np.eye(X.shape[1])) @ X.T @ y return self
測試代碼
import matplotlib.pyplot as plt
import numpy as np
def gen_reg_data():
X = np.arange(0, 45, 0.1)
X = X + np.random.random(size=X.shape[0]) * 20
y = 2 * X + np.random.random(size=X.shape[0]) * 20 + 10
return X, y
def test_linear_regression():
clf = LinearRegression()
X, y = gen_reg_data()
clf.fit(X, y)
plt.plot(X, y, '.')
X_axis = np.arange(-5, 75, 0.1)
plt.plot(X_axis, clf.predict(X_axis))
plt.title("Linear Regression")
plt.show()
def test_lasso():
clf = Lasso()
X, y = gen_reg_data()
clf.fit(X, y)
plt.plot(X, y, '.')
X_axis = np.arange(-5, 75, 0.1)
plt.plot(X_axis, clf.predict(X_axis))
plt.title("Lasso")
plt.show()
def test_ridge():
clf = Ridge()
X, y = gen_reg_data()
clf.fit(X, y)
plt.plot(X, y, '.')
X_axis = np.arange(-5, 75, 0.1)
plt.plot(X_axis, clf.predict(X_axis))
plt.title("Ridge")
plt.show()
測試效果



更多機器學習代碼,請訪問 https://github.com/WiseDoge/plume
以上就是Python 實現(xiàn) 3 種回歸模型(Linear Regression,Lasso,Ridge)的示例的詳細內(nèi)容,更多關(guān)于Python 實現(xiàn) 回歸模型的資料請關(guān)注腳本之家其它相關(guān)文章!
相關(guān)文章
Python結(jié)合Sprak實現(xiàn)計算曲線與X軸上方的面積
這篇文章主要介紹了Python結(jié)合Sprak實現(xiàn)計算曲線與X軸上方的面積,文中通過示例代碼介紹的非常詳細,對大家的學習或者工作具有一定的參考學習價值,需要的朋友們下面隨著小編來一起學習吧2023-02-02
使用Python爬取Json數(shù)據(jù)的示例代碼
這篇文章主要介紹了使用Python爬取Json數(shù)據(jù)的示例代碼,本文給大家介紹的非常詳細,對大家的學習或工作具有一定的參考借鑒價值,需要的朋友可以參考下2020-12-12
注意import和from import 的區(qū)別及說明
這篇文章主要介紹了注意import和from import 的區(qū)別及說明,具有很好的參考價值,希望對大家有所幫助。如有錯誤或未考慮完全的地方,望不吝賜教2022-09-09

