一 介绍、

二 编程基础

1、

import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import make_regression
from sklearn.linear_model import LinearRegression

X, y = make_regression(n_samples=50, n_features=1, n_informative=1, noise=50, random_state=1)
reg = LinearRegression()
reg.fit(X, y)
z = np.linspace(-3, 3, 200).reshape(-1, 1)
plt.scatter(X, y, c=\’b\’, s=60)
plt.plot(z, reg.predict(z), c=\’k\’)
print(\’直线的系数是: {: .2f}\’.format(reg.coef_[0]))
print(\’直线的截距是: {: .2f}\’.format(reg.intercept_))

2、 线性回归

from sklearn.datasets import load_diabetes
from sklearn.model_selection import train_test_split

X, y = load_diabetes().data, load_diabetes().target
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=8)
lr = LinearRegression().fit(X_train, y_train)
print(“训练数据集得分: {: .2f}”.format(lr.score(X_train, y_train)))
print(“测试数据集得分: {: .2f}”.format(lr.score(X_test, y_test)))

3、L2正则化的线性模型–岭回归

from sklearn.linear_model import Ridge

ridge = Ridge(alpha=0.1).fit(X_train, y_train)
plt.plot(ridge.coef_, \’s\’, label = \’Ridge alpha=1\’)
plt.plot(lr.coef_, \’o\’, label = \’linear regression\’)
plt.xlabel(“coefficient index”)
plt.ylabel(“coefficient magnitude”)
plt.hlines(0, 0, len(lr.coef_))
plt.legend()
print(“训练数据得分: {: .2f}”.format(ridge.score(X_train, y_train)))
print(“训练数据得分: {: .2f}”.format(ridge.score(X_test, y_test)))

 

版权声明:本文为zhaop8078原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。
本文链接:https://www.cnblogs.com/zhaop8078/p/9781248.html