64 lines
2.7 KiB
Python
64 lines
2.7 KiB
Python
|
from matplotlib import pyplot as plt
|
||
|
from matplotlib.colors import ListedColormap
|
||
|
from sklearn.linear_model import LinearRegression, Ridge, Perceptron
|
||
|
from sklearn.model_selection import train_test_split
|
||
|
from sklearn.pipeline import Pipeline
|
||
|
from sklearn.preprocessing import PolynomialFeatures
|
||
|
from sklearn.datasets import make_circles
|
||
|
from sklearn import metrics
|
||
|
|
||
|
cm_bright = ListedColormap(['#8B0000', '#FF0000'])
|
||
|
cm_bright1 = ListedColormap(['#FF4500', '#FFA500'])
|
||
|
|
||
|
|
||
|
def create_circles():
|
||
|
x, y = make_circles(noise=0.2, factor=0.5, random_state=0)
|
||
|
X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=42)
|
||
|
|
||
|
linear_regression(X_train, X_test, y_train, y_test)
|
||
|
perceptron(X_train, X_test, y_train, y_test)
|
||
|
ridge_regression(X_train, X_test, y_train, y_test)
|
||
|
|
||
|
|
||
|
def linear_regression(x_train, x_test, y_train, y_test):
|
||
|
model = LinearRegression().fit(x_train, y_train)
|
||
|
y_predict = model.intercept_ + model.coef_ * x_test
|
||
|
plt.title('Линейная регрессия')
|
||
|
print('Линейная регрессия')
|
||
|
plt.scatter(x_train[:, 0], x_train[:, 1], c=y_train, cmap=cm_bright)
|
||
|
plt.scatter(x_test[:, 0], x_test[:, 1], c=y_test, cmap=cm_bright1, alpha=0.7)
|
||
|
plt.plot(x_test, y_predict, color='red')
|
||
|
print('MAE', metrics.mean_absolute_error(y_test, y_predict[:, 1]))
|
||
|
print('MSE', metrics.mean_squared_error(y_test, y_predict[:, 1]))
|
||
|
plt.show()
|
||
|
|
||
|
|
||
|
def perceptron(x_train, x_test, y_train, y_test):
|
||
|
model = Perceptron()
|
||
|
model.fit(x_train, y_train)
|
||
|
y_predict = model.predict(x_test)
|
||
|
plt.title('Персептрон')
|
||
|
plt.scatter(x_train[:, 0], x_train[:, 1], c=y_train, cmap=cm_bright)
|
||
|
plt.scatter(x_test[:, 0], x_test[:, 1], c=y_test, cmap=cm_bright1, alpha=0.8)
|
||
|
plt.plot(x_test, y_predict, color='red', linewidth=1)
|
||
|
plt.show()
|
||
|
print('Персептрон')
|
||
|
print('MAE', metrics.mean_absolute_error(y_test, y_predict))
|
||
|
print('MSE', metrics.mean_squared_error(y_test, y_predict))
|
||
|
|
||
|
|
||
|
def ridge_regression(X_train, X_test, y_train, y_test):
|
||
|
model = Pipeline([('poly', PolynomialFeatures(degree=3)), ('ridge', Ridge(alpha=1.0))])
|
||
|
model.fit(X_train, y_train)
|
||
|
y_predict = model.predict(X_test)
|
||
|
plt.title('Гребневая полиномиальная регрессия')
|
||
|
plt.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm_bright)
|
||
|
plt.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm_bright1, alpha=0.7)
|
||
|
plt.plot(X_test, y_predict, color='blue')
|
||
|
plt.show()
|
||
|
print('Гребневая полиномиальная регрессия')
|
||
|
print('MAE', metrics.mean_absolute_error(y_test, y_predict))
|
||
|
print('MSE', metrics.mean_squared_error(y_test, y_predict))
|
||
|
|
||
|
|
||
|
create_circles()
|