import numpy as np from matplotlib import pyplot as plt from matplotlib.colors import ListedColormap from sklearn.model_selection import train_test_split from sklearn.pipeline import make_pipeline from sklearn.preprocessing import PolynomialFeatures from sklearn.datasets import make_circles from sklearn.linear_model import LinearRegression, Ridge rs = np.random.RandomState(50) X, y = make_circles(noise=0.2, factor=0.5, random_state=rs) # генерация данных X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.4, random_state=42) # создание моделей linear_regression = LinearRegression() polynomial_regression = make_pipeline(PolynomialFeatures(degree=5), LinearRegression()) ridge_polynomial_regression = make_pipeline(PolynomialFeatures(degree=5), Ridge(alpha=1.0)) models = [("Линейная регрессия", linear_regression), ("Полиномиальная регрессия", polynomial_regression), ("Гребневая полиномиальная регрессия", ridge_polynomial_regression)] # тренируем модель for name, model in models: model.fit(X_train, y_train) # обучение модели y_predict = model.predict(X_test) # предсказание score = model.score(X_train, y_train) # оценка качества print(name + ': качество модели = ' + str(score)) # построение графиков cm = plt.cm.RdBu cm_bright = ListedColormap(['#FF0000', '#0000FF']) for i, (name, model) in enumerate(models): current_subplot = plt.subplot(1, 3, i + 1) h = .02 # шаг регулярной сетки x0_min, x0_max = X[:, 0].min() - .5, X[:, 0].max() + .5 x1_min, x1_max = X[:, 1].min() - .5, X[:, 1].max() + .5 xx0, xx1 = np.meshgrid(np.arange(x0_min, x0_max, h), np.arange(x1_min, x1_max, h)) Z = model.predict(np.c_[xx0.ravel(), xx1.ravel()]) Z = Z.reshape(xx0.shape) current_subplot.contourf(xx0, xx1, Z, cmap=cm, alpha=.8) current_subplot.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm_bright) current_subplot.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm_bright, alpha=0.4) plt.title(name) plt.show()