IIS_2023_1/malkova_anastasia_lab_1/plots.py
2023-11-01 23:53:45 +04:00

72 lines
3.1 KiB
Python

import numpy as np
from matplotlib.colors import ListedColormap
from matplotlib.axes import Axes
from matplotlib import pyplot as plt
TRAIN_DATA_ROW_LENGTH = 3
TEST_DATA_ROW_LENGTH = 6
LINEAR_REGRESSION_PLOT_INDEX = 6
PERCEPTRON_REGRESSION_PLOT_INDEX = 7
RIDGE_POLY_REGRESSION_REGRESSION_PLOT_INDEX = 8
def show_plot(x, x_train, x_test, y_train, y_test, my_linear_model, linear_model_score, my_perceptron_model, perceptron_model_score, pipeline, polynomial_model_score):
h = .02 # шаг регулярной сетки
x0_min, x0_max = x[:, 0].min() - .5, x[:, 0].max() + .5
x1_min, x1_max = x[:, 1].min() - .5, x[:, 1].max() + .5
xx0, xx1 = np.meshgrid(np.arange(x0_min, x0_max, h),
np.arange(x1_min, x1_max, h))
cm = plt.cm.RdBu
cm_bright = ListedColormap(['#FF0000', '#0000FF'])
for i in range(9):
current_subplot = plt.subplot(3, 3, i+1)
if i < TRAIN_DATA_ROW_LENGTH:
current_subplot.scatter(
x_train[:, 0], x_train[:, 1], c=y_train, cmap=cm_bright)
elif i < TEST_DATA_ROW_LENGTH:
current_subplot.scatter(
x_test[:, 0], x_test[:, 1], c=y_test, cmap=cm_bright, alpha=0.6)
else:
if i == LINEAR_REGRESSION_PLOT_INDEX:
show_gradient(my_linear_model, current_subplot=current_subplot,
title='LinearRegression', score=linear_model_score, xx0=xx0, xx1=xx1, cm=cm)
elif i == PERCEPTRON_REGRESSION_PLOT_INDEX:
show_gradient(my_perceptron_model, current_subplot=current_subplot,
title='Perceptron', score=perceptron_model_score, xx0=xx0, xx1=xx1, cm=cm)
elif i == RIDGE_POLY_REGRESSION_REGRESSION_PLOT_INDEX:
current_subplot.set_title('RidgePolyRegression')
show_gradient(pipeline, current_subplot=current_subplot,
title='RidgePolyRegression', score=polynomial_model_score, xx0=xx0, xx1=xx1, cm=cm)
current_subplot.scatter(
x_train[:, 0], x_train[:, 1], c=y_train, cmap=cm_bright)
current_subplot.scatter(
x_test[:, 0], x_test[:, 1], c=y_test, cmap=cm_bright, alpha=0.6)
plt.show()
def show_gradient(model, current_subplot: Axes, title: str, score: float, xx0, xx1, cm):
current_subplot.set_title(title)
if hasattr(model, "decision_function"):
Z = model.decision_function(np.c_[xx0.ravel(), xx1.ravel()])
elif hasattr(model, "predict_proba"):
Z = model.predict_proba(np.c_[xx0.ravel(), xx1.ravel()])[:, 1]
elif hasattr(model, "predict"):
Z = model.predict(np.c_[xx0.ravel(), xx1.ravel()])
else:
return
Z = Z.reshape(xx0.shape)
current_subplot.contourf(xx0, xx1, Z, cmap=cm, alpha=.8)
current_subplot.set_xlim(xx0.min(), xx0.max())
current_subplot.set_ylim(xx0.min(), xx1.max())
current_subplot.set_xticks(())
current_subplot.set_yticks(())
current_subplot.text(xx0.max() - .3, xx1.min() + .3, ('%.2f' % score),
size=15, horizontalalignment='left')