From efa81f50bf660d6ae7fa8f75702c36e466720187 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=94=D0=BC=D0=B8=D1=82=D1=80=D0=B8=D0=B9=20=D0=90=D0=BB?= =?UTF-8?q?=D0=B5=D0=BA=D1=81=D0=B0=D0=BD=D0=B4=D1=80=D0=BE=D0=B2?= Date: Thu, 21 Sep 2023 20:19:20 +0400 Subject: [PATCH] =?UTF-8?q?=D0=B4=D0=BE=D0=B4=D0=B5=D0=BB=D0=BA=D0=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- alexandrov_dmitrii_lab_1/lab1.py | 112 +++++++++-------------------- alexandrov_dmitrii_lab_1/readme.md | 2 +- 2 files changed, 36 insertions(+), 78 deletions(-) diff --git a/alexandrov_dmitrii_lab_1/lab1.py b/alexandrov_dmitrii_lab_1/lab1.py index e60c4ff..071ff62 100644 --- a/alexandrov_dmitrii_lab_1/lab1.py +++ b/alexandrov_dmitrii_lab_1/lab1.py @@ -7,92 +7,50 @@ from sklearn.model_selection import train_test_split from sklearn.preprocessing import PolynomialFeatures from sklearn.pipeline import Pipeline +rs = random.randrange(50) + +X, y = make_moons(n_samples=250, noise=0.3, random_state=rs) +X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4, random_state=42) + +figure = plt.figure(1, figsize=(16, 9)) +axis = figure.subplots(4, 3) +cm = ListedColormap(['#FF0000', "#0000FF"]) +arr_res = list(range(len(y_test))) +X_scale = list(range(len(y_test))) + + +def test(col, model): + global axis + global arr_res + global X_test + global X_train + global y_train + global y_test + + model.fit(X_train, y_train) + res_y = model.predict(X_test) + print(model.score(X_test, y_test)) + + axis[0, col].scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm) + axis[1, col].scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm) + axis[2, col].scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm) + axis[2, col].scatter(X_test[:, 0], X_test[:, 1], c=res_y, cmap=cm) + axis[3, col].plot([i for i in range(len(res_y))], y_test, c="g") + axis[3, col].plot([i for i in range(len(res_y))], res_y, c="r") + def start(): - rs = random.randrange(10) - rs = 5 - - X, y = make_moons(n_samples=250, noise=0.3, random_state=rs) - X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4, random_state=42) - lin = LinearRegression() poly = Pipeline([('poly', PolynomialFeatures(degree=3)), ('linear', LinearRegression())]) ridge = Pipeline([('poly', PolynomialFeatures(degree=3)), - ('ridge', Ridge(alpha=1.0))]) + ('ridge', Ridge(alpha=1.0))]) - - figure = plt.figure(1, figsize=(16, 9)) - axis = figure.subplots(4, 3) - cm = ListedColormap(['#FF0000', "#0000FF"]) - arr_res = list(range(len(y_test))) - X_scale = list(range(len(y_test))) - - - lin.fit(X_train, y_train) - res_y = lin.predict(X_test) - print(lin.score(X_test, y_test)) - - axis[0, 0].scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm) - axis[1, 0].scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm) - axis[3, 0].plot([i for i in range(len(res_y))], y_test, c="g") - axis[3, 0].plot([i for i in range(len(res_y))], res_y, c="r") - - for i in range(len(X_test)): - arr_res[i] = [X_test[i], res_y[i], y_test[i]] - arr_res = sorted(arr_res, key=lambda x: x[1]) - for i in range(len(X_test)): - X_scale[i] = arr_res[i][0] - res_y[i] = arr_res[i][1] - arr_res[i] = arr_res[i][2] - - axis[2, 0].plot(X_scale, arr_res, c="g") - axis[2, 0].plot(X_scale, res_y, c="r") - - - poly.fit(X_train, y_train) - res_y = poly.predict(X_test) - print(poly.score(X_test, y_test)) - - axis[0, 1].scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm) - axis[1, 1].scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm) - axis[3, 1].plot([i for i in range(len(res_y))], y_test, c="g") - axis[3, 1].plot([i for i in range(len(res_y))], res_y, c="r") - - for i in range(len(X_test)): - arr_res[i] = [X_test[i], res_y[i], y_test[i]] - arr_res = sorted(arr_res, key=lambda x: x[1]) - for i in range(len(X_test)): - X_scale[i] = arr_res[i][0] - res_y[i] = arr_res[i][1] - arr_res[i] = arr_res[i][2] - - axis[2, 1].plot(X_scale, arr_res, c="g") - axis[2, 1].plot(X_scale, res_y, c="r") - - - ridge.fit(X_train, y_train) - res_y = ridge.predict(X_test) - print(ridge.score(X_test, y_test)) - - axis[0, 2].scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm) - axis[1, 2].scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm) - axis[3, 2].plot([i for i in range(len(res_y))], y_test, c="g") - axis[3, 2].plot([i for i in range(len(res_y))], res_y, c="r") - - for i in range(len(X_test)): - arr_res[i] = [X_test[i], res_y[i], y_test[i]] - arr_res = sorted(arr_res, key=lambda x: x[1]) - for i in range(len(X_test)): - X_scale[i] = arr_res[i][0] - res_y[i] = arr_res[i][1] - arr_res[i] = arr_res[i][2] - - axis[2, 2].plot(X_scale, arr_res, c="g") - axis[2, 2].plot(X_scale, res_y, c="r") + test(0, lin) + test(1, poly) + test(2, ridge) plt.show() start() - diff --git a/alexandrov_dmitrii_lab_1/readme.md b/alexandrov_dmitrii_lab_1/readme.md index fea4428..98bcbfe 100644 --- a/alexandrov_dmitrii_lab_1/readme.md +++ b/alexandrov_dmitrii_lab_1/readme.md @@ -11,7 +11,7 @@ Файл lab1.py содержит и запускает программу, аргументов и настройки ~~вроде~~ не требует, ###Описание программы -Генерирует один из 10 наборов данных, показывает окно с графиками и пишет оценку моделей обучения по заданию. +Генерирует один из 50 наборов данных, показывает окно с графиками и пишет оценку моделей обучения по заданию. Использует библиотеки matplotlib для демонстрации графиков и sklearn для создания и использования моделей. ###Результаты тестирования