IIS_2023_1/kondrashin_mikhail_lab_6/neuralNetwork.py

29 lines
720 B
Python

import statistics
import pandas as pd
from sklearn.model_selection import train_test_split
from funcClassifier import random_state_fit
names = ['D1', 'TI1', 'V1', 'RH', 'P']
def bring(data):
return data[names], data['T'].astype('int')
def worker():
data = pd.read_csv('WindData.csv')
x, y = bring(data)
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=100)
funcs = ['relu', 'identity', 'tanh', 'logistic']
acc = []
for i in range(0, len(funcs)):
acc.append(random_state_fit(i, x_train, y_train, x_test, y_test, funcs[i]))
print('\n Results: ')
print(f' Min is {min(acc)}, Median is {statistics.median(acc)}, Max is {max(acc)}')