Compare commits
100 Commits
basharin_s
...
degtyarev_
| Author | SHA1 | Date | |
|---|---|---|---|
| 744877cdd9 | |||
| a8c58683dd | |||
| b3e1e38eeb | |||
| 6de7179b7d | |||
| c0ead13d82 | |||
| 357f26d992 | |||
| f2f5d16974 | |||
| cab38b4f27 | |||
| c813e16f55 | |||
| 9142e612f8 | |||
| 7c92d143e0 | |||
| 52431a867c | |||
| 666a34b483 | |||
| 57bb7a90cd | |||
| da2b5dacb8 | |||
| 0acf59f77f | |||
| 40f7706378 | |||
| 2881070bf0 | |||
| 02422f4eff | |||
| 831912d692 | |||
| 70c0f7a0e1 | |||
| 8592ba88a4 | |||
| 4973adb1f2 | |||
| 388c9e64cf | |||
| 1f8bc49d17 | |||
| d4dbce9b09 | |||
| 931d8de854 | |||
| ec42e21a1d | |||
| 02147c3d5f | |||
| d388cd8442 | |||
| 7f45d87074 | |||
| fe77447993 | |||
| 9ce5af1aea | |||
| 278b85e66a | |||
| 2885277f6c | |||
| 58b1009367 | |||
| 9755697671 | |||
| d6bdc5893a | |||
| 28056f94bd | |||
| 1aef95a6d9 | |||
| 95519adc5a | |||
| 5746fc2084 | |||
|
|
c92f833265 | ||
|
|
1d2c86f568 | ||
|
|
b27537157a | ||
| ee70ec67ba | |||
| dde432a16b | |||
| def334a1f4 | |||
| f6a9dc6a74 | |||
| d8ea68139d | |||
| 37d75cda32 | |||
| 2383a997b1 | |||
| e8ff2392da | |||
| de79db46c0 | |||
| 82829a15a2 | |||
| c9fa1b2d60 | |||
| d5cd684a98 | |||
| a9af6c3c37 | |||
| e1bba9b13c | |||
| aa543e057e | |||
| 72b717d7ae | |||
| 3007207ade | |||
| 4838c6dbeb | |||
| 4949686542 | |||
| 4f16492ad7 | |||
| 565b4f171f | |||
| a87330830b | |||
| a8f3b6c692 | |||
| ce7cfa4365 | |||
|
|
a492e2a6df | ||
| 462c0ea3e0 | |||
| 4eb8cfabd1 | |||
| e65543a5fc | |||
|
|
f0e16a20d4 | ||
| 08ed6413b9 | |||
| 1f35af8f8f | |||
|
|
63198665cc | ||
| 10761e96bb | |||
| f61aea2ee2 | |||
| be664b513c | |||
| 5d250948b5 | |||
|
|
c344eb7300 | ||
| 8a51aacfb2 | |||
| 017623e084 | |||
| 09b9bfc730 | |||
| fee881b4b4 | |||
| 5e0058b82e | |||
| faeeecf1ef | |||
| dab82f11ee | |||
| 55b79c339e | |||
|
|
0e5a5ad282 | ||
| a9e95110c1 | |||
| 0fa8db9c5d | |||
| e8a3914840 | |||
| 63c40e202e | |||
| b8af0044a0 | |||
| e36a729776 | |||
| bbd6aea496 | |||
|
|
16b36dce9b | ||
|
|
0d865a6160 |
141
.gitignore
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
### Python template
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
.idea
|
||||
39
abanin_daniil_lab_7/README.md
Normal file
@@ -0,0 +1,39 @@
|
||||
# Лабораторная работа №7
|
||||
|
||||
### Рекуррентная нейронная сеть и задача генерации текста
|
||||
|
||||
## ПИбд-41 Абанин Даниил
|
||||
|
||||
### Как запустить лабораторную работу:
|
||||
|
||||
* установить python, numpy, keras, tensorflow
|
||||
* запустить проект (стартовая точка lab7)
|
||||
|
||||
### Какие технологии использовались:
|
||||
|
||||
* Язык программирования `Python`, библиотеки numpy, keras, tensorflow
|
||||
* Среда разработки `PyCharm`
|
||||
|
||||
### Что делает лабораторная работа:
|
||||
* На основе выбранных художественных текстов происходит обучение рекуррентной нейронной сети для решения задачи генерации.
|
||||
* Необходимо подобрать архитектуру и параметры так, чтобы приблизиться к максимально осмысленному результату.
|
||||
|
||||
### Тест
|
||||
* Чтение текста из файлов .txt (eng_text.txt, rus_text.txt)
|
||||
* Вызов функция get_model_data, из которой мы получаем входные, выходные данные (X, y), размер словаря и токенайзер. Используем Tokenizer с настройкой char_level=True, что позволяет упразднить использование Embedding слоя далее
|
||||
* Создание объекта Sequential (последовательная рекуррентная нейронная сеть) и добавление двух слоёв LSTM. LSTM (Long Short-Term Memory) представляет собой разновидность рекуррентной нейронной сети, которая эффективно работает с последовательными данными. Использование нескольких слоёв даёт большую гибкость. Dropout — это метод регуляризации для нейронных сетей и моделей глубокого обучения, решение проблемы переобучения. Слой Dense с функцией активации softmax используется для предсказания следующего слова
|
||||
* Компилирование модели с использованием sparse_categorical_crossentropy
|
||||
* Обучение модели на 100 эпохах (оптимальный вариант)
|
||||
* Генерация текста
|
||||
|
||||
Сгенерированные тексты
|
||||
|
||||
* ENG: I must be getting somewhere near the centre of the earth. how funny it'll seem to come out among the people that walk with their heads downward! the antipathies, i think—' (for, you see, alice had learnt several things of this
|
||||
|
||||
* RUS: господин осматривал свою комнату, внесены были его пожитки: прежде всего чемодан из белой кожи, несколько поистасканный, показывавший, что был не в первый раз в дороге. чемодан внесли кучер селифан отправился на конюшню вози
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
По итогу, программа способна сгенерировать осмысленный текст в каждом из случаев
|
||||
7
abanin_daniil_lab_7/eng_text.txt
Normal file
@@ -0,0 +1,7 @@
|
||||
Either the well was very deep, or she fell very slowly, for she had plenty of time as she went down to look about her and to wonder what was going to happen next. First, she tried to look down and make out what she was coming to, but it was too dark to see anything; then she looked at the sides of the well, and noticed that they were filled with cupboards and book-shelves; here and there she saw maps and pictures hung upon pegs. She took down a jar from one of the shelves as she passed; it was labelled 'ORANGE MARMALADE', but to her great disappointment it was empty: she did not like to drop the jar for fear of killing somebody, so managed to put it into one of the cupboards as she fell past it.
|
||||
|
||||
'Well!' thought Alice to herself, 'after such a fall as this, I shall think nothing of tumbling down stairs! How brave they'll all think me at home! Why, I wouldn't say anything about it, even if I fell off the top of the house!' (Which was very likely true.)
|
||||
|
||||
Down, down, down. Would the fall NEVER come to an end! 'I wonder how many miles I've fallen by this time?' she said aloud. 'I must be getting somewhere near the centre of the earth. Let me see: that would be four thousand miles down, I think—' (for, you see, Alice had learnt several things of this sort in her lessons in the schoolroom, and though this was not a VERY good opportunity for showing off her knowledge, as there was no one to listen to her, still it was good practice to say it over) '—yes, that's about the right distance—but then I wonder what Latitude or Longitude I've got to?' (Alice had no idea what Latitude was, or Longitude either, but thought they were nice grand words to say.)
|
||||
|
||||
Presently she began again. 'I wonder if I shall fall right THROUGH the earth! How funny it'll seem to come out among the people that walk with their heads downward! The Antipathies, I think—' (she was rather glad there WAS no one listening, this time, as it didn't sound at all the right word) '—but I shall have to ask them what the name of the country is, you know. Please, Ma'am, is this New Zealand or Australia?' (and she tried to curtsey as she spoke—fancy CURTSEYING as you're falling through the air!
|
||||
75
abanin_daniil_lab_7/lab7.py
Normal file
@@ -0,0 +1,75 @@
|
||||
from keras import Sequential
|
||||
from keras.layers import LSTM, Dense, Dropout
|
||||
from keras.preprocessing.text import Tokenizer
|
||||
from keras.preprocessing.sequence import pad_sequences
|
||||
import numpy as np
|
||||
|
||||
with open('rus_text.txt', 'r', encoding='utf-8') as file:
|
||||
text = file.read()
|
||||
|
||||
|
||||
def create_sequences(text, seq_len):
|
||||
sequences = []
|
||||
next_chars = []
|
||||
for i in range(0, len(text) - seq_len):
|
||||
sequences.append(text[i:i + seq_len])
|
||||
next_chars.append(text[i + seq_len])
|
||||
return sequences, next_chars
|
||||
|
||||
|
||||
def get_model_data(seq_length):
|
||||
tokenizer = Tokenizer(char_level=True)
|
||||
tokenizer.fit_on_texts([text])
|
||||
|
||||
token_text = tokenizer.texts_to_sequences([text])[0]
|
||||
|
||||
sequences, next_chars = create_sequences(token_text, seq_length)
|
||||
|
||||
vocab_size = len(tokenizer.word_index) + 1
|
||||
x = pad_sequences(sequences, maxlen=seq_length)
|
||||
y = np.array(next_chars)
|
||||
|
||||
return x, y, vocab_size, tokenizer
|
||||
|
||||
|
||||
def model_build(model, vocab_size):
|
||||
model.add(LSTM(256, input_shape=(seq_length, 1), return_sequences=True))
|
||||
model.add(LSTM(128, input_shape=(seq_length, 1)))
|
||||
model.add(Dropout(0.2, input_shape=(60,)))
|
||||
model.add(Dense(vocab_size, activation='softmax'))
|
||||
|
||||
model.compile(loss='sparse_categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
|
||||
|
||||
|
||||
# Функция для генерации текста
|
||||
def generate_text(seed_text, gen_length, tokenizer, model):
|
||||
generated_text = seed_text
|
||||
|
||||
for _ in range(gen_length):
|
||||
sequence = tokenizer.texts_to_sequences([seed_text])[0]
|
||||
sequence = pad_sequences([sequence], maxlen=seq_length)
|
||||
prediction = model.predict(sequence)[0]
|
||||
predicted_index = np.argmax(prediction)
|
||||
predicted_char = tokenizer.index_word[predicted_index]
|
||||
generated_text += predicted_char
|
||||
seed_text += predicted_char
|
||||
seed_text = seed_text[1:]
|
||||
|
||||
return generated_text
|
||||
|
||||
|
||||
seq_length = 10
|
||||
seed_text = "господин осматривал свою"
|
||||
|
||||
# Создание экземпляра Tokenizer и обучение на тексте
|
||||
|
||||
X, y, vocab_size, tokenizer = get_model_data(seq_length)
|
||||
|
||||
model = Sequential()
|
||||
|
||||
model_build(model, vocab_size)
|
||||
|
||||
model.fit(X, y, epochs=100, verbose=1)
|
||||
|
||||
generated_text = generate_text(seed_text, 200, tokenizer, model)
|
||||
print(generated_text)
|
||||
BIN
abanin_daniil_lab_7/result_eng.png
Normal file
|
After Width: | Height: | Size: 154 KiB |
BIN
abanin_daniil_lab_7/result_rus.png
Normal file
|
After Width: | Height: | Size: 85 KiB |
3
abanin_daniil_lab_7/rus_text.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
В ворота гостиницы губернского города NN въехала довольно красивая рессорная небольшая бричка, в какой ездят холостяки: отставные подполковники, штабс-капитаны, помещики, имеющие около сотни душ крестьян, — словом, все те, которых называют господами средней руки. В бричке сидел господин, не красавец, но и не дурной наружности, ни слишком толст, ни слишком тонок; нельзя сказать, чтобы стар, однако ж и не так чтобы слишком молод. Въезд его не произвел в городе совершенно никакого шума и не был сопровожден ничем особенным; только два русские мужика, стоявшие у дверей кабака против гостиницы, сделали кое-какие замечания, относившиеся, впрочем, более к экипажу, чем к сидевшему в нем. «Вишь ты, — сказал один другому, — вон какое колесо! что ты думаешь, доедет то колесо, если б случилось, в Москву или не доедет?» — «Доедет», — отвечал другой. «А в Казань-то, я думаю, не доедет?» — «В Казань не доедет», — отвечал другой. Этим разговор и кончился. Да еще, когда бричка подъехала к гостинице, встретился молодой человек в белых канифасовых панталонах, весьма узких и коротких, во фраке с покушеньями на моду, из-под которого видна была манишка, застегнутая тульскою булавкою с бронзовым пистолетом. Молодой человек оборотился назад, посмотрел экипаж, придержал рукою картуз, чуть не слетевший от ветра, и пошел своей дорогой.
|
||||
Когда экипаж въехал на двор, господин был встречен трактирным слугою, или половым, как их называют в русских трактирах, живым и вертлявым до такой степени, что даже нельзя было рассмотреть, какое у него было лицо. Он выбежал проворно, с салфеткой в руке, весь длинный и в длинном демикотонном сюртуке со спинкою чуть не на самом затылке, встряхнул волосами и повел проворно господина вверх по всей деревянной галдарее показывать ниспосланный ему Богом покой. Покой был известного рода, ибо гостиница была тоже известного рода, то есть именно такая, как бывают гостиницы в губернских городах, где за два рубля в сутки проезжающие получают покойную комнату с тараканами, выглядывающими, как чернослив, из всех углов, и дверью в соседнее помещение, всегда заставленную комодом, где устроивается сосед, молчаливый и спокойный человек, но чрезвычайно любопытный, интересующийся знать о всех подробностях проезжающего. Наружный фасад гостиницы отвечал ее внутренности: она была очень длинна, в два этажа; нижний не был выщекатурен и оставался в темно-красных кирпичиках, еще более потемневших от лихих погодных перемен и грязноватых уже самих по себе; верхний был выкрашен вечною желтою краскою; внизу были лавочки с хомутами, веревками и баранками. В угольной из этих лавочек, или, лучше, в окне, помещался сбитенщик с самоваром из красной меди и лицом так же красным, как самовар, так что издали можно бы подумать, что на окне стояло два самовара, если б один самовар не был с черною как смоль бородою.
|
||||
Пока приезжий господин осматривал свою комнату, внесены были его пожитки: прежде всего чемодан из белой кожи, несколько поистасканный, показывавший, что был не в первый раз в дороге. Чемодан внесли кучер Селифан, низенький человек в тулупчике, и лакей Петрушка, малый лет тридцати, в просторном подержанном сюртуке, как видно с барского плеча, малый немного суровый на взгляд, с очень крупными губами и носом. Вслед за чемоданом внесен был небольшой ларчик красного дерева с штучными выкладками из карельской березы, сапожные колодки и завернутая в синюю бумагу жареная курица. Когда все это было внесено, кучер Селифан отправился на конюшню возиться около лошадей, а лакей Петрушка стал устраиваться в маленькой передней, очень темной конурке, куда уже успел притащить свою шинель и вместе с нею какой-то свой собственный запах, который был сообщен и принесенному вслед за тем мешку с разным лакейским туалетом. В этой конурке он приладил к стене узенькую трехногую кровать, накрыв ее небольшим подобием тюфяка, убитым и плоским, как блин, и, может быть, так же замаслившимся, как блин, который удалось ему вытребовать у хозяина гостиницы.
|
||||
34
abanin_danill_lab_6/README.md
Normal file
@@ -0,0 +1,34 @@
|
||||
## Лабораторная работа №6
|
||||
|
||||
### MLPClassifier
|
||||
|
||||
## Cтудент группы ПИбд-41 Абанин Даниил
|
||||
|
||||
### Как запустить лабораторную работу:
|
||||
|
||||
* установить python, numpy, matplotlib, sklearn
|
||||
* запустить проект (lab6)
|
||||
|
||||
### Какие технологии использовались:
|
||||
|
||||
* Язык программирования `Python`, библиотеки numpy, matplotlib, sklearn
|
||||
* Среда разработки `PyCharm`
|
||||
|
||||
### Что делает лабораторная работа:
|
||||
|
||||
* По данным "Eligibility Prediction for Loan" решает задачу классификации, в которой необходимо выявить риски выдачи кредита. В качестве исходных данных используются признаки:
|
||||
Credit_History - соответствие кредитной истории стандартам банка, ApplicantIncome - доход заявителя, LoanAmount - сумма кредитаб, Self_Employed - самозанятость (Да/Нет), Education - наличие образования, Married - заявитель женат/замужем (Да/Нет).
|
||||
|
||||
### Примеры работы:
|
||||
|
||||
#### Результаты:
|
||||
* Было проведено несколько прогонов на разном количестве итераций (200, 400, 600, 800, 1000)
|
||||
|
||||

|
||||

|
||||
|
||||
Средняя точность находится в диапазоне 50-60%, что является недостаточным значением. Увеличение итераций не дало значительного улучшения результата,
|
||||
максиальный прирост составляет 10%
|
||||
|
||||
|
||||

|
||||
46
abanin_danill_lab_6/lab6.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from matplotlib import pyplot as plt
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
|
||||
|
||||
def test_iter(iters_num, x_train, x_test, y_train, y_test):
|
||||
|
||||
print("Количество итераций: ", iters_num)
|
||||
scores = []
|
||||
|
||||
for i in range(10):
|
||||
neuro = MLPClassifier(max_iter=iters_num)
|
||||
neuro.fit(x_train, y_train.values.ravel())
|
||||
score = neuro.score(x_test, y_test)
|
||||
print(f'Оценка №{i + 1} - {score}')
|
||||
scores.append(score)
|
||||
|
||||
mean_value = np.mean(scores)
|
||||
|
||||
print(f"Средняя оценка - {mean_value}")
|
||||
|
||||
return mean_value
|
||||
|
||||
|
||||
def start():
|
||||
data = pd.read_csv('loan.csv')
|
||||
x = data[['ApplicantIncome', 'LoanAmount', 'Credit_History', 'Self_Employed', 'Education', 'Married']]
|
||||
y = data[['Loan_Status']]
|
||||
|
||||
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.1, random_state=42)
|
||||
|
||||
iters = [200, 400, 600, 800, 1000]
|
||||
iters_means = []
|
||||
|
||||
for i in range(len(iters)):
|
||||
mean_value = test_iter(iters[i], x_train, x_test, y_train, y_test)
|
||||
iters_means.append(mean_value)
|
||||
|
||||
plt.figure(1, figsize=(16, 9))
|
||||
plt.plot(iters, iters_means, c='r')
|
||||
plt.show()
|
||||
|
||||
|
||||
start()
|
||||
615
abanin_danill_lab_6/loan.csv
Normal file
@@ -0,0 +1,615 @@
|
||||
Loan_ID,Gender,Married,Dependents,Education,Self_Employed,ApplicantIncome,CoapplicantIncome,LoanAmount,Loan_Amount_Term,Credit_History,Property_Area,Loan_Status
|
||||
LP001002,Male,0.0,0,1,0.0,5849,0.0,360.0,1.0,0,Y,0.0
|
||||
LP001003,Male,1.0,1,1,0.0,4583,1508.0,128.0,360.0,1,Rural,0.0
|
||||
LP001005,Male,1.0,0,1,1.0,3000,0.0,66.0,360.0,1,Urban,1.0
|
||||
LP001006,Male,1.0,0,0,0.0,2583,2358.0,120.0,360.0,1,Urban,1.0
|
||||
LP001008,Male,0.0,0,1,0.0,6000,0.0,141.0,360.0,1,Urban,1.0
|
||||
LP001011,Male,1.0,2,1,1.0,5417,4196.0,267.0,360.0,1,Urban,1.0
|
||||
LP001013,Male,1.0,0,0,0.0,2333,1516.0,95.0,360.0,1,Urban,1.0
|
||||
LP001014,Male,1.0,3+,1,0.0,3036,2504.0,158.0,360.0,0,Semiurban,0.0
|
||||
LP001018,Male,1.0,2,1,0.0,4006,1526.0,168.0,360.0,1,Urban,1.0
|
||||
LP001020,Male,1.0,1,1,0.0,12841,10968.0,349.0,360.0,1,Semiurban,0.0
|
||||
LP001024,Male,1.0,2,1,0.0,3200,700.0,70.0,360.0,1,Urban,1.0
|
||||
LP001027,Male,1.0,2,1,0.0,2500,1840.0,109.0,360.0,1,Urban,1.0
|
||||
LP001028,Male,1.0,2,1,0.0,3073,8106.0,200.0,360.0,1,Urban,1.0
|
||||
LP001029,Male,0.0,0,1,0.0,1853,2840.0,114.0,360.0,1,Rural,0.0
|
||||
LP001030,Male,1.0,2,1,0.0,1299,1086.0,17.0,120.0,1,Urban,1.0
|
||||
LP001032,Male,0.0,0,1,0.0,4950,0.0,125.0,360.0,1,Urban,1.0
|
||||
LP001034,Male,0.0,1,0,0.0,3596,0.0,100.0,240.0,0,Urban,1.0
|
||||
LP001036,Female,0.0,0,1,0.0,3510,0.0,76.0,360.0,0,Urban,0.0
|
||||
LP001038,Male,1.0,0,0,0.0,4887,0.0,133.0,360.0,1,Rural,0.0
|
||||
LP001041,Male,1.0,0,1,0.0,2600,3500.0,115.0,,1,Urban,1.0
|
||||
LP001043,Male,1.0,0,0,0.0,7660,0.0,104.0,360.0,0,Urban,0.0
|
||||
LP001046,Male,1.0,1,1,0.0,5955,5625.0,315.0,360.0,1,Urban,1.0
|
||||
LP001047,Male,1.0,0,0,0.0,2600,1911.0,116.0,360.0,0,Semiurban,0.0
|
||||
LP001050,,1.0,2,0,0.0,3365,1917.0,112.0,360.0,0,Rural,0.0
|
||||
LP001052,Male,1.0,1,1,0.0,3717,2925.0,151.0,360.0,0,Semiurban,0.0
|
||||
LP001066,Male,1.0,0,1,1.0,9560,0.0,191.0,360.0,1,Semiurban,1.0
|
||||
LP001068,Male,1.0,0,1,0.0,2799,2253.0,122.0,360.0,1,Semiurban,1.0
|
||||
LP001073,Male,1.0,2,0,0.0,4226,1040.0,110.0,360.0,1,Urban,1.0
|
||||
LP001086,Male,0.0,0,0,0.0,1442,0.0,35.0,360.0,1,Urban,0.0
|
||||
LP001087,Female,0.0,2,1,0.0,3750,2083.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001091,Male,1.0,1,1,0.0,4166,3369.0,201.0,360.0,0,Urban,0.0
|
||||
LP001095,Male,0.0,0,1,0.0,3167,0.0,74.0,360.0,1,Urban,0.0
|
||||
LP001097,Male,0.0,1,1,1.0,4692,0.0,106.0,360.0,1,Rural,0.0
|
||||
LP001098,Male,1.0,0,1,0.0,3500,1667.0,114.0,360.0,1,Semiurban,1.0
|
||||
LP001100,Male,0.0,3+,1,0.0,12500,3000.0,320.0,360.0,1,Rural,0.0
|
||||
LP001106,Male,1.0,0,1,0.0,2275,2067.0,0.0,360.0,1,Urban,1.0
|
||||
LP001109,Male,1.0,0,1,0.0,1828,1330.0,100.0,,0,Urban,0.0
|
||||
LP001112,Female,1.0,0,1,0.0,3667,1459.0,144.0,360.0,1,Semiurban,1.0
|
||||
LP001114,Male,0.0,0,1,0.0,4166,7210.0,184.0,360.0,1,Urban,1.0
|
||||
LP001116,Male,0.0,0,0,0.0,3748,1668.0,110.0,360.0,1,Semiurban,1.0
|
||||
LP001119,Male,0.0,0,1,0.0,3600,0.0,80.0,360.0,1,Urban,0.0
|
||||
LP001120,Male,0.0,0,1,0.0,1800,1213.0,47.0,360.0,1,Urban,1.0
|
||||
LP001123,Male,1.0,0,1,0.0,2400,0.0,75.0,360.0,0,Urban,1.0
|
||||
LP001131,Male,1.0,0,1,0.0,3941,2336.0,134.0,360.0,1,Semiurban,1.0
|
||||
LP001136,Male,1.0,0,0,1.0,4695,0.0,96.0,,1,Urban,1.0
|
||||
LP001137,Female,0.0,0,1,0.0,3410,0.0,88.0,,1,Urban,1.0
|
||||
LP001138,Male,1.0,1,1,0.0,5649,0.0,44.0,360.0,1,Urban,1.0
|
||||
LP001144,Male,1.0,0,1,0.0,5821,0.0,144.0,360.0,1,Urban,1.0
|
||||
LP001146,Female,1.0,0,1,0.0,2645,3440.0,120.0,360.0,0,Urban,0.0
|
||||
LP001151,Female,0.0,0,1,0.0,4000,2275.0,144.0,360.0,1,Semiurban,1.0
|
||||
LP001155,Female,1.0,0,0,0.0,1928,1644.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP001157,Female,0.0,0,1,0.0,3086,0.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001164,Female,0.0,0,1,0.0,4230,0.0,112.0,360.0,1,Semiurban,0.0
|
||||
LP001179,Male,1.0,2,1,0.0,4616,0.0,134.0,360.0,1,Urban,0.0
|
||||
LP001186,Female,1.0,1,1,1.0,11500,0.0,286.0,360.0,0,Urban,0.0
|
||||
LP001194,Male,1.0,2,1,0.0,2708,1167.0,97.0,360.0,1,Semiurban,1.0
|
||||
LP001195,Male,1.0,0,1,0.0,2132,1591.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP001197,Male,1.0,0,1,0.0,3366,2200.0,135.0,360.0,1,Rural,0.0
|
||||
LP001198,Male,1.0,1,1,0.0,8080,2250.0,180.0,360.0,1,Urban,1.0
|
||||
LP001199,Male,1.0,2,0,0.0,3357,2859.0,144.0,360.0,1,Urban,1.0
|
||||
LP001205,Male,1.0,0,1,0.0,2500,3796.0,120.0,360.0,1,Urban,1.0
|
||||
LP001206,Male,1.0,3+,1,0.0,3029,0.0,99.0,360.0,1,Urban,1.0
|
||||
LP001207,Male,1.0,0,0,1.0,2609,3449.0,165.0,180.0,0,Rural,0.0
|
||||
LP001213,Male,1.0,1,1,0.0,4945,0.0,0.0,360.0,0,Rural,0.0
|
||||
LP001222,Female,0.0,0,1,0.0,4166,0.0,116.0,360.0,0,Semiurban,0.0
|
||||
LP001225,Male,1.0,0,1,0.0,5726,4595.0,258.0,360.0,1,Semiurban,0.0
|
||||
LP001228,Male,0.0,0,0,0.0,3200,2254.0,126.0,180.0,0,Urban,0.0
|
||||
LP001233,Male,1.0,1,1,0.0,10750,0.0,312.0,360.0,1,Urban,1.0
|
||||
LP001238,Male,1.0,3+,0,1.0,7100,0.0,125.0,60.0,1,Urban,1.0
|
||||
LP001241,Female,0.0,0,1,0.0,4300,0.0,136.0,360.0,0,Semiurban,0.0
|
||||
LP001243,Male,1.0,0,1,0.0,3208,3066.0,172.0,360.0,1,Urban,1.0
|
||||
LP001245,Male,1.0,2,0,1.0,1875,1875.0,97.0,360.0,1,Semiurban,1.0
|
||||
LP001248,Male,0.0,0,1,0.0,3500,0.0,81.0,300.0,1,Semiurban,1.0
|
||||
LP001250,Male,1.0,3+,0,0.0,4755,0.0,95.0,,0,Semiurban,0.0
|
||||
LP001253,Male,1.0,3+,1,1.0,5266,1774.0,187.0,360.0,1,Semiurban,1.0
|
||||
LP001255,Male,0.0,0,1,0.0,3750,0.0,113.0,480.0,1,Urban,0.0
|
||||
LP001256,Male,0.0,0,1,0.0,3750,4750.0,176.0,360.0,1,Urban,0.0
|
||||
LP001259,Male,1.0,1,1,1.0,1000,3022.0,110.0,360.0,1,Urban,0.0
|
||||
LP001263,Male,1.0,3+,1,0.0,3167,4000.0,180.0,300.0,0,Semiurban,0.0
|
||||
LP001264,Male,1.0,3+,0,1.0,3333,2166.0,130.0,360.0,0,Semiurban,1.0
|
||||
LP001265,Female,0.0,0,1,0.0,3846,0.0,111.0,360.0,1,Semiurban,1.0
|
||||
LP001266,Male,1.0,1,1,1.0,2395,0.0,0.0,360.0,1,Semiurban,1.0
|
||||
LP001267,Female,1.0,2,1,0.0,1378,1881.0,167.0,360.0,1,Urban,0.0
|
||||
LP001273,Male,1.0,0,1,0.0,6000,2250.0,265.0,360.0,0,Semiurban,0.0
|
||||
LP001275,Male,1.0,1,1,0.0,3988,0.0,50.0,240.0,1,Urban,1.0
|
||||
LP001279,Male,0.0,0,1,0.0,2366,2531.0,136.0,360.0,1,Semiurban,1.0
|
||||
LP001280,Male,1.0,2,0,0.0,3333,2000.0,99.0,360.0,0,Semiurban,1.0
|
||||
LP001282,Male,1.0,0,1,0.0,2500,2118.0,104.0,360.0,1,Semiurban,1.0
|
||||
LP001289,Male,0.0,0,1,0.0,8566,0.0,210.0,360.0,1,Urban,1.0
|
||||
LP001310,Male,1.0,0,1,0.0,5695,4167.0,175.0,360.0,1,Semiurban,1.0
|
||||
LP001316,Male,1.0,0,1,0.0,2958,2900.0,131.0,360.0,1,Semiurban,1.0
|
||||
LP001318,Male,1.0,2,1,0.0,6250,5654.0,188.0,180.0,1,Semiurban,1.0
|
||||
LP001319,Male,1.0,2,0,0.0,3273,1820.0,81.0,360.0,1,Urban,1.0
|
||||
LP001322,Male,0.0,0,1,0.0,4133,0.0,122.0,360.0,1,Semiurban,1.0
|
||||
LP001325,Male,0.0,0,0,0.0,3620,0.0,25.0,120.0,1,Semiurban,1.0
|
||||
LP001326,Male,0.0,0,1,0.0,6782,0.0,0.0,360.0,0,Urban,0.0
|
||||
LP001327,Female,1.0,0,1,0.0,2484,2302.0,137.0,360.0,1,Semiurban,1.0
|
||||
LP001333,Male,1.0,0,1,0.0,1977,997.0,50.0,360.0,1,Semiurban,1.0
|
||||
LP001334,Male,1.0,0,0,0.0,4188,0.0,115.0,180.0,1,Semiurban,1.0
|
||||
LP001343,Male,1.0,0,1,0.0,1759,3541.0,131.0,360.0,1,Semiurban,1.0
|
||||
LP001345,Male,1.0,2,0,0.0,4288,3263.0,133.0,180.0,1,Urban,1.0
|
||||
LP001349,Male,0.0,0,1,0.0,4843,3806.0,151.0,360.0,1,Semiurban,1.0
|
||||
LP001350,Male,1.0,,1,0.0,13650,0.0,0.0,360.0,1,Urban,1.0
|
||||
LP001356,Male,1.0,0,1,0.0,4652,3583.0,0.0,360.0,1,Semiurban,1.0
|
||||
LP001357,Male,0.0,,1,0.0,3816,754.0,160.0,360.0,1,Urban,1.0
|
||||
LP001367,Male,1.0,1,1,0.0,3052,1030.0,100.0,360.0,1,Urban,1.0
|
||||
LP001369,Male,1.0,2,1,0.0,11417,1126.0,225.0,360.0,1,Urban,1.0
|
||||
LP001370,Male,0.0,0,0,0.0,7333,0.0,120.0,360.0,1,Rural,0.0
|
||||
LP001379,Male,1.0,2,1,0.0,3800,3600.0,216.0,360.0,0,Urban,0.0
|
||||
LP001384,Male,1.0,3+,0,0.0,2071,754.0,94.0,480.0,1,Semiurban,1.0
|
||||
LP001385,Male,0.0,0,1,0.0,5316,0.0,136.0,360.0,1,Urban,1.0
|
||||
LP001387,Female,1.0,0,1,0.0,2929,2333.0,139.0,360.0,1,Semiurban,1.0
|
||||
LP001391,Male,1.0,0,0,0.0,3572,4114.0,152.0,,0,Rural,0.0
|
||||
LP001392,Female,0.0,1,1,1.0,7451,0.0,0.0,360.0,1,Semiurban,1.0
|
||||
LP001398,Male,0.0,0,1,0.0,5050,0.0,118.0,360.0,1,Semiurban,1.0
|
||||
LP001401,Male,1.0,1,1,0.0,14583,0.0,185.0,180.0,1,Rural,1.0
|
||||
LP001404,Female,1.0,0,1,0.0,3167,2283.0,154.0,360.0,1,Semiurban,1.0
|
||||
LP001405,Male,1.0,1,1,0.0,2214,1398.0,85.0,360.0,0,Urban,1.0
|
||||
LP001421,Male,1.0,0,1,0.0,5568,2142.0,175.0,360.0,1,Rural,0.0
|
||||
LP001422,Female,0.0,0,1,0.0,10408,0.0,259.0,360.0,1,Urban,1.0
|
||||
LP001426,Male,1.0,,1,0.0,5667,2667.0,180.0,360.0,1,Rural,1.0
|
||||
LP001430,Female,0.0,0,1,0.0,4166,0.0,44.0,360.0,1,Semiurban,1.0
|
||||
LP001431,Female,0.0,0,1,0.0,2137,8980.0,137.0,360.0,0,Semiurban,1.0
|
||||
LP001432,Male,1.0,2,1,0.0,2957,0.0,81.0,360.0,1,Semiurban,1.0
|
||||
LP001439,Male,1.0,0,0,0.0,4300,2014.0,194.0,360.0,1,Rural,1.0
|
||||
LP001443,Female,0.0,0,1,0.0,3692,0.0,93.0,360.0,0,Rural,1.0
|
||||
LP001448,,1.0,3+,1,0.0,23803,0.0,370.0,360.0,1,Rural,1.0
|
||||
LP001449,Male,0.0,0,1,0.0,3865,1640.0,0.0,360.0,1,Rural,1.0
|
||||
LP001451,Male,1.0,1,1,1.0,10513,3850.0,160.0,180.0,0,Urban,0.0
|
||||
LP001465,Male,1.0,0,1,0.0,6080,2569.0,182.0,360.0,0,Rural,0.0
|
||||
LP001469,Male,0.0,0,1,1.0,20166,0.0,650.0,480.0,0,Urban,1.0
|
||||
LP001473,Male,0.0,0,1,0.0,2014,1929.0,74.0,360.0,1,Urban,1.0
|
||||
LP001478,Male,0.0,0,1,0.0,2718,0.0,70.0,360.0,1,Semiurban,1.0
|
||||
LP001482,Male,1.0,0,1,1.0,3459,0.0,25.0,120.0,1,Semiurban,1.0
|
||||
LP001487,Male,0.0,0,1,0.0,4895,0.0,102.0,360.0,1,Semiurban,1.0
|
||||
LP001488,Male,1.0,3+,1,0.0,4000,7750.0,290.0,360.0,1,Semiurban,0.0
|
||||
LP001489,Female,1.0,0,1,0.0,4583,0.0,84.0,360.0,1,Rural,0.0
|
||||
LP001491,Male,1.0,2,1,1.0,3316,3500.0,88.0,360.0,1,Urban,1.0
|
||||
LP001492,Male,0.0,0,1,0.0,14999,0.0,242.0,360.0,0,Semiurban,0.0
|
||||
LP001493,Male,1.0,2,0,0.0,4200,1430.0,129.0,360.0,1,Rural,0.0
|
||||
LP001497,Male,1.0,2,1,0.0,5042,2083.0,185.0,360.0,1,Rural,0.0
|
||||
LP001498,Male,0.0,0,1,0.0,5417,0.0,168.0,360.0,1,Urban,1.0
|
||||
LP001504,Male,0.0,0,1,1.0,6950,0.0,175.0,180.0,1,Semiurban,1.0
|
||||
LP001507,Male,1.0,0,1,0.0,2698,2034.0,122.0,360.0,1,Semiurban,1.0
|
||||
LP001508,Male,1.0,2,1,0.0,11757,0.0,187.0,180.0,1,Urban,1.0
|
||||
LP001514,Female,1.0,0,1,0.0,2330,4486.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP001516,Female,1.0,2,1,0.0,14866,0.0,70.0,360.0,1,Urban,1.0
|
||||
LP001518,Male,1.0,1,1,0.0,1538,1425.0,30.0,360.0,1,Urban,1.0
|
||||
LP001519,Female,0.0,0,1,0.0,10000,1666.0,225.0,360.0,1,Rural,0.0
|
||||
LP001520,Male,1.0,0,1,0.0,4860,830.0,125.0,360.0,1,Semiurban,1.0
|
||||
LP001528,Male,0.0,0,1,0.0,6277,0.0,118.0,360.0,0,Rural,0.0
|
||||
LP001529,Male,1.0,0,1,1.0,2577,3750.0,152.0,360.0,1,Rural,1.0
|
||||
LP001531,Male,0.0,0,1,0.0,9166,0.0,244.0,360.0,1,Urban,0.0
|
||||
LP001532,Male,1.0,2,0,0.0,2281,0.0,113.0,360.0,1,Rural,0.0
|
||||
LP001535,Male,0.0,0,1,0.0,3254,0.0,50.0,360.0,1,Urban,1.0
|
||||
LP001536,Male,1.0,3+,1,0.0,39999,0.0,600.0,180.0,0,Semiurban,1.0
|
||||
LP001541,Male,1.0,1,1,0.0,6000,0.0,160.0,360.0,0,Rural,1.0
|
||||
LP001543,Male,1.0,1,1,0.0,9538,0.0,187.0,360.0,1,Urban,1.0
|
||||
LP001546,Male,0.0,0,1,0.0,2980,2083.0,120.0,360.0,1,Rural,1.0
|
||||
LP001552,Male,1.0,0,1,0.0,4583,5625.0,255.0,360.0,1,Semiurban,1.0
|
||||
LP001560,Male,1.0,0,0,0.0,1863,1041.0,98.0,360.0,1,Semiurban,1.0
|
||||
LP001562,Male,1.0,0,1,0.0,7933,0.0,275.0,360.0,1,Urban,0.0
|
||||
LP001565,Male,1.0,1,1,0.0,3089,1280.0,121.0,360.0,0,Semiurban,0.0
|
||||
LP001570,Male,1.0,2,1,0.0,4167,1447.0,158.0,360.0,1,Rural,1.0
|
||||
LP001572,Male,1.0,0,1,0.0,9323,0.0,75.0,180.0,1,Urban,1.0
|
||||
LP001574,Male,1.0,0,1,0.0,3707,3166.0,182.0,,1,Rural,1.0
|
||||
LP001577,Female,1.0,0,1,0.0,4583,0.0,112.0,360.0,1,Rural,0.0
|
||||
LP001578,Male,1.0,0,1,0.0,2439,3333.0,129.0,360.0,1,Rural,1.0
|
||||
LP001579,Male,0.0,0,1,0.0,2237,0.0,63.0,480.0,0,Semiurban,0.0
|
||||
LP001580,Male,1.0,2,1,0.0,8000,0.0,200.0,360.0,1,Semiurban,1.0
|
||||
LP001581,Male,1.0,0,0,0.0,1820,1769.0,95.0,360.0,1,Rural,1.0
|
||||
LP001585,,1.0,3+,1,0.0,51763,0.0,700.0,300.0,1,Urban,1.0
|
||||
LP001586,Male,1.0,3+,0,0.0,3522,0.0,81.0,180.0,1,Rural,0.0
|
||||
LP001594,Male,1.0,0,1,0.0,5708,5625.0,187.0,360.0,1,Semiurban,1.0
|
||||
LP001603,Male,1.0,0,0,1.0,4344,736.0,87.0,360.0,1,Semiurban,0.0
|
||||
LP001606,Male,1.0,0,1,0.0,3497,1964.0,116.0,360.0,1,Rural,1.0
|
||||
LP001608,Male,1.0,2,1,0.0,2045,1619.0,101.0,360.0,1,Rural,1.0
|
||||
LP001610,Male,1.0,3+,1,0.0,5516,11300.0,495.0,360.0,0,Semiurban,0.0
|
||||
LP001616,Male,1.0,1,1,0.0,3750,0.0,116.0,360.0,1,Semiurban,1.0
|
||||
LP001630,Male,0.0,0,0,0.0,2333,1451.0,102.0,480.0,0,Urban,0.0
|
||||
LP001633,Male,1.0,1,1,0.0,6400,7250.0,180.0,360.0,0,Urban,0.0
|
||||
LP001634,Male,0.0,0,1,0.0,1916,5063.0,67.0,360.0,0,Rural,0.0
|
||||
LP001636,Male,1.0,0,1,0.0,4600,0.0,73.0,180.0,1,Semiurban,1.0
|
||||
LP001637,Male,1.0,1,1,0.0,33846,0.0,260.0,360.0,1,Semiurban,0.0
|
||||
LP001639,Female,1.0,0,1,0.0,3625,0.0,108.0,360.0,1,Semiurban,1.0
|
||||
LP001640,Male,1.0,0,1,1.0,39147,4750.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001641,Male,1.0,1,1,1.0,2178,0.0,66.0,300.0,0,Rural,0.0
|
||||
LP001643,Male,1.0,0,1,0.0,2383,2138.0,58.0,360.0,0,Rural,1.0
|
||||
LP001644,,1.0,0,1,1.0,674,5296.0,168.0,360.0,1,Rural,1.0
|
||||
LP001647,Male,1.0,0,1,0.0,9328,0.0,188.0,180.0,1,Rural,1.0
|
||||
LP001653,Male,0.0,0,0,0.0,4885,0.0,48.0,360.0,1,Rural,1.0
|
||||
LP001656,Male,0.0,0,1,0.0,12000,0.0,164.0,360.0,1,Semiurban,0.0
|
||||
LP001657,Male,1.0,0,0,0.0,6033,0.0,160.0,360.0,1,Urban,0.0
|
||||
LP001658,Male,0.0,0,1,0.0,3858,0.0,76.0,360.0,1,Semiurban,1.0
|
||||
LP001664,Male,0.0,0,1,0.0,4191,0.0,120.0,360.0,1,Rural,1.0
|
||||
LP001665,Male,1.0,1,1,0.0,3125,2583.0,170.0,360.0,1,Semiurban,0.0
|
||||
LP001666,Male,0.0,0,1,0.0,8333,3750.0,187.0,360.0,1,Rural,1.0
|
||||
LP001669,Female,0.0,0,0,0.0,1907,2365.0,120.0,,1,Urban,1.0
|
||||
LP001671,Female,1.0,0,1,0.0,3416,2816.0,113.0,360.0,0,Semiurban,1.0
|
||||
LP001673,Male,0.0,0,1,1.0,11000,0.0,83.0,360.0,1,Urban,0.0
|
||||
LP001674,Male,1.0,1,0,0.0,2600,2500.0,90.0,360.0,1,Semiurban,1.0
|
||||
LP001677,Male,0.0,2,1,0.0,4923,0.0,166.0,360.0,0,Semiurban,1.0
|
||||
LP001682,Male,1.0,3+,0,0.0,3992,0.0,0.0,180.0,1,Urban,0.0
|
||||
LP001688,Male,1.0,1,0,0.0,3500,1083.0,135.0,360.0,1,Urban,1.0
|
||||
LP001691,Male,1.0,2,0,0.0,3917,0.0,124.0,360.0,1,Semiurban,1.0
|
||||
LP001692,Female,0.0,0,0,0.0,4408,0.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001693,Female,0.0,0,1,0.0,3244,0.0,80.0,360.0,1,Urban,1.0
|
||||
LP001698,Male,0.0,0,0,0.0,3975,2531.0,55.0,360.0,1,Rural,1.0
|
||||
LP001699,Male,0.0,0,1,0.0,2479,0.0,59.0,360.0,1,Urban,1.0
|
||||
LP001702,Male,0.0,0,1,0.0,3418,0.0,127.0,360.0,1,Semiurban,0.0
|
||||
LP001708,Female,0.0,0,1,0.0,10000,0.0,214.0,360.0,1,Semiurban,0.0
|
||||
LP001711,Male,1.0,3+,1,0.0,3430,1250.0,128.0,360.0,0,Semiurban,0.0
|
||||
LP001713,Male,1.0,1,1,1.0,7787,0.0,240.0,360.0,1,Urban,1.0
|
||||
LP001715,Male,1.0,3+,0,1.0,5703,0.0,130.0,360.0,1,Rural,1.0
|
||||
LP001716,Male,1.0,0,1,0.0,3173,3021.0,137.0,360.0,1,Urban,1.0
|
||||
LP001720,Male,1.0,3+,0,0.0,3850,983.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP001722,Male,1.0,0,1,0.0,150,1800.0,135.0,360.0,1,Rural,0.0
|
||||
LP001726,Male,1.0,0,1,0.0,3727,1775.0,131.0,360.0,1,Semiurban,1.0
|
||||
LP001732,Male,1.0,2,1,0.0,5000,0.0,72.0,360.0,0,Semiurban,0.0
|
||||
LP001734,Female,1.0,2,1,0.0,4283,2383.0,127.0,360.0,0,Semiurban,1.0
|
||||
LP001736,Male,1.0,0,1,0.0,2221,0.0,60.0,360.0,0,Urban,0.0
|
||||
LP001743,Male,1.0,2,1,0.0,4009,1717.0,116.0,360.0,1,Semiurban,1.0
|
||||
LP001744,Male,0.0,0,1,0.0,2971,2791.0,144.0,360.0,1,Semiurban,1.0
|
||||
LP001749,Male,1.0,0,1,0.0,7578,1010.0,175.0,,1,Semiurban,1.0
|
||||
LP001750,Male,1.0,0,1,0.0,6250,0.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP001751,Male,1.0,0,1,0.0,3250,0.0,170.0,360.0,1,Rural,0.0
|
||||
LP001754,Male,1.0,,0,1.0,4735,0.0,138.0,360.0,1,Urban,0.0
|
||||
LP001758,Male,1.0,2,1,0.0,6250,1695.0,210.0,360.0,1,Semiurban,1.0
|
||||
LP001760,Male,0.0,,1,0.0,4758,0.0,158.0,480.0,1,Semiurban,1.0
|
||||
LP001761,Male,0.0,0,1,1.0,6400,0.0,200.0,360.0,1,Rural,1.0
|
||||
LP001765,Male,1.0,1,1,0.0,2491,2054.0,104.0,360.0,1,Semiurban,1.0
|
||||
LP001768,Male,1.0,0,1,0.0,3716,0.0,42.0,180.0,1,Rural,1.0
|
||||
LP001770,Male,0.0,0,0,0.0,3189,2598.0,120.0,,1,Rural,1.0
|
||||
LP001776,Female,0.0,0,1,0.0,8333,0.0,280.0,360.0,1,Semiurban,1.0
|
||||
LP001778,Male,1.0,1,1,0.0,3155,1779.0,140.0,360.0,1,Semiurban,1.0
|
||||
LP001784,Male,1.0,1,1,0.0,5500,1260.0,170.0,360.0,1,Rural,1.0
|
||||
LP001786,Male,1.0,0,1,0.0,5746,0.0,255.0,360.0,0,Urban,0.0
|
||||
LP001788,Female,0.0,0,1,1.0,3463,0.0,122.0,360.0,0,Urban,1.0
|
||||
LP001790,Female,0.0,1,1,0.0,3812,0.0,112.0,360.0,1,Rural,1.0
|
||||
LP001792,Male,1.0,1,1,0.0,3315,0.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP001798,Male,1.0,2,1,0.0,5819,5000.0,120.0,360.0,1,Rural,1.0
|
||||
LP001800,Male,1.0,1,0,0.0,2510,1983.0,140.0,180.0,1,Urban,0.0
|
||||
LP001806,Male,0.0,0,1,0.0,2965,5701.0,155.0,60.0,1,Urban,1.0
|
||||
LP001807,Male,1.0,2,1,1.0,6250,1300.0,108.0,360.0,1,Rural,1.0
|
||||
LP001811,Male,1.0,0,0,0.0,3406,4417.0,123.0,360.0,1,Semiurban,1.0
|
||||
LP001813,Male,0.0,0,1,1.0,6050,4333.0,120.0,180.0,1,Urban,0.0
|
||||
LP001814,Male,1.0,2,1,0.0,9703,0.0,112.0,360.0,1,Urban,1.0
|
||||
LP001819,Male,1.0,1,0,0.0,6608,0.0,137.0,180.0,1,Urban,1.0
|
||||
LP001824,Male,1.0,1,1,0.0,2882,1843.0,123.0,480.0,1,Semiurban,1.0
|
||||
LP001825,Male,1.0,0,1,0.0,1809,1868.0,90.0,360.0,1,Urban,1.0
|
||||
LP001835,Male,1.0,0,0,0.0,1668,3890.0,201.0,360.0,0,Semiurban,0.0
|
||||
LP001836,Female,0.0,2,1,0.0,3427,0.0,138.0,360.0,1,Urban,0.0
|
||||
LP001841,Male,0.0,0,0,1.0,2583,2167.0,104.0,360.0,1,Rural,1.0
|
||||
LP001843,Male,1.0,1,0,0.0,2661,7101.0,279.0,180.0,1,Semiurban,1.0
|
||||
LP001844,Male,0.0,0,1,1.0,16250,0.0,192.0,360.0,0,Urban,0.0
|
||||
LP001846,Female,0.0,3+,1,0.0,3083,0.0,255.0,360.0,1,Rural,1.0
|
||||
LP001849,Male,0.0,0,0,0.0,6045,0.0,115.0,360.0,0,Rural,0.0
|
||||
LP001854,Male,1.0,3+,1,0.0,5250,0.0,94.0,360.0,1,Urban,0.0
|
||||
LP001859,Male,1.0,0,1,0.0,14683,2100.0,304.0,360.0,1,Rural,0.0
|
||||
LP001864,Male,1.0,3+,0,0.0,4931,0.0,128.0,360.0,0,Semiurban,0.0
|
||||
LP001865,Male,1.0,1,1,0.0,6083,4250.0,330.0,360.0,0,Urban,1.0
|
||||
LP001868,Male,0.0,0,1,0.0,2060,2209.0,134.0,360.0,1,Semiurban,1.0
|
||||
LP001870,Female,0.0,1,1,0.0,3481,0.0,155.0,36.0,1,Semiurban,0.0
|
||||
LP001871,Female,0.0,0,1,0.0,7200,0.0,120.0,360.0,1,Rural,1.0
|
||||
LP001872,Male,0.0,0,1,1.0,5166,0.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP001875,Male,0.0,0,1,0.0,4095,3447.0,151.0,360.0,1,Rural,1.0
|
||||
LP001877,Male,1.0,2,1,0.0,4708,1387.0,150.0,360.0,1,Semiurban,1.0
|
||||
LP001882,Male,1.0,3+,1,0.0,4333,1811.0,160.0,360.0,0,Urban,1.0
|
||||
LP001883,Female,0.0,0,1,0.0,3418,0.0,135.0,360.0,1,Rural,0.0
|
||||
LP001884,Female,0.0,1,1,0.0,2876,1560.0,90.0,360.0,1,Urban,1.0
|
||||
LP001888,Female,0.0,0,1,0.0,3237,0.0,30.0,360.0,1,Urban,1.0
|
||||
LP001891,Male,1.0,0,1,0.0,11146,0.0,136.0,360.0,1,Urban,1.0
|
||||
LP001892,Male,0.0,0,1,0.0,2833,1857.0,126.0,360.0,1,Rural,1.0
|
||||
LP001894,Male,1.0,0,1,0.0,2620,2223.0,150.0,360.0,1,Semiurban,1.0
|
||||
LP001896,Male,1.0,2,1,0.0,3900,0.0,90.0,360.0,1,Semiurban,1.0
|
||||
LP001900,Male,1.0,1,1,0.0,2750,1842.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP001903,Male,1.0,0,1,0.0,3993,3274.0,207.0,360.0,1,Semiurban,1.0
|
||||
LP001904,Male,1.0,0,1,0.0,3103,1300.0,80.0,360.0,1,Urban,1.0
|
||||
LP001907,Male,1.0,0,1,0.0,14583,0.0,436.0,360.0,1,Semiurban,1.0
|
||||
LP001908,Female,1.0,0,0,0.0,4100,0.0,124.0,360.0,0,Rural,1.0
|
||||
LP001910,Male,0.0,1,0,1.0,4053,2426.0,158.0,360.0,0,Urban,0.0
|
||||
LP001914,Male,1.0,0,1,0.0,3927,800.0,112.0,360.0,1,Semiurban,1.0
|
||||
LP001915,Male,1.0,2,1,0.0,2301,985.7999878,78.0,180.0,1,Urban,1.0
|
||||
LP001917,Female,0.0,0,1,0.0,1811,1666.0,54.0,360.0,1,Urban,1.0
|
||||
LP001922,Male,1.0,0,1,0.0,20667,0.0,0.0,360.0,1,Rural,0.0
|
||||
LP001924,Male,0.0,0,1,0.0,3158,3053.0,89.0,360.0,1,Rural,1.0
|
||||
LP001925,Female,0.0,0,1,1.0,2600,1717.0,99.0,300.0,1,Semiurban,0.0
|
||||
LP001926,Male,1.0,0,1,0.0,3704,2000.0,120.0,360.0,1,Rural,1.0
|
||||
LP001931,Female,0.0,0,1,0.0,4124,0.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP001935,Male,0.0,0,1,0.0,9508,0.0,187.0,360.0,1,Rural,1.0
|
||||
LP001936,Male,1.0,0,1,0.0,3075,2416.0,139.0,360.0,1,Rural,1.0
|
||||
LP001938,Male,1.0,2,1,0.0,4400,0.0,127.0,360.0,0,Semiurban,0.0
|
||||
LP001940,Male,1.0,2,1,0.0,3153,1560.0,134.0,360.0,1,Urban,1.0
|
||||
LP001945,Female,0.0,,1,0.0,5417,0.0,143.0,480.0,0,Urban,0.0
|
||||
LP001947,Male,1.0,0,1,0.0,2383,3334.0,172.0,360.0,1,Semiurban,1.0
|
||||
LP001949,Male,1.0,3+,1,0.0,4416,1250.0,110.0,360.0,1,Urban,1.0
|
||||
LP001953,Male,1.0,1,1,0.0,6875,0.0,200.0,360.0,1,Semiurban,1.0
|
||||
LP001954,Female,1.0,1,1,0.0,4666,0.0,135.0,360.0,1,Urban,1.0
|
||||
LP001955,Female,0.0,0,1,0.0,5000,2541.0,151.0,480.0,1,Rural,0.0
|
||||
LP001963,Male,1.0,1,1,0.0,2014,2925.0,113.0,360.0,1,Urban,0.0
|
||||
LP001964,Male,1.0,0,0,0.0,1800,2934.0,93.0,360.0,0,Urban,0.0
|
||||
LP001972,Male,1.0,,0,0.0,2875,1750.0,105.0,360.0,1,Semiurban,1.0
|
||||
LP001974,Female,0.0,0,1,0.0,5000,0.0,132.0,360.0,1,Rural,1.0
|
||||
LP001977,Male,1.0,1,1,0.0,1625,1803.0,96.0,360.0,1,Urban,1.0
|
||||
LP001978,Male,0.0,0,1,0.0,4000,2500.0,140.0,360.0,1,Rural,1.0
|
||||
LP001990,Male,0.0,0,0,0.0,2000,0.0,0.0,360.0,1,Urban,0.0
|
||||
LP001993,Female,0.0,0,1,0.0,3762,1666.0,135.0,360.0,1,Rural,1.0
|
||||
LP001994,Female,0.0,0,1,0.0,2400,1863.0,104.0,360.0,0,Urban,0.0
|
||||
LP001996,Male,0.0,0,1,0.0,20233,0.0,480.0,360.0,1,Rural,0.0
|
||||
LP001998,Male,1.0,2,0,0.0,7667,0.0,185.0,360.0,0,Rural,1.0
|
||||
LP002002,Female,0.0,0,1,0.0,2917,0.0,84.0,360.0,1,Semiurban,1.0
|
||||
LP002004,Male,0.0,0,0,0.0,2927,2405.0,111.0,360.0,1,Semiurban,1.0
|
||||
LP002006,Female,0.0,0,1,0.0,2507,0.0,56.0,360.0,1,Rural,1.0
|
||||
LP002008,Male,1.0,2,1,1.0,5746,0.0,144.0,84.0,0,Rural,1.0
|
||||
LP002024,,1.0,0,1,0.0,2473,1843.0,159.0,360.0,1,Rural,0.0
|
||||
LP002031,Male,1.0,1,0,0.0,3399,1640.0,111.0,180.0,1,Urban,1.0
|
||||
LP002035,Male,1.0,2,1,0.0,3717,0.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP002036,Male,1.0,0,1,0.0,2058,2134.0,88.0,360.0,0,Urban,1.0
|
||||
LP002043,Female,0.0,1,1,0.0,3541,0.0,112.0,360.0,0,Semiurban,1.0
|
||||
LP002050,Male,1.0,1,1,1.0,10000,0.0,155.0,360.0,1,Rural,0.0
|
||||
LP002051,Male,1.0,0,1,0.0,2400,2167.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP002053,Male,1.0,3+,1,0.0,4342,189.0,124.0,360.0,1,Semiurban,1.0
|
||||
LP002054,Male,1.0,2,0,0.0,3601,1590.0,0.0,360.0,1,Rural,1.0
|
||||
LP002055,Female,0.0,0,1,0.0,3166,2985.0,132.0,360.0,0,Rural,1.0
|
||||
LP002065,Male,1.0,3+,1,0.0,15000,0.0,300.0,360.0,1,Rural,1.0
|
||||
LP002067,Male,1.0,1,1,1.0,8666,4983.0,376.0,360.0,0,Rural,0.0
|
||||
LP002068,Male,0.0,0,1,0.0,4917,0.0,130.0,360.0,0,Rural,1.0
|
||||
LP002082,Male,1.0,0,1,1.0,5818,2160.0,184.0,360.0,1,Semiurban,1.0
|
||||
LP002086,Female,1.0,0,1,0.0,4333,2451.0,110.0,360.0,1,Urban,0.0
|
||||
LP002087,Female,0.0,0,1,0.0,2500,0.0,67.0,360.0,1,Urban,1.0
|
||||
LP002097,Male,0.0,1,1,0.0,4384,1793.0,117.0,360.0,1,Urban,1.0
|
||||
LP002098,Male,0.0,0,1,0.0,2935,0.0,98.0,360.0,1,Semiurban,1.0
|
||||
LP002100,Male,0.0,,1,0.0,2833,0.0,71.0,360.0,1,Urban,1.0
|
||||
LP002101,Male,1.0,0,1,0.0,63337,0.0,490.0,180.0,1,Urban,1.0
|
||||
LP002103,,1.0,1,1,1.0,9833,1833.0,182.0,180.0,1,Urban,1.0
|
||||
LP002106,Male,1.0,,1,1.0,5503,4490.0,70.0,,1,Semiurban,1.0
|
||||
LP002110,Male,1.0,1,1,0.0,5250,688.0,160.0,360.0,1,Rural,1.0
|
||||
LP002112,Male,1.0,2,1,1.0,2500,4600.0,176.0,360.0,1,Rural,1.0
|
||||
LP002113,Female,0.0,3+,0,0.0,1830,0.0,0.0,360.0,0,Urban,0.0
|
||||
LP002114,Female,0.0,0,1,0.0,4160,0.0,71.0,360.0,1,Semiurban,1.0
|
||||
LP002115,Male,1.0,3+,0,0.0,2647,1587.0,173.0,360.0,1,Rural,0.0
|
||||
LP002116,Female,0.0,0,1,0.0,2378,0.0,46.0,360.0,1,Rural,0.0
|
||||
LP002119,Male,1.0,1,0,0.0,4554,1229.0,158.0,360.0,1,Urban,1.0
|
||||
LP002126,Male,1.0,3+,0,0.0,3173,0.0,74.0,360.0,1,Semiurban,1.0
|
||||
LP002128,Male,1.0,2,1,0.0,2583,2330.0,125.0,360.0,1,Rural,1.0
|
||||
LP002129,Male,1.0,0,1,0.0,2499,2458.0,160.0,360.0,1,Semiurban,1.0
|
||||
LP002130,Male,1.0,,0,0.0,3523,3230.0,152.0,360.0,0,Rural,0.0
|
||||
LP002131,Male,1.0,2,0,0.0,3083,2168.0,126.0,360.0,1,Urban,1.0
|
||||
LP002137,Male,1.0,0,1,0.0,6333,4583.0,259.0,360.0,0,Semiurban,1.0
|
||||
LP002138,Male,1.0,0,1,0.0,2625,6250.0,187.0,360.0,1,Rural,1.0
|
||||
LP002139,Male,1.0,0,1,0.0,9083,0.0,228.0,360.0,1,Semiurban,1.0
|
||||
LP002140,Male,0.0,0,1,0.0,8750,4167.0,308.0,360.0,1,Rural,0.0
|
||||
LP002141,Male,1.0,3+,1,0.0,2666,2083.0,95.0,360.0,1,Rural,1.0
|
||||
LP002142,Female,1.0,0,1,1.0,5500,0.0,105.0,360.0,0,Rural,0.0
|
||||
LP002143,Female,1.0,0,1,0.0,2423,505.0,130.0,360.0,1,Semiurban,1.0
|
||||
LP002144,Female,0.0,,1,0.0,3813,0.0,116.0,180.0,1,Urban,1.0
|
||||
LP002149,Male,1.0,2,1,0.0,8333,3167.0,165.0,360.0,1,Rural,1.0
|
||||
LP002151,Male,1.0,1,1,0.0,3875,0.0,67.0,360.0,1,Urban,0.0
|
||||
LP002158,Male,1.0,0,0,0.0,3000,1666.0,100.0,480.0,0,Urban,0.0
|
||||
LP002160,Male,1.0,3+,1,0.0,5167,3167.0,200.0,360.0,1,Semiurban,1.0
|
||||
LP002161,Female,0.0,1,1,0.0,4723,0.0,81.0,360.0,1,Semiurban,0.0
|
||||
LP002170,Male,1.0,2,1,0.0,5000,3667.0,236.0,360.0,1,Semiurban,1.0
|
||||
LP002175,Male,1.0,0,1,0.0,4750,2333.0,130.0,360.0,1,Urban,1.0
|
||||
LP002178,Male,1.0,0,1,0.0,3013,3033.0,95.0,300.0,0,Urban,1.0
|
||||
LP002180,Male,0.0,0,1,1.0,6822,0.0,141.0,360.0,1,Rural,1.0
|
||||
LP002181,Male,0.0,0,0,0.0,6216,0.0,133.0,360.0,1,Rural,0.0
|
||||
LP002187,Male,0.0,0,1,0.0,2500,0.0,96.0,480.0,1,Semiurban,0.0
|
||||
LP002188,Male,0.0,0,1,0.0,5124,0.0,124.0,,0,Rural,0.0
|
||||
LP002190,Male,1.0,1,1,0.0,6325,0.0,175.0,360.0,1,Semiurban,1.0
|
||||
LP002191,Male,1.0,0,1,0.0,19730,5266.0,570.0,360.0,1,Rural,0.0
|
||||
LP002194,Female,0.0,0,1,1.0,15759,0.0,55.0,360.0,1,Semiurban,1.0
|
||||
LP002197,Male,1.0,2,1,0.0,5185,0.0,155.0,360.0,1,Semiurban,1.0
|
||||
LP002201,Male,1.0,2,1,1.0,9323,7873.0,380.0,300.0,1,Rural,1.0
|
||||
LP002205,Male,0.0,1,1,0.0,3062,1987.0,111.0,180.0,0,Urban,0.0
|
||||
LP002209,Female,0.0,0,1,0.0,2764,1459.0,110.0,360.0,1,Urban,1.0
|
||||
LP002211,Male,1.0,0,1,0.0,4817,923.0,120.0,180.0,1,Urban,1.0
|
||||
LP002219,Male,1.0,3+,1,0.0,8750,4996.0,130.0,360.0,1,Rural,1.0
|
||||
LP002223,Male,1.0,0,1,0.0,4310,0.0,130.0,360.0,0,Semiurban,1.0
|
||||
LP002224,Male,0.0,0,1,0.0,3069,0.0,71.0,480.0,1,Urban,0.0
|
||||
LP002225,Male,1.0,2,1,0.0,5391,0.0,130.0,360.0,1,Urban,1.0
|
||||
LP002226,Male,1.0,0,1,0.0,3333,2500.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP002229,Male,0.0,0,1,0.0,5941,4232.0,296.0,360.0,1,Semiurban,1.0
|
||||
LP002231,Female,0.0,0,1,0.0,6000,0.0,156.0,360.0,1,Urban,1.0
|
||||
LP002234,Male,0.0,0,1,1.0,7167,0.0,128.0,360.0,1,Urban,1.0
|
||||
LP002236,Male,1.0,2,1,0.0,4566,0.0,100.0,360.0,1,Urban,0.0
|
||||
LP002237,Male,0.0,1,1,0.0,3667,0.0,113.0,180.0,1,Urban,1.0
|
||||
LP002239,Male,0.0,0,0,0.0,2346,1600.0,132.0,360.0,1,Semiurban,1.0
|
||||
LP002243,Male,1.0,0,0,0.0,3010,3136.0,0.0,360.0,0,Urban,0.0
|
||||
LP002244,Male,1.0,0,1,0.0,2333,2417.0,136.0,360.0,1,Urban,1.0
|
||||
LP002250,Male,1.0,0,1,0.0,5488,0.0,125.0,360.0,1,Rural,1.0
|
||||
LP002255,Male,0.0,3+,1,0.0,9167,0.0,185.0,360.0,1,Rural,1.0
|
||||
LP002262,Male,1.0,3+,1,0.0,9504,0.0,275.0,360.0,1,Rural,1.0
|
||||
LP002263,Male,1.0,0,1,0.0,2583,2115.0,120.0,360.0,0,Urban,1.0
|
||||
LP002265,Male,1.0,2,0,0.0,1993,1625.0,113.0,180.0,1,Semiurban,1.0
|
||||
LP002266,Male,1.0,2,1,0.0,3100,1400.0,113.0,360.0,1,Urban,1.0
|
||||
LP002272,Male,1.0,2,1,0.0,3276,484.0,135.0,360.0,0,Semiurban,1.0
|
||||
LP002277,Female,0.0,0,1,0.0,3180,0.0,71.0,360.0,0,Urban,0.0
|
||||
LP002281,Male,1.0,0,1,0.0,3033,1459.0,95.0,360.0,1,Urban,1.0
|
||||
LP002284,Male,0.0,0,0,0.0,3902,1666.0,109.0,360.0,1,Rural,1.0
|
||||
LP002287,Female,0.0,0,1,0.0,1500,1800.0,103.0,360.0,0,Semiurban,0.0
|
||||
LP002288,Male,1.0,2,0,0.0,2889,0.0,45.0,180.0,0,Urban,0.0
|
||||
LP002296,Male,0.0,0,0,0.0,2755,0.0,65.0,300.0,1,Rural,0.0
|
||||
LP002297,Male,0.0,0,1,0.0,2500,20000.0,103.0,360.0,1,Semiurban,1.0
|
||||
LP002300,Female,0.0,0,0,0.0,1963,0.0,53.0,360.0,1,Semiurban,1.0
|
||||
LP002301,Female,0.0,0,1,1.0,7441,0.0,194.0,360.0,1,Rural,0.0
|
||||
LP002305,Female,0.0,0,1,0.0,4547,0.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP002308,Male,1.0,0,0,0.0,2167,2400.0,115.0,360.0,1,Urban,1.0
|
||||
LP002314,Female,0.0,0,0,0.0,2213,0.0,66.0,360.0,1,Rural,1.0
|
||||
LP002315,Male,1.0,1,1,0.0,8300,0.0,152.0,300.0,0,Semiurban,0.0
|
||||
LP002317,Male,1.0,3+,1,0.0,81000,0.0,360.0,360.0,0,Rural,0.0
|
||||
LP002318,Female,0.0,1,0,1.0,3867,0.0,62.0,360.0,1,Semiurban,0.0
|
||||
LP002319,Male,1.0,0,1,0.0,6256,0.0,160.0,360.0,0,Urban,1.0
|
||||
LP002328,Male,1.0,0,0,0.0,6096,0.0,218.0,360.0,0,Rural,0.0
|
||||
LP002332,Male,1.0,0,0,0.0,2253,2033.0,110.0,360.0,1,Rural,1.0
|
||||
LP002335,Female,1.0,0,0,0.0,2149,3237.0,178.0,360.0,0,Semiurban,0.0
|
||||
LP002337,Female,0.0,0,1,0.0,2995,0.0,60.0,360.0,1,Urban,1.0
|
||||
LP002341,Female,0.0,1,1,0.0,2600,0.0,160.0,360.0,1,Urban,0.0
|
||||
LP002342,Male,1.0,2,1,1.0,1600,20000.0,239.0,360.0,1,Urban,0.0
|
||||
LP002345,Male,1.0,0,1,0.0,1025,2773.0,112.0,360.0,1,Rural,1.0
|
||||
LP002347,Male,1.0,0,1,0.0,3246,1417.0,138.0,360.0,1,Semiurban,1.0
|
||||
LP002348,Male,1.0,0,1,0.0,5829,0.0,138.0,360.0,1,Rural,1.0
|
||||
LP002357,Female,0.0,0,0,0.0,2720,0.0,80.0,,0,Urban,0.0
|
||||
LP002361,Male,1.0,0,1,0.0,1820,1719.0,100.0,360.0,1,Urban,1.0
|
||||
LP002362,Male,1.0,1,1,0.0,7250,1667.0,110.0,,0,Urban,0.0
|
||||
LP002364,Male,1.0,0,1,0.0,14880,0.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP002366,Male,1.0,0,1,0.0,2666,4300.0,121.0,360.0,1,Rural,1.0
|
||||
LP002367,Female,0.0,1,0,0.0,4606,0.0,81.0,360.0,1,Rural,0.0
|
||||
LP002368,Male,1.0,2,1,0.0,5935,0.0,133.0,360.0,1,Semiurban,1.0
|
||||
LP002369,Male,1.0,0,1,0.0,2920,16.12000084,87.0,360.0,1,Rural,1.0
|
||||
LP002370,Male,0.0,0,0,0.0,2717,0.0,60.0,180.0,1,Urban,1.0
|
||||
LP002377,Female,0.0,1,1,1.0,8624,0.0,150.0,360.0,1,Semiurban,1.0
|
||||
LP002379,Male,0.0,0,1,0.0,6500,0.0,105.0,360.0,0,Rural,0.0
|
||||
LP002386,Male,0.0,0,1,0.0,12876,0.0,405.0,360.0,1,Semiurban,1.0
|
||||
LP002387,Male,1.0,0,1,0.0,2425,2340.0,143.0,360.0,1,Semiurban,1.0
|
||||
LP002390,Male,0.0,0,1,0.0,3750,0.0,100.0,360.0,1,Urban,1.0
|
||||
LP002393,Female,0.0,,1,0.0,10047,0.0,0.0,240.0,1,Semiurban,1.0
|
||||
LP002398,Male,0.0,0,1,0.0,1926,1851.0,50.0,360.0,1,Semiurban,1.0
|
||||
LP002401,Male,1.0,0,1,0.0,2213,1125.0,0.0,360.0,1,Urban,1.0
|
||||
LP002403,Male,0.0,0,1,1.0,10416,0.0,187.0,360.0,0,Urban,0.0
|
||||
LP002407,Female,1.0,0,0,1.0,7142,0.0,138.0,360.0,1,Rural,1.0
|
||||
LP002408,Male,0.0,0,1,0.0,3660,5064.0,187.0,360.0,1,Semiurban,1.0
|
||||
LP002409,Male,1.0,0,1,0.0,7901,1833.0,180.0,360.0,1,Rural,1.0
|
||||
LP002418,Male,0.0,3+,0,0.0,4707,1993.0,148.0,360.0,1,Semiurban,1.0
|
||||
LP002422,Male,0.0,1,1,0.0,37719,0.0,152.0,360.0,1,Semiurban,1.0
|
||||
LP002424,Male,1.0,0,1,0.0,7333,8333.0,175.0,300.0,0,Rural,1.0
|
||||
LP002429,Male,1.0,1,1,1.0,3466,1210.0,130.0,360.0,1,Rural,1.0
|
||||
LP002434,Male,1.0,2,0,0.0,4652,0.0,110.0,360.0,1,Rural,1.0
|
||||
LP002435,Male,1.0,0,1,0.0,3539,1376.0,55.0,360.0,1,Rural,0.0
|
||||
LP002443,Male,1.0,2,1,0.0,3340,1710.0,150.0,360.0,0,Rural,0.0
|
||||
LP002444,Male,0.0,1,0,1.0,2769,1542.0,190.0,360.0,0,Semiurban,0.0
|
||||
LP002446,Male,1.0,2,0,0.0,2309,1255.0,125.0,360.0,0,Rural,0.0
|
||||
LP002447,Male,1.0,2,0,0.0,1958,1456.0,60.0,300.0,0,Urban,1.0
|
||||
LP002448,Male,1.0,0,1,0.0,3948,1733.0,149.0,360.0,0,Rural,0.0
|
||||
LP002449,Male,1.0,0,1,0.0,2483,2466.0,90.0,180.0,0,Rural,1.0
|
||||
LP002453,Male,0.0,0,1,1.0,7085,0.0,84.0,360.0,1,Semiurban,1.0
|
||||
LP002455,Male,1.0,2,1,0.0,3859,0.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP002459,Male,1.0,0,1,0.0,4301,0.0,118.0,360.0,1,Urban,1.0
|
||||
LP002467,Male,1.0,0,1,0.0,3708,2569.0,173.0,360.0,1,Urban,0.0
|
||||
LP002472,Male,0.0,2,1,0.0,4354,0.0,136.0,360.0,1,Rural,1.0
|
||||
LP002473,Male,1.0,0,1,0.0,8334,0.0,160.0,360.0,1,Semiurban,0.0
|
||||
LP002478,,1.0,0,1,1.0,2083,4083.0,160.0,360.0,0,Semiurban,1.0
|
||||
LP002484,Male,1.0,3+,1,0.0,7740,0.0,128.0,180.0,1,Urban,1.0
|
||||
LP002487,Male,1.0,0,1,0.0,3015,2188.0,153.0,360.0,1,Rural,1.0
|
||||
LP002489,Female,0.0,1,0,0.0,5191,0.0,132.0,360.0,1,Semiurban,1.0
|
||||
LP002493,Male,0.0,0,1,0.0,4166,0.0,98.0,360.0,0,Semiurban,0.0
|
||||
LP002494,Male,0.0,0,1,0.0,6000,0.0,140.0,360.0,1,Rural,1.0
|
||||
LP002500,Male,1.0,3+,0,0.0,2947,1664.0,70.0,180.0,0,Urban,0.0
|
||||
LP002501,,1.0,0,1,0.0,16692,0.0,110.0,360.0,1,Semiurban,1.0
|
||||
LP002502,Female,1.0,2,0,0.0,210,2917.0,98.0,360.0,1,Semiurban,1.0
|
||||
LP002505,Male,1.0,0,1,0.0,4333,2451.0,110.0,360.0,1,Urban,0.0
|
||||
LP002515,Male,1.0,1,1,1.0,3450,2079.0,162.0,360.0,1,Semiurban,1.0
|
||||
LP002517,Male,1.0,1,0,0.0,2653,1500.0,113.0,180.0,0,Rural,0.0
|
||||
LP002519,Male,1.0,3+,1,0.0,4691,0.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP002522,Female,0.0,0,1,1.0,2500,0.0,93.0,360.0,0,Urban,1.0
|
||||
LP002524,Male,0.0,2,1,0.0,5532,4648.0,162.0,360.0,1,Rural,1.0
|
||||
LP002527,Male,1.0,2,1,1.0,16525,1014.0,150.0,360.0,1,Rural,1.0
|
||||
LP002529,Male,1.0,2,1,0.0,6700,1750.0,230.0,300.0,1,Semiurban,1.0
|
||||
LP002530,,1.0,2,1,0.0,2873,1872.0,132.0,360.0,0,Semiurban,0.0
|
||||
LP002531,Male,1.0,1,1,1.0,16667,2250.0,86.0,360.0,1,Semiurban,1.0
|
||||
LP002533,Male,1.0,2,1,0.0,2947,1603.0,0.0,360.0,1,Urban,0.0
|
||||
LP002534,Female,0.0,0,0,0.0,4350,0.0,154.0,360.0,1,Rural,1.0
|
||||
LP002536,Male,1.0,3+,0,0.0,3095,0.0,113.0,360.0,1,Rural,1.0
|
||||
LP002537,Male,1.0,0,1,0.0,2083,3150.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP002541,Male,1.0,0,1,0.0,10833,0.0,234.0,360.0,1,Semiurban,1.0
|
||||
LP002543,Male,1.0,2,1,0.0,8333,0.0,246.0,360.0,1,Semiurban,1.0
|
||||
LP002544,Male,1.0,1,0,0.0,1958,2436.0,131.0,360.0,1,Rural,1.0
|
||||
LP002545,Male,0.0,2,1,0.0,3547,0.0,80.0,360.0,0,Rural,0.0
|
||||
LP002547,Male,1.0,1,1,0.0,18333,0.0,500.0,360.0,1,Urban,0.0
|
||||
LP002555,Male,1.0,2,1,1.0,4583,2083.0,160.0,360.0,1,Semiurban,1.0
|
||||
LP002556,Male,0.0,0,1,0.0,2435,0.0,75.0,360.0,1,Urban,0.0
|
||||
LP002560,Male,0.0,0,0,0.0,2699,2785.0,96.0,360.0,0,Semiurban,1.0
|
||||
LP002562,Male,1.0,1,0,0.0,5333,1131.0,186.0,360.0,0,Urban,1.0
|
||||
LP002571,Male,0.0,0,0,0.0,3691,0.0,110.0,360.0,1,Rural,1.0
|
||||
LP002582,Female,0.0,0,0,1.0,17263,0.0,225.0,360.0,1,Semiurban,1.0
|
||||
LP002585,Male,1.0,0,1,0.0,3597,2157.0,119.0,360.0,0,Rural,0.0
|
||||
LP002586,Female,1.0,1,1,0.0,3326,913.0,105.0,84.0,1,Semiurban,1.0
|
||||
LP002587,Male,1.0,0,0,0.0,2600,1700.0,107.0,360.0,1,Rural,1.0
|
||||
LP002588,Male,1.0,0,1,0.0,4625,2857.0,111.0,12.0,0,Urban,1.0
|
||||
LP002600,Male,1.0,1,1,1.0,2895,0.0,95.0,360.0,1,Semiurban,1.0
|
||||
LP002602,Male,0.0,0,1,0.0,6283,4416.0,209.0,360.0,0,Rural,0.0
|
||||
LP002603,Female,0.0,0,1,0.0,645,3683.0,113.0,480.0,1,Rural,1.0
|
||||
LP002606,Female,0.0,0,1,0.0,3159,0.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP002615,Male,1.0,2,1,0.0,4865,5624.0,208.0,360.0,1,Semiurban,1.0
|
||||
LP002618,Male,1.0,1,0,0.0,4050,5302.0,138.0,360.0,0,Rural,0.0
|
||||
LP002619,Male,1.0,0,0,0.0,3814,1483.0,124.0,300.0,1,Semiurban,1.0
|
||||
LP002622,Male,1.0,2,1,0.0,3510,4416.0,243.0,360.0,1,Rural,1.0
|
||||
LP002624,Male,1.0,0,1,0.0,20833,6667.0,480.0,360.0,0,Urban,1.0
|
||||
LP002625,,0.0,0,1,0.0,3583,0.0,96.0,360.0,1,Urban,0.0
|
||||
LP002626,Male,1.0,0,1,1.0,2479,3013.0,188.0,360.0,1,Urban,1.0
|
||||
LP002634,Female,0.0,1,1,0.0,13262,0.0,40.0,360.0,1,Urban,1.0
|
||||
LP002637,Male,0.0,0,0,0.0,3598,1287.0,100.0,360.0,1,Rural,0.0
|
||||
LP002640,Male,1.0,1,1,0.0,6065,2004.0,250.0,360.0,1,Semiurban,1.0
|
||||
LP002643,Male,1.0,2,1,0.0,3283,2035.0,148.0,360.0,1,Urban,1.0
|
||||
LP002648,Male,1.0,0,1,0.0,2130,6666.0,70.0,180.0,1,Semiurban,0.0
|
||||
LP002652,Male,0.0,0,1,0.0,5815,3666.0,311.0,360.0,1,Rural,0.0
|
||||
LP002659,Male,1.0,3+,1,0.0,3466,3428.0,150.0,360.0,1,Rural,1.0
|
||||
LP002670,Female,1.0,2,1,0.0,2031,1632.0,113.0,480.0,1,Semiurban,1.0
|
||||
LP002682,Male,1.0,,0,0.0,3074,1800.0,123.0,360.0,0,Semiurban,0.0
|
||||
LP002683,Male,0.0,0,1,0.0,4683,1915.0,185.0,360.0,1,Semiurban,0.0
|
||||
LP002684,Female,0.0,0,0,0.0,3400,0.0,95.0,360.0,1,Rural,0.0
|
||||
LP002689,Male,1.0,2,0,0.0,2192,1742.0,45.0,360.0,1,Semiurban,1.0
|
||||
LP002690,Male,0.0,0,1,0.0,2500,0.0,55.0,360.0,1,Semiurban,1.0
|
||||
LP002692,Male,1.0,3+,1,1.0,5677,1424.0,100.0,360.0,1,Rural,1.0
|
||||
LP002693,Male,1.0,2,1,1.0,7948,7166.0,480.0,360.0,1,Rural,1.0
|
||||
LP002697,Male,0.0,0,1,0.0,4680,2087.0,0.0,360.0,1,Semiurban,0.0
|
||||
LP002699,Male,1.0,2,1,1.0,17500,0.0,400.0,360.0,1,Rural,1.0
|
||||
LP002705,Male,1.0,0,1,0.0,3775,0.0,110.0,360.0,1,Semiurban,1.0
|
||||
LP002706,Male,1.0,1,0,0.0,5285,1430.0,161.0,360.0,0,Semiurban,1.0
|
||||
LP002714,Male,0.0,1,0,0.0,2679,1302.0,94.0,360.0,1,Semiurban,1.0
|
||||
LP002716,Male,0.0,0,0,0.0,6783,0.0,130.0,360.0,1,Semiurban,1.0
|
||||
LP002717,Male,1.0,0,1,0.0,1025,5500.0,216.0,360.0,0,Rural,1.0
|
||||
LP002720,Male,1.0,3+,1,0.0,4281,0.0,100.0,360.0,1,Urban,1.0
|
||||
LP002723,Male,0.0,2,1,0.0,3588,0.0,110.0,360.0,0,Rural,0.0
|
||||
LP002729,Male,0.0,1,1,0.0,11250,0.0,196.0,360.0,0,Semiurban,0.0
|
||||
LP002731,Female,0.0,0,0,1.0,18165,0.0,125.0,360.0,1,Urban,1.0
|
||||
LP002732,Male,0.0,0,0,0.0,2550,2042.0,126.0,360.0,1,Rural,1.0
|
||||
LP002734,Male,1.0,0,1,0.0,6133,3906.0,324.0,360.0,1,Urban,1.0
|
||||
LP002738,Male,0.0,2,1,0.0,3617,0.0,107.0,360.0,1,Semiurban,1.0
|
||||
LP002739,Male,1.0,0,0,0.0,2917,536.0,66.0,360.0,1,Rural,0.0
|
||||
LP002740,Male,1.0,3+,1,0.0,6417,0.0,157.0,180.0,1,Rural,1.0
|
||||
LP002741,Female,1.0,1,1,0.0,4608,2845.0,140.0,180.0,1,Semiurban,1.0
|
||||
LP002743,Female,0.0,0,1,0.0,2138,0.0,99.0,360.0,0,Semiurban,0.0
|
||||
LP002753,Female,0.0,1,1,0.0,3652,0.0,95.0,360.0,1,Semiurban,1.0
|
||||
LP002755,Male,1.0,1,0,0.0,2239,2524.0,128.0,360.0,1,Urban,1.0
|
||||
LP002757,Female,1.0,0,0,0.0,3017,663.0,102.0,360.0,0,Semiurban,1.0
|
||||
LP002767,Male,1.0,0,1,0.0,2768,1950.0,155.0,360.0,1,Rural,1.0
|
||||
LP002768,Male,0.0,0,0,0.0,3358,0.0,80.0,36.0,1,Semiurban,0.0
|
||||
LP002772,Male,0.0,0,1,0.0,2526,1783.0,145.0,360.0,1,Rural,1.0
|
||||
LP002776,Female,0.0,0,1,0.0,5000,0.0,103.0,360.0,0,Semiurban,0.0
|
||||
LP002777,Male,1.0,0,1,0.0,2785,2016.0,110.0,360.0,1,Rural,1.0
|
||||
LP002778,Male,1.0,2,1,1.0,6633,0.0,0.0,360.0,0,Rural,0.0
|
||||
LP002784,Male,1.0,1,0,0.0,2492,2375.0,0.0,360.0,1,Rural,1.0
|
||||
LP002785,Male,1.0,1,1,0.0,3333,3250.0,158.0,360.0,1,Urban,1.0
|
||||
LP002788,Male,1.0,0,0,0.0,2454,2333.0,181.0,360.0,0,Urban,0.0
|
||||
LP002789,Male,1.0,0,1,0.0,3593,4266.0,132.0,180.0,0,Rural,0.0
|
||||
LP002792,Male,1.0,1,1,0.0,5468,1032.0,26.0,360.0,1,Semiurban,1.0
|
||||
LP002794,Female,0.0,0,1,0.0,2667,1625.0,84.0,360.0,0,Urban,1.0
|
||||
LP002795,Male,1.0,3+,1,1.0,10139,0.0,260.0,360.0,1,Semiurban,1.0
|
||||
LP002798,Male,1.0,0,1,0.0,3887,2669.0,162.0,360.0,1,Semiurban,1.0
|
||||
LP002804,Female,1.0,0,1,0.0,4180,2306.0,182.0,360.0,1,Semiurban,1.0
|
||||
LP002807,Male,1.0,2,0,0.0,3675,242.0,108.0,360.0,1,Semiurban,1.0
|
||||
LP002813,Female,1.0,1,1,1.0,19484,0.0,600.0,360.0,1,Semiurban,1.0
|
||||
LP002820,Male,1.0,0,1,0.0,5923,2054.0,211.0,360.0,1,Rural,1.0
|
||||
LP002821,Male,0.0,0,0,1.0,5800,0.0,132.0,360.0,1,Semiurban,1.0
|
||||
LP002832,Male,1.0,2,1,0.0,8799,0.0,258.0,360.0,0,Urban,0.0
|
||||
LP002833,Male,1.0,0,0,0.0,4467,0.0,120.0,360.0,0,Rural,1.0
|
||||
LP002836,Male,0.0,0,1,0.0,3333,0.0,70.0,360.0,1,Urban,1.0
|
||||
LP002837,Male,1.0,3+,1,0.0,3400,2500.0,123.0,360.0,0,Rural,0.0
|
||||
LP002840,Female,0.0,0,1,0.0,2378,0.0,9.0,360.0,1,Urban,0.0
|
||||
LP002841,Male,1.0,0,1,0.0,3166,2064.0,104.0,360.0,0,Urban,0.0
|
||||
LP002842,Male,1.0,1,1,0.0,3417,1750.0,186.0,360.0,1,Urban,1.0
|
||||
LP002847,Male,1.0,,1,0.0,5116,1451.0,165.0,360.0,0,Urban,0.0
|
||||
LP002855,Male,1.0,2,1,0.0,16666,0.0,275.0,360.0,1,Urban,1.0
|
||||
LP002862,Male,1.0,2,0,0.0,6125,1625.0,187.0,480.0,1,Semiurban,0.0
|
||||
LP002863,Male,1.0,3+,1,0.0,6406,0.0,150.0,360.0,1,Semiurban,0.0
|
||||
LP002868,Male,1.0,2,1,0.0,3159,461.0,108.0,84.0,1,Urban,1.0
|
||||
LP002872,,1.0,0,1,0.0,3087,2210.0,136.0,360.0,0,Semiurban,0.0
|
||||
LP002874,Male,0.0,0,1,0.0,3229,2739.0,110.0,360.0,1,Urban,1.0
|
||||
LP002877,Male,1.0,1,1,0.0,1782,2232.0,107.0,360.0,1,Rural,1.0
|
||||
LP002888,Male,0.0,0,1,0.0,3182,2917.0,161.0,360.0,1,Urban,1.0
|
||||
LP002892,Male,1.0,2,1,0.0,6540,0.0,205.0,360.0,1,Semiurban,1.0
|
||||
LP002893,Male,0.0,0,1,0.0,1836,33837.0,90.0,360.0,1,Urban,0.0
|
||||
LP002894,Female,1.0,0,1,0.0,3166,0.0,36.0,360.0,1,Semiurban,1.0
|
||||
LP002898,Male,1.0,1,1,0.0,1880,0.0,61.0,360.0,0,Rural,0.0
|
||||
LP002911,Male,1.0,1,1,0.0,2787,1917.0,146.0,360.0,0,Rural,0.0
|
||||
LP002912,Male,1.0,1,1,0.0,4283,3000.0,172.0,84.0,1,Rural,0.0
|
||||
LP002916,Male,1.0,0,1,0.0,2297,1522.0,104.0,360.0,1,Urban,1.0
|
||||
LP002917,Female,0.0,0,0,0.0,2165,0.0,70.0,360.0,1,Semiurban,1.0
|
||||
LP002925,,0.0,0,1,0.0,4750,0.0,94.0,360.0,1,Semiurban,1.0
|
||||
LP002926,Male,1.0,2,1,1.0,2726,0.0,106.0,360.0,0,Semiurban,0.0
|
||||
LP002928,Male,1.0,0,1,0.0,3000,3416.0,56.0,180.0,1,Semiurban,1.0
|
||||
LP002931,Male,1.0,2,1,1.0,6000,0.0,205.0,240.0,1,Semiurban,0.0
|
||||
LP002933,,0.0,3+,1,1.0,9357,0.0,292.0,360.0,1,Semiurban,1.0
|
||||
LP002936,Male,1.0,0,1,0.0,3859,3300.0,142.0,180.0,1,Rural,1.0
|
||||
LP002938,Male,1.0,0,1,1.0,16120,0.0,260.0,360.0,1,Urban,1.0
|
||||
LP002940,Male,0.0,0,0,0.0,3833,0.0,110.0,360.0,1,Rural,1.0
|
||||
LP002941,Male,1.0,2,0,1.0,6383,1000.0,187.0,360.0,1,Rural,0.0
|
||||
LP002943,Male,0.0,,1,0.0,2987,0.0,88.0,360.0,0,Semiurban,0.0
|
||||
LP002945,Male,1.0,0,1,1.0,9963,0.0,180.0,360.0,1,Rural,1.0
|
||||
LP002948,Male,1.0,2,1,0.0,5780,0.0,192.0,360.0,1,Urban,1.0
|
||||
LP002949,Female,0.0,3+,1,0.0,416,41667.0,350.0,180.0,0,Urban,0.0
|
||||
LP002950,Male,1.0,0,0,0.0,2894,2792.0,155.0,360.0,1,Rural,1.0
|
||||
LP002953,Male,1.0,3+,1,0.0,5703,0.0,128.0,360.0,1,Urban,1.0
|
||||
LP002958,Male,0.0,0,1,0.0,3676,4301.0,172.0,360.0,1,Rural,1.0
|
||||
LP002959,Female,1.0,1,1,0.0,12000,0.0,496.0,360.0,1,Semiurban,1.0
|
||||
LP002960,Male,1.0,0,0,0.0,2400,3800.0,0.0,180.0,1,Urban,0.0
|
||||
LP002961,Male,1.0,1,1,0.0,3400,2500.0,173.0,360.0,1,Semiurban,1.0
|
||||
LP002964,Male,1.0,2,0,0.0,3987,1411.0,157.0,360.0,1,Rural,1.0
|
||||
LP002974,Male,1.0,0,1,0.0,3232,1950.0,108.0,360.0,1,Rural,1.0
|
||||
LP002978,Female,0.0,0,1,0.0,2900,0.0,71.0,360.0,1,Rural,1.0
|
||||
LP002979,Male,1.0,3+,1,0.0,4106,0.0,40.0,180.0,1,Rural,1.0
|
||||
LP002983,Male,1.0,1,1,0.0,8072,240.0,253.0,360.0,1,Urban,1.0
|
||||
LP002984,Male,1.0,2,1,0.0,7583,0.0,187.0,360.0,1,Urban,1.0
|
||||
LP002990,Female,0.0,0,1,1.0,4583,0.0,133.0,360.0,0,Semiurban,0.0
|
||||
|
BIN
abanin_danill_lab_6/result_mean.jpg
Normal file
|
After Width: | Height: | Size: 32 KiB |
BIN
abanin_danill_lab_6/score_1.png
Normal file
|
After Width: | Height: | Size: 680 KiB |
BIN
abanin_danill_lab_6/score_2.png
Normal file
|
After Width: | Height: | Size: 452 KiB |
BIN
almukhammetov_bulat_lab_3/1.png
Normal file
|
After Width: | Height: | Size: 73 KiB |
64
almukhammetov_bulat_lab_3/README.md
Normal file
@@ -0,0 +1,64 @@
|
||||
Вариант 2
|
||||
|
||||
Задание:
|
||||
Предсказание категории возраста дома (housingMedianAge) на основе других признаков, таких как широта, долгота, общее количество комнат и т.д.
|
||||
|
||||
Данные:
|
||||
Данный набор данных использовался во второй главе недавней книги Аурелиена Жерона "Практическое машинное обучение с помощью Scikit-Learn и TensorFlow". Он служит отличным введением в реализацию алгоритмов машинного обучения, потому что требует минимальной предварительной обработки данных, содержит легко понимаемый список переменных и находится в оптимальном размере, который не слишком мал и не слишком большой.
|
||||
|
||||
Данные содержат информацию о домах в определенном районе Калифорнии и некоторую сводную статистику на основе данных переписи 1990 года. Следует отметить, что данные не прошли предварительную очистку, и для них требуются некоторые этапы предварительной обработки. Столбцы включают в себя следующие переменные, их названия весьма наглядно описывают их суть:
|
||||
|
||||
долгота longitude
|
||||
|
||||
широта latitude
|
||||
|
||||
средний возраст жилья median_house_value
|
||||
|
||||
общее количество комнат total_rooms
|
||||
|
||||
общее количество спален total_bedrooms
|
||||
|
||||
население population
|
||||
|
||||
домохозяйства households
|
||||
|
||||
медианный доход median_income
|
||||
|
||||
Запуск:
|
||||
Запустите файл lab3.py
|
||||
|
||||
Описание программы:
|
||||
|
||||
1. Загружает набор данных из файла 'housing.csv', который содержит информацию о домах в Калифорнии, включая их координаты, возраст, количество комнат, население, доход и другие характеристики.
|
||||
|
||||
2. Удаляет строки с нулевыми значениями из набора данных для чистоты анализа.
|
||||
|
||||
3. Выбирает набор признаков (features) из данных, которые будут использоваться для обучения моделей регрессии и классификации.
|
||||
|
||||
4. Определяет задачу регрессии, где целевой переменной (target) является 'housing_median_age', и задачу классификации, где целевой переменной является 'housing_median_age'.
|
||||
|
||||
5. Разделяет данные на обучающий и тестовый наборы для обеих задач с использованием функции train_test_split. Тестовый набор составляет 1% от исходных данных.
|
||||
|
||||
6. Создает и обучает дерево решений для регрессии и классификации с использованием моделей DecisionTreeRegressor и DecisionTreeClassifier.
|
||||
|
||||
7. Предсказывает значения целевой переменной на тестовых наборах для обеих задач.
|
||||
|
||||
8. Оценивает качество моделей с помощью среднеквадратичной ошибки (MSE) для регрессии и точности (accuracy) для классификации.
|
||||
|
||||
9. Выводит среднеквадратичную ошибку для регрессии и точность для классификации, а также важности признаков для обеих задач.
|
||||
|
||||
Результаты:
|
||||
|
||||

|
||||
|
||||
Выводы:
|
||||
|
||||
Для задачи регрессии, где целью было предсказать возраст жилья (housing_median_age), модель дерева решений показала среднюю ошибку (MSE) равную 117.65. Это означает, что модель регрессии вполне приемлемо предсказывает возраст жилья на основе выбранных признаков.
|
||||
|
||||
Для задачи классификации, где целью было предсказать стоимость жилья (housing_median_age), модель дерева решений показала низкую точность, всего 8.29%. Это свидетельствует о том, что модель классификации не справляется с предсказанием стоимости жилья на основе выбранных признаков. Низкая точность указывает на необходимость улучшения модели или выбора других методов для решения задачи классификации.
|
||||
|
||||
Анализ важности признаков для задачи регрессии показал, что наибольший вклад в предсказание возраста жилья вносят признаки 'longitude', 'latitude' и 'total_rooms'. Эти признаки оказывают наибольшее влияние на результаты модели.
|
||||
|
||||
Для задачи классификации наибольший вклад в предсказание стоимости жилья вносят признаки 'median_income', 'longitude' и 'latitude'. Эти признаки имеют наибольшее значение при определении классов стоимости жилья.
|
||||
|
||||
В целом, результаты указывают на успешное решение задачи регрессии с использованием модели дерева решений. Однако задача классификации требует дополнительных улучшений.
|
||||
48
almukhammetov_bulat_lab_3/lab3(old).py
Normal file
@@ -0,0 +1,48 @@
|
||||
import pandas as pd
|
||||
from sklearn.preprocessing import LabelEncoder
|
||||
from sklearn.tree import DecisionTreeClassifier
|
||||
|
||||
# Загрузка данных
|
||||
data = pd.read_csv('titanic.csv', index_col='PassengerId')
|
||||
|
||||
|
||||
# Функция для преобразования пола в числовое значение
|
||||
def Sex_to_bool(sex):
|
||||
if sex == "male":
|
||||
return 0
|
||||
return 1
|
||||
|
||||
|
||||
# Преобразование пола в числовое значение
|
||||
data['Sex'] = data['Sex'].apply(Sex_to_bool)
|
||||
|
||||
# Отбор строк с непустыми значениями
|
||||
# Отбор строк с непустыми значениями
|
||||
data = data.loc[~data['Name'].isna()
|
||||
& ~data['Age'].isna()
|
||||
& ~data['Sex'].isna()
|
||||
& ~data['Survived'].isna()]
|
||||
|
||||
|
||||
# Отбор нужных столбцов
|
||||
features = data[['Name', 'Sex', 'Age']]
|
||||
|
||||
# Применение Label Encoding к столбцу 'Name'
|
||||
label_encoder = LabelEncoder()
|
||||
features['Name'] = label_encoder.fit_transform(features['Name'])
|
||||
|
||||
# Определение целевой переменной
|
||||
y = data['Survived']
|
||||
|
||||
# Создание и обучение дерева решений
|
||||
clf = DecisionTreeClassifier(random_state=241)
|
||||
clf.fit(features, y)
|
||||
|
||||
# Получение важностей признаков
|
||||
importance = clf.feature_importances_
|
||||
|
||||
# Печать важности каждого признака
|
||||
print("Важность 'Name':", importance[0])
|
||||
print("Важность 'Sex':", importance[1])
|
||||
print("Важность 'Age':", importance[2])
|
||||
|
||||
77
almukhammetov_bulat_lab_3/lab3.py
Normal file
@@ -0,0 +1,77 @@
|
||||
import pandas as pd
|
||||
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.metrics import mean_squared_error, accuracy_score
|
||||
|
||||
# Загрузка данных
|
||||
data = pd.read_csv('housing.csv')
|
||||
data = data.dropna()
|
||||
|
||||
# Отбор нужных столбцов
|
||||
features = data[
|
||||
['longitude', 'latitude', 'total_rooms', 'total_bedrooms', 'population', 'households', 'median_income']]
|
||||
|
||||
# Задача регрессии
|
||||
target_regression = data['housing_median_age']
|
||||
|
||||
# Разделение данных на обучающий и тестовый наборы для регрессии
|
||||
X_train_regression, X_test_regression, y_train_regression, y_test_regression = train_test_split(features,
|
||||
target_regression,
|
||||
test_size=0.01,
|
||||
random_state=241)
|
||||
|
||||
# Создание и обучение дерева решений для регрессии
|
||||
clf_regression = DecisionTreeRegressor(random_state=241)
|
||||
clf_regression.fit(X_train_regression, y_train_regression)
|
||||
|
||||
# Предсказание на тестовом наборе для регрессии
|
||||
y_pred_regression = clf_regression.predict(X_test_regression)
|
||||
|
||||
# Оценка качества модели для регрессии (MSE)
|
||||
mse_regression = mean_squared_error(y_test_regression, y_pred_regression)
|
||||
print("Средняя ошибка для регрессии:", mse_regression)
|
||||
|
||||
# Задача классификации
|
||||
target_classification = data['median_house_value']
|
||||
|
||||
# Разделение данных на обучающий и тестовый наборы для классификации
|
||||
X_train_classification, X_test_classification, y_train_classification, y_test_classification = train_test_split(
|
||||
features, target_classification, test_size=0.01, random_state=241)
|
||||
|
||||
# Создание и обучение дерева классификации
|
||||
clf_classification = DecisionTreeClassifier(random_state=241)
|
||||
clf_classification.fit(X_train_classification, y_train_classification)
|
||||
|
||||
# Предсказание на тестовом наборе для классификации
|
||||
y_pred_classification = clf_classification.predict(X_test_classification)
|
||||
|
||||
# Оценка качества модели для классификации (точность)
|
||||
accuracy_classification = accuracy_score(y_test_classification, y_pred_classification)
|
||||
print("Точность для классификации: {:.2f}%".format(accuracy_classification * 100))
|
||||
|
||||
# Важности признаков для регрессии
|
||||
importance_regression = clf_regression.feature_importances_
|
||||
|
||||
print("Важность для регрессии")
|
||||
# Печать важности каждого признака для регрессии
|
||||
print("Важность 'longitude':", importance_regression[0]) # За западную долготу дома
|
||||
print("Важность 'latitude':", importance_regression[1]) # За северную широту дома
|
||||
print("Важность 'total_rooms':", importance_regression[2]) # За общее количество комнат в блоке
|
||||
print("Важность 'total_bedrooms':", importance_regression[3]) # За общее количество спален в блоке
|
||||
print("Важность 'population':", importance_regression[4]) # За общее количество проживающих в блоке
|
||||
print("Важность 'households':", importance_regression[5]) # За общее количество домохозяйств в блоке
|
||||
print("Важность 'median_income':", importance_regression[6]) # За медианный доход домохозяйств в блоке
|
||||
|
||||
# Важности признаков для классификации
|
||||
importance_classification = clf_classification.feature_importances_
|
||||
|
||||
print()
|
||||
print("Важность для классификации")
|
||||
# Печать важности каждого признака для классификации
|
||||
print("Важность 'longitude':", importance_classification[0]) # За западную долготу дома
|
||||
print("Важность 'latitude':", importance_classification[1]) # За северную широту дома
|
||||
print("Важность 'total_rooms':", importance_classification[2]) # За общее количество комнат в блоке
|
||||
print("Важность 'total_bedrooms':", importance_classification[3]) # За общее количество спален в блоке
|
||||
print("Важность 'population':", importance_classification[4]) # За общее количество проживающих в блоке
|
||||
print("Важность 'households':", importance_classification[5]) # За общее количество домохозяйств в блоке
|
||||
print("Важность 'median_income':", importance_classification[6]) # За медианный доход домохозяйств в блоке
|
||||
42
basharin_sevastyan_lab_2/README.md
Normal file
@@ -0,0 +1,42 @@
|
||||
## Лабораторная работа 2. Вариант 5.
|
||||
### Задание
|
||||
Выполнить ранжирование признаков. Отобразить получившиеся значения\оценки каждого признака каждым методом\моделью и среднюю оценку. Провести анализ получившихся результатов. Какие четыре признака оказались самыми важными по среднему значению?
|
||||
|
||||
Модели:
|
||||
|
||||
- Гребневая регрессия `Ridge`,
|
||||
- Рекурсивное сокращение признаков `Recursive Feature Elimination – RFE`,
|
||||
- Сокращение признаков Случайными деревьями `Random Forest Regressor`
|
||||
|
||||
### Как запустить
|
||||
Для запуска программы необходимо с помощью командной строки в корневой директории файлов прокета прописать:
|
||||
```
|
||||
python main.py
|
||||
```
|
||||
|
||||
### Используемые технологии
|
||||
- `numpy` (псевдоним `np`): NumPy - это библиотека для научных вычислений в Python.
|
||||
- `sklearn` (scikit-learn): Scikit-learn - это библиотека для машинного обучения и анализа данных в Python. Из данной библиотеки были использованы следующие модули:
|
||||
- `LinearRegression` - линейная регрессия - это алгоритм машинного обучения, используемый для задач бинарной классификации.
|
||||
- `Ridge` - инструмент работы с моделью "Гребневая регрессия"
|
||||
- `RFE` - инструмент оценки важности признаков "Рекурсивное сокращение признаков"
|
||||
- `RandomForestRegressor` - инструмент работы с моделью "Регрессор случайного леса"
|
||||
|
||||
### Описание работы
|
||||
1. Программа генерирует данные для обучения моделей, содержащие матрицу признаков X и вектор целевой переменной y.
|
||||
1. Создает DataFrame data, в котором столбцы представляют признаки, а последний столбец - целевую переменную.
|
||||
1. Разделяет данные на матрицу признаков X и вектор целевой переменной y
|
||||
1. Создает список обученных моделей для ранжирования признаков: гребневой регрессии, рекурсивного сокращения признаков и сокращения признаков случайными деревьями.
|
||||
1. Создает словарь model_scores для хранения оценок каждой модели.
|
||||
1. Выводит оценки признаков каждой модели и их средние оценки.
|
||||
1. Находит четыре наиболее важных признака по средней оценке и выводит их индексы и значения.
|
||||
|
||||
### Результат работы
|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
### Вывод
|
||||
Четыре наиболее важных признака, определенных на основе средних оценок, включают
|
||||
Признак 1, Признак 3, Признак 12 и Признак 6.
|
||||
67
basharin_sevastyan_lab_2/main.py
Normal file
@@ -0,0 +1,67 @@
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from sklearn.datasets import make_regression
|
||||
from sklearn.linear_model import Ridge, LinearRegression
|
||||
from sklearn.ensemble import RandomForestRegressor
|
||||
from sklearn.feature_selection import RFE
|
||||
from sklearn.preprocessing import MinMaxScaler
|
||||
|
||||
''' Задание
|
||||
Используя код из [1](пункт «Решение задачи ранжирования признаков», стр. 205), выполните ранжирование признаков с
|
||||
помощью указанных по вариантумоделей. Отобразите получившиеся значения\оценки каждого признака каждым методом\моделью и
|
||||
среднюю оценку. Проведите анализ получившихся результатов. Какие четырепризнака оказались самыми важными по среднему
|
||||
значению? (Названия\индексы признаков и будут ответом на задание).
|
||||
|
||||
Вариант 5.
|
||||
Гребневая регрессия (Ridge), Рекурсивное сокращение признаков (Recursive Feature Elimination – RFE),
|
||||
Сокращение признаков Случайными деревьями (Random Forest Regressor).
|
||||
'''
|
||||
|
||||
# создание данных
|
||||
random_state = np.random.RandomState(2)
|
||||
X, y = make_regression(n_samples=750, n_features=15, noise=0.1, random_state=random_state)
|
||||
data = pd.DataFrame(X, columns=[f'Признак {i}' for i in range(X.shape[1])])
|
||||
data['Целевая переменная'] = y
|
||||
X = data.drop('Целевая переменная', axis=1)
|
||||
y = data['Целевая переменная']
|
||||
|
||||
ridge = Ridge(alpha=1) # Гребневая регрессия
|
||||
ridge.fit(X, y)
|
||||
|
||||
recFE = RFE(LinearRegression(), n_features_to_select=1) # Рекурсивное сокращение признаков
|
||||
recFE.fit(X, y)
|
||||
|
||||
rfr = RandomForestRegressor() # Сокращение признаков Случайными деревьями
|
||||
rfr.fit(X, y)
|
||||
|
||||
models = [('Ridge', ridge),
|
||||
('RFE', recFE),
|
||||
('RFR', rfr)]
|
||||
model_scores = []
|
||||
|
||||
for name, model in models:
|
||||
if name == 'Ridge':
|
||||
coef = model.coef_
|
||||
normalized_coef = MinMaxScaler().fit_transform(coef.reshape(-1, 1))
|
||||
model_scores.append((name, normalized_coef.flatten()))
|
||||
elif name == 'RFE':
|
||||
rankings = model.ranking_
|
||||
normalized_rankings = 1 - (rankings - 1) / (np.max(rankings) - 1)
|
||||
model_scores.append((name, normalized_rankings))
|
||||
elif name == 'RFR':
|
||||
feature_importances = model.feature_importances_
|
||||
normalized_importances = MinMaxScaler().fit_transform(feature_importances.reshape(-1, 1))
|
||||
model_scores.append((name, normalized_importances.flatten()))
|
||||
|
||||
for name, scores in model_scores:
|
||||
print(f"{name} оценки признаков:")
|
||||
for feature, score in enumerate(scores, start=1):
|
||||
print(f"Признак {feature}: {score:.2f}")
|
||||
print(f"Средняя оценка: {np.mean(scores):.2f}")
|
||||
|
||||
all_feature_scores = np.mean(list(map(lambda x: x[1], model_scores)), axis=0)
|
||||
sorted_features = sorted(enumerate(all_feature_scores, start=1), key=lambda x: x[1], reverse=True)
|
||||
top_features = sorted_features[:4]
|
||||
print("Четыре наиболее важных признака:")
|
||||
for feature, score in top_features:
|
||||
print(f"Признак {feature}: {score:.2f}")
|
||||
BIN
basharin_sevastyan_lab_2/res.png
Normal file
|
After Width: | Height: | Size: 6.0 KiB |
BIN
basharin_sevastyan_lab_2/rfe.png
Normal file
|
After Width: | Height: | Size: 11 KiB |
BIN
basharin_sevastyan_lab_2/rfr.png
Normal file
|
After Width: | Height: | Size: 11 KiB |
BIN
basharin_sevastyan_lab_2/ridge.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
125
belyaeva_ekaterina_lab_3/Current_Pub_Meta.csv
Normal file
@@ -0,0 +1,125 @@
|
||||
,Name,Primary Attribute,Roles,Herald Picks,Herald Wins,Herald Win Rate,Guardian Picks,Guardian Wins,Guardian Win Rate,Crusader Picks,Crusader Wins,Crusader Win Rate,Archon Picks,Archon Wins,Archon Win Rate,Legend Picks,Legend Wins,Legend Win Rate,Ancient Picks,Ancient Wins,Ancient Win Rate,Divine Picks,Divine Wins,Divine Win Rate,Immortal Picks,Immortal Wins,Immortal Win Rate
|
||||
0,Abaddon,all,"Support, Carry, Durable",1111,575,51.76,6408,3309,51.64,13811,7050,51.05,16497,8530,51.71,11360,5877,51.73,5571,2893,51.93,2632,1345,51.1,991,497,50.15
|
||||
1,Alchemist,str,"Carry, Support, Durable, Disabler, Initiator, Nuker",1119,486,43.43,6370,2883,45.26,12238,5617,45.9,13028,6130,47.05,8455,4055,47.96,4120,1984,48.16,2021,1023,50.62,860,424,49.3
|
||||
2,Ancient Apparition,int,"Support, Disabler, Nuker",2146,1073,50.0,13697,7069,51.61,30673,16118,52.55,35145,18219,51.84,23114,12166,52.63,10688,5528,51.72,5035,2573,51.1,2134,1076,50.42
|
||||
3,Anti-Mage,agi,"Carry, Escape, Nuker",3765,1818,48.29,22050,10774,48.86,47371,23304,49.19,49115,24074,49.02,28599,13991,48.92,12303,5958,48.43,4866,2349,48.27,1502,751,50.0
|
||||
4,Arc Warden,agi,"Carry, Escape, Nuker",1448,704,48.62,8047,4162,51.72,14946,7982,53.41,14711,7875,53.53,9472,5167,54.55,4323,2309,53.41,2104,1148,54.56,789,435,55.13
|
||||
5,Axe,str,"Initiator, Durable, Disabler, Carry",5343,2880,53.9,32652,17719,54.27,71010,37736,53.14,77869,40559,52.09,49182,25079,50.99,22637,11353,50.15,10114,5000,49.44,3795,1837,48.41
|
||||
6,Bane,all,"Support, Disabler, Nuker, Durable",745,334,44.83,4983,2422,48.61,11332,5504,48.57,13633,6767,49.64,10132,5032,49.66,5596,2861,51.13,3028,1555,51.35,1958,1055,53.88
|
||||
7,Batrider,all,"Initiator, Disabler, Escape",349,136,38.97,1983,812,40.95,4053,1595,39.35,4725,1861,39.39,3173,1275,40.18,1678,731,43.56,802,362,45.14,497,227,45.67
|
||||
8,Beastmaster,all,"Initiator, Disabler, Durable, Nuker",402,174,43.28,2447,1060,43.32,5787,2569,44.39,6930,3092,44.62,5288,2389,45.18,2816,1274,45.24,1593,752,47.21,1176,539,45.83
|
||||
9,Bloodseeker,agi,"Carry, Disabler, Nuker, Initiator",2765,1382,49.98,12589,6270,49.81,21781,10683,49.05,20961,10420,49.71,13035,6430,49.33,6210,3006,48.41,2941,1475,50.15,1465,718,49.01
|
||||
10,Bounty Hunter,agi,"Escape, Nuker",3852,1868,48.49,19609,9535,48.63,36362,17600,48.4,37059,18314,49.42,22934,11518,50.22,10584,5276,49.85,5105,2594,50.81,2498,1325,53.04
|
||||
11,Brewmaster,all,"Carry, Initiator, Durable, Disabler, Nuker",545,280,51.38,3564,1745,48.96,8941,4388,49.08,12340,6111,49.52,11185,5623,50.27,7645,3906,51.09,4812,2478,51.5,3533,1820,51.51
|
||||
12,Bristleback,str,"Carry, Durable, Initiator, Nuker",5884,3262,55.44,27952,14587,52.19,48847,24379,49.91,46702,22927,49.09,27466,13319,48.49,12398,5969,48.14,5865,2915,49.7,2639,1304,49.41
|
||||
13,Broodmother,all,"Carry, Pusher, Escape, Nuker",456,173,37.94,2048,842,41.11,3444,1462,42.45,3392,1448,42.69,2193,1048,47.79,1203,602,50.04,795,422,53.08,453,230,50.77
|
||||
14,Centaur Warrunner,str,"Durable, Initiator, Disabler, Nuker, Escape",1721,911,52.93,11754,6266,53.31,28691,15201,52.98,35369,18741,52.99,25393,13468,53.04,12653,6607,52.22,6124,3181,51.94,2442,1243,50.9
|
||||
15,Chaos Knight,str,"Carry, Disabler, Durable, Pusher, Initiator",3032,1639,54.06,16762,8931,53.28,31892,17139,53.74,30697,16435,53.54,18217,9810,53.85,8572,4620,53.9,4230,2291,54.16,1750,943,53.89
|
||||
16,Chen,all,"Support, Pusher",284,125,44.01,1450,678,46.76,2969,1345,45.3,3258,1604,49.23,2641,1331,50.4,1488,767,51.55,970,512,52.78,770,448,58.18
|
||||
17,Clinkz,agi,"Carry, Escape, Pusher",3151,1608,51.03,13891,7141,51.41,25465,12938,50.81,27327,14066,51.47,18846,9726,51.61,9452,4890,51.74,4765,2475,51.94,2093,1052,50.26
|
||||
18,Clockwerk,all,"Initiator, Disabler, Durable, Nuker",816,397,48.65,5860,2837,48.41,14478,6929,47.86,18466,8843,47.89,13143,6301,47.94,6612,3169,47.93,3286,1581,48.11,1378,658,47.75
|
||||
19,Crystal Maiden,int,"Support, Disabler, Nuker",4821,2529,52.46,26584,13626,51.26,52168,26040,49.92,52258,25365,48.54,30690,14848,48.38,13295,6404,48.17,5602,2680,47.84,1638,771,47.07
|
||||
20,Dark Seer,all,"Initiator, Escape, Disabler",627,320,51.04,3675,1884,51.27,7881,3803,48.26,9589,4844,50.52,7186,3573,49.72,3902,1983,50.82,2145,1095,51.05,1217,593,48.73
|
||||
21,Dark Willow,all,"Support, Nuker, Disabler, Escape",2654,1293,48.72,13829,6657,48.14,28142,13480,47.9,32114,15785,49.15,23100,11331,49.05,12052,5909,49.03,6400,3182,49.72,3708,1915,51.65
|
||||
22,Dawnbreaker,str,"Carry, Durable",1746,875,50.11,12297,6105,49.65,32398,15921,49.14,44846,21936,48.91,35474,17441,49.17,19770,9832,49.73,10637,5263,49.48,6339,3173,50.06
|
||||
23,Dazzle,all,"Support, Nuker, Disabler",2827,1418,50.16,19852,9758,49.15,48236,23691,49.11,56417,27798,49.27,38159,18642,48.85,18695,9199,49.21,8530,4239,49.7,3382,1654,48.91
|
||||
24,Death Prophet,int,"Carry, Pusher, Nuker, Disabler",1372,659,48.03,6643,3145,47.34,11987,5729,47.79,12268,5856,47.73,7455,3606,48.37,3591,1698,47.28,1872,902,48.18,926,459,49.57
|
||||
25,Disruptor,int,"Support, Disabler, Nuker, Initiator",1541,757,49.12,11104,5331,48.01,27746,13542,48.81,33742,16310,48.34,23173,11096,47.88,10907,5201,47.68,4859,2255,46.41,1863,861,46.22
|
||||
26,Doom,str,"Carry, Disabler, Initiator, Durable, Nuker",1049,474,45.19,6112,2767,45.27,13700,6056,44.2,15454,6925,44.81,10727,4842,45.14,5444,2451,45.02,2979,1348,45.25,1545,731,47.31
|
||||
27,Dragon Knight,str,"Carry, Pusher, Durable, Disabler, Initiator, Nuker",1950,942,48.31,10643,5274,49.55,20451,9733,47.59,20326,9671,47.58,11674,5544,47.49,4979,2355,47.3,2024,973,48.07,725,341,47.03
|
||||
28,Drow Ranger,agi,"Carry, Disabler, Pusher",5737,2904,50.62,29675,14831,49.98,57655,28573,49.56,56682,27927,49.27,34310,16607,48.4,15050,7171,47.65,5947,2815,47.33,1768,788,44.57
|
||||
29,Earth Spirit,str,"Nuker, Escape, Disabler, Initiator, Durable",1038,465,44.8,7420,3276,44.15,20807,9432,45.33,30107,14166,47.05,25314,12148,47.99,14579,7041,48.3,7678,3802,49.52,4379,2169,49.53
|
||||
30,Earthshaker,str,"Support, Initiator, Disabler, Nuker",5012,2455,48.98,29784,14662,49.23,67050,33111,49.38,79963,39843,49.83,57108,28961,50.71,28650,14591,50.93,14186,7296,51.43,6151,3165,51.46
|
||||
31,Elder Titan,str,"Initiator, Disabler, Nuker, Durable",471,212,45.01,2551,1248,48.92,5213,2570,49.3,5572,2809,50.41,3847,1942,50.48,1964,998,50.81,1124,613,54.54,550,292,53.09
|
||||
32,Ember Spirit,agi,"Carry, Escape, Nuker, Disabler, Initiator",1514,635,41.94,9180,3836,41.79,20578,8738,42.46,25152,10844,43.11,17703,7814,44.14,8538,3793,44.42,4265,1892,44.36,2065,928,44.94
|
||||
33,Enchantress,int,"Support, Pusher, Durable, Disabler",1794,848,47.27,8050,3622,44.99,12921,5686,44.01,11673,4974,42.61,6863,2840,41.38,2948,1212,41.11,1434,654,45.61,806,318,39.45
|
||||
34,Enigma,all,"Disabler, Initiator, Pusher",1317,588,44.65,6937,3171,45.71,12908,5979,46.32,11687,5428,46.44,6194,2839,45.83,2493,1127,45.21,938,437,46.59,338,159,47.04
|
||||
35,Faceless Void,agi,"Carry, Initiator, Disabler, Escape, Durable",4323,2043,47.26,25618,11902,46.46,54581,25874,47.4,60671,28993,47.79,40137,19611,48.86,19376,9620,49.65,9579,4828,50.4,4439,2256,50.82
|
||||
36,Grimstroke,int,"Support, Nuker, Disabler, Escape",1455,694,47.7,9714,4789,49.3,24688,12430,50.35,32027,16094,50.25,23193,11795,50.86,12102,6100,50.4,6191,3047,49.22,3449,1666,48.3
|
||||
37,Gyrocopter,agi,"Carry, Nuker, Disabler",2560,1213,47.38,16589,7882,47.51,42072,20358,48.39,54200,26229,48.39,39414,19053,48.34,20164,9781,48.51,10164,4937,48.57,5241,2507,47.83
|
||||
38,Hoodwink,agi,"Support, Nuker, Escape, Disabler",2420,1126,46.53,14034,6800,48.45,31382,14964,47.68,35684,16966,47.55,22626,10651,47.07,9949,4690,47.14,4349,2089,48.03,1533,703,45.86
|
||||
39,Huskar,str,"Carry, Durable, Initiator",3501,1603,45.79,14234,6639,46.64,22794,10912,47.87,21801,10763,49.37,13811,6919,50.1,6769,3535,52.22,3556,1822,51.24,1936,993,51.29
|
||||
40,Invoker,all,"Carry, Nuker, Disabler, Escape, Pusher",4330,2042,47.16,27625,13176,47.7,69035,33863,49.05,86745,43479,50.12,61821,31510,50.97,31459,16321,51.88,15431,8195,53.11,7852,4148,52.83
|
||||
41,Io,all,"Support, Escape, Nuker",1274,615,48.27,6158,2999,48.7,12762,6247,48.95,14216,7024,49.41,9564,4843,50.64,5301,2685,50.65,2789,1463,52.46,1464,773,52.8
|
||||
42,Jakiro,int,"Support, Nuker, Pusher, Disabler",3147,1708,54.27,22718,12413,54.64,56736,30984,54.61,70038,37473,53.5,46389,24997,53.89,22084,11639,52.7,9838,5103,51.87,3282,1729,52.68
|
||||
43,Juggernaut,agi,"Carry, Pusher, Escape",5585,2711,48.54,30394,14800,48.69,62313,30581,49.08,65590,32344,49.31,39235,19326,49.26,16334,8012,49.05,6419,3066,47.76,1576,731,46.38
|
||||
44,Keeper of the Light,int,"Support, Nuker, Disabler",896,353,39.4,5051,2216,43.87,10452,4579,43.81,11614,5322,45.82,7870,3627,46.09,4268,2001,46.88,2147,1043,48.58,1333,588,44.11
|
||||
45,Kunkka,str,"Carry, Support, Disabler, Initiator, Durable, Nuker",2251,1124,49.93,13474,6828,50.68,31210,16196,51.89,39691,21293,53.65,30314,16458,54.29,15706,8793,55.98,7884,4339,55.04,3458,1898,54.89
|
||||
46,Legion Commander,str,"Carry, Disabler, Initiator, Durable, Nuker",6263,3264,52.12,37100,19157,51.64,81491,41557,51.0,91431,46558,50.92,59383,29917,50.38,27945,13917,49.8,13193,6587,49.93,5601,2745,49.01
|
||||
47,Leshrac,int,"Carry, Support, Nuker, Pusher, Disabler",674,316,46.88,3872,1799,46.46,7490,3433,45.83,7903,3604,45.6,5322,2526,47.46,2687,1298,48.31,1325,647,48.83,721,357,49.51
|
||||
48,Lich,int,"Support, Nuker",2700,1412,52.3,16646,8820,52.99,37785,19685,52.1,45471,23554,51.8,31203,16108,51.62,15530,7821,50.36,7243,3597,49.66,2520,1258,49.92
|
||||
49,Lifestealer,str,"Carry, Durable, Escape, Disabler",2515,1213,48.23,14131,6978,49.38,29724,14627,49.21,31211,15581,49.92,18970,9481,49.98,8689,4400,50.64,3630,1821,50.17,1229,617,50.2
|
||||
50,Lina,int,"Support, Carry, Nuker, Disabler",4512,2030,44.99,21927,10156,46.32,45301,21210,46.82,54229,25956,47.86,40016,19138,47.83,21072,10112,47.99,10481,5031,48.0,4369,2138,48.94
|
||||
51,Lion,int,"Support, Disabler, Nuker, Initiator",6204,2855,46.02,37869,17465,46.12,80124,36649,45.74,84390,38176,45.24,50720,22914,45.18,21698,9784,45.09,9308,4280,45.98,3220,1496,46.46
|
||||
52,Lone Druid,all,"Carry, Pusher, Durable",909,483,53.14,4714,2421,51.36,10987,5858,53.32,14580,7968,54.65,11810,6490,54.95,7241,3971,54.84,4024,2240,55.67,2303,1259,54.67
|
||||
53,Luna,agi,"Carry, Nuker, Pusher",1927,904,46.91,9091,4271,46.98,16571,7922,47.81,16035,7615,47.49,9728,4634,47.64,4463,2103,47.12,1912,911,47.65,719,322,44.78
|
||||
54,Lycan,all,"Carry, Pusher, Durable, Escape",374,174,46.52,1894,915,48.31,3691,1744,47.25,3824,1905,49.82,2694,1332,49.44,1460,753,51.58,827,411,49.7,532,289,54.32
|
||||
55,Magnus,all,"Initiator, Disabler, Nuker, Escape",770,339,44.03,5789,2651,45.79,17837,7954,44.59,26126,12058,46.15,20634,9592,46.49,10574,5056,47.82,4565,2073,45.41,1606,751,46.76
|
||||
56,Marci,all,"Support, Carry, Initiator, Disabler, Escape",1370,620,45.26,7092,3252,45.85,15199,7240,47.63,18485,8874,48.01,13308,6305,47.38,7176,3476,48.44,3689,1882,51.02,1746,883,50.57
|
||||
57,Mars,str,"Carry, Initiator, Disabler, Durable",862,375,43.5,5719,2529,44.22,15156,6756,44.58,20719,9369,45.22,16419,7387,44.99,9044,4052,44.8,4536,2093,46.14,1926,868,45.07
|
||||
58,Medusa,agi,"Carry, Disabler, Durable",1898,902,47.52,9289,4512,48.57,16504,7818,47.37,14796,6886,46.54,7488,3449,46.06,2775,1270,45.77,1073,482,44.92,394,184,46.7
|
||||
59,Meepo,agi,"Carry, Escape, Nuker, Disabler, Initiator, Pusher",1004,523,52.09,3970,1990,50.13,6904,3587,51.96,7166,3646,50.88,4906,2563,52.24,2383,1282,53.8,1139,588,51.62,585,300,51.28
|
||||
60,Mirana,all,"Carry, Support, Escape, Nuker, Disabler",2499,1193,47.74,16954,8135,47.98,39985,19097,47.76,45169,21554,47.72,28467,13456,47.27,12800,6047,47.24,5272,2500,47.42,1824,874,47.92
|
||||
61,Monkey King,agi,"Carry, Escape, Disabler, Initiator",3191,1384,43.37,17306,7544,43.59,35734,16113,45.09,40778,18322,44.93,27558,12630,45.83,14034,6433,45.84,6650,3152,47.4,3040,1440,47.37
|
||||
62,Morphling,agi,"Carry, Escape, Durable, Nuker, Disabler",1521,690,45.36,8620,4006,46.47,18075,8161,45.15,20414,9235,45.24,14395,6530,45.36,7697,3551,46.13,4432,2050,46.25,2560,1190,46.48
|
||||
63,Muerta,int,"Carry, Nuker, Disabler",2130,1089,51.13,10787,5740,53.21,22602,11898,52.64,27609,14495,52.5,20175,10465,51.87,10662,5518,51.75,5462,2759,50.51,2948,1517,51.46
|
||||
64,Naga Siren,agi,"Carry, Support, Pusher, Disabler, Initiator, Escape",1502,804,53.53,6495,3356,51.67,10423,5234,50.22,9830,4929,50.14,6057,2971,49.05,3216,1675,52.08,1855,933,50.3,1242,634,51.05
|
||||
65,Nature's Prophet,int,"Carry, Pusher, Escape, Nuker",5991,3029,50.56,36433,18143,49.8,83118,42095,50.64,100341,51268,51.09,69436,35870,51.66,34256,17858,52.13,16585,8745,52.73,7182,3755,52.28
|
||||
66,Necrophos,int,"Carry, Nuker, Durable, Disabler",4776,2702,56.57,28535,15771,55.27,62186,34285,55.13,70212,38163,54.35,46539,24708,53.09,21607,11302,52.31,9677,4994,51.61,3418,1733,50.7
|
||||
67,Night Stalker,str,"Carry, Initiator, Durable, Disabler, Nuker",1189,594,49.96,7868,3892,49.47,19446,10004,51.45,25524,13506,52.91,20138,10828,53.77,10767,5651,52.48,5499,2889,52.54,2415,1257,52.05
|
||||
68,Nyx Assassin,all,"Disabler, Nuker, Initiator, Escape",1718,867,50.47,10925,5525,50.57,27207,14073,51.73,34684,18059,52.07,25736,13572,52.74,13313,7093,53.28,6485,3444,53.11,2852,1468,51.47
|
||||
69,Ogre Magi,str,"Support, Nuker, Disabler, Durable, Initiator",5331,2845,53.37,31507,16299,51.73,62954,32248,51.22,61758,31373,50.8,33746,16988,50.34,13262,6654,50.17,4861,2420,49.78,1271,654,51.46
|
||||
70,Omniknight,str,"Support, Durable, Nuker",975,479,49.13,6426,3109,48.38,14641,7319,49.99,17258,8731,50.59,11695,5916,50.59,5746,2993,52.09,2870,1469,51.18,1333,656,49.21
|
||||
71,Oracle,int,"Support, Nuker, Disabler, Escape",796,384,48.24,4857,2417,49.76,13141,6645,50.57,18944,9853,52.01,15221,7964,52.32,8356,4458,53.35,4475,2380,53.18,1905,1018,53.44
|
||||
72,Outworld Destroyer,int,"Carry, Nuker, Disabler",2226,1118,50.22,13388,6864,51.27,33284,17362,52.16,43991,23377,53.14,32021,16994,53.07,16655,8724,52.38,8123,4218,51.93,3176,1649,51.92
|
||||
73,Pangolier,all,"Carry, Nuker, Disabler, Durable, Escape, Initiator",1156,534,46.19,7189,3209,44.64,17802,7937,44.58,25785,11677,45.29,21727,10144,46.69,13064,6351,48.61,7567,3737,49.39,5275,2734,51.83
|
||||
74,Phantom Assassin,agi,"Carry, Escape",8553,4426,51.75,48549,25553,52.63,104756,54881,52.39,119332,62511,52.38,79140,41143,51.99,37399,19325,51.67,17774,9077,51.07,7819,3856,49.32
|
||||
75,Phantom Lancer,agi,"Carry, Escape, Pusher, Nuker",3641,1960,53.83,19550,10374,53.06,38576,20633,53.49,41505,22310,53.75,26401,14268,54.04,12437,6590,52.99,5708,2985,52.3,2383,1243,52.16
|
||||
76,Phoenix,all,"Support, Nuker, Initiator, Escape, Disabler",743,315,42.4,5231,2471,47.24,13950,6633,47.55,18350,8864,48.31,13972,6715,48.06,7787,3761,48.3,4322,2132,49.33,2610,1325,50.77
|
||||
77,Primal Beast,str,"Initiator, Durable, Disabler",1455,701,48.18,9333,4448,47.66,22800,11058,48.5,30084,14643,48.67,24307,11993,49.34,13970,6991,50.04,7742,3890,50.25,4625,2407,52.04
|
||||
78,Puck,int,"Initiator, Disabler, Escape, Nuker",871,399,45.81,5773,2628,45.52,16596,7578,45.66,24480,11315,46.22,20070,9497,47.32,11023,5298,48.06,5656,2714,47.98,2555,1200,46.97
|
||||
79,Pudge,str,"Disabler, Initiator, Durable, Nuker",7677,3796,49.45,50891,24776,48.68,114784,56289,49.04,129604,63097,48.68,85800,41542,48.42,41730,20239,48.5,19823,9530,48.08,7112,3431,48.24
|
||||
80,Pugna,int,"Nuker, Pusher",2075,944,45.49,9998,4695,46.96,18962,8958,47.24,20240,9965,49.23,12807,6199,48.4,5825,2855,49.01,2758,1387,50.29,1195,592,49.54
|
||||
81,Queen of Pain,int,"Carry, Nuker, Escape",2287,1100,48.1,15119,7354,48.64,37137,18118,48.79,47706,23657,49.59,35500,18018,50.75,18405,9289,50.47,9243,4689,50.73,4227,2113,49.99
|
||||
82,Razor,agi,"Carry, Durable, Nuker, Pusher",2470,1231,49.84,12000,5964,49.7,24666,12142,49.23,30334,14844,48.94,21832,10558,48.36,11917,5679,47.65,6092,2912,47.8,3144,1551,49.33
|
||||
83,Riki,agi,"Carry, Escape, Disabler",3684,1929,52.36,19022,9891,52.0,35638,18582,52.14,33908,17415,51.36,20194,10312,51.06,8726,4377,50.16,3735,1855,49.67,1160,559,48.19
|
||||
84,Rubick,int,"Support, Disabler, Nuker",3090,1404,45.44,21639,9303,42.99,57417,24590,42.83,74874,32603,43.54,55186,24219,43.89,28206,12568,44.56,13732,6106,44.47,5764,2642,45.84
|
||||
85,Sand King,all,"Initiator, Disabler, Support, Nuker, Escape",2633,1513,57.46,13097,7323,55.91,25271,13807,54.64,26724,14323,53.6,17384,9144,52.6,7907,4104,51.9,3394,1719,50.65,1211,611,50.45
|
||||
86,Shadow Demon,int,"Support, Disabler, Initiator, Nuker",547,236,43.14,3252,1426,43.85,7920,3524,44.49,9752,4551,46.67,7404,3467,46.83,3956,1876,47.42,2076,1004,48.36,1054,497,47.15
|
||||
87,Shadow Fiend,agi,"Carry, Nuker",5051,2544,50.37,27255,14064,51.6,58589,29830,50.91,65429,33097,50.58,41810,21189,50.68,18766,9401,50.1,8232,4000,48.59,3016,1430,47.41
|
||||
88,Shadow Shaman,int,"Support, Pusher, Disabler, Nuker, Initiator",5323,2795,52.51,29733,15606,52.49,58894,31236,53.04,58765,30895,52.57,34475,18242,52.91,15166,7986,52.66,6377,3323,52.11,2413,1253,51.93
|
||||
89,Silencer,int,"Carry, Support, Disabler, Initiator, Nuker",4229,2324,54.95,27878,14960,53.66,61698,33081,53.62,65256,34458,52.8,38589,19853,51.45,16889,8653,51.23,6836,3416,49.97,2236,1105,49.42
|
||||
90,Skywrath Mage,int,"Support, Nuker, Disabler",4000,2030,50.75,22783,11675,51.24,46512,23624,50.79,51329,25706,50.08,34167,17364,50.82,16693,8415,50.41,8496,4208,49.53,4389,2069,47.14
|
||||
91,Slardar,str,"Carry, Durable, Initiator, Disabler, Escape",3935,2129,54.1,21523,11602,53.91,43947,23701,53.93,47721,25633,53.71,29887,16132,53.98,14233,7722,54.25,6530,3467,53.09,2322,1205,51.89
|
||||
92,Slark,agi,"Carry, Escape, Disabler, Nuker",4815,2521,52.36,29413,14762,50.19,64004,31771,49.64,70173,34411,49.04,44780,21926,48.96,20864,10270,49.22,9969,4962,49.77,4565,2394,52.44
|
||||
93,Snapfire,all,"Support, Nuker, Disabler, Escape",1524,682,44.75,10646,4576,42.98,27103,12120,44.72,34711,15412,44.4,24351,10786,44.29,11723,5131,43.77,5227,2294,43.89,1987,868,43.68
|
||||
94,Sniper,agi,"Carry, Nuker",8022,4079,50.85,44508,22727,51.06,88690,45223,50.99,87190,44086,50.56,47411,23648,49.88,18092,8924,49.33,6130,3040,49.59,1370,662,48.32
|
||||
95,Spectre,agi,"Carry, Durable, Escape",3454,2008,58.14,22097,12356,55.92,49157,26961,54.85,55914,30100,53.83,36321,19338,53.24,16946,8960,52.87,7921,4163,52.56,2568,1370,53.35
|
||||
96,Spirit Breaker,str,"Carry, Initiator, Disabler, Durable, Escape",4788,2423,50.61,26662,13530,50.75,56535,28908,51.13,63991,32249,50.4,42512,21357,50.24,20119,9926,49.34,9499,4814,50.68,3761,1884,50.09
|
||||
97,Storm Spirit,int,"Carry, Escape, Nuker, Initiator, Disabler",2202,1001,45.46,11656,5197,44.59,25644,11806,46.04,30968,14210,45.89,21680,10197,47.03,10810,5025,46.48,5278,2382,45.13,2363,1122,47.48
|
||||
98,Sven,str,"Carry, Disabler, Initiator, Durable, Nuker",3552,1761,49.58,19792,9744,49.23,41296,20478,49.59,48709,24228,49.74,35460,17828,50.28,19795,10065,50.85,11014,5655,51.34,6701,3387,50.54
|
||||
99,Techies,all,"Nuker, Disabler",2356,1131,48.01,13105,6245,47.65,27293,12893,47.24,29180,13507,46.29,18216,8407,46.15,8266,3771,45.62,3459,1644,47.53,1319,591,44.81
|
||||
100,Templar Assassin,agi,"Carry, Escape",2142,955,44.58,10932,4758,43.52,21211,9445,44.53,23928,10909,45.59,17399,8242,47.37,9567,4656,48.67,5525,2708,49.01,3524,1775,50.37
|
||||
101,Terrorblade,agi,"Carry, Pusher, Nuker",1115,484,43.41,5686,2430,42.74,10856,4638,42.72,11518,5041,43.77,8059,3540,43.93,4192,1827,43.58,2419,1082,44.73,1621,700,43.18
|
||||
102,Tidehunter,str,"Initiator, Durable, Disabler, Nuker, Carry",1835,855,46.59,11159,5369,48.11,26222,12699,48.43,30735,14879,48.41,20523,9727,47.4,9731,4740,48.71,4426,2079,46.97,1998,936,46.85
|
||||
103,Timbersaw,all,"Nuker, Durable, Escape",1050,448,42.67,5854,2584,44.14,12301,5391,43.83,14295,6097,42.65,9697,4217,43.49,4992,2163,43.33,2419,1021,42.21,1139,471,41.35
|
||||
104,Tinker,int,"Carry, Nuker, Pusher",2106,944,44.82,11058,5200,47.02,24263,11826,48.74,27531,13614,49.45,19017,9732,51.18,9416,4875,51.77,4700,2466,52.47,1951,1036,53.1
|
||||
105,Tiny,str,"Carry, Nuker, Pusher, Initiator, Durable, Disabler",1434,654,45.61,7742,3452,44.59,15936,6950,43.61,17139,7468,43.57,11269,4991,44.29,5485,2491,45.41,2599,1216,46.79,1058,519,49.05
|
||||
106,Treant Protector,str,"Support, Initiator, Durable, Disabler, Escape",1646,899,54.62,11430,5881,51.45,28752,15124,52.6,36093,19344,53.59,28762,15532,54.0,16751,9227,55.08,9870,5468,55.4,6801,3855,56.68
|
||||
107,Troll Warlord,agi,"Carry, Pusher, Disabler, Durable",3176,1720,54.16,14007,7445,53.15,24729,13022,52.66,25424,13228,52.03,17362,9030,52.01,9427,4913,52.12,4767,2499,52.42,2341,1242,53.05
|
||||
108,Tusk,str,"Initiator, Disabler, Nuker",1263,565,44.73,8338,3777,45.3,19642,8869,45.15,25308,11520,45.52,18927,8853,46.77,10100,4820,47.72,5220,2502,47.93,2350,1157,49.23
|
||||
109,Underlord,str,"Support, Nuker, Disabler, Durable, Escape",797,405,50.82,4583,2341,51.08,10067,5057,50.23,11650,5786,49.67,7224,3561,49.29,3310,1591,48.07,1368,673,49.2,395,190,48.1
|
||||
110,Undying,str,"Support, Durable, Disabler, Nuker",3170,1620,51.1,19403,10116,52.14,40582,21110,52.02,40850,21182,51.85,23985,12454,51.92,10395,5389,51.84,4541,2336,51.44,2064,1012,49.03
|
||||
111,Ursa,agi,"Carry, Durable, Disabler",2801,1273,45.45,15132,7038,46.51,33269,15478,46.52,40822,19264,47.19,29348,14011,47.74,15262,7375,48.32,7507,3622,48.25,3004,1473,49.03
|
||||
112,Vengeful Spirit,all,"Support, Initiator, Disabler, Nuker, Escape",2186,1108,50.69,15817,8285,52.38,41843,21809,52.12,57524,30476,52.98,45512,24120,53.0,25581,13382,52.31,13758,7121,51.76,8276,4303,51.99
|
||||
113,Venomancer,all,"Support, Nuker, Initiator, Pusher, Disabler",2309,1187,51.41,14669,7463,50.88,34787,18020,51.8,41797,21690,51.89,28706,15085,52.55,13974,7338,52.51,6538,3495,53.46,2794,1459,52.22
|
||||
114,Viper,agi,"Carry, Durable, Initiator, Disabler",4100,2057,50.17,18991,9510,50.08,33517,16923,50.49,32728,16677,50.96,18537,9427,50.86,7851,3928,50.03,3260,1652,50.67,1176,610,51.87
|
||||
115,Visage,all,"Support, Nuker, Durable, Disabler, Pusher",331,171,51.66,1638,813,49.63,3240,1577,48.67,3840,1986,51.72,3108,1609,51.77,1995,1055,52.88,1309,702,53.63,858,457,53.26
|
||||
116,Void Spirit,all,"Carry, Escape, Nuker, Disabler",1565,727,46.45,8672,4096,47.23,20010,9694,48.45,25213,12376,49.09,18817,9231,49.06,10026,4920,49.07,4788,2319,48.43,2006,964,48.06
|
||||
117,Warlock,int,"Support, Initiator, Disabler",2547,1369,53.75,18931,10331,54.57,49795,26999,54.22,66697,36220,54.31,48401,25668,53.03,24999,12942,51.77,12575,6356,50.54,6183,2934,47.45
|
||||
118,Weaver,agi,"Carry, Escape",2818,1389,49.29,13873,6770,48.8,23493,11571,49.25,21545,10694,49.64,12911,6427,49.78,5809,2928,50.4,2960,1455,49.16,1303,719,55.18
|
||||
119,Windranger,all,"Carry, Support, Disabler, Escape, Nuker",3861,1814,46.98,19934,9223,46.27,40644,18807,46.27,44476,20652,46.43,28952,13508,46.66,13418,6297,46.93,5898,2782,47.17,2374,1142,48.1
|
||||
120,Winter Wyvern,all,"Support, Disabler, Nuker",821,371,45.19,5168,2424,46.9,10544,5014,47.55,11184,5308,47.46,7426,3512,47.29,3730,1854,49.71,1862,934,50.16,944,464,49.15
|
||||
121,Witch Doctor,int,"Support, Nuker, Disabler",7504,4173,55.61,45501,25616,56.3,99664,54963,55.15,111382,60421,54.25,71830,37860,52.71,33164,17334,52.27,14610,7442,50.94,4196,2076,49.48
|
||||
122,Wraith King,str,"Carry, Support, Durable, Disabler, Initiator",4175,2266,54.28,26362,14516,55.06,58733,32403,55.17,66283,36503,55.07,42360,23083,54.49,19084,10251,53.72,8334,4315,51.78,2707,1376,50.83
|
||||
123,Zeus,int,"Nuker, Carry",4132,2106,50.97,23721,12487,52.64,51568,27475,53.28,58333,31078,53.28,37821,20047,53.0,17901,9504,53.09,8539,4459,52.22,3400,1791,52.68
|
||||
|
35
belyaeva_ekaterina_lab_3/README.md
Normal file
@@ -0,0 +1,35 @@
|
||||
## Задание
|
||||
|
||||
Решите с помощью библиотечной реализации дерева решений задачу из лабораторной работы «Веб-сервис «Дерево решений» по предмету «Методы искусственного интеллекта»на 99% ваших данных. Проверьте работу модели на оставшемся проценте, сделайте вывод
|
||||
|
||||
|
||||
## Как запустить лабораторную
|
||||
Запустить файл main.py
|
||||
## Используемые технологии
|
||||
Библиотеки pandas, scikit-learn, их компоненты
|
||||
## Описание лабораторной (программы)
|
||||
Данный код берет данные из датасета о персонажах Dota 2, где описаны атрибуты персонажей, их роли, название, и как часто их пикают и какой у них винрейт на каждом звании в Доте, от реркута до титана.
|
||||
|
||||
В моем случае была поставлена задача определить винрейт персонажа на ранге рекрут в зависимости от его атрибута, роли (я взяла 2 - саппорт или керри), и того, как часто его берут на рекрутах.
|
||||
|
||||
Программа берет столбцы Herald Win Rate, Primary Attribute, Herald Picks и Roles, далее проводит фильтрацию столбца Roles и выбирает тех персонажей, у которых есть роль support или carry. Затем создает
|
||||
два новых столбца - isCarry и isSupport, так как в столбце Roles несколько значений и его нужно удалить.
|
||||
|
||||
Затем данные делятся на обучающую и тестовую выборки и выясняется зависимость винрейта от остальных признаков.
|
||||
|
||||
В конце программа выводит, насколько важны были выбранные признаки при определении винрейта и точность модели.
|
||||
|
||||
|
||||
## Результат
|
||||
|
||||
В результате получаем следующее:
|
||||
|
||||
Feature Importances: [0.08035262 0.82893841 0.00453277 0.08617619]
|
||||
Score: 0.23055568233652535
|
||||
|
||||
Вывод: самым значимым признаком при определении винрейта стал признак Primary Attribute. На фоне других признаков его значимость сильно выделяется, все остальные признаки уже играют очень маленькую роль.
|
||||
|
||||
Точность модели вышла относительно низкой, но это легко объясняется тем, что в Доте невозможно точно предсказать винрейт персонажа, основываясь на подобных признаках. Винрейт предсказывается только лишь тем, какие персонажи сильны в данной мете, что зависит от их скиллов и изменений патча, не описанных в датасете (но и нет такого датасета, где они могли бы быть описаны).
|
||||
|
||||
Тем не менее, данная программа дала понять, что на рекрутах на винрейт персонажа сильно влияет его главный атрибут.
|
||||
|
||||
47
belyaeva_ekaterina_lab_3/main.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import pandas as pd
|
||||
from sklearn.tree import DecisionTreeRegressor
|
||||
from sklearn.model_selection import train_test_split
|
||||
|
||||
# Загрузка данных
|
||||
data = pd.read_csv("Current_Pub_Meta.csv")
|
||||
|
||||
# Отбор нужных столбцов
|
||||
selected_columns = ['Herald Win Rate', 'Primary Attribute', 'Herald Picks', 'Roles']
|
||||
data = data[selected_columns]
|
||||
|
||||
# Фильтрация по ролям Carry и Support
|
||||
data = data[data['Roles'].apply(lambda x: 'Carry' in x or 'Support' in x)]
|
||||
|
||||
# Создание столбцов для каждой роли и заполнение их значениями 1 или 0
|
||||
data['IsCarry'] = data['Roles'].apply(lambda x: 1 if 'Carry' in x else 0)
|
||||
data['IsSupport'] = data['Roles'].apply(lambda x: 1 if 'Support' in x else 0)
|
||||
|
||||
# Удаление столбца Roles
|
||||
data.drop('Roles', axis=1, inplace=True)
|
||||
|
||||
# Замена категориальных переменных на числовые
|
||||
data['Primary Attribute'] = data['Primary Attribute'].map({'str': 0, 'all': 1, 'int': 2, 'agi': 3})
|
||||
|
||||
# Разделение данных на обучающую и тестовую выборки
|
||||
X = data.drop('Herald Win Rate', axis=1)
|
||||
y = data['Herald Win Rate']
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
||||
|
||||
# Обучение модели
|
||||
model = DecisionTreeRegressor()
|
||||
model.fit(X_train, y_train)
|
||||
|
||||
# Прогноз на тестовой выборке
|
||||
y_pred = model.predict(X_test)
|
||||
|
||||
# Вывод обработанных данных
|
||||
print("Обработанные данные:")
|
||||
print(data)
|
||||
|
||||
# Оценка значимости признаков
|
||||
feature_importances = model.feature_importances_
|
||||
print("Feature Importances:", feature_importances)
|
||||
|
||||
# Оценка score модели
|
||||
score = model.score(X_test, y_test)
|
||||
print("Score:", score)
|
||||
125
belyaeva_ekaterina_lab_4/Current_Pub_Meta.csv
Normal file
@@ -0,0 +1,125 @@
|
||||
,Name,Primary Attribute,Roles,Herald Picks,Herald Wins,Herald Win Rate,Guardian Picks,Guardian Wins,Guardian Win Rate,Crusader Picks,Crusader Wins,Crusader Win Rate,Archon Picks,Archon Wins,Archon Win Rate,Legend Picks,Legend Wins,Legend Win Rate,Ancient Picks,Ancient Wins,Ancient Win Rate,Divine Picks,Divine Wins,Divine Win Rate,Immortal Picks,Immortal Wins,Immortal Win Rate
|
||||
0,Abaddon,all,"Support, Carry, Durable",1111,575,51.76,6408,3309,51.64,13811,7050,51.05,16497,8530,51.71,11360,5877,51.73,5571,2893,51.93,2632,1345,51.1,991,497,50.15
|
||||
1,Alchemist,str,"Carry, Support, Durable, Disabler, Initiator, Nuker",1119,486,43.43,6370,2883,45.26,12238,5617,45.9,13028,6130,47.05,8455,4055,47.96,4120,1984,48.16,2021,1023,50.62,860,424,49.3
|
||||
2,Ancient Apparition,int,"Support, Disabler, Nuker",2146,1073,50.0,13697,7069,51.61,30673,16118,52.55,35145,18219,51.84,23114,12166,52.63,10688,5528,51.72,5035,2573,51.1,2134,1076,50.42
|
||||
3,Anti-Mage,agi,"Carry, Escape, Nuker",3765,1818,48.29,22050,10774,48.86,47371,23304,49.19,49115,24074,49.02,28599,13991,48.92,12303,5958,48.43,4866,2349,48.27,1502,751,50.0
|
||||
4,Arc Warden,agi,"Carry, Escape, Nuker",1448,704,48.62,8047,4162,51.72,14946,7982,53.41,14711,7875,53.53,9472,5167,54.55,4323,2309,53.41,2104,1148,54.56,789,435,55.13
|
||||
5,Axe,str,"Initiator, Durable, Disabler, Carry",5343,2880,53.9,32652,17719,54.27,71010,37736,53.14,77869,40559,52.09,49182,25079,50.99,22637,11353,50.15,10114,5000,49.44,3795,1837,48.41
|
||||
6,Bane,all,"Support, Disabler, Nuker, Durable",745,334,44.83,4983,2422,48.61,11332,5504,48.57,13633,6767,49.64,10132,5032,49.66,5596,2861,51.13,3028,1555,51.35,1958,1055,53.88
|
||||
7,Batrider,all,"Initiator, Disabler, Escape",349,136,38.97,1983,812,40.95,4053,1595,39.35,4725,1861,39.39,3173,1275,40.18,1678,731,43.56,802,362,45.14,497,227,45.67
|
||||
8,Beastmaster,all,"Initiator, Disabler, Durable, Nuker",402,174,43.28,2447,1060,43.32,5787,2569,44.39,6930,3092,44.62,5288,2389,45.18,2816,1274,45.24,1593,752,47.21,1176,539,45.83
|
||||
9,Bloodseeker,agi,"Carry, Disabler, Nuker, Initiator",2765,1382,49.98,12589,6270,49.81,21781,10683,49.05,20961,10420,49.71,13035,6430,49.33,6210,3006,48.41,2941,1475,50.15,1465,718,49.01
|
||||
10,Bounty Hunter,agi,"Escape, Nuker",3852,1868,48.49,19609,9535,48.63,36362,17600,48.4,37059,18314,49.42,22934,11518,50.22,10584,5276,49.85,5105,2594,50.81,2498,1325,53.04
|
||||
11,Brewmaster,all,"Carry, Initiator, Durable, Disabler, Nuker",545,280,51.38,3564,1745,48.96,8941,4388,49.08,12340,6111,49.52,11185,5623,50.27,7645,3906,51.09,4812,2478,51.5,3533,1820,51.51
|
||||
12,Bristleback,str,"Carry, Durable, Initiator, Nuker",5884,3262,55.44,27952,14587,52.19,48847,24379,49.91,46702,22927,49.09,27466,13319,48.49,12398,5969,48.14,5865,2915,49.7,2639,1304,49.41
|
||||
13,Broodmother,all,"Carry, Pusher, Escape, Nuker",456,173,37.94,2048,842,41.11,3444,1462,42.45,3392,1448,42.69,2193,1048,47.79,1203,602,50.04,795,422,53.08,453,230,50.77
|
||||
14,Centaur Warrunner,str,"Durable, Initiator, Disabler, Nuker, Escape",1721,911,52.93,11754,6266,53.31,28691,15201,52.98,35369,18741,52.99,25393,13468,53.04,12653,6607,52.22,6124,3181,51.94,2442,1243,50.9
|
||||
15,Chaos Knight,str,"Carry, Disabler, Durable, Pusher, Initiator",3032,1639,54.06,16762,8931,53.28,31892,17139,53.74,30697,16435,53.54,18217,9810,53.85,8572,4620,53.9,4230,2291,54.16,1750,943,53.89
|
||||
16,Chen,all,"Support, Pusher",284,125,44.01,1450,678,46.76,2969,1345,45.3,3258,1604,49.23,2641,1331,50.4,1488,767,51.55,970,512,52.78,770,448,58.18
|
||||
17,Clinkz,agi,"Carry, Escape, Pusher",3151,1608,51.03,13891,7141,51.41,25465,12938,50.81,27327,14066,51.47,18846,9726,51.61,9452,4890,51.74,4765,2475,51.94,2093,1052,50.26
|
||||
18,Clockwerk,all,"Initiator, Disabler, Durable, Nuker",816,397,48.65,5860,2837,48.41,14478,6929,47.86,18466,8843,47.89,13143,6301,47.94,6612,3169,47.93,3286,1581,48.11,1378,658,47.75
|
||||
19,Crystal Maiden,int,"Support, Disabler, Nuker",4821,2529,52.46,26584,13626,51.26,52168,26040,49.92,52258,25365,48.54,30690,14848,48.38,13295,6404,48.17,5602,2680,47.84,1638,771,47.07
|
||||
20,Dark Seer,all,"Initiator, Escape, Disabler",627,320,51.04,3675,1884,51.27,7881,3803,48.26,9589,4844,50.52,7186,3573,49.72,3902,1983,50.82,2145,1095,51.05,1217,593,48.73
|
||||
21,Dark Willow,all,"Support, Nuker, Disabler, Escape",2654,1293,48.72,13829,6657,48.14,28142,13480,47.9,32114,15785,49.15,23100,11331,49.05,12052,5909,49.03,6400,3182,49.72,3708,1915,51.65
|
||||
22,Dawnbreaker,str,"Carry, Durable",1746,875,50.11,12297,6105,49.65,32398,15921,49.14,44846,21936,48.91,35474,17441,49.17,19770,9832,49.73,10637,5263,49.48,6339,3173,50.06
|
||||
23,Dazzle,all,"Support, Nuker, Disabler",2827,1418,50.16,19852,9758,49.15,48236,23691,49.11,56417,27798,49.27,38159,18642,48.85,18695,9199,49.21,8530,4239,49.7,3382,1654,48.91
|
||||
24,Death Prophet,int,"Carry, Pusher, Nuker, Disabler",1372,659,48.03,6643,3145,47.34,11987,5729,47.79,12268,5856,47.73,7455,3606,48.37,3591,1698,47.28,1872,902,48.18,926,459,49.57
|
||||
25,Disruptor,int,"Support, Disabler, Nuker, Initiator",1541,757,49.12,11104,5331,48.01,27746,13542,48.81,33742,16310,48.34,23173,11096,47.88,10907,5201,47.68,4859,2255,46.41,1863,861,46.22
|
||||
26,Doom,str,"Carry, Disabler, Initiator, Durable, Nuker",1049,474,45.19,6112,2767,45.27,13700,6056,44.2,15454,6925,44.81,10727,4842,45.14,5444,2451,45.02,2979,1348,45.25,1545,731,47.31
|
||||
27,Dragon Knight,str,"Carry, Pusher, Durable, Disabler, Initiator, Nuker",1950,942,48.31,10643,5274,49.55,20451,9733,47.59,20326,9671,47.58,11674,5544,47.49,4979,2355,47.3,2024,973,48.07,725,341,47.03
|
||||
28,Drow Ranger,agi,"Carry, Disabler, Pusher",5737,2904,50.62,29675,14831,49.98,57655,28573,49.56,56682,27927,49.27,34310,16607,48.4,15050,7171,47.65,5947,2815,47.33,1768,788,44.57
|
||||
29,Earth Spirit,str,"Nuker, Escape, Disabler, Initiator, Durable",1038,465,44.8,7420,3276,44.15,20807,9432,45.33,30107,14166,47.05,25314,12148,47.99,14579,7041,48.3,7678,3802,49.52,4379,2169,49.53
|
||||
30,Earthshaker,str,"Support, Initiator, Disabler, Nuker",5012,2455,48.98,29784,14662,49.23,67050,33111,49.38,79963,39843,49.83,57108,28961,50.71,28650,14591,50.93,14186,7296,51.43,6151,3165,51.46
|
||||
31,Elder Titan,str,"Initiator, Disabler, Nuker, Durable",471,212,45.01,2551,1248,48.92,5213,2570,49.3,5572,2809,50.41,3847,1942,50.48,1964,998,50.81,1124,613,54.54,550,292,53.09
|
||||
32,Ember Spirit,agi,"Carry, Escape, Nuker, Disabler, Initiator",1514,635,41.94,9180,3836,41.79,20578,8738,42.46,25152,10844,43.11,17703,7814,44.14,8538,3793,44.42,4265,1892,44.36,2065,928,44.94
|
||||
33,Enchantress,int,"Support, Pusher, Durable, Disabler",1794,848,47.27,8050,3622,44.99,12921,5686,44.01,11673,4974,42.61,6863,2840,41.38,2948,1212,41.11,1434,654,45.61,806,318,39.45
|
||||
34,Enigma,all,"Disabler, Initiator, Pusher",1317,588,44.65,6937,3171,45.71,12908,5979,46.32,11687,5428,46.44,6194,2839,45.83,2493,1127,45.21,938,437,46.59,338,159,47.04
|
||||
35,Faceless Void,agi,"Carry, Initiator, Disabler, Escape, Durable",4323,2043,47.26,25618,11902,46.46,54581,25874,47.4,60671,28993,47.79,40137,19611,48.86,19376,9620,49.65,9579,4828,50.4,4439,2256,50.82
|
||||
36,Grimstroke,int,"Support, Nuker, Disabler, Escape",1455,694,47.7,9714,4789,49.3,24688,12430,50.35,32027,16094,50.25,23193,11795,50.86,12102,6100,50.4,6191,3047,49.22,3449,1666,48.3
|
||||
37,Gyrocopter,agi,"Carry, Nuker, Disabler",2560,1213,47.38,16589,7882,47.51,42072,20358,48.39,54200,26229,48.39,39414,19053,48.34,20164,9781,48.51,10164,4937,48.57,5241,2507,47.83
|
||||
38,Hoodwink,agi,"Support, Nuker, Escape, Disabler",2420,1126,46.53,14034,6800,48.45,31382,14964,47.68,35684,16966,47.55,22626,10651,47.07,9949,4690,47.14,4349,2089,48.03,1533,703,45.86
|
||||
39,Huskar,str,"Carry, Durable, Initiator",3501,1603,45.79,14234,6639,46.64,22794,10912,47.87,21801,10763,49.37,13811,6919,50.1,6769,3535,52.22,3556,1822,51.24,1936,993,51.29
|
||||
40,Invoker,all,"Carry, Nuker, Disabler, Escape, Pusher",4330,2042,47.16,27625,13176,47.7,69035,33863,49.05,86745,43479,50.12,61821,31510,50.97,31459,16321,51.88,15431,8195,53.11,7852,4148,52.83
|
||||
41,Io,all,"Support, Escape, Nuker",1274,615,48.27,6158,2999,48.7,12762,6247,48.95,14216,7024,49.41,9564,4843,50.64,5301,2685,50.65,2789,1463,52.46,1464,773,52.8
|
||||
42,Jakiro,int,"Support, Nuker, Pusher, Disabler",3147,1708,54.27,22718,12413,54.64,56736,30984,54.61,70038,37473,53.5,46389,24997,53.89,22084,11639,52.7,9838,5103,51.87,3282,1729,52.68
|
||||
43,Juggernaut,agi,"Carry, Pusher, Escape",5585,2711,48.54,30394,14800,48.69,62313,30581,49.08,65590,32344,49.31,39235,19326,49.26,16334,8012,49.05,6419,3066,47.76,1576,731,46.38
|
||||
44,Keeper of the Light,int,"Support, Nuker, Disabler",896,353,39.4,5051,2216,43.87,10452,4579,43.81,11614,5322,45.82,7870,3627,46.09,4268,2001,46.88,2147,1043,48.58,1333,588,44.11
|
||||
45,Kunkka,str,"Carry, Support, Disabler, Initiator, Durable, Nuker",2251,1124,49.93,13474,6828,50.68,31210,16196,51.89,39691,21293,53.65,30314,16458,54.29,15706,8793,55.98,7884,4339,55.04,3458,1898,54.89
|
||||
46,Legion Commander,str,"Carry, Disabler, Initiator, Durable, Nuker",6263,3264,52.12,37100,19157,51.64,81491,41557,51.0,91431,46558,50.92,59383,29917,50.38,27945,13917,49.8,13193,6587,49.93,5601,2745,49.01
|
||||
47,Leshrac,int,"Carry, Support, Nuker, Pusher, Disabler",674,316,46.88,3872,1799,46.46,7490,3433,45.83,7903,3604,45.6,5322,2526,47.46,2687,1298,48.31,1325,647,48.83,721,357,49.51
|
||||
48,Lich,int,"Support, Nuker",2700,1412,52.3,16646,8820,52.99,37785,19685,52.1,45471,23554,51.8,31203,16108,51.62,15530,7821,50.36,7243,3597,49.66,2520,1258,49.92
|
||||
49,Lifestealer,str,"Carry, Durable, Escape, Disabler",2515,1213,48.23,14131,6978,49.38,29724,14627,49.21,31211,15581,49.92,18970,9481,49.98,8689,4400,50.64,3630,1821,50.17,1229,617,50.2
|
||||
50,Lina,int,"Support, Carry, Nuker, Disabler",4512,2030,44.99,21927,10156,46.32,45301,21210,46.82,54229,25956,47.86,40016,19138,47.83,21072,10112,47.99,10481,5031,48.0,4369,2138,48.94
|
||||
51,Lion,int,"Support, Disabler, Nuker, Initiator",6204,2855,46.02,37869,17465,46.12,80124,36649,45.74,84390,38176,45.24,50720,22914,45.18,21698,9784,45.09,9308,4280,45.98,3220,1496,46.46
|
||||
52,Lone Druid,all,"Carry, Pusher, Durable",909,483,53.14,4714,2421,51.36,10987,5858,53.32,14580,7968,54.65,11810,6490,54.95,7241,3971,54.84,4024,2240,55.67,2303,1259,54.67
|
||||
53,Luna,agi,"Carry, Nuker, Pusher",1927,904,46.91,9091,4271,46.98,16571,7922,47.81,16035,7615,47.49,9728,4634,47.64,4463,2103,47.12,1912,911,47.65,719,322,44.78
|
||||
54,Lycan,all,"Carry, Pusher, Durable, Escape",374,174,46.52,1894,915,48.31,3691,1744,47.25,3824,1905,49.82,2694,1332,49.44,1460,753,51.58,827,411,49.7,532,289,54.32
|
||||
55,Magnus,all,"Initiator, Disabler, Nuker, Escape",770,339,44.03,5789,2651,45.79,17837,7954,44.59,26126,12058,46.15,20634,9592,46.49,10574,5056,47.82,4565,2073,45.41,1606,751,46.76
|
||||
56,Marci,all,"Support, Carry, Initiator, Disabler, Escape",1370,620,45.26,7092,3252,45.85,15199,7240,47.63,18485,8874,48.01,13308,6305,47.38,7176,3476,48.44,3689,1882,51.02,1746,883,50.57
|
||||
57,Mars,str,"Carry, Initiator, Disabler, Durable",862,375,43.5,5719,2529,44.22,15156,6756,44.58,20719,9369,45.22,16419,7387,44.99,9044,4052,44.8,4536,2093,46.14,1926,868,45.07
|
||||
58,Medusa,agi,"Carry, Disabler, Durable",1898,902,47.52,9289,4512,48.57,16504,7818,47.37,14796,6886,46.54,7488,3449,46.06,2775,1270,45.77,1073,482,44.92,394,184,46.7
|
||||
59,Meepo,agi,"Carry, Escape, Nuker, Disabler, Initiator, Pusher",1004,523,52.09,3970,1990,50.13,6904,3587,51.96,7166,3646,50.88,4906,2563,52.24,2383,1282,53.8,1139,588,51.62,585,300,51.28
|
||||
60,Mirana,all,"Carry, Support, Escape, Nuker, Disabler",2499,1193,47.74,16954,8135,47.98,39985,19097,47.76,45169,21554,47.72,28467,13456,47.27,12800,6047,47.24,5272,2500,47.42,1824,874,47.92
|
||||
61,Monkey King,agi,"Carry, Escape, Disabler, Initiator",3191,1384,43.37,17306,7544,43.59,35734,16113,45.09,40778,18322,44.93,27558,12630,45.83,14034,6433,45.84,6650,3152,47.4,3040,1440,47.37
|
||||
62,Morphling,agi,"Carry, Escape, Durable, Nuker, Disabler",1521,690,45.36,8620,4006,46.47,18075,8161,45.15,20414,9235,45.24,14395,6530,45.36,7697,3551,46.13,4432,2050,46.25,2560,1190,46.48
|
||||
63,Muerta,int,"Carry, Nuker, Disabler",2130,1089,51.13,10787,5740,53.21,22602,11898,52.64,27609,14495,52.5,20175,10465,51.87,10662,5518,51.75,5462,2759,50.51,2948,1517,51.46
|
||||
64,Naga Siren,agi,"Carry, Support, Pusher, Disabler, Initiator, Escape",1502,804,53.53,6495,3356,51.67,10423,5234,50.22,9830,4929,50.14,6057,2971,49.05,3216,1675,52.08,1855,933,50.3,1242,634,51.05
|
||||
65,Nature's Prophet,int,"Carry, Pusher, Escape, Nuker",5991,3029,50.56,36433,18143,49.8,83118,42095,50.64,100341,51268,51.09,69436,35870,51.66,34256,17858,52.13,16585,8745,52.73,7182,3755,52.28
|
||||
66,Necrophos,int,"Carry, Nuker, Durable, Disabler",4776,2702,56.57,28535,15771,55.27,62186,34285,55.13,70212,38163,54.35,46539,24708,53.09,21607,11302,52.31,9677,4994,51.61,3418,1733,50.7
|
||||
67,Night Stalker,str,"Carry, Initiator, Durable, Disabler, Nuker",1189,594,49.96,7868,3892,49.47,19446,10004,51.45,25524,13506,52.91,20138,10828,53.77,10767,5651,52.48,5499,2889,52.54,2415,1257,52.05
|
||||
68,Nyx Assassin,all,"Disabler, Nuker, Initiator, Escape",1718,867,50.47,10925,5525,50.57,27207,14073,51.73,34684,18059,52.07,25736,13572,52.74,13313,7093,53.28,6485,3444,53.11,2852,1468,51.47
|
||||
69,Ogre Magi,str,"Support, Nuker, Disabler, Durable, Initiator",5331,2845,53.37,31507,16299,51.73,62954,32248,51.22,61758,31373,50.8,33746,16988,50.34,13262,6654,50.17,4861,2420,49.78,1271,654,51.46
|
||||
70,Omniknight,str,"Support, Durable, Nuker",975,479,49.13,6426,3109,48.38,14641,7319,49.99,17258,8731,50.59,11695,5916,50.59,5746,2993,52.09,2870,1469,51.18,1333,656,49.21
|
||||
71,Oracle,int,"Support, Nuker, Disabler, Escape",796,384,48.24,4857,2417,49.76,13141,6645,50.57,18944,9853,52.01,15221,7964,52.32,8356,4458,53.35,4475,2380,53.18,1905,1018,53.44
|
||||
72,Outworld Destroyer,int,"Carry, Nuker, Disabler",2226,1118,50.22,13388,6864,51.27,33284,17362,52.16,43991,23377,53.14,32021,16994,53.07,16655,8724,52.38,8123,4218,51.93,3176,1649,51.92
|
||||
73,Pangolier,all,"Carry, Nuker, Disabler, Durable, Escape, Initiator",1156,534,46.19,7189,3209,44.64,17802,7937,44.58,25785,11677,45.29,21727,10144,46.69,13064,6351,48.61,7567,3737,49.39,5275,2734,51.83
|
||||
74,Phantom Assassin,agi,"Carry, Escape",8553,4426,51.75,48549,25553,52.63,104756,54881,52.39,119332,62511,52.38,79140,41143,51.99,37399,19325,51.67,17774,9077,51.07,7819,3856,49.32
|
||||
75,Phantom Lancer,agi,"Carry, Escape, Pusher, Nuker",3641,1960,53.83,19550,10374,53.06,38576,20633,53.49,41505,22310,53.75,26401,14268,54.04,12437,6590,52.99,5708,2985,52.3,2383,1243,52.16
|
||||
76,Phoenix,all,"Support, Nuker, Initiator, Escape, Disabler",743,315,42.4,5231,2471,47.24,13950,6633,47.55,18350,8864,48.31,13972,6715,48.06,7787,3761,48.3,4322,2132,49.33,2610,1325,50.77
|
||||
77,Primal Beast,str,"Initiator, Durable, Disabler",1455,701,48.18,9333,4448,47.66,22800,11058,48.5,30084,14643,48.67,24307,11993,49.34,13970,6991,50.04,7742,3890,50.25,4625,2407,52.04
|
||||
78,Puck,int,"Initiator, Disabler, Escape, Nuker",871,399,45.81,5773,2628,45.52,16596,7578,45.66,24480,11315,46.22,20070,9497,47.32,11023,5298,48.06,5656,2714,47.98,2555,1200,46.97
|
||||
79,Pudge,str,"Disabler, Initiator, Durable, Nuker",7677,3796,49.45,50891,24776,48.68,114784,56289,49.04,129604,63097,48.68,85800,41542,48.42,41730,20239,48.5,19823,9530,48.08,7112,3431,48.24
|
||||
80,Pugna,int,"Nuker, Pusher",2075,944,45.49,9998,4695,46.96,18962,8958,47.24,20240,9965,49.23,12807,6199,48.4,5825,2855,49.01,2758,1387,50.29,1195,592,49.54
|
||||
81,Queen of Pain,int,"Carry, Nuker, Escape",2287,1100,48.1,15119,7354,48.64,37137,18118,48.79,47706,23657,49.59,35500,18018,50.75,18405,9289,50.47,9243,4689,50.73,4227,2113,49.99
|
||||
82,Razor,agi,"Carry, Durable, Nuker, Pusher",2470,1231,49.84,12000,5964,49.7,24666,12142,49.23,30334,14844,48.94,21832,10558,48.36,11917,5679,47.65,6092,2912,47.8,3144,1551,49.33
|
||||
83,Riki,agi,"Carry, Escape, Disabler",3684,1929,52.36,19022,9891,52.0,35638,18582,52.14,33908,17415,51.36,20194,10312,51.06,8726,4377,50.16,3735,1855,49.67,1160,559,48.19
|
||||
84,Rubick,int,"Support, Disabler, Nuker",3090,1404,45.44,21639,9303,42.99,57417,24590,42.83,74874,32603,43.54,55186,24219,43.89,28206,12568,44.56,13732,6106,44.47,5764,2642,45.84
|
||||
85,Sand King,all,"Initiator, Disabler, Support, Nuker, Escape",2633,1513,57.46,13097,7323,55.91,25271,13807,54.64,26724,14323,53.6,17384,9144,52.6,7907,4104,51.9,3394,1719,50.65,1211,611,50.45
|
||||
86,Shadow Demon,int,"Support, Disabler, Initiator, Nuker",547,236,43.14,3252,1426,43.85,7920,3524,44.49,9752,4551,46.67,7404,3467,46.83,3956,1876,47.42,2076,1004,48.36,1054,497,47.15
|
||||
87,Shadow Fiend,agi,"Carry, Nuker",5051,2544,50.37,27255,14064,51.6,58589,29830,50.91,65429,33097,50.58,41810,21189,50.68,18766,9401,50.1,8232,4000,48.59,3016,1430,47.41
|
||||
88,Shadow Shaman,int,"Support, Pusher, Disabler, Nuker, Initiator",5323,2795,52.51,29733,15606,52.49,58894,31236,53.04,58765,30895,52.57,34475,18242,52.91,15166,7986,52.66,6377,3323,52.11,2413,1253,51.93
|
||||
89,Silencer,int,"Carry, Support, Disabler, Initiator, Nuker",4229,2324,54.95,27878,14960,53.66,61698,33081,53.62,65256,34458,52.8,38589,19853,51.45,16889,8653,51.23,6836,3416,49.97,2236,1105,49.42
|
||||
90,Skywrath Mage,int,"Support, Nuker, Disabler",4000,2030,50.75,22783,11675,51.24,46512,23624,50.79,51329,25706,50.08,34167,17364,50.82,16693,8415,50.41,8496,4208,49.53,4389,2069,47.14
|
||||
91,Slardar,str,"Carry, Durable, Initiator, Disabler, Escape",3935,2129,54.1,21523,11602,53.91,43947,23701,53.93,47721,25633,53.71,29887,16132,53.98,14233,7722,54.25,6530,3467,53.09,2322,1205,51.89
|
||||
92,Slark,agi,"Carry, Escape, Disabler, Nuker",4815,2521,52.36,29413,14762,50.19,64004,31771,49.64,70173,34411,49.04,44780,21926,48.96,20864,10270,49.22,9969,4962,49.77,4565,2394,52.44
|
||||
93,Snapfire,all,"Support, Nuker, Disabler, Escape",1524,682,44.75,10646,4576,42.98,27103,12120,44.72,34711,15412,44.4,24351,10786,44.29,11723,5131,43.77,5227,2294,43.89,1987,868,43.68
|
||||
94,Sniper,agi,"Carry, Nuker",8022,4079,50.85,44508,22727,51.06,88690,45223,50.99,87190,44086,50.56,47411,23648,49.88,18092,8924,49.33,6130,3040,49.59,1370,662,48.32
|
||||
95,Spectre,agi,"Carry, Durable, Escape",3454,2008,58.14,22097,12356,55.92,49157,26961,54.85,55914,30100,53.83,36321,19338,53.24,16946,8960,52.87,7921,4163,52.56,2568,1370,53.35
|
||||
96,Spirit Breaker,str,"Carry, Initiator, Disabler, Durable, Escape",4788,2423,50.61,26662,13530,50.75,56535,28908,51.13,63991,32249,50.4,42512,21357,50.24,20119,9926,49.34,9499,4814,50.68,3761,1884,50.09
|
||||
97,Storm Spirit,int,"Carry, Escape, Nuker, Initiator, Disabler",2202,1001,45.46,11656,5197,44.59,25644,11806,46.04,30968,14210,45.89,21680,10197,47.03,10810,5025,46.48,5278,2382,45.13,2363,1122,47.48
|
||||
98,Sven,str,"Carry, Disabler, Initiator, Durable, Nuker",3552,1761,49.58,19792,9744,49.23,41296,20478,49.59,48709,24228,49.74,35460,17828,50.28,19795,10065,50.85,11014,5655,51.34,6701,3387,50.54
|
||||
99,Techies,all,"Nuker, Disabler",2356,1131,48.01,13105,6245,47.65,27293,12893,47.24,29180,13507,46.29,18216,8407,46.15,8266,3771,45.62,3459,1644,47.53,1319,591,44.81
|
||||
100,Templar Assassin,agi,"Carry, Escape",2142,955,44.58,10932,4758,43.52,21211,9445,44.53,23928,10909,45.59,17399,8242,47.37,9567,4656,48.67,5525,2708,49.01,3524,1775,50.37
|
||||
101,Terrorblade,agi,"Carry, Pusher, Nuker",1115,484,43.41,5686,2430,42.74,10856,4638,42.72,11518,5041,43.77,8059,3540,43.93,4192,1827,43.58,2419,1082,44.73,1621,700,43.18
|
||||
102,Tidehunter,str,"Initiator, Durable, Disabler, Nuker, Carry",1835,855,46.59,11159,5369,48.11,26222,12699,48.43,30735,14879,48.41,20523,9727,47.4,9731,4740,48.71,4426,2079,46.97,1998,936,46.85
|
||||
103,Timbersaw,all,"Nuker, Durable, Escape",1050,448,42.67,5854,2584,44.14,12301,5391,43.83,14295,6097,42.65,9697,4217,43.49,4992,2163,43.33,2419,1021,42.21,1139,471,41.35
|
||||
104,Tinker,int,"Carry, Nuker, Pusher",2106,944,44.82,11058,5200,47.02,24263,11826,48.74,27531,13614,49.45,19017,9732,51.18,9416,4875,51.77,4700,2466,52.47,1951,1036,53.1
|
||||
105,Tiny,str,"Carry, Nuker, Pusher, Initiator, Durable, Disabler",1434,654,45.61,7742,3452,44.59,15936,6950,43.61,17139,7468,43.57,11269,4991,44.29,5485,2491,45.41,2599,1216,46.79,1058,519,49.05
|
||||
106,Treant Protector,str,"Support, Initiator, Durable, Disabler, Escape",1646,899,54.62,11430,5881,51.45,28752,15124,52.6,36093,19344,53.59,28762,15532,54.0,16751,9227,55.08,9870,5468,55.4,6801,3855,56.68
|
||||
107,Troll Warlord,agi,"Carry, Pusher, Disabler, Durable",3176,1720,54.16,14007,7445,53.15,24729,13022,52.66,25424,13228,52.03,17362,9030,52.01,9427,4913,52.12,4767,2499,52.42,2341,1242,53.05
|
||||
108,Tusk,str,"Initiator, Disabler, Nuker",1263,565,44.73,8338,3777,45.3,19642,8869,45.15,25308,11520,45.52,18927,8853,46.77,10100,4820,47.72,5220,2502,47.93,2350,1157,49.23
|
||||
109,Underlord,str,"Support, Nuker, Disabler, Durable, Escape",797,405,50.82,4583,2341,51.08,10067,5057,50.23,11650,5786,49.67,7224,3561,49.29,3310,1591,48.07,1368,673,49.2,395,190,48.1
|
||||
110,Undying,str,"Support, Durable, Disabler, Nuker",3170,1620,51.1,19403,10116,52.14,40582,21110,52.02,40850,21182,51.85,23985,12454,51.92,10395,5389,51.84,4541,2336,51.44,2064,1012,49.03
|
||||
111,Ursa,agi,"Carry, Durable, Disabler",2801,1273,45.45,15132,7038,46.51,33269,15478,46.52,40822,19264,47.19,29348,14011,47.74,15262,7375,48.32,7507,3622,48.25,3004,1473,49.03
|
||||
112,Vengeful Spirit,all,"Support, Initiator, Disabler, Nuker, Escape",2186,1108,50.69,15817,8285,52.38,41843,21809,52.12,57524,30476,52.98,45512,24120,53.0,25581,13382,52.31,13758,7121,51.76,8276,4303,51.99
|
||||
113,Venomancer,all,"Support, Nuker, Initiator, Pusher, Disabler",2309,1187,51.41,14669,7463,50.88,34787,18020,51.8,41797,21690,51.89,28706,15085,52.55,13974,7338,52.51,6538,3495,53.46,2794,1459,52.22
|
||||
114,Viper,agi,"Carry, Durable, Initiator, Disabler",4100,2057,50.17,18991,9510,50.08,33517,16923,50.49,32728,16677,50.96,18537,9427,50.86,7851,3928,50.03,3260,1652,50.67,1176,610,51.87
|
||||
115,Visage,all,"Support, Nuker, Durable, Disabler, Pusher",331,171,51.66,1638,813,49.63,3240,1577,48.67,3840,1986,51.72,3108,1609,51.77,1995,1055,52.88,1309,702,53.63,858,457,53.26
|
||||
116,Void Spirit,all,"Carry, Escape, Nuker, Disabler",1565,727,46.45,8672,4096,47.23,20010,9694,48.45,25213,12376,49.09,18817,9231,49.06,10026,4920,49.07,4788,2319,48.43,2006,964,48.06
|
||||
117,Warlock,int,"Support, Initiator, Disabler",2547,1369,53.75,18931,10331,54.57,49795,26999,54.22,66697,36220,54.31,48401,25668,53.03,24999,12942,51.77,12575,6356,50.54,6183,2934,47.45
|
||||
118,Weaver,agi,"Carry, Escape",2818,1389,49.29,13873,6770,48.8,23493,11571,49.25,21545,10694,49.64,12911,6427,49.78,5809,2928,50.4,2960,1455,49.16,1303,719,55.18
|
||||
119,Windranger,all,"Carry, Support, Disabler, Escape, Nuker",3861,1814,46.98,19934,9223,46.27,40644,18807,46.27,44476,20652,46.43,28952,13508,46.66,13418,6297,46.93,5898,2782,47.17,2374,1142,48.1
|
||||
120,Winter Wyvern,all,"Support, Disabler, Nuker",821,371,45.19,5168,2424,46.9,10544,5014,47.55,11184,5308,47.46,7426,3512,47.29,3730,1854,49.71,1862,934,50.16,944,464,49.15
|
||||
121,Witch Doctor,int,"Support, Nuker, Disabler",7504,4173,55.61,45501,25616,56.3,99664,54963,55.15,111382,60421,54.25,71830,37860,52.71,33164,17334,52.27,14610,7442,50.94,4196,2076,49.48
|
||||
122,Wraith King,str,"Carry, Support, Durable, Disabler, Initiator",4175,2266,54.28,26362,14516,55.06,58733,32403,55.17,66283,36503,55.07,42360,23083,54.49,19084,10251,53.72,8334,4315,51.78,2707,1376,50.83
|
||||
123,Zeus,int,"Nuker, Carry",4132,2106,50.97,23721,12487,52.64,51568,27475,53.28,58333,31078,53.28,37821,20047,53.0,17901,9504,53.09,8539,4459,52.22,3400,1791,52.68
|
||||
|
BIN
belyaeva_ekaterina_lab_4/ImmortalInfo.png
Normal file
|
After Width: | Height: | Size: 197 KiB |
31
belyaeva_ekaterina_lab_4/README.md
Normal file
@@ -0,0 +1,31 @@
|
||||
## Задание
|
||||
|
||||
Использовать метод кластеризациипо варианту для данных из таблицы 1 по варианту(таблица 9),самостоятельно сформулировав задачу. Интерпретировать результаты и оценить, насколько хорошо он подходит для решения сформулированной вами задачи
|
||||
Вариант 6 - dendogram
|
||||
|
||||
## Как запустить лабораторную
|
||||
Запустить файл main.py
|
||||
## Используемые технологии
|
||||
Библиотеки pandas, matplotlib, scipy, их компоненты
|
||||
## Описание лабораторной (программы)
|
||||
Данный код берет данные из датасета о персонажах Dota 2, где описаны атрибуты персонажей, их роли, название, и как часто их пикают и какой у них винрейт на каждом звании в Доте, от реркута до титана.
|
||||
|
||||
В моем случае была поставлена задача сгруппировать персонажей по их винрейту и частоте их пиков на определенных рангах.
|
||||
|
||||
Программа берет столбцы Name, Herald Win Rate, Herald Picks, создает матрицу для анализа и вычисляет матрицу связей, а затем выводит дендограмму, где персонажи объединены по тому, как часто их пикают и какой у них винрейт.
|
||||
|
||||
## Результат
|
||||
|
||||
В результате получаем дендограмму, где персонажи сгруппированы по частоте пиков и винрейту. Наглядное представление оказалось очень точным и такой способ решения поставленной задачи выполнил свою работу хорошо.
|
||||
|
||||
Например, на диаграмме ниже можно обратить внимание на то, что на ранге рекрут персонажи Phantom Asassin, Witch Doctor, Sniper и Pudge стоят вместе в правом нижнем углу. Такое наблюдение говорит о том, что датасет очень приближен к реальным данным и составлен правильно, а так же о том, что программа работает верно и выдает правильный, приближенный к реальности, результат.
|
||||
|
||||

|
||||
|
||||
Если же посмотреть на результат по данным для ранга титан, можно увидеть других героев, объединенных друг с другом по тому же приципу.
|
||||
|
||||

|
||||
|
||||
Сначала я хотела объединить героев по их винрейту на всех рангах, но такая информация не несет в себе много смысла, поэтому задача, которую я описала выше, сформулирована правильно, несет в себе смысл и решается заданным способом.
|
||||
|
||||
Такую статистику можно посмотреть по любому из рангов, заменив в коде слово Herald на интересующий ранг.
|
||||
BIN
belyaeva_ekaterina_lab_4/heraldInfo.png
Normal file
|
After Width: | Height: | Size: 160 KiB |
29
belyaeva_ekaterina_lab_4/main.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import pandas as pd
|
||||
import matplotlib.pyplot as plt
|
||||
from scipy.cluster.hierarchy import dendrogram, linkage
|
||||
|
||||
# Загрузка данных
|
||||
data = pd.read_csv('Current_Pub_Meta.csv')
|
||||
|
||||
# Выбор нужных столбцов
|
||||
selected_columns = ['Name', 'Herald Picks', 'Herald Win Rate']
|
||||
data = data[selected_columns]
|
||||
|
||||
# Создание матрицы для анализа
|
||||
matrix = data.drop('Name', axis=1).values
|
||||
|
||||
# Вычисление матрицы связей
|
||||
linked = linkage(matrix, 'ward')
|
||||
|
||||
# Рисование дендрограммы
|
||||
plt.figure(figsize=(10, 6))
|
||||
dendrogram(linked,
|
||||
orientation='top',
|
||||
labels=data['Name'].tolist(),
|
||||
distance_sort='descending',
|
||||
show_leaf_counts=True)
|
||||
plt.title('Dendrogram of Hero Win Percentage')
|
||||
plt.xlabel('Heroes')
|
||||
plt.ylabel('Distance')
|
||||
plt.xticks(rotation=90)
|
||||
plt.show()
|
||||
125
belyaeva_ekaterina_lab_5/Current_Pub_Meta.csv
Normal file
@@ -0,0 +1,125 @@
|
||||
,Name,Primary Attribute,Roles,Herald Picks,Herald Wins,Herald Win Rate,Guardian Picks,Guardian Wins,Guardian Win Rate,Crusader Picks,Crusader Wins,Crusader Win Rate,Archon Picks,Archon Wins,Archon Win Rate,Legend Picks,Legend Wins,Legend Win Rate,Ancient Picks,Ancient Wins,Ancient Win Rate,Divine Picks,Divine Wins,Divine Win Rate,Immortal Picks,Immortal Wins,Immortal Win Rate
|
||||
0,Abaddon,all,"Support, Carry, Durable",1111,575,51.76,6408,3309,51.64,13811,7050,51.05,16497,8530,51.71,11360,5877,51.73,5571,2893,51.93,2632,1345,51.1,991,497,50.15
|
||||
1,Alchemist,str,"Carry, Support, Durable, Disabler, Initiator, Nuker",1119,486,43.43,6370,2883,45.26,12238,5617,45.9,13028,6130,47.05,8455,4055,47.96,4120,1984,48.16,2021,1023,50.62,860,424,49.3
|
||||
2,Ancient Apparition,int,"Support, Disabler, Nuker",2146,1073,50.0,13697,7069,51.61,30673,16118,52.55,35145,18219,51.84,23114,12166,52.63,10688,5528,51.72,5035,2573,51.1,2134,1076,50.42
|
||||
3,Anti-Mage,agi,"Carry, Escape, Nuker",3765,1818,48.29,22050,10774,48.86,47371,23304,49.19,49115,24074,49.02,28599,13991,48.92,12303,5958,48.43,4866,2349,48.27,1502,751,50.0
|
||||
4,Arc Warden,agi,"Carry, Escape, Nuker",1448,704,48.62,8047,4162,51.72,14946,7982,53.41,14711,7875,53.53,9472,5167,54.55,4323,2309,53.41,2104,1148,54.56,789,435,55.13
|
||||
5,Axe,str,"Initiator, Durable, Disabler, Carry",5343,2880,53.9,32652,17719,54.27,71010,37736,53.14,77869,40559,52.09,49182,25079,50.99,22637,11353,50.15,10114,5000,49.44,3795,1837,48.41
|
||||
6,Bane,all,"Support, Disabler, Nuker, Durable",745,334,44.83,4983,2422,48.61,11332,5504,48.57,13633,6767,49.64,10132,5032,49.66,5596,2861,51.13,3028,1555,51.35,1958,1055,53.88
|
||||
7,Batrider,all,"Initiator, Disabler, Escape",349,136,38.97,1983,812,40.95,4053,1595,39.35,4725,1861,39.39,3173,1275,40.18,1678,731,43.56,802,362,45.14,497,227,45.67
|
||||
8,Beastmaster,all,"Initiator, Disabler, Durable, Nuker",402,174,43.28,2447,1060,43.32,5787,2569,44.39,6930,3092,44.62,5288,2389,45.18,2816,1274,45.24,1593,752,47.21,1176,539,45.83
|
||||
9,Bloodseeker,agi,"Carry, Disabler, Nuker, Initiator",2765,1382,49.98,12589,6270,49.81,21781,10683,49.05,20961,10420,49.71,13035,6430,49.33,6210,3006,48.41,2941,1475,50.15,1465,718,49.01
|
||||
10,Bounty Hunter,agi,"Escape, Nuker",3852,1868,48.49,19609,9535,48.63,36362,17600,48.4,37059,18314,49.42,22934,11518,50.22,10584,5276,49.85,5105,2594,50.81,2498,1325,53.04
|
||||
11,Brewmaster,all,"Carry, Initiator, Durable, Disabler, Nuker",545,280,51.38,3564,1745,48.96,8941,4388,49.08,12340,6111,49.52,11185,5623,50.27,7645,3906,51.09,4812,2478,51.5,3533,1820,51.51
|
||||
12,Bristleback,str,"Carry, Durable, Initiator, Nuker",5884,3262,55.44,27952,14587,52.19,48847,24379,49.91,46702,22927,49.09,27466,13319,48.49,12398,5969,48.14,5865,2915,49.7,2639,1304,49.41
|
||||
13,Broodmother,all,"Carry, Pusher, Escape, Nuker",456,173,37.94,2048,842,41.11,3444,1462,42.45,3392,1448,42.69,2193,1048,47.79,1203,602,50.04,795,422,53.08,453,230,50.77
|
||||
14,Centaur Warrunner,str,"Durable, Initiator, Disabler, Nuker, Escape",1721,911,52.93,11754,6266,53.31,28691,15201,52.98,35369,18741,52.99,25393,13468,53.04,12653,6607,52.22,6124,3181,51.94,2442,1243,50.9
|
||||
15,Chaos Knight,str,"Carry, Disabler, Durable, Pusher, Initiator",3032,1639,54.06,16762,8931,53.28,31892,17139,53.74,30697,16435,53.54,18217,9810,53.85,8572,4620,53.9,4230,2291,54.16,1750,943,53.89
|
||||
16,Chen,all,"Support, Pusher",284,125,44.01,1450,678,46.76,2969,1345,45.3,3258,1604,49.23,2641,1331,50.4,1488,767,51.55,970,512,52.78,770,448,58.18
|
||||
17,Clinkz,agi,"Carry, Escape, Pusher",3151,1608,51.03,13891,7141,51.41,25465,12938,50.81,27327,14066,51.47,18846,9726,51.61,9452,4890,51.74,4765,2475,51.94,2093,1052,50.26
|
||||
18,Clockwerk,all,"Initiator, Disabler, Durable, Nuker",816,397,48.65,5860,2837,48.41,14478,6929,47.86,18466,8843,47.89,13143,6301,47.94,6612,3169,47.93,3286,1581,48.11,1378,658,47.75
|
||||
19,Crystal Maiden,int,"Support, Disabler, Nuker",4821,2529,52.46,26584,13626,51.26,52168,26040,49.92,52258,25365,48.54,30690,14848,48.38,13295,6404,48.17,5602,2680,47.84,1638,771,47.07
|
||||
20,Dark Seer,all,"Initiator, Escape, Disabler",627,320,51.04,3675,1884,51.27,7881,3803,48.26,9589,4844,50.52,7186,3573,49.72,3902,1983,50.82,2145,1095,51.05,1217,593,48.73
|
||||
21,Dark Willow,all,"Support, Nuker, Disabler, Escape",2654,1293,48.72,13829,6657,48.14,28142,13480,47.9,32114,15785,49.15,23100,11331,49.05,12052,5909,49.03,6400,3182,49.72,3708,1915,51.65
|
||||
22,Dawnbreaker,str,"Carry, Durable",1746,875,50.11,12297,6105,49.65,32398,15921,49.14,44846,21936,48.91,35474,17441,49.17,19770,9832,49.73,10637,5263,49.48,6339,3173,50.06
|
||||
23,Dazzle,all,"Support, Nuker, Disabler",2827,1418,50.16,19852,9758,49.15,48236,23691,49.11,56417,27798,49.27,38159,18642,48.85,18695,9199,49.21,8530,4239,49.7,3382,1654,48.91
|
||||
24,Death Prophet,int,"Carry, Pusher, Nuker, Disabler",1372,659,48.03,6643,3145,47.34,11987,5729,47.79,12268,5856,47.73,7455,3606,48.37,3591,1698,47.28,1872,902,48.18,926,459,49.57
|
||||
25,Disruptor,int,"Support, Disabler, Nuker, Initiator",1541,757,49.12,11104,5331,48.01,27746,13542,48.81,33742,16310,48.34,23173,11096,47.88,10907,5201,47.68,4859,2255,46.41,1863,861,46.22
|
||||
26,Doom,str,"Carry, Disabler, Initiator, Durable, Nuker",1049,474,45.19,6112,2767,45.27,13700,6056,44.2,15454,6925,44.81,10727,4842,45.14,5444,2451,45.02,2979,1348,45.25,1545,731,47.31
|
||||
27,Dragon Knight,str,"Carry, Pusher, Durable, Disabler, Initiator, Nuker",1950,942,48.31,10643,5274,49.55,20451,9733,47.59,20326,9671,47.58,11674,5544,47.49,4979,2355,47.3,2024,973,48.07,725,341,47.03
|
||||
28,Drow Ranger,agi,"Carry, Disabler, Pusher",5737,2904,50.62,29675,14831,49.98,57655,28573,49.56,56682,27927,49.27,34310,16607,48.4,15050,7171,47.65,5947,2815,47.33,1768,788,44.57
|
||||
29,Earth Spirit,str,"Nuker, Escape, Disabler, Initiator, Durable",1038,465,44.8,7420,3276,44.15,20807,9432,45.33,30107,14166,47.05,25314,12148,47.99,14579,7041,48.3,7678,3802,49.52,4379,2169,49.53
|
||||
30,Earthshaker,str,"Support, Initiator, Disabler, Nuker",5012,2455,48.98,29784,14662,49.23,67050,33111,49.38,79963,39843,49.83,57108,28961,50.71,28650,14591,50.93,14186,7296,51.43,6151,3165,51.46
|
||||
31,Elder Titan,str,"Initiator, Disabler, Nuker, Durable",471,212,45.01,2551,1248,48.92,5213,2570,49.3,5572,2809,50.41,3847,1942,50.48,1964,998,50.81,1124,613,54.54,550,292,53.09
|
||||
32,Ember Spirit,agi,"Carry, Escape, Nuker, Disabler, Initiator",1514,635,41.94,9180,3836,41.79,20578,8738,42.46,25152,10844,43.11,17703,7814,44.14,8538,3793,44.42,4265,1892,44.36,2065,928,44.94
|
||||
33,Enchantress,int,"Support, Pusher, Durable, Disabler",1794,848,47.27,8050,3622,44.99,12921,5686,44.01,11673,4974,42.61,6863,2840,41.38,2948,1212,41.11,1434,654,45.61,806,318,39.45
|
||||
34,Enigma,all,"Disabler, Initiator, Pusher",1317,588,44.65,6937,3171,45.71,12908,5979,46.32,11687,5428,46.44,6194,2839,45.83,2493,1127,45.21,938,437,46.59,338,159,47.04
|
||||
35,Faceless Void,agi,"Carry, Initiator, Disabler, Escape, Durable",4323,2043,47.26,25618,11902,46.46,54581,25874,47.4,60671,28993,47.79,40137,19611,48.86,19376,9620,49.65,9579,4828,50.4,4439,2256,50.82
|
||||
36,Grimstroke,int,"Support, Nuker, Disabler, Escape",1455,694,47.7,9714,4789,49.3,24688,12430,50.35,32027,16094,50.25,23193,11795,50.86,12102,6100,50.4,6191,3047,49.22,3449,1666,48.3
|
||||
37,Gyrocopter,agi,"Carry, Nuker, Disabler",2560,1213,47.38,16589,7882,47.51,42072,20358,48.39,54200,26229,48.39,39414,19053,48.34,20164,9781,48.51,10164,4937,48.57,5241,2507,47.83
|
||||
38,Hoodwink,agi,"Support, Nuker, Escape, Disabler",2420,1126,46.53,14034,6800,48.45,31382,14964,47.68,35684,16966,47.55,22626,10651,47.07,9949,4690,47.14,4349,2089,48.03,1533,703,45.86
|
||||
39,Huskar,str,"Carry, Durable, Initiator",3501,1603,45.79,14234,6639,46.64,22794,10912,47.87,21801,10763,49.37,13811,6919,50.1,6769,3535,52.22,3556,1822,51.24,1936,993,51.29
|
||||
40,Invoker,all,"Carry, Nuker, Disabler, Escape, Pusher",4330,2042,47.16,27625,13176,47.7,69035,33863,49.05,86745,43479,50.12,61821,31510,50.97,31459,16321,51.88,15431,8195,53.11,7852,4148,52.83
|
||||
41,Io,all,"Support, Escape, Nuker",1274,615,48.27,6158,2999,48.7,12762,6247,48.95,14216,7024,49.41,9564,4843,50.64,5301,2685,50.65,2789,1463,52.46,1464,773,52.8
|
||||
42,Jakiro,int,"Support, Nuker, Pusher, Disabler",3147,1708,54.27,22718,12413,54.64,56736,30984,54.61,70038,37473,53.5,46389,24997,53.89,22084,11639,52.7,9838,5103,51.87,3282,1729,52.68
|
||||
43,Juggernaut,agi,"Carry, Pusher, Escape",5585,2711,48.54,30394,14800,48.69,62313,30581,49.08,65590,32344,49.31,39235,19326,49.26,16334,8012,49.05,6419,3066,47.76,1576,731,46.38
|
||||
44,Keeper of the Light,int,"Support, Nuker, Disabler",896,353,39.4,5051,2216,43.87,10452,4579,43.81,11614,5322,45.82,7870,3627,46.09,4268,2001,46.88,2147,1043,48.58,1333,588,44.11
|
||||
45,Kunkka,str,"Carry, Support, Disabler, Initiator, Durable, Nuker",2251,1124,49.93,13474,6828,50.68,31210,16196,51.89,39691,21293,53.65,30314,16458,54.29,15706,8793,55.98,7884,4339,55.04,3458,1898,54.89
|
||||
46,Legion Commander,str,"Carry, Disabler, Initiator, Durable, Nuker",6263,3264,52.12,37100,19157,51.64,81491,41557,51.0,91431,46558,50.92,59383,29917,50.38,27945,13917,49.8,13193,6587,49.93,5601,2745,49.01
|
||||
47,Leshrac,int,"Carry, Support, Nuker, Pusher, Disabler",674,316,46.88,3872,1799,46.46,7490,3433,45.83,7903,3604,45.6,5322,2526,47.46,2687,1298,48.31,1325,647,48.83,721,357,49.51
|
||||
48,Lich,int,"Support, Nuker",2700,1412,52.3,16646,8820,52.99,37785,19685,52.1,45471,23554,51.8,31203,16108,51.62,15530,7821,50.36,7243,3597,49.66,2520,1258,49.92
|
||||
49,Lifestealer,str,"Carry, Durable, Escape, Disabler",2515,1213,48.23,14131,6978,49.38,29724,14627,49.21,31211,15581,49.92,18970,9481,49.98,8689,4400,50.64,3630,1821,50.17,1229,617,50.2
|
||||
50,Lina,int,"Support, Carry, Nuker, Disabler",4512,2030,44.99,21927,10156,46.32,45301,21210,46.82,54229,25956,47.86,40016,19138,47.83,21072,10112,47.99,10481,5031,48.0,4369,2138,48.94
|
||||
51,Lion,int,"Support, Disabler, Nuker, Initiator",6204,2855,46.02,37869,17465,46.12,80124,36649,45.74,84390,38176,45.24,50720,22914,45.18,21698,9784,45.09,9308,4280,45.98,3220,1496,46.46
|
||||
52,Lone Druid,all,"Carry, Pusher, Durable",909,483,53.14,4714,2421,51.36,10987,5858,53.32,14580,7968,54.65,11810,6490,54.95,7241,3971,54.84,4024,2240,55.67,2303,1259,54.67
|
||||
53,Luna,agi,"Carry, Nuker, Pusher",1927,904,46.91,9091,4271,46.98,16571,7922,47.81,16035,7615,47.49,9728,4634,47.64,4463,2103,47.12,1912,911,47.65,719,322,44.78
|
||||
54,Lycan,all,"Carry, Pusher, Durable, Escape",374,174,46.52,1894,915,48.31,3691,1744,47.25,3824,1905,49.82,2694,1332,49.44,1460,753,51.58,827,411,49.7,532,289,54.32
|
||||
55,Magnus,all,"Initiator, Disabler, Nuker, Escape",770,339,44.03,5789,2651,45.79,17837,7954,44.59,26126,12058,46.15,20634,9592,46.49,10574,5056,47.82,4565,2073,45.41,1606,751,46.76
|
||||
56,Marci,all,"Support, Carry, Initiator, Disabler, Escape",1370,620,45.26,7092,3252,45.85,15199,7240,47.63,18485,8874,48.01,13308,6305,47.38,7176,3476,48.44,3689,1882,51.02,1746,883,50.57
|
||||
57,Mars,str,"Carry, Initiator, Disabler, Durable",862,375,43.5,5719,2529,44.22,15156,6756,44.58,20719,9369,45.22,16419,7387,44.99,9044,4052,44.8,4536,2093,46.14,1926,868,45.07
|
||||
58,Medusa,agi,"Carry, Disabler, Durable",1898,902,47.52,9289,4512,48.57,16504,7818,47.37,14796,6886,46.54,7488,3449,46.06,2775,1270,45.77,1073,482,44.92,394,184,46.7
|
||||
59,Meepo,agi,"Carry, Escape, Nuker, Disabler, Initiator, Pusher",1004,523,52.09,3970,1990,50.13,6904,3587,51.96,7166,3646,50.88,4906,2563,52.24,2383,1282,53.8,1139,588,51.62,585,300,51.28
|
||||
60,Mirana,all,"Carry, Support, Escape, Nuker, Disabler",2499,1193,47.74,16954,8135,47.98,39985,19097,47.76,45169,21554,47.72,28467,13456,47.27,12800,6047,47.24,5272,2500,47.42,1824,874,47.92
|
||||
61,Monkey King,agi,"Carry, Escape, Disabler, Initiator",3191,1384,43.37,17306,7544,43.59,35734,16113,45.09,40778,18322,44.93,27558,12630,45.83,14034,6433,45.84,6650,3152,47.4,3040,1440,47.37
|
||||
62,Morphling,agi,"Carry, Escape, Durable, Nuker, Disabler",1521,690,45.36,8620,4006,46.47,18075,8161,45.15,20414,9235,45.24,14395,6530,45.36,7697,3551,46.13,4432,2050,46.25,2560,1190,46.48
|
||||
63,Muerta,int,"Carry, Nuker, Disabler",2130,1089,51.13,10787,5740,53.21,22602,11898,52.64,27609,14495,52.5,20175,10465,51.87,10662,5518,51.75,5462,2759,50.51,2948,1517,51.46
|
||||
64,Naga Siren,agi,"Carry, Support, Pusher, Disabler, Initiator, Escape",1502,804,53.53,6495,3356,51.67,10423,5234,50.22,9830,4929,50.14,6057,2971,49.05,3216,1675,52.08,1855,933,50.3,1242,634,51.05
|
||||
65,Nature's Prophet,int,"Carry, Pusher, Escape, Nuker",5991,3029,50.56,36433,18143,49.8,83118,42095,50.64,100341,51268,51.09,69436,35870,51.66,34256,17858,52.13,16585,8745,52.73,7182,3755,52.28
|
||||
66,Necrophos,int,"Carry, Nuker, Durable, Disabler",4776,2702,56.57,28535,15771,55.27,62186,34285,55.13,70212,38163,54.35,46539,24708,53.09,21607,11302,52.31,9677,4994,51.61,3418,1733,50.7
|
||||
67,Night Stalker,str,"Carry, Initiator, Durable, Disabler, Nuker",1189,594,49.96,7868,3892,49.47,19446,10004,51.45,25524,13506,52.91,20138,10828,53.77,10767,5651,52.48,5499,2889,52.54,2415,1257,52.05
|
||||
68,Nyx Assassin,all,"Disabler, Nuker, Initiator, Escape",1718,867,50.47,10925,5525,50.57,27207,14073,51.73,34684,18059,52.07,25736,13572,52.74,13313,7093,53.28,6485,3444,53.11,2852,1468,51.47
|
||||
69,Ogre Magi,str,"Support, Nuker, Disabler, Durable, Initiator",5331,2845,53.37,31507,16299,51.73,62954,32248,51.22,61758,31373,50.8,33746,16988,50.34,13262,6654,50.17,4861,2420,49.78,1271,654,51.46
|
||||
70,Omniknight,str,"Support, Durable, Nuker",975,479,49.13,6426,3109,48.38,14641,7319,49.99,17258,8731,50.59,11695,5916,50.59,5746,2993,52.09,2870,1469,51.18,1333,656,49.21
|
||||
71,Oracle,int,"Support, Nuker, Disabler, Escape",796,384,48.24,4857,2417,49.76,13141,6645,50.57,18944,9853,52.01,15221,7964,52.32,8356,4458,53.35,4475,2380,53.18,1905,1018,53.44
|
||||
72,Outworld Destroyer,int,"Carry, Nuker, Disabler",2226,1118,50.22,13388,6864,51.27,33284,17362,52.16,43991,23377,53.14,32021,16994,53.07,16655,8724,52.38,8123,4218,51.93,3176,1649,51.92
|
||||
73,Pangolier,all,"Carry, Nuker, Disabler, Durable, Escape, Initiator",1156,534,46.19,7189,3209,44.64,17802,7937,44.58,25785,11677,45.29,21727,10144,46.69,13064,6351,48.61,7567,3737,49.39,5275,2734,51.83
|
||||
74,Phantom Assassin,agi,"Carry, Escape",8553,4426,51.75,48549,25553,52.63,104756,54881,52.39,119332,62511,52.38,79140,41143,51.99,37399,19325,51.67,17774,9077,51.07,7819,3856,49.32
|
||||
75,Phantom Lancer,agi,"Carry, Escape, Pusher, Nuker",3641,1960,53.83,19550,10374,53.06,38576,20633,53.49,41505,22310,53.75,26401,14268,54.04,12437,6590,52.99,5708,2985,52.3,2383,1243,52.16
|
||||
76,Phoenix,all,"Support, Nuker, Initiator, Escape, Disabler",743,315,42.4,5231,2471,47.24,13950,6633,47.55,18350,8864,48.31,13972,6715,48.06,7787,3761,48.3,4322,2132,49.33,2610,1325,50.77
|
||||
77,Primal Beast,str,"Initiator, Durable, Disabler",1455,701,48.18,9333,4448,47.66,22800,11058,48.5,30084,14643,48.67,24307,11993,49.34,13970,6991,50.04,7742,3890,50.25,4625,2407,52.04
|
||||
78,Puck,int,"Initiator, Disabler, Escape, Nuker",871,399,45.81,5773,2628,45.52,16596,7578,45.66,24480,11315,46.22,20070,9497,47.32,11023,5298,48.06,5656,2714,47.98,2555,1200,46.97
|
||||
79,Pudge,str,"Disabler, Initiator, Durable, Nuker",7677,3796,49.45,50891,24776,48.68,114784,56289,49.04,129604,63097,48.68,85800,41542,48.42,41730,20239,48.5,19823,9530,48.08,7112,3431,48.24
|
||||
80,Pugna,int,"Nuker, Pusher",2075,944,45.49,9998,4695,46.96,18962,8958,47.24,20240,9965,49.23,12807,6199,48.4,5825,2855,49.01,2758,1387,50.29,1195,592,49.54
|
||||
81,Queen of Pain,int,"Carry, Nuker, Escape",2287,1100,48.1,15119,7354,48.64,37137,18118,48.79,47706,23657,49.59,35500,18018,50.75,18405,9289,50.47,9243,4689,50.73,4227,2113,49.99
|
||||
82,Razor,agi,"Carry, Durable, Nuker, Pusher",2470,1231,49.84,12000,5964,49.7,24666,12142,49.23,30334,14844,48.94,21832,10558,48.36,11917,5679,47.65,6092,2912,47.8,3144,1551,49.33
|
||||
83,Riki,agi,"Carry, Escape, Disabler",3684,1929,52.36,19022,9891,52.0,35638,18582,52.14,33908,17415,51.36,20194,10312,51.06,8726,4377,50.16,3735,1855,49.67,1160,559,48.19
|
||||
84,Rubick,int,"Support, Disabler, Nuker",3090,1404,45.44,21639,9303,42.99,57417,24590,42.83,74874,32603,43.54,55186,24219,43.89,28206,12568,44.56,13732,6106,44.47,5764,2642,45.84
|
||||
85,Sand King,all,"Initiator, Disabler, Support, Nuker, Escape",2633,1513,57.46,13097,7323,55.91,25271,13807,54.64,26724,14323,53.6,17384,9144,52.6,7907,4104,51.9,3394,1719,50.65,1211,611,50.45
|
||||
86,Shadow Demon,int,"Support, Disabler, Initiator, Nuker",547,236,43.14,3252,1426,43.85,7920,3524,44.49,9752,4551,46.67,7404,3467,46.83,3956,1876,47.42,2076,1004,48.36,1054,497,47.15
|
||||
87,Shadow Fiend,agi,"Carry, Nuker",5051,2544,50.37,27255,14064,51.6,58589,29830,50.91,65429,33097,50.58,41810,21189,50.68,18766,9401,50.1,8232,4000,48.59,3016,1430,47.41
|
||||
88,Shadow Shaman,int,"Support, Pusher, Disabler, Nuker, Initiator",5323,2795,52.51,29733,15606,52.49,58894,31236,53.04,58765,30895,52.57,34475,18242,52.91,15166,7986,52.66,6377,3323,52.11,2413,1253,51.93
|
||||
89,Silencer,int,"Carry, Support, Disabler, Initiator, Nuker",4229,2324,54.95,27878,14960,53.66,61698,33081,53.62,65256,34458,52.8,38589,19853,51.45,16889,8653,51.23,6836,3416,49.97,2236,1105,49.42
|
||||
90,Skywrath Mage,int,"Support, Nuker, Disabler",4000,2030,50.75,22783,11675,51.24,46512,23624,50.79,51329,25706,50.08,34167,17364,50.82,16693,8415,50.41,8496,4208,49.53,4389,2069,47.14
|
||||
91,Slardar,str,"Carry, Durable, Initiator, Disabler, Escape",3935,2129,54.1,21523,11602,53.91,43947,23701,53.93,47721,25633,53.71,29887,16132,53.98,14233,7722,54.25,6530,3467,53.09,2322,1205,51.89
|
||||
92,Slark,agi,"Carry, Escape, Disabler, Nuker",4815,2521,52.36,29413,14762,50.19,64004,31771,49.64,70173,34411,49.04,44780,21926,48.96,20864,10270,49.22,9969,4962,49.77,4565,2394,52.44
|
||||
93,Snapfire,all,"Support, Nuker, Disabler, Escape",1524,682,44.75,10646,4576,42.98,27103,12120,44.72,34711,15412,44.4,24351,10786,44.29,11723,5131,43.77,5227,2294,43.89,1987,868,43.68
|
||||
94,Sniper,agi,"Carry, Nuker",8022,4079,50.85,44508,22727,51.06,88690,45223,50.99,87190,44086,50.56,47411,23648,49.88,18092,8924,49.33,6130,3040,49.59,1370,662,48.32
|
||||
95,Spectre,agi,"Carry, Durable, Escape",3454,2008,58.14,22097,12356,55.92,49157,26961,54.85,55914,30100,53.83,36321,19338,53.24,16946,8960,52.87,7921,4163,52.56,2568,1370,53.35
|
||||
96,Spirit Breaker,str,"Carry, Initiator, Disabler, Durable, Escape",4788,2423,50.61,26662,13530,50.75,56535,28908,51.13,63991,32249,50.4,42512,21357,50.24,20119,9926,49.34,9499,4814,50.68,3761,1884,50.09
|
||||
97,Storm Spirit,int,"Carry, Escape, Nuker, Initiator, Disabler",2202,1001,45.46,11656,5197,44.59,25644,11806,46.04,30968,14210,45.89,21680,10197,47.03,10810,5025,46.48,5278,2382,45.13,2363,1122,47.48
|
||||
98,Sven,str,"Carry, Disabler, Initiator, Durable, Nuker",3552,1761,49.58,19792,9744,49.23,41296,20478,49.59,48709,24228,49.74,35460,17828,50.28,19795,10065,50.85,11014,5655,51.34,6701,3387,50.54
|
||||
99,Techies,all,"Nuker, Disabler",2356,1131,48.01,13105,6245,47.65,27293,12893,47.24,29180,13507,46.29,18216,8407,46.15,8266,3771,45.62,3459,1644,47.53,1319,591,44.81
|
||||
100,Templar Assassin,agi,"Carry, Escape",2142,955,44.58,10932,4758,43.52,21211,9445,44.53,23928,10909,45.59,17399,8242,47.37,9567,4656,48.67,5525,2708,49.01,3524,1775,50.37
|
||||
101,Terrorblade,agi,"Carry, Pusher, Nuker",1115,484,43.41,5686,2430,42.74,10856,4638,42.72,11518,5041,43.77,8059,3540,43.93,4192,1827,43.58,2419,1082,44.73,1621,700,43.18
|
||||
102,Tidehunter,str,"Initiator, Durable, Disabler, Nuker, Carry",1835,855,46.59,11159,5369,48.11,26222,12699,48.43,30735,14879,48.41,20523,9727,47.4,9731,4740,48.71,4426,2079,46.97,1998,936,46.85
|
||||
103,Timbersaw,all,"Nuker, Durable, Escape",1050,448,42.67,5854,2584,44.14,12301,5391,43.83,14295,6097,42.65,9697,4217,43.49,4992,2163,43.33,2419,1021,42.21,1139,471,41.35
|
||||
104,Tinker,int,"Carry, Nuker, Pusher",2106,944,44.82,11058,5200,47.02,24263,11826,48.74,27531,13614,49.45,19017,9732,51.18,9416,4875,51.77,4700,2466,52.47,1951,1036,53.1
|
||||
105,Tiny,str,"Carry, Nuker, Pusher, Initiator, Durable, Disabler",1434,654,45.61,7742,3452,44.59,15936,6950,43.61,17139,7468,43.57,11269,4991,44.29,5485,2491,45.41,2599,1216,46.79,1058,519,49.05
|
||||
106,Treant Protector,str,"Support, Initiator, Durable, Disabler, Escape",1646,899,54.62,11430,5881,51.45,28752,15124,52.6,36093,19344,53.59,28762,15532,54.0,16751,9227,55.08,9870,5468,55.4,6801,3855,56.68
|
||||
107,Troll Warlord,agi,"Carry, Pusher, Disabler, Durable",3176,1720,54.16,14007,7445,53.15,24729,13022,52.66,25424,13228,52.03,17362,9030,52.01,9427,4913,52.12,4767,2499,52.42,2341,1242,53.05
|
||||
108,Tusk,str,"Initiator, Disabler, Nuker",1263,565,44.73,8338,3777,45.3,19642,8869,45.15,25308,11520,45.52,18927,8853,46.77,10100,4820,47.72,5220,2502,47.93,2350,1157,49.23
|
||||
109,Underlord,str,"Support, Nuker, Disabler, Durable, Escape",797,405,50.82,4583,2341,51.08,10067,5057,50.23,11650,5786,49.67,7224,3561,49.29,3310,1591,48.07,1368,673,49.2,395,190,48.1
|
||||
110,Undying,str,"Support, Durable, Disabler, Nuker",3170,1620,51.1,19403,10116,52.14,40582,21110,52.02,40850,21182,51.85,23985,12454,51.92,10395,5389,51.84,4541,2336,51.44,2064,1012,49.03
|
||||
111,Ursa,agi,"Carry, Durable, Disabler",2801,1273,45.45,15132,7038,46.51,33269,15478,46.52,40822,19264,47.19,29348,14011,47.74,15262,7375,48.32,7507,3622,48.25,3004,1473,49.03
|
||||
112,Vengeful Spirit,all,"Support, Initiator, Disabler, Nuker, Escape",2186,1108,50.69,15817,8285,52.38,41843,21809,52.12,57524,30476,52.98,45512,24120,53.0,25581,13382,52.31,13758,7121,51.76,8276,4303,51.99
|
||||
113,Venomancer,all,"Support, Nuker, Initiator, Pusher, Disabler",2309,1187,51.41,14669,7463,50.88,34787,18020,51.8,41797,21690,51.89,28706,15085,52.55,13974,7338,52.51,6538,3495,53.46,2794,1459,52.22
|
||||
114,Viper,agi,"Carry, Durable, Initiator, Disabler",4100,2057,50.17,18991,9510,50.08,33517,16923,50.49,32728,16677,50.96,18537,9427,50.86,7851,3928,50.03,3260,1652,50.67,1176,610,51.87
|
||||
115,Visage,all,"Support, Nuker, Durable, Disabler, Pusher",331,171,51.66,1638,813,49.63,3240,1577,48.67,3840,1986,51.72,3108,1609,51.77,1995,1055,52.88,1309,702,53.63,858,457,53.26
|
||||
116,Void Spirit,all,"Carry, Escape, Nuker, Disabler",1565,727,46.45,8672,4096,47.23,20010,9694,48.45,25213,12376,49.09,18817,9231,49.06,10026,4920,49.07,4788,2319,48.43,2006,964,48.06
|
||||
117,Warlock,int,"Support, Initiator, Disabler",2547,1369,53.75,18931,10331,54.57,49795,26999,54.22,66697,36220,54.31,48401,25668,53.03,24999,12942,51.77,12575,6356,50.54,6183,2934,47.45
|
||||
118,Weaver,agi,"Carry, Escape",2818,1389,49.29,13873,6770,48.8,23493,11571,49.25,21545,10694,49.64,12911,6427,49.78,5809,2928,50.4,2960,1455,49.16,1303,719,55.18
|
||||
119,Windranger,all,"Carry, Support, Disabler, Escape, Nuker",3861,1814,46.98,19934,9223,46.27,40644,18807,46.27,44476,20652,46.43,28952,13508,46.66,13418,6297,46.93,5898,2782,47.17,2374,1142,48.1
|
||||
120,Winter Wyvern,all,"Support, Disabler, Nuker",821,371,45.19,5168,2424,46.9,10544,5014,47.55,11184,5308,47.46,7426,3512,47.29,3730,1854,49.71,1862,934,50.16,944,464,49.15
|
||||
121,Witch Doctor,int,"Support, Nuker, Disabler",7504,4173,55.61,45501,25616,56.3,99664,54963,55.15,111382,60421,54.25,71830,37860,52.71,33164,17334,52.27,14610,7442,50.94,4196,2076,49.48
|
||||
122,Wraith King,str,"Carry, Support, Durable, Disabler, Initiator",4175,2266,54.28,26362,14516,55.06,58733,32403,55.17,66283,36503,55.07,42360,23083,54.49,19084,10251,53.72,8334,4315,51.78,2707,1376,50.83
|
||||
123,Zeus,int,"Nuker, Carry",4132,2106,50.97,23721,12487,52.64,51568,27475,53.28,58333,31078,53.28,37821,20047,53.0,17901,9504,53.09,8539,4459,52.22,3400,1791,52.68
|
||||
|
BIN
belyaeva_ekaterina_lab_5/R2Score.png
Normal file
|
After Width: | Height: | Size: 7.2 KiB |
42
belyaeva_ekaterina_lab_5/README.md
Normal file
@@ -0,0 +1,42 @@
|
||||
## Задание
|
||||
|
||||
Использовать регрессию по варианту для данных из таблицы 1 по варианту(таблица 10),самостоятельно сформулировав задачу. Оценить, насколько хорошо она подходит для решения сформулированной вами задачи
|
||||
Вариант 6 - полиномиальная регрессия
|
||||
|
||||
## Как запустить лабораторную
|
||||
Запустить файл main.py
|
||||
## Используемые технологии
|
||||
Библиотеки pandas, matplotlib, scikit-learn, их компоненты
|
||||
## Описание лабораторной (программы)
|
||||
Данный код берет данные из датасета о персонажах Dota 2, где описаны атрибуты персонажей, их роли, название, и как часто их пикают и какой у них винрейт на каждом звании в Доте, от реркута до титана.
|
||||
|
||||
В моем случае была поставлена задача предсказать винрейт персонажа по тому, как часто его берут и по его винрейту на
|
||||
смежных рангах (просто предсказать винрейт по тому, как часто его берут, нельзя, потому что винрейт зависит от текущей меты)
|
||||
|
||||
Программа берет столбцы Name, Archon Picks, Archon Win Rate, Legend Picks, Legend Win Rate, Ancient Picks, Ancient Win Rate.
|
||||
Все столбцы, кроме Name и Legend Win Rate, нужны для того чтобы обучить модель. Legend Win Rate -
|
||||
данные, которые нужно предсказать. Name - столбец для вывода результатов.
|
||||
|
||||
Дальше все по дефолту - программа делит данные на обучающую и тестовые выборки, просиходит
|
||||
применение данных для обучения, затем обучаем модель. После этого происходит то же самое с тестовыми данными и затем выводится
|
||||
оценка качества модели.
|
||||
|
||||
В конце программа строит график, где показывает точки обучающей и тестовой выборки, но к тестовой выборки я решила добавить названия
|
||||
персонажей, чтобы график был более наглядным, но в то же время не перегруженным.
|
||||
|
||||
## Результат
|
||||
|
||||
В результате получаем график, который показывает результаты обучающей и тестовой выборок.
|
||||

|
||||
|
||||
Помимо этого, программа вводит оценку качества модели:
|
||||

|
||||
|
||||
Из чего можно сделать вывод, что модель работает очень хорошо и успешно решает поставленную задачу.
|
||||
|
||||
Это объясняется тем, что модели было предоставлено достаточно большое количество признаков, по которым можно предсказать
|
||||
интересующие нас данные. Кроме того, винрейт персонажей взят со смежных рангов.
|
||||
|
||||
Если взять винрейт персонажей на рангах, которые
|
||||
находятся далеко от целевого, модель будет работать хуже, потому что чем больше разница в рангах, тем более разный винрейт у персонажей.
|
||||
Также, если бы было взято меньше признаков, оценка качества модели так же была бы ниже.
|
||||
BIN
belyaeva_ekaterina_lab_5/diagram.png
Normal file
|
After Width: | Height: | Size: 81 KiB |
47
belyaeva_ekaterina_lab_5/main.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import pandas as pd
|
||||
from sklearn.preprocessing import PolynomialFeatures
|
||||
from sklearn.linear_model import LinearRegression
|
||||
from sklearn.model_selection import train_test_split
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
# Загрузка данных
|
||||
data = pd.read_csv('Current_Pub_Meta.csv')
|
||||
|
||||
X = data[['Archon Picks', 'Archon Win Rate', 'Legend Picks', 'Ancient Picks', 'Ancient Win Rate']]
|
||||
y = data['Legend Win Rate']
|
||||
names = data['Name']
|
||||
|
||||
# Разбиваем данные на обучающую и тестовую выборки
|
||||
X_train, X_test, y_train, y_test, names_train, names_test = train_test_split(X, y, names, test_size=0.1, random_state=42)
|
||||
|
||||
# Применяем полиномиальные признаки к обучающим данным
|
||||
poly_features = PolynomialFeatures(degree=2)
|
||||
X_train_poly = poly_features.fit_transform(X_train)
|
||||
|
||||
# Создаем и обучаем модель полиномиальной регрессии
|
||||
poly_model = LinearRegression()
|
||||
poly_model.fit(X_train_poly, y_train)
|
||||
|
||||
# Применяем полиномиальные признаки к тестовым данным и делаем предсказания
|
||||
X_test_poly = poly_features.transform(X_test)
|
||||
y_pred = poly_model.predict(X_test_poly)
|
||||
|
||||
# Оценка качества модели на тестовых данных
|
||||
r2 = poly_model.score(X_test_poly, y_test)
|
||||
print(f"R-квадрат: {r2}")
|
||||
|
||||
# Построение графика с именами персонажей
|
||||
plt.figure(figsize=(10, 6))
|
||||
plt.title('Корреляция между выбором персонажей и победами в ранге "Legend"')
|
||||
plt.grid(True)
|
||||
plt.scatter(X_train['Legend Picks'], y_train, color='blue', alpha=0.5, label='Обучающая выборка')
|
||||
plt.scatter(X_test['Legend Picks'], y_test, color='red', alpha=0.5, label='Тестовая выборка')
|
||||
|
||||
# Добавляем имена персонажей на график
|
||||
for i, name in enumerate(names_test):
|
||||
plt.annotate(name, (X_test['Legend Picks'].iloc[i], y_pred[i]), fontsize=8, alpha=0.7, color='black')
|
||||
|
||||
plt.xlabel('Legend Picks')
|
||||
plt.ylabel('Legend Win Rate')
|
||||
plt.legend()
|
||||
plt.show()
|
||||
125
belyaeva_ekaterina_lab_6/Current_Pub_Meta.csv
Normal file
@@ -0,0 +1,125 @@
|
||||
,Name,Primary Attribute,Roles,Herald Picks,Herald Wins,Herald Win Rate,Guardian Picks,Guardian Wins,Guardian Win Rate,Crusader Picks,Crusader Wins,Crusader Win Rate,Archon Picks,Archon Wins,Archon Win Rate,Legend Picks,Legend Wins,Legend Win Rate,Ancient Picks,Ancient Wins,Ancient Win Rate,Divine Picks,Divine Wins,Divine Win Rate,Immortal Picks,Immortal Wins,Immortal Win Rate
|
||||
0,Abaddon,all,"Support, Carry, Durable",1111,575,51.76,6408,3309,51.64,13811,7050,51.05,16497,8530,51.71,11360,5877,51.73,5571,2893,51.93,2632,1345,51.1,991,497,50.15
|
||||
1,Alchemist,str,"Carry, Support, Durable, Disabler, Initiator, Nuker",1119,486,43.43,6370,2883,45.26,12238,5617,45.9,13028,6130,47.05,8455,4055,47.96,4120,1984,48.16,2021,1023,50.62,860,424,49.3
|
||||
2,Ancient Apparition,int,"Support, Disabler, Nuker",2146,1073,50.0,13697,7069,51.61,30673,16118,52.55,35145,18219,51.84,23114,12166,52.63,10688,5528,51.72,5035,2573,51.1,2134,1076,50.42
|
||||
3,Anti-Mage,agi,"Carry, Escape, Nuker",3765,1818,48.29,22050,10774,48.86,47371,23304,49.19,49115,24074,49.02,28599,13991,48.92,12303,5958,48.43,4866,2349,48.27,1502,751,50.0
|
||||
4,Arc Warden,agi,"Carry, Escape, Nuker",1448,704,48.62,8047,4162,51.72,14946,7982,53.41,14711,7875,53.53,9472,5167,54.55,4323,2309,53.41,2104,1148,54.56,789,435,55.13
|
||||
5,Axe,str,"Initiator, Durable, Disabler, Carry",5343,2880,53.9,32652,17719,54.27,71010,37736,53.14,77869,40559,52.09,49182,25079,50.99,22637,11353,50.15,10114,5000,49.44,3795,1837,48.41
|
||||
6,Bane,all,"Support, Disabler, Nuker, Durable",745,334,44.83,4983,2422,48.61,11332,5504,48.57,13633,6767,49.64,10132,5032,49.66,5596,2861,51.13,3028,1555,51.35,1958,1055,53.88
|
||||
7,Batrider,all,"Initiator, Disabler, Escape",349,136,38.97,1983,812,40.95,4053,1595,39.35,4725,1861,39.39,3173,1275,40.18,1678,731,43.56,802,362,45.14,497,227,45.67
|
||||
8,Beastmaster,all,"Initiator, Disabler, Durable, Nuker",402,174,43.28,2447,1060,43.32,5787,2569,44.39,6930,3092,44.62,5288,2389,45.18,2816,1274,45.24,1593,752,47.21,1176,539,45.83
|
||||
9,Bloodseeker,agi,"Carry, Disabler, Nuker, Initiator",2765,1382,49.98,12589,6270,49.81,21781,10683,49.05,20961,10420,49.71,13035,6430,49.33,6210,3006,48.41,2941,1475,50.15,1465,718,49.01
|
||||
10,Bounty Hunter,agi,"Escape, Nuker",3852,1868,48.49,19609,9535,48.63,36362,17600,48.4,37059,18314,49.42,22934,11518,50.22,10584,5276,49.85,5105,2594,50.81,2498,1325,53.04
|
||||
11,Brewmaster,all,"Carry, Initiator, Durable, Disabler, Nuker",545,280,51.38,3564,1745,48.96,8941,4388,49.08,12340,6111,49.52,11185,5623,50.27,7645,3906,51.09,4812,2478,51.5,3533,1820,51.51
|
||||
12,Bristleback,str,"Carry, Durable, Initiator, Nuker",5884,3262,55.44,27952,14587,52.19,48847,24379,49.91,46702,22927,49.09,27466,13319,48.49,12398,5969,48.14,5865,2915,49.7,2639,1304,49.41
|
||||
13,Broodmother,all,"Carry, Pusher, Escape, Nuker",456,173,37.94,2048,842,41.11,3444,1462,42.45,3392,1448,42.69,2193,1048,47.79,1203,602,50.04,795,422,53.08,453,230,50.77
|
||||
14,Centaur Warrunner,str,"Durable, Initiator, Disabler, Nuker, Escape",1721,911,52.93,11754,6266,53.31,28691,15201,52.98,35369,18741,52.99,25393,13468,53.04,12653,6607,52.22,6124,3181,51.94,2442,1243,50.9
|
||||
15,Chaos Knight,str,"Carry, Disabler, Durable, Pusher, Initiator",3032,1639,54.06,16762,8931,53.28,31892,17139,53.74,30697,16435,53.54,18217,9810,53.85,8572,4620,53.9,4230,2291,54.16,1750,943,53.89
|
||||
16,Chen,all,"Support, Pusher",284,125,44.01,1450,678,46.76,2969,1345,45.3,3258,1604,49.23,2641,1331,50.4,1488,767,51.55,970,512,52.78,770,448,58.18
|
||||
17,Clinkz,agi,"Carry, Escape, Pusher",3151,1608,51.03,13891,7141,51.41,25465,12938,50.81,27327,14066,51.47,18846,9726,51.61,9452,4890,51.74,4765,2475,51.94,2093,1052,50.26
|
||||
18,Clockwerk,all,"Initiator, Disabler, Durable, Nuker",816,397,48.65,5860,2837,48.41,14478,6929,47.86,18466,8843,47.89,13143,6301,47.94,6612,3169,47.93,3286,1581,48.11,1378,658,47.75
|
||||
19,Crystal Maiden,int,"Support, Disabler, Nuker",4821,2529,52.46,26584,13626,51.26,52168,26040,49.92,52258,25365,48.54,30690,14848,48.38,13295,6404,48.17,5602,2680,47.84,1638,771,47.07
|
||||
20,Dark Seer,all,"Initiator, Escape, Disabler",627,320,51.04,3675,1884,51.27,7881,3803,48.26,9589,4844,50.52,7186,3573,49.72,3902,1983,50.82,2145,1095,51.05,1217,593,48.73
|
||||
21,Dark Willow,all,"Support, Nuker, Disabler, Escape",2654,1293,48.72,13829,6657,48.14,28142,13480,47.9,32114,15785,49.15,23100,11331,49.05,12052,5909,49.03,6400,3182,49.72,3708,1915,51.65
|
||||
22,Dawnbreaker,str,"Carry, Durable",1746,875,50.11,12297,6105,49.65,32398,15921,49.14,44846,21936,48.91,35474,17441,49.17,19770,9832,49.73,10637,5263,49.48,6339,3173,50.06
|
||||
23,Dazzle,all,"Support, Nuker, Disabler",2827,1418,50.16,19852,9758,49.15,48236,23691,49.11,56417,27798,49.27,38159,18642,48.85,18695,9199,49.21,8530,4239,49.7,3382,1654,48.91
|
||||
24,Death Prophet,int,"Carry, Pusher, Nuker, Disabler",1372,659,48.03,6643,3145,47.34,11987,5729,47.79,12268,5856,47.73,7455,3606,48.37,3591,1698,47.28,1872,902,48.18,926,459,49.57
|
||||
25,Disruptor,int,"Support, Disabler, Nuker, Initiator",1541,757,49.12,11104,5331,48.01,27746,13542,48.81,33742,16310,48.34,23173,11096,47.88,10907,5201,47.68,4859,2255,46.41,1863,861,46.22
|
||||
26,Doom,str,"Carry, Disabler, Initiator, Durable, Nuker",1049,474,45.19,6112,2767,45.27,13700,6056,44.2,15454,6925,44.81,10727,4842,45.14,5444,2451,45.02,2979,1348,45.25,1545,731,47.31
|
||||
27,Dragon Knight,str,"Carry, Pusher, Durable, Disabler, Initiator, Nuker",1950,942,48.31,10643,5274,49.55,20451,9733,47.59,20326,9671,47.58,11674,5544,47.49,4979,2355,47.3,2024,973,48.07,725,341,47.03
|
||||
28,Drow Ranger,agi,"Carry, Disabler, Pusher",5737,2904,50.62,29675,14831,49.98,57655,28573,49.56,56682,27927,49.27,34310,16607,48.4,15050,7171,47.65,5947,2815,47.33,1768,788,44.57
|
||||
29,Earth Spirit,str,"Nuker, Escape, Disabler, Initiator, Durable",1038,465,44.8,7420,3276,44.15,20807,9432,45.33,30107,14166,47.05,25314,12148,47.99,14579,7041,48.3,7678,3802,49.52,4379,2169,49.53
|
||||
30,Earthshaker,str,"Support, Initiator, Disabler, Nuker",5012,2455,48.98,29784,14662,49.23,67050,33111,49.38,79963,39843,49.83,57108,28961,50.71,28650,14591,50.93,14186,7296,51.43,6151,3165,51.46
|
||||
31,Elder Titan,str,"Initiator, Disabler, Nuker, Durable",471,212,45.01,2551,1248,48.92,5213,2570,49.3,5572,2809,50.41,3847,1942,50.48,1964,998,50.81,1124,613,54.54,550,292,53.09
|
||||
32,Ember Spirit,agi,"Carry, Escape, Nuker, Disabler, Initiator",1514,635,41.94,9180,3836,41.79,20578,8738,42.46,25152,10844,43.11,17703,7814,44.14,8538,3793,44.42,4265,1892,44.36,2065,928,44.94
|
||||
33,Enchantress,int,"Support, Pusher, Durable, Disabler",1794,848,47.27,8050,3622,44.99,12921,5686,44.01,11673,4974,42.61,6863,2840,41.38,2948,1212,41.11,1434,654,45.61,806,318,39.45
|
||||
34,Enigma,all,"Disabler, Initiator, Pusher",1317,588,44.65,6937,3171,45.71,12908,5979,46.32,11687,5428,46.44,6194,2839,45.83,2493,1127,45.21,938,437,46.59,338,159,47.04
|
||||
35,Faceless Void,agi,"Carry, Initiator, Disabler, Escape, Durable",4323,2043,47.26,25618,11902,46.46,54581,25874,47.4,60671,28993,47.79,40137,19611,48.86,19376,9620,49.65,9579,4828,50.4,4439,2256,50.82
|
||||
36,Grimstroke,int,"Support, Nuker, Disabler, Escape",1455,694,47.7,9714,4789,49.3,24688,12430,50.35,32027,16094,50.25,23193,11795,50.86,12102,6100,50.4,6191,3047,49.22,3449,1666,48.3
|
||||
37,Gyrocopter,agi,"Carry, Nuker, Disabler",2560,1213,47.38,16589,7882,47.51,42072,20358,48.39,54200,26229,48.39,39414,19053,48.34,20164,9781,48.51,10164,4937,48.57,5241,2507,47.83
|
||||
38,Hoodwink,agi,"Support, Nuker, Escape, Disabler",2420,1126,46.53,14034,6800,48.45,31382,14964,47.68,35684,16966,47.55,22626,10651,47.07,9949,4690,47.14,4349,2089,48.03,1533,703,45.86
|
||||
39,Huskar,str,"Carry, Durable, Initiator",3501,1603,45.79,14234,6639,46.64,22794,10912,47.87,21801,10763,49.37,13811,6919,50.1,6769,3535,52.22,3556,1822,51.24,1936,993,51.29
|
||||
40,Invoker,all,"Carry, Nuker, Disabler, Escape, Pusher",4330,2042,47.16,27625,13176,47.7,69035,33863,49.05,86745,43479,50.12,61821,31510,50.97,31459,16321,51.88,15431,8195,53.11,7852,4148,52.83
|
||||
41,Io,all,"Support, Escape, Nuker",1274,615,48.27,6158,2999,48.7,12762,6247,48.95,14216,7024,49.41,9564,4843,50.64,5301,2685,50.65,2789,1463,52.46,1464,773,52.8
|
||||
42,Jakiro,int,"Support, Nuker, Pusher, Disabler",3147,1708,54.27,22718,12413,54.64,56736,30984,54.61,70038,37473,53.5,46389,24997,53.89,22084,11639,52.7,9838,5103,51.87,3282,1729,52.68
|
||||
43,Juggernaut,agi,"Carry, Pusher, Escape",5585,2711,48.54,30394,14800,48.69,62313,30581,49.08,65590,32344,49.31,39235,19326,49.26,16334,8012,49.05,6419,3066,47.76,1576,731,46.38
|
||||
44,Keeper of the Light,int,"Support, Nuker, Disabler",896,353,39.4,5051,2216,43.87,10452,4579,43.81,11614,5322,45.82,7870,3627,46.09,4268,2001,46.88,2147,1043,48.58,1333,588,44.11
|
||||
45,Kunkka,str,"Carry, Support, Disabler, Initiator, Durable, Nuker",2251,1124,49.93,13474,6828,50.68,31210,16196,51.89,39691,21293,53.65,30314,16458,54.29,15706,8793,55.98,7884,4339,55.04,3458,1898,54.89
|
||||
46,Legion Commander,str,"Carry, Disabler, Initiator, Durable, Nuker",6263,3264,52.12,37100,19157,51.64,81491,41557,51.0,91431,46558,50.92,59383,29917,50.38,27945,13917,49.8,13193,6587,49.93,5601,2745,49.01
|
||||
47,Leshrac,int,"Carry, Support, Nuker, Pusher, Disabler",674,316,46.88,3872,1799,46.46,7490,3433,45.83,7903,3604,45.6,5322,2526,47.46,2687,1298,48.31,1325,647,48.83,721,357,49.51
|
||||
48,Lich,int,"Support, Nuker",2700,1412,52.3,16646,8820,52.99,37785,19685,52.1,45471,23554,51.8,31203,16108,51.62,15530,7821,50.36,7243,3597,49.66,2520,1258,49.92
|
||||
49,Lifestealer,str,"Carry, Durable, Escape, Disabler",2515,1213,48.23,14131,6978,49.38,29724,14627,49.21,31211,15581,49.92,18970,9481,49.98,8689,4400,50.64,3630,1821,50.17,1229,617,50.2
|
||||
50,Lina,int,"Support, Carry, Nuker, Disabler",4512,2030,44.99,21927,10156,46.32,45301,21210,46.82,54229,25956,47.86,40016,19138,47.83,21072,10112,47.99,10481,5031,48.0,4369,2138,48.94
|
||||
51,Lion,int,"Support, Disabler, Nuker, Initiator",6204,2855,46.02,37869,17465,46.12,80124,36649,45.74,84390,38176,45.24,50720,22914,45.18,21698,9784,45.09,9308,4280,45.98,3220,1496,46.46
|
||||
52,Lone Druid,all,"Carry, Pusher, Durable",909,483,53.14,4714,2421,51.36,10987,5858,53.32,14580,7968,54.65,11810,6490,54.95,7241,3971,54.84,4024,2240,55.67,2303,1259,54.67
|
||||
53,Luna,agi,"Carry, Nuker, Pusher",1927,904,46.91,9091,4271,46.98,16571,7922,47.81,16035,7615,47.49,9728,4634,47.64,4463,2103,47.12,1912,911,47.65,719,322,44.78
|
||||
54,Lycan,all,"Carry, Pusher, Durable, Escape",374,174,46.52,1894,915,48.31,3691,1744,47.25,3824,1905,49.82,2694,1332,49.44,1460,753,51.58,827,411,49.7,532,289,54.32
|
||||
55,Magnus,all,"Initiator, Disabler, Nuker, Escape",770,339,44.03,5789,2651,45.79,17837,7954,44.59,26126,12058,46.15,20634,9592,46.49,10574,5056,47.82,4565,2073,45.41,1606,751,46.76
|
||||
56,Marci,all,"Support, Carry, Initiator, Disabler, Escape",1370,620,45.26,7092,3252,45.85,15199,7240,47.63,18485,8874,48.01,13308,6305,47.38,7176,3476,48.44,3689,1882,51.02,1746,883,50.57
|
||||
57,Mars,str,"Carry, Initiator, Disabler, Durable",862,375,43.5,5719,2529,44.22,15156,6756,44.58,20719,9369,45.22,16419,7387,44.99,9044,4052,44.8,4536,2093,46.14,1926,868,45.07
|
||||
58,Medusa,agi,"Carry, Disabler, Durable",1898,902,47.52,9289,4512,48.57,16504,7818,47.37,14796,6886,46.54,7488,3449,46.06,2775,1270,45.77,1073,482,44.92,394,184,46.7
|
||||
59,Meepo,agi,"Carry, Escape, Nuker, Disabler, Initiator, Pusher",1004,523,52.09,3970,1990,50.13,6904,3587,51.96,7166,3646,50.88,4906,2563,52.24,2383,1282,53.8,1139,588,51.62,585,300,51.28
|
||||
60,Mirana,all,"Carry, Support, Escape, Nuker, Disabler",2499,1193,47.74,16954,8135,47.98,39985,19097,47.76,45169,21554,47.72,28467,13456,47.27,12800,6047,47.24,5272,2500,47.42,1824,874,47.92
|
||||
61,Monkey King,agi,"Carry, Escape, Disabler, Initiator",3191,1384,43.37,17306,7544,43.59,35734,16113,45.09,40778,18322,44.93,27558,12630,45.83,14034,6433,45.84,6650,3152,47.4,3040,1440,47.37
|
||||
62,Morphling,agi,"Carry, Escape, Durable, Nuker, Disabler",1521,690,45.36,8620,4006,46.47,18075,8161,45.15,20414,9235,45.24,14395,6530,45.36,7697,3551,46.13,4432,2050,46.25,2560,1190,46.48
|
||||
63,Muerta,int,"Carry, Nuker, Disabler",2130,1089,51.13,10787,5740,53.21,22602,11898,52.64,27609,14495,52.5,20175,10465,51.87,10662,5518,51.75,5462,2759,50.51,2948,1517,51.46
|
||||
64,Naga Siren,agi,"Carry, Support, Pusher, Disabler, Initiator, Escape",1502,804,53.53,6495,3356,51.67,10423,5234,50.22,9830,4929,50.14,6057,2971,49.05,3216,1675,52.08,1855,933,50.3,1242,634,51.05
|
||||
65,Nature's Prophet,int,"Carry, Pusher, Escape, Nuker",5991,3029,50.56,36433,18143,49.8,83118,42095,50.64,100341,51268,51.09,69436,35870,51.66,34256,17858,52.13,16585,8745,52.73,7182,3755,52.28
|
||||
66,Necrophos,int,"Carry, Nuker, Durable, Disabler",4776,2702,56.57,28535,15771,55.27,62186,34285,55.13,70212,38163,54.35,46539,24708,53.09,21607,11302,52.31,9677,4994,51.61,3418,1733,50.7
|
||||
67,Night Stalker,str,"Carry, Initiator, Durable, Disabler, Nuker",1189,594,49.96,7868,3892,49.47,19446,10004,51.45,25524,13506,52.91,20138,10828,53.77,10767,5651,52.48,5499,2889,52.54,2415,1257,52.05
|
||||
68,Nyx Assassin,all,"Disabler, Nuker, Initiator, Escape",1718,867,50.47,10925,5525,50.57,27207,14073,51.73,34684,18059,52.07,25736,13572,52.74,13313,7093,53.28,6485,3444,53.11,2852,1468,51.47
|
||||
69,Ogre Magi,str,"Support, Nuker, Disabler, Durable, Initiator",5331,2845,53.37,31507,16299,51.73,62954,32248,51.22,61758,31373,50.8,33746,16988,50.34,13262,6654,50.17,4861,2420,49.78,1271,654,51.46
|
||||
70,Omniknight,str,"Support, Durable, Nuker",975,479,49.13,6426,3109,48.38,14641,7319,49.99,17258,8731,50.59,11695,5916,50.59,5746,2993,52.09,2870,1469,51.18,1333,656,49.21
|
||||
71,Oracle,int,"Support, Nuker, Disabler, Escape",796,384,48.24,4857,2417,49.76,13141,6645,50.57,18944,9853,52.01,15221,7964,52.32,8356,4458,53.35,4475,2380,53.18,1905,1018,53.44
|
||||
72,Outworld Destroyer,int,"Carry, Nuker, Disabler",2226,1118,50.22,13388,6864,51.27,33284,17362,52.16,43991,23377,53.14,32021,16994,53.07,16655,8724,52.38,8123,4218,51.93,3176,1649,51.92
|
||||
73,Pangolier,all,"Carry, Nuker, Disabler, Durable, Escape, Initiator",1156,534,46.19,7189,3209,44.64,17802,7937,44.58,25785,11677,45.29,21727,10144,46.69,13064,6351,48.61,7567,3737,49.39,5275,2734,51.83
|
||||
74,Phantom Assassin,agi,"Carry, Escape",8553,4426,51.75,48549,25553,52.63,104756,54881,52.39,119332,62511,52.38,79140,41143,51.99,37399,19325,51.67,17774,9077,51.07,7819,3856,49.32
|
||||
75,Phantom Lancer,agi,"Carry, Escape, Pusher, Nuker",3641,1960,53.83,19550,10374,53.06,38576,20633,53.49,41505,22310,53.75,26401,14268,54.04,12437,6590,52.99,5708,2985,52.3,2383,1243,52.16
|
||||
76,Phoenix,all,"Support, Nuker, Initiator, Escape, Disabler",743,315,42.4,5231,2471,47.24,13950,6633,47.55,18350,8864,48.31,13972,6715,48.06,7787,3761,48.3,4322,2132,49.33,2610,1325,50.77
|
||||
77,Primal Beast,str,"Initiator, Durable, Disabler",1455,701,48.18,9333,4448,47.66,22800,11058,48.5,30084,14643,48.67,24307,11993,49.34,13970,6991,50.04,7742,3890,50.25,4625,2407,52.04
|
||||
78,Puck,int,"Initiator, Disabler, Escape, Nuker",871,399,45.81,5773,2628,45.52,16596,7578,45.66,24480,11315,46.22,20070,9497,47.32,11023,5298,48.06,5656,2714,47.98,2555,1200,46.97
|
||||
79,Pudge,str,"Disabler, Initiator, Durable, Nuker",7677,3796,49.45,50891,24776,48.68,114784,56289,49.04,129604,63097,48.68,85800,41542,48.42,41730,20239,48.5,19823,9530,48.08,7112,3431,48.24
|
||||
80,Pugna,int,"Nuker, Pusher",2075,944,45.49,9998,4695,46.96,18962,8958,47.24,20240,9965,49.23,12807,6199,48.4,5825,2855,49.01,2758,1387,50.29,1195,592,49.54
|
||||
81,Queen of Pain,int,"Carry, Nuker, Escape",2287,1100,48.1,15119,7354,48.64,37137,18118,48.79,47706,23657,49.59,35500,18018,50.75,18405,9289,50.47,9243,4689,50.73,4227,2113,49.99
|
||||
82,Razor,agi,"Carry, Durable, Nuker, Pusher",2470,1231,49.84,12000,5964,49.7,24666,12142,49.23,30334,14844,48.94,21832,10558,48.36,11917,5679,47.65,6092,2912,47.8,3144,1551,49.33
|
||||
83,Riki,agi,"Carry, Escape, Disabler",3684,1929,52.36,19022,9891,52.0,35638,18582,52.14,33908,17415,51.36,20194,10312,51.06,8726,4377,50.16,3735,1855,49.67,1160,559,48.19
|
||||
84,Rubick,int,"Support, Disabler, Nuker",3090,1404,45.44,21639,9303,42.99,57417,24590,42.83,74874,32603,43.54,55186,24219,43.89,28206,12568,44.56,13732,6106,44.47,5764,2642,45.84
|
||||
85,Sand King,all,"Initiator, Disabler, Support, Nuker, Escape",2633,1513,57.46,13097,7323,55.91,25271,13807,54.64,26724,14323,53.6,17384,9144,52.6,7907,4104,51.9,3394,1719,50.65,1211,611,50.45
|
||||
86,Shadow Demon,int,"Support, Disabler, Initiator, Nuker",547,236,43.14,3252,1426,43.85,7920,3524,44.49,9752,4551,46.67,7404,3467,46.83,3956,1876,47.42,2076,1004,48.36,1054,497,47.15
|
||||
87,Shadow Fiend,agi,"Carry, Nuker",5051,2544,50.37,27255,14064,51.6,58589,29830,50.91,65429,33097,50.58,41810,21189,50.68,18766,9401,50.1,8232,4000,48.59,3016,1430,47.41
|
||||
88,Shadow Shaman,int,"Support, Pusher, Disabler, Nuker, Initiator",5323,2795,52.51,29733,15606,52.49,58894,31236,53.04,58765,30895,52.57,34475,18242,52.91,15166,7986,52.66,6377,3323,52.11,2413,1253,51.93
|
||||
89,Silencer,int,"Carry, Support, Disabler, Initiator, Nuker",4229,2324,54.95,27878,14960,53.66,61698,33081,53.62,65256,34458,52.8,38589,19853,51.45,16889,8653,51.23,6836,3416,49.97,2236,1105,49.42
|
||||
90,Skywrath Mage,int,"Support, Nuker, Disabler",4000,2030,50.75,22783,11675,51.24,46512,23624,50.79,51329,25706,50.08,34167,17364,50.82,16693,8415,50.41,8496,4208,49.53,4389,2069,47.14
|
||||
91,Slardar,str,"Carry, Durable, Initiator, Disabler, Escape",3935,2129,54.1,21523,11602,53.91,43947,23701,53.93,47721,25633,53.71,29887,16132,53.98,14233,7722,54.25,6530,3467,53.09,2322,1205,51.89
|
||||
92,Slark,agi,"Carry, Escape, Disabler, Nuker",4815,2521,52.36,29413,14762,50.19,64004,31771,49.64,70173,34411,49.04,44780,21926,48.96,20864,10270,49.22,9969,4962,49.77,4565,2394,52.44
|
||||
93,Snapfire,all,"Support, Nuker, Disabler, Escape",1524,682,44.75,10646,4576,42.98,27103,12120,44.72,34711,15412,44.4,24351,10786,44.29,11723,5131,43.77,5227,2294,43.89,1987,868,43.68
|
||||
94,Sniper,agi,"Carry, Nuker",8022,4079,50.85,44508,22727,51.06,88690,45223,50.99,87190,44086,50.56,47411,23648,49.88,18092,8924,49.33,6130,3040,49.59,1370,662,48.32
|
||||
95,Spectre,agi,"Carry, Durable, Escape",3454,2008,58.14,22097,12356,55.92,49157,26961,54.85,55914,30100,53.83,36321,19338,53.24,16946,8960,52.87,7921,4163,52.56,2568,1370,53.35
|
||||
96,Spirit Breaker,str,"Carry, Initiator, Disabler, Durable, Escape",4788,2423,50.61,26662,13530,50.75,56535,28908,51.13,63991,32249,50.4,42512,21357,50.24,20119,9926,49.34,9499,4814,50.68,3761,1884,50.09
|
||||
97,Storm Spirit,int,"Carry, Escape, Nuker, Initiator, Disabler",2202,1001,45.46,11656,5197,44.59,25644,11806,46.04,30968,14210,45.89,21680,10197,47.03,10810,5025,46.48,5278,2382,45.13,2363,1122,47.48
|
||||
98,Sven,str,"Carry, Disabler, Initiator, Durable, Nuker",3552,1761,49.58,19792,9744,49.23,41296,20478,49.59,48709,24228,49.74,35460,17828,50.28,19795,10065,50.85,11014,5655,51.34,6701,3387,50.54
|
||||
99,Techies,all,"Nuker, Disabler",2356,1131,48.01,13105,6245,47.65,27293,12893,47.24,29180,13507,46.29,18216,8407,46.15,8266,3771,45.62,3459,1644,47.53,1319,591,44.81
|
||||
100,Templar Assassin,agi,"Carry, Escape",2142,955,44.58,10932,4758,43.52,21211,9445,44.53,23928,10909,45.59,17399,8242,47.37,9567,4656,48.67,5525,2708,49.01,3524,1775,50.37
|
||||
101,Terrorblade,agi,"Carry, Pusher, Nuker",1115,484,43.41,5686,2430,42.74,10856,4638,42.72,11518,5041,43.77,8059,3540,43.93,4192,1827,43.58,2419,1082,44.73,1621,700,43.18
|
||||
102,Tidehunter,str,"Initiator, Durable, Disabler, Nuker, Carry",1835,855,46.59,11159,5369,48.11,26222,12699,48.43,30735,14879,48.41,20523,9727,47.4,9731,4740,48.71,4426,2079,46.97,1998,936,46.85
|
||||
103,Timbersaw,all,"Nuker, Durable, Escape",1050,448,42.67,5854,2584,44.14,12301,5391,43.83,14295,6097,42.65,9697,4217,43.49,4992,2163,43.33,2419,1021,42.21,1139,471,41.35
|
||||
104,Tinker,int,"Carry, Nuker, Pusher",2106,944,44.82,11058,5200,47.02,24263,11826,48.74,27531,13614,49.45,19017,9732,51.18,9416,4875,51.77,4700,2466,52.47,1951,1036,53.1
|
||||
105,Tiny,str,"Carry, Nuker, Pusher, Initiator, Durable, Disabler",1434,654,45.61,7742,3452,44.59,15936,6950,43.61,17139,7468,43.57,11269,4991,44.29,5485,2491,45.41,2599,1216,46.79,1058,519,49.05
|
||||
106,Treant Protector,str,"Support, Initiator, Durable, Disabler, Escape",1646,899,54.62,11430,5881,51.45,28752,15124,52.6,36093,19344,53.59,28762,15532,54.0,16751,9227,55.08,9870,5468,55.4,6801,3855,56.68
|
||||
107,Troll Warlord,agi,"Carry, Pusher, Disabler, Durable",3176,1720,54.16,14007,7445,53.15,24729,13022,52.66,25424,13228,52.03,17362,9030,52.01,9427,4913,52.12,4767,2499,52.42,2341,1242,53.05
|
||||
108,Tusk,str,"Initiator, Disabler, Nuker",1263,565,44.73,8338,3777,45.3,19642,8869,45.15,25308,11520,45.52,18927,8853,46.77,10100,4820,47.72,5220,2502,47.93,2350,1157,49.23
|
||||
109,Underlord,str,"Support, Nuker, Disabler, Durable, Escape",797,405,50.82,4583,2341,51.08,10067,5057,50.23,11650,5786,49.67,7224,3561,49.29,3310,1591,48.07,1368,673,49.2,395,190,48.1
|
||||
110,Undying,str,"Support, Durable, Disabler, Nuker",3170,1620,51.1,19403,10116,52.14,40582,21110,52.02,40850,21182,51.85,23985,12454,51.92,10395,5389,51.84,4541,2336,51.44,2064,1012,49.03
|
||||
111,Ursa,agi,"Carry, Durable, Disabler",2801,1273,45.45,15132,7038,46.51,33269,15478,46.52,40822,19264,47.19,29348,14011,47.74,15262,7375,48.32,7507,3622,48.25,3004,1473,49.03
|
||||
112,Vengeful Spirit,all,"Support, Initiator, Disabler, Nuker, Escape",2186,1108,50.69,15817,8285,52.38,41843,21809,52.12,57524,30476,52.98,45512,24120,53.0,25581,13382,52.31,13758,7121,51.76,8276,4303,51.99
|
||||
113,Venomancer,all,"Support, Nuker, Initiator, Pusher, Disabler",2309,1187,51.41,14669,7463,50.88,34787,18020,51.8,41797,21690,51.89,28706,15085,52.55,13974,7338,52.51,6538,3495,53.46,2794,1459,52.22
|
||||
114,Viper,agi,"Carry, Durable, Initiator, Disabler",4100,2057,50.17,18991,9510,50.08,33517,16923,50.49,32728,16677,50.96,18537,9427,50.86,7851,3928,50.03,3260,1652,50.67,1176,610,51.87
|
||||
115,Visage,all,"Support, Nuker, Durable, Disabler, Pusher",331,171,51.66,1638,813,49.63,3240,1577,48.67,3840,1986,51.72,3108,1609,51.77,1995,1055,52.88,1309,702,53.63,858,457,53.26
|
||||
116,Void Spirit,all,"Carry, Escape, Nuker, Disabler",1565,727,46.45,8672,4096,47.23,20010,9694,48.45,25213,12376,49.09,18817,9231,49.06,10026,4920,49.07,4788,2319,48.43,2006,964,48.06
|
||||
117,Warlock,int,"Support, Initiator, Disabler",2547,1369,53.75,18931,10331,54.57,49795,26999,54.22,66697,36220,54.31,48401,25668,53.03,24999,12942,51.77,12575,6356,50.54,6183,2934,47.45
|
||||
118,Weaver,agi,"Carry, Escape",2818,1389,49.29,13873,6770,48.8,23493,11571,49.25,21545,10694,49.64,12911,6427,49.78,5809,2928,50.4,2960,1455,49.16,1303,719,55.18
|
||||
119,Windranger,all,"Carry, Support, Disabler, Escape, Nuker",3861,1814,46.98,19934,9223,46.27,40644,18807,46.27,44476,20652,46.43,28952,13508,46.66,13418,6297,46.93,5898,2782,47.17,2374,1142,48.1
|
||||
120,Winter Wyvern,all,"Support, Disabler, Nuker",821,371,45.19,5168,2424,46.9,10544,5014,47.55,11184,5308,47.46,7426,3512,47.29,3730,1854,49.71,1862,934,50.16,944,464,49.15
|
||||
121,Witch Doctor,int,"Support, Nuker, Disabler",7504,4173,55.61,45501,25616,56.3,99664,54963,55.15,111382,60421,54.25,71830,37860,52.71,33164,17334,52.27,14610,7442,50.94,4196,2076,49.48
|
||||
122,Wraith King,str,"Carry, Support, Durable, Disabler, Initiator",4175,2266,54.28,26362,14516,55.06,58733,32403,55.17,66283,36503,55.07,42360,23083,54.49,19084,10251,53.72,8334,4315,51.78,2707,1376,50.83
|
||||
123,Zeus,int,"Nuker, Carry",4132,2106,50.97,23721,12487,52.64,51568,27475,53.28,58333,31078,53.28,37821,20047,53.0,17901,9504,53.09,8539,4459,52.22,3400,1791,52.68
|
||||
|
92
belyaeva_ekaterina_lab_6/README.md
Normal file
@@ -0,0 +1,92 @@
|
||||
## Задание
|
||||
|
||||
Использовать нейронную сеть MLPClassifier для данных из таблицы 1 по
|
||||
варианту, самостоятельно сформулировав задачу. Интерпретировать результаты и оценить, насколько хорошо она подходит для решения сформулированной вами задачи
|
||||
|
||||
## Как запустить лабораторную
|
||||
Запустить файл main.py
|
||||
## Используемые технологии
|
||||
Библиотеки pandas, scikit-learn, их компоненты
|
||||
## Описание лабораторной (программы)
|
||||
Данный код берет данные из датасета о персонажах Dota 2, где описаны атрибуты персонажей, их роли, название, и как часто их пикают и какой у них винрейт на каждом звании в Доте, от реркута до титана.
|
||||
|
||||
В моем случае была поставлена задача понять, можно ли определить позицию персонажа (всего в игре есть 5 позиций -
|
||||
carry, mid, offlane, support, full support), по его главному атрибуту и по тому, какие роли он выполняет в игре. Учитывая
|
||||
то, что Dota 2 имеет 124 персонажа, все они очень разные, поэтому была вероятность, что модель не установит зависимость и
|
||||
не будет работать в принципе. Именно поэтому я посчитала данную задачу довольно интересной. В моем датасете присутствует информация о главном атрибуте персонажа и его ролях, но нет
|
||||
информации о том, на каких позициях он играется. Поэтому для выяснения этого списка я обратилась к внешним ресурсам
|
||||
и занесла информацию об этом в программу вручную. Это можно увидеть в коде в месте, где определяются роли.
|
||||
|
||||

|
||||
|
||||
Программа берет столбцы Name, Roles, PrimaryAttribute из датасета. Так как в столбце Roles есть 9 значений, которые прописаны
|
||||
в разном количестве и разные у каждого персонажа, нужно было создать 9 дополнительных столбцов, где для каждого персонажа
|
||||
выставлялось 1, если такая роль присутствует в его описании и 0, если ее нет.
|
||||
|
||||
Пример:
|
||||
data['IsDurable'] = data['Roles'].apply(lambda x: 1 if 'Durable' in x else 0)
|
||||
|
||||
Далее столбец Roles был удален.
|
||||
|
||||
Так как PrimaryAttribute указан в строковом значении, он так же был переведен в числовое значение.
|
||||
|
||||
После этого нужно было заполнить столбцы posCarry, posMid, posOfflane, posSupport, posFullSupport. Если персонаж есть в списке
|
||||
персонажей с этой позицией, там проставлялась 1, 0 - если нет.
|
||||
|
||||
В итоге получился датасет, где есть имя персонажа, его главный атрибут в виде числа, его роли (1 - если есть, 0 - если нет)
|
||||
и то же самое с позициями.
|
||||
|
||||
Далее датафрейм делится на признаки (все столбцы, кроме столбцов с позициями) и метки (столбцы с позициями). Метки переводятся в числовой формат с помощью LabelEncoder(), иначе программа не может с ними работать.
|
||||
Данные делятся на обучающую и тестовую выборку.
|
||||
|
||||
Модель создается таким образом потому, что если ставить меньшее число итераций или скрытых слоев, то она не успевала обучаться.
|
||||
model = MLPClassifier(hidden_layer_sizes=(128, 128, 128), activation='relu', max_iter=1000, random_state=42)
|
||||
|
||||
Затем происходит предсказание позиций для тестовой выборки и оценка работы модели с помощью accuracy_score и classification_report
|
||||
|
||||
## Результат
|
||||
|
||||
В результате получаем следующее:
|
||||
|
||||

|
||||
|
||||
Оценка модели имеет относительно низкое значение. Однако, как было сказано ранее, она могла не работать в принципе, поэтому
|
||||
я считаю это достаточно неплохим результатом и поставленная цель была выполнена - было выяснено, что позиция персонажа
|
||||
все-таки зависит от его атрибута и ролей, которые он выполняет по игре, хоть эта зависимость и не 100% явная. Если бы она
|
||||
была явная, например, все персонажи с атрибутом "сила" - это позиция offlane, тогда работа модели была бы значительно лучше.
|
||||
|
||||
Далее мы получаем classification report:
|
||||
|
||||

|
||||
|
||||
В данном отчете представлены 5 классов, то есть позиции (0, 1, 2, 3, 4). Для каждого класса представлены значения точности,
|
||||
полноты и F1-оценки, вычисленные с использованием соответствующих метрик. Также показана поддержка класса, которая
|
||||
представляет собой количество образцов, принадлежащих этому классу.
|
||||
|
||||
Precision (точность) - это метрика, которая оценивает долю правильно классифицированных объектов из всех объектов, которые модель отнесла к данному классу. Она измеряет, насколько точно модель предсказывает положительные классы.
|
||||
|
||||
Recall (полнота) - это метрика, которая оценивает долю правильно классифицированных объектов, отнесенных моделью к данному классу, относительно всех объектов, принадлежащих к данному классу. Она измеряет, насколько полно модель находит положительные классы.
|
||||
|
||||
F1-мера (F1-score) - это гармоническое среднее между precision и recall. Она используется для объединения оценок точности и полноты в единую метрику. F1-мера принимает значение между 0 и 1, где 1 - это идеальное значение, означающее, что модель идеально находит и точно классифицирует объекты положительного класса
|
||||
|
||||
micro avg - средневзвешенное значение точности, полноты и F1-оценки во всех классах, подсчитанное по общему количеству образцов.
|
||||
|
||||
macro avg - среднее значение точности, полноты и F1-оценки по всем классам, без учета количества образцов.
|
||||
|
||||
weighted avg - средневзвешенное значение точности, полноты и F1-оценки по всем классам, учитывая количество образцов.
|
||||
|
||||
samples avg - средневзвешенное значение точности, полноты и F1-оценки по всем классам, учитывая количество образцов
|
||||
класса (если образец может принадлежать нескольким классам).
|
||||
|
||||
Из данного отчета можно сделать вывод о том, что по атрибутам и ролям в игре модель точно выявила персонажей для позиции
|
||||
mid и offlane, но при этом, при работе с объектами, модель пропустила больше всего объектов, относящихся к этим классам,
|
||||
и занесла их в другие классы, из-за чего снизилась precision других классов. Мы сами должны выбирать, что важнее - точность или полнота,
|
||||
и в моем случае важнее точность, ведь изначально стоял вопрос о том, сможет ли модель определить, что к чему относится. Но низкие
|
||||
значения полноты говорят о том, что низкое значение accuracy вполне оправдано, и хоть модель и может выявить, какие объекты к каким классам относятся,
|
||||
делает она это не совсем "пОлно" и пропускает некоторые объекты.
|
||||
|
||||
Что касается признаков micro avg, macro avg, weighted avg, samples avg - все они показывают неплохие результаты относительно
|
||||
ожиданий по поводу работы модели. Я думаю, что для поставленной задачи значения этих показателей довольно высоки.
|
||||
|
||||
Вывод: точность и показатели из отчета вышли достаточно хорошими относительно поставленной задачи, также был получен ответ на вопрос
|
||||
зависит ли позиция персонажа от его атрибута и роли. Следовательно, с задачей разработанная модель справилась.
|
||||
BIN
belyaeva_ekaterina_lab_6/accuracy.png
Normal file
|
After Width: | Height: | Size: 3.1 KiB |
BIN
belyaeva_ekaterina_lab_6/classificationReport.png
Normal file
|
After Width: | Height: | Size: 27 KiB |
76
belyaeva_ekaterina_lab_6/main.py
Normal file
@@ -0,0 +1,76 @@
|
||||
import pandas as pd
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.preprocessing import StandardScaler
|
||||
from sklearn.preprocessing import LabelEncoder
|
||||
from sklearn.metrics import accuracy_score, classification_report
|
||||
|
||||
# Чтение данных из файла Current_Pub_Meta.csv
|
||||
current_pub_meta = pd.read_csv('Current_Pub_Meta.csv')
|
||||
|
||||
# Создаем пустой DataFrame для хранения данных
|
||||
data = pd.DataFrame(columns=['Name', 'Roles', 'Primary Attribute', 'IsDurable', 'IsSupport', 'IsCarry', 'IsDisabler',
|
||||
'IsInitiator', 'IsNuker', 'IsEscaper', 'IsPusher', 'posCarry', 'posMid',
|
||||
'posOfflane', 'posSupport', 'posHardSupport'])
|
||||
|
||||
|
||||
# Добавление новых столбцов из файла в датафрейм data
|
||||
data['Name'] = current_pub_meta['Name']
|
||||
data['Roles'] = current_pub_meta['Roles']
|
||||
data['Primary Attribute'] = current_pub_meta['Primary Attribute']
|
||||
data['Primary Attribute'] = data['Primary Attribute'].map({'str': 0, 'all': 1, 'int': 2, 'agi': 3})
|
||||
|
||||
data['IsDurable'] = data['Roles'].apply(lambda x: 1 if 'Durable' in x else 0)
|
||||
data['IsCarry'] = data['Roles'].apply(lambda x: 1 if 'Carry' in x else 0)
|
||||
data['IsSupport'] = data['Roles'].apply(lambda x: 1 if 'Support' in x else 0)
|
||||
data['IsDisabler'] = data['Roles'].apply(lambda x: 1 if 'Disabler' in x else 0)
|
||||
data['IsInitiator'] = data['Roles'].apply(lambda x: 1 if 'Initiator' in x else 0)
|
||||
data['IsNuker'] = data['Roles'].apply(lambda x: 1 if 'Nuker' in x else 0)
|
||||
data['IsEscaper'] = data['Roles'].apply(lambda x: 1 if 'Escaper' in x else 0)
|
||||
data['IsPusher'] = data['Roles'].apply(lambda x: 1 if 'Pusher' in x else 0)
|
||||
|
||||
#Удаление столбца Roles
|
||||
data.drop('Roles', axis=1, inplace=True)
|
||||
|
||||
# Создаем список персонажей на каждую позицию
|
||||
roles = {
|
||||
'posHardSupport': ['Undying', 'Pudge', 'Marci', 'Grimstroke', 'Elder Titan', 'Warlock', 'Dazzle', 'Witch Doctor', 'Vengeful Spirit', 'Ancient Apparition', 'Disruptor', 'Keeper of the Light', 'Rubick', 'Jakiro', 'Oracle', 'Visage', 'Silencer', 'Shadow Demon', 'Chen', 'Winter Wyvern', 'Bane', 'Treant Protector', 'Io', 'Enchantress', 'Naga Siren'],
|
||||
'posSupport': ['Venomancer', 'Tusk', 'Tiny', 'Spirit Breaker', 'Techies', 'Snapfire', 'Pudge', 'Muerta', 'Marci', 'Hoodwink', 'Grimstroke', 'Earth Spirit', 'Bounty Hunter', 'Crystal Maiden', 'Lion', 'Shadow Shaman', 'Lich', 'Ogre Magi', 'Warlock', 'Dazzle', 'Witch Doctor', 'Vengeful Spirit', 'Ancient Apparition', 'Disruptor', 'Keeper of the Light', 'Rubick', 'Jakiro', 'Oracle', 'Visage', 'Silencer', 'Shadow Demon', 'Chen', 'Winter Wyvern', 'Bane', 'Treant Protector', 'Io', 'Enchantress', 'Naga Siren', 'Earthshaker', 'Skywrath Mage', 'Leshrac', 'Shadow Fiend', 'Nyx Assassin', 'Pugna', 'Lina', 'Zeus', "Nature's Prophet", 'Dark Willow'],
|
||||
'posOfflane': ['Wraith King', 'Spirit Breaker', 'Snapfire', 'Pudge', 'Primal Beast', 'Marci', 'Dragon Knight', 'Tidehunter', 'Centaur Warrunner', 'Dark Seer', 'Beastmaster', 'Mars', 'Brewmaster', 'Timbersaw', 'Bristleback', 'Abaddon', 'Axe', 'Enigma', 'Sand King', 'Clockwerk', 'Doom', 'Underlord', 'Omniknight', 'Legion Commander', "Nature's Prophet", 'Slardar', 'Faceless Void', 'Earthshaker', 'Pangolier', 'Pugna', 'Mars', 'Batrider', 'Windranger', 'Mirana', 'Beastmaster', 'Brewmaster', 'Phoenix', 'Beastmaster', 'Dark Seer', 'Lone Druid', 'Timbersaw', 'Broodmother', "Nature's Prophet", 'Magnus', 'Necrophos', 'Bloodseeker', 'Lycan'],
|
||||
'posMid': ['Void Spirit', 'Pudge', 'Primal Beast', 'Earth Spirit', 'Dragon Knight', 'Arc Warden', 'Invoker', 'Storm Spirit', 'Shadow Fiend', 'Templar Assassin', 'Queen of Pain', 'Puck', 'Zeus', 'Tinker', 'Lina', 'Ember Spirit', 'Outworld Destroyer', 'Morphling', 'Leshrac', 'Sniper', 'Mirana', 'Viper', 'Death Prophet', 'Razor', 'Pugna', 'Skywrath Mage', "Nature's Prophet", 'Windranger', 'Batrider', 'Lina', 'Shadow Fiend', 'Templar Assassin', 'Ember Spirit', 'Huskar', 'Kunkka', 'Puck', 'Queen of Pain', 'Invoker', 'Storm Spirit', 'Outworld Devourer', 'Death Prophet', 'Razor', 'Lina', 'Sniper', 'Medusa', 'Leshrac', 'Viper'],
|
||||
'posCarry': ['Pudge', 'Muerta', 'Monkey King', 'Drow Ranger', 'Alchemist', 'Anti-Mage', 'Spectre', 'Juggernaut', 'Phantom Assassin', 'Faceless Void', 'Phantom Lancer', 'Lifestealer', 'Slark', 'Terrorblade', 'Medusa', 'Luna', 'Shadow Fiend', 'Morphling', 'Templar Assassin', 'Ember Spirit', 'Naga Siren', 'Troll Warlord', 'Gyrocopter', 'Lone Druid', 'Ursa', 'Riki', 'Sven', 'Phantom Lancer', 'Chaos Knight', 'Night Stalker', 'Wraith King', 'Meepo', 'Troll Warlord', 'Juggernaut', 'Lifestealer', 'Templar Assassin', 'Ursa', 'Clinkz', 'Weaver', 'Riki', 'Spectre', 'Phantom Assassin', 'Naga Siren', 'Luna', 'Gyrocopter', 'Meepo', 'Lone Druid', 'Slark', 'Morphling', 'Terrorblade', 'Medusa', 'Faceless Void']
|
||||
}
|
||||
|
||||
# Перебираем каждого героя и добавляем значения в соответствующие столбцы
|
||||
for index, row in data.iterrows():
|
||||
for role, characters in roles.items():
|
||||
data.loc[index, role] = int(row['Name'] in characters)
|
||||
|
||||
pd.set_option('display.max_columns', None)
|
||||
pd.set_option('display.max_rows', None)
|
||||
print(data)
|
||||
|
||||
# Разделение датафрейма на признаки и метки
|
||||
X = data[['Primary Attribute', 'IsDurable', 'IsSupport', 'IsCarry', 'IsDisabler', 'IsInitiator', 'IsNuker', 'IsEscaper', 'IsPusher']]
|
||||
y = data[['posCarry', 'posMid', 'posOfflane', 'posSupport', 'posHardSupport']]
|
||||
|
||||
# Преобразование меток в числовой формат
|
||||
label_encoder = LabelEncoder()
|
||||
y = y.apply(label_encoder.fit_transform)
|
||||
|
||||
# Разделение выборки на обучающую и тестовую
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1, random_state=42)
|
||||
|
||||
# Создание и обучение модели
|
||||
model = MLPClassifier(hidden_layer_sizes=(128, 128, 128), activation='relu', max_iter=1000, random_state=42)
|
||||
model.fit(X_train, y_train)
|
||||
|
||||
# Предсказание позиций для тестовой выборки
|
||||
y_pred = model.predict(X_test)
|
||||
|
||||
# Оценка точности модели
|
||||
accuracy = accuracy_score(y_test, y_pred)
|
||||
class_report = classification_report(y_test, y_pred)
|
||||
print("Accuracy:", accuracy)
|
||||
print('Classification Report:')
|
||||
print(class_report)
|
||||
BIN
belyaeva_ekaterina_lab_6/positions.png
Normal file
|
After Width: | Height: | Size: 36 KiB |
54
belyaeva_ekaterina_lab_7/README.md
Normal file
@@ -0,0 +1,54 @@
|
||||
## Задание
|
||||
|
||||
Выбрать художественный текст (четные варианты – русскоязычный, нечетные – англоязычный) и обучить на нем рекуррентную
|
||||
нейронную сеть для решения задачи генерации. Подобрать архитектуру и параметры так, чтобы приблизиться к максимально осмысленному результату.Далее разбиться на пары четный-нечетный вариант, обменяться разработанными сетями и проверить, как архитектура товарища справляется с вашим текстом.
|
||||
|
||||
## Как запустить лабораторную
|
||||
Запустить файл main.py
|
||||
## Используемые технологии
|
||||
Библиотеки tensorflow, numpy, их компоненты
|
||||
## Описание лабораторной (программы)
|
||||
|
||||
Данная лабораторная работа обучает модели для обработки русского и английского текста и решает задачу генерации.
|
||||
Ниже будет описан алгоритм работы одной из моделей (вторая работает аналогично):
|
||||
1. Читается текст из файла
|
||||
2. Создается экземпляр Tokenizer для токенизации текста
|
||||
3. С помощью метода fit_on_texts токенизатор анализирует текст и строит словарь уникальных слов
|
||||
4. rus_vocab_size - длина словаря
|
||||
5. C помощью метода text_to_sequences текст преобразуется в последовательность чисел
|
||||
6. Создаются последовательности для обучения модели
|
||||
7. Рассчитывается максимальная длина последовательности
|
||||
8. Входные последовательности выравниваются до максимальной длины
|
||||
9. С помощью функции to_categorical последовательности преобразуются в one-hot представление
|
||||
10. Переменные x_rus_train, y_rus_train инициализируются соответствующими значениями
|
||||
11. Такая же обработка текста происходит и для текста на английском языке
|
||||
12. Происходит создание модели на русском языке:
|
||||
- создается экземпляр модели Sequential
|
||||
- добавляется слой Embedding, отображающий слова в векторы фиксированной длины
|
||||
- добавляется слой LSTM с 512 нейронами
|
||||
- добавляется слой Dense с функцией softmax для получения вероятности каждого слова в словаре
|
||||
- модель компилируется
|
||||
13. Происходит обучение модели через model.fit()
|
||||
14. Все то же самое происходит для модели с английским языком
|
||||
15. Определяется функция generate_text для генерации текста на основе всех заданных параметров
|
||||
16. Выводятся результаты работы моделей и сгенерированные тексты
|
||||
|
||||
## Результат
|
||||
|
||||
Результат сгенерированного текста на русском языке: Помню просторный грязный двор и низкие домики обнесённые забором двор стоял у самой реки и по вёснам когда спадала полая вода он был усеян щепой и ракушками а иногда и другими куда более интересными вещами так однажды мы нашли туго набитую письмами сумку а потом вода принесла и осторожно положила на берег и самого почтальона он лежал на спине закинув руки как будто заслонясь от солнца ещё совсем молодой белокурый в форменной тужурке с блестящими пуговицами должно быть отправляясь в свой последний рейс почтальон начистил их мелом мелом мелом спадала щепой мелом мелом мелом мелом мелом спадала полая вода он ракушками а
|
||||
|
||||
Результат сгенерированного текста на английском языке: The old man was thin and gaunt with deep wrinkles in the back of his neck the brown blotches of the benevolent skin cancer the sun brings from its reflection on the tropic sea were on his cheeks the blotches ran well down the sides of his face and his hands had the deep creased scars from handling heavy fish on the cords but none of these scars were fresh they were as old as erosions in a fishless desert fishless desert desert desert desert desert desert desert desert desert desert desert desert desert desert desert desert desert desert desert desert desert fishless
|
||||
|
||||
Результат потерь на тренировочных данных:
|
||||
|
||||

|
||||
|
||||
Вывод: можно заметить, что в сгенерированных текстах в конце слова повторяются. Это происходит потому, что в параметрах модели
|
||||
указано сгенерировать 100 слов, хотя в тексте, по которому модель обучается, меньше слов. Поэтому сгенерированный текст сначала
|
||||
соответствует тексту для обучения, а затем начинает выдавать рандомные слова. Но нужно отметить, что это слова, а не просто
|
||||
набор букв и пробелы, которые получались при иных настройках моделей.
|
||||
|
||||
Так как у английской модели меньше потерь на тренировочных данных, чем у русской, то получается, что выполненная модель
|
||||
обрабатывает английский текст чуть лучше, чем русский, но в результате обе модели выдали осмысленный текст, что связано с большим
|
||||
числом нейронов и эпох, при помощи которых обучалась модель. Ведь когда было 20 эпох, а не 200, модель выдавала очень слабо осмысленный результат.
|
||||
|
||||
5
belyaeva_ekaterina_lab_7/eng.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
The old man was thin and gaunt with deep wrinkles in the back of his neck. The
|
||||
brown blotches of the benevolent skin cancer the sun brings from its reflection on the
|
||||
tropic sea were on his cheeks. The blotches ran well down the sides of his face and his
|
||||
hands had the deep-creased scars from handling heavy fish on the cords. But none of
|
||||
these scars were fresh. They were as old as erosions in a fishless desert.
|
||||
97
belyaeva_ekaterina_lab_7/main.py
Normal file
@@ -0,0 +1,97 @@
|
||||
import tensorflow as tf
|
||||
import numpy as np
|
||||
from keras.models import Sequential
|
||||
from keras.layers import LSTM, Dense, Embedding
|
||||
from keras.preprocessing.text import Tokenizer
|
||||
from keras.preprocessing.sequence import pad_sequences
|
||||
|
||||
# Загрузка и предобработка данных на русском языке
|
||||
with open("rus.txt", "r", encoding="utf-8") as f:
|
||||
rus_text = f.read()
|
||||
|
||||
tokenizer_rus = Tokenizer()
|
||||
tokenizer_rus.fit_on_texts([rus_text])
|
||||
|
||||
rus_vocab_size = len(tokenizer_rus.word_index) + 1
|
||||
rus_sequences = tokenizer_rus.texts_to_sequences([rus_text])[0]
|
||||
rus_input_sequences = []
|
||||
rus_output_sequences = []
|
||||
|
||||
for i in range(1, len(rus_sequences)):
|
||||
rus_input_sequences.append(rus_sequences[:i])
|
||||
rus_output_sequences.append(rus_sequences[i])
|
||||
|
||||
rus_max_sequence_len = max([len(seq) for seq in rus_input_sequences])
|
||||
rus_input_sequences = pad_sequences(rus_input_sequences, maxlen=rus_max_sequence_len)
|
||||
|
||||
x_rus_train = rus_input_sequences
|
||||
y_rus_train = tf.keras.utils.to_categorical(rus_output_sequences, num_classes=rus_vocab_size)
|
||||
|
||||
# Загрузка и предобработка данных на английском языке
|
||||
with open("eng.txt", "r", encoding="utf-8") as f:
|
||||
eng_text = f.read()
|
||||
|
||||
tokenizer_eng = Tokenizer()
|
||||
tokenizer_eng.fit_on_texts([eng_text])
|
||||
|
||||
eng_vocab_size = len(tokenizer_eng.word_index) + 1
|
||||
eng_sequences = tokenizer_eng.texts_to_sequences([eng_text])[0]
|
||||
eng_input_sequences = []
|
||||
eng_output_sequences = []
|
||||
|
||||
for i in range(1, len(eng_sequences)):
|
||||
eng_input_sequences.append(eng_sequences[:i])
|
||||
eng_output_sequences.append(eng_sequences[i])
|
||||
|
||||
eng_max_sequence_len = max([len(seq) for seq in eng_input_sequences])
|
||||
eng_input_sequences = pad_sequences(eng_input_sequences, maxlen=eng_max_sequence_len)
|
||||
|
||||
x_eng_train = eng_input_sequences
|
||||
y_eng_train = tf.keras.utils.to_categorical(eng_output_sequences, num_classes=eng_vocab_size)
|
||||
|
||||
# Построение модели для русского языка
|
||||
rus_model = Sequential()
|
||||
rus_model.add(Embedding(rus_vocab_size, 256, input_length=rus_max_sequence_len))
|
||||
rus_model.add(LSTM(512))
|
||||
rus_model.add(Dense(rus_vocab_size, activation='softmax'))
|
||||
|
||||
rus_model.compile(loss='categorical_crossentropy', optimizer='adam')
|
||||
|
||||
# Обучение модели для русского языка
|
||||
rus_history = rus_model.fit(x_rus_train, y_rus_train, batch_size=128, epochs=200)
|
||||
|
||||
# Построение модели для английского языка
|
||||
eng_model = Sequential()
|
||||
eng_model.add(Embedding(eng_vocab_size, 256, input_length=eng_max_sequence_len))
|
||||
eng_model.add(LSTM(512))
|
||||
eng_model.add(Dense(eng_vocab_size, activation='softmax'))
|
||||
|
||||
eng_model.compile(loss='categorical_crossentropy', optimizer='adam')
|
||||
|
||||
# Обучение модели для английского языка
|
||||
eng_history = eng_model.fit(x_eng_train, y_eng_train, batch_size=128, epochs=200)
|
||||
|
||||
def generate_text(model, tokenizer, max_sequence_len, seed_text):
|
||||
output_text = seed_text
|
||||
for _ in range(100): # Генерируем 100 слов
|
||||
encoded_text = tokenizer.texts_to_sequences([output_text])[0]
|
||||
pad_encoded = pad_sequences([encoded_text], maxlen=max_sequence_len, truncating='pre')
|
||||
pred_word_index = np.argmax(model.predict(pad_encoded), axis=-1)
|
||||
pred_word = tokenizer.index_word[pred_word_index[0]]
|
||||
output_text += " " + pred_word
|
||||
return output_text
|
||||
|
||||
# Генерация текста для русской и английской моделей
|
||||
rus_output_text = generate_text(rus_model, tokenizer_rus, rus_max_sequence_len, "Помню просторный")
|
||||
eng_output_text = generate_text(eng_model, tokenizer_eng, eng_max_sequence_len, "The old man")
|
||||
|
||||
# Вывод результатов
|
||||
print("Русская модель:")
|
||||
print("Потери на тренировочных данных:", rus_history.history['loss'][-1])
|
||||
print("Сгенерированный текст:")
|
||||
print(rus_output_text)
|
||||
|
||||
print("Английская модель:")
|
||||
print("Потери на тренировочных данных:", eng_history.history['loss'][-1])
|
||||
print("Сгенерированный текст:")
|
||||
print(eng_output_text)
|
||||
BIN
belyaeva_ekaterina_lab_7/res.png
Normal file
|
After Width: | Height: | Size: 13 KiB |
1
belyaeva_ekaterina_lab_7/rus.txt
Normal file
@@ -0,0 +1 @@
|
||||
Помню просторный грязный двор и низкие домики, обнесённые забором. Двор стоял у самой реки, и по вёснам, когда спадала полая вода, он был усеян щепой и ракушками, а иногда и другими, куда более интересными вещами. Так, однажды мы нашли туго набитую письмами сумку, а потом вода принесла и осторожно положила на берег и самого почтальона. Он лежал на спине, закинув руки, как будто заслонясь от солнца, ещё совсем молодой, белокурый, в форменной тужурке с блестящими пуговицами: должно быть, отправляясь в свой последний рейс, почтальон начистил их мелом.
|
||||
80
degtyarev_mikhail_lab_5/Readme.md
Normal file
@@ -0,0 +1,80 @@
|
||||
# Лабораторная 5
|
||||
## Вариант 9
|
||||
|
||||
## Задание
|
||||
Использовать Ласо-регрессию, самостоятельно сформулировав задачу. Оценить, насколько хорошо она подходит для решения сформулированной вами задачи.
|
||||
|
||||
Задача:
|
||||
|
||||
Можно использовать регрессию для прогнозирования заработной платы на основе опыта работы (experience_level), типа занятости (employment_type), местоположения компании (company_location) и размера компании (company_size).
|
||||
|
||||
## Описание Программы
|
||||
Программа представляет собой пример использования Lasso регрессии для прогнозирования заработной платы на основе различных признаков.
|
||||
|
||||
### Используемые библиотеки
|
||||
- `pandas`: Библиотека для обработки и анализа данных, используется для загрузки и предобработки данных.
|
||||
- `scikit-learn`:
|
||||
|
||||
train_test_split: Используется для разделения данных на обучающий и тестовый наборы.
|
||||
|
||||
StandardScaler: Применяется для нормализации числовых признаков.
|
||||
|
||||
OneHotEncoder: Используется для кодирования категориальных признаков.
|
||||
|
||||
Lasso: Линейная модель Lasso для обучения регрессии.
|
||||
|
||||
Pipeline: Позволяет объединять шаги предварительной обработки данных и обучения модели в пайплайн.
|
||||
|
||||
- `matplotlib`: Используется для визуализации коэффициентов модели в виде горизонтальной столбчатой диаграммы.
|
||||
- `numpy`: Использована для работы с числовыми данными.
|
||||
|
||||
### Шаги программы
|
||||
|
||||
**Загрузка данных:**
|
||||
|
||||
Используется библиотека pandas для загрузки данных из файла ds_salaries.csv.
|
||||
|
||||
**Предварительная обработка данных:**
|
||||
|
||||
Категориальные признаки ('experience_level', 'employment_type', 'company_location', 'company_size') обрабатываются с использованием OneHotEncoder, а числовые признаки ('work_year') нормализуются с помощью StandardScaler. Эти шаги объединены в ColumnTransformer и используются в качестве предварительного обработчика данных.
|
||||
|
||||
**Выбор признаков:**
|
||||
|
||||
Определены признаки, которые будут использоваться для обучения модели.
|
||||
|
||||
**Разделение данных:**
|
||||
|
||||
Данные разделены на обучающий и тестовый наборы в соотношении 80/20 с использованием train_test_split.
|
||||
|
||||
**Обучение модели:**
|
||||
|
||||
Используется линейная модель Лассо-регрессия, объединенная с предварительным обработчиком данных в рамках Pipeline.
|
||||
|
||||
**Оценка точности модели:**
|
||||
|
||||
Вычисляется коэффициент детерминации (R^2 Score) и среднеквадратичная ошибка (Mean Squared Error) для оценки точности модели.
|
||||
|
||||
**Вывод предсказанных и фактических значений:**
|
||||
|
||||
Создается DataFrame с фактическими и предсказанными значениями и выводится в консоль.
|
||||
|
||||
**Визуализация весов (коэффициентов) модели:**
|
||||
|
||||
Строится горизонтальная столбчатая диаграмма для визуализации весов (коэффициентов) модели.
|
||||
|
||||
### Запуск программы
|
||||
- Склонировать или скачать код `main.py`.
|
||||
- Запустите файл в среде, поддерживающей выполнение Python. `python main.py`
|
||||
|
||||
### Результаты
|
||||

|
||||
|
||||

|
||||
|
||||
Точность модели составляет всего 39%, что является довольно низким показателем
|
||||
|
||||
MSE довольно высок, что указывает на то, что модель не слишком хорошо соответствует данным и допускает ошибки в предсказаниях
|
||||
|
||||
Фактические и предсказанные значения: видно, что модель часто недооценивает или переоценивает заработную плату. Например, для индексов 563 и 289 фактическая заработная плата выше, чем предсказанная.
|
||||
|
||||
Изменение alfa не особо улучшает общую картину, поэтому, можно сделать вывод, что следует выбрать другой алгоритм.
|
||||
BIN
degtyarev_mikhail_lab_5/cli_res.png
Normal file
|
After Width: | Height: | Size: 15 KiB |
608
degtyarev_mikhail_lab_5/ds_salaries.csv
Normal file
@@ -0,0 +1,608 @@
|
||||
,work_year,experience_level,employment_type,job_title,salary,salary_currency,salary_in_usd,employee_residence,remote_ratio,company_location,company_size
|
||||
0,2020,MI,FT,Data Scientist,70000,EUR,79833,DE,0,DE,L
|
||||
1,2020,SE,FT,Machine Learning Scientist,260000,USD,260000,JP,0,JP,S
|
||||
2,2020,SE,FT,Big Data Engineer,85000,GBP,109024,GB,50,GB,M
|
||||
3,2020,MI,FT,Product Data Analyst,20000,USD,20000,HN,0,HN,S
|
||||
4,2020,SE,FT,Machine Learning Engineer,150000,USD,150000,US,50,US,L
|
||||
5,2020,EN,FT,Data Analyst,72000,USD,72000,US,100,US,L
|
||||
6,2020,SE,FT,Lead Data Scientist,190000,USD,190000,US,100,US,S
|
||||
7,2020,MI,FT,Data Scientist,11000000,HUF,35735,HU,50,HU,L
|
||||
8,2020,MI,FT,Business Data Analyst,135000,USD,135000,US,100,US,L
|
||||
9,2020,SE,FT,Lead Data Engineer,125000,USD,125000,NZ,50,NZ,S
|
||||
10,2020,EN,FT,Data Scientist,45000,EUR,51321,FR,0,FR,S
|
||||
11,2020,MI,FT,Data Scientist,3000000,INR,40481,IN,0,IN,L
|
||||
12,2020,EN,FT,Data Scientist,35000,EUR,39916,FR,0,FR,M
|
||||
13,2020,MI,FT,Lead Data Analyst,87000,USD,87000,US,100,US,L
|
||||
14,2020,MI,FT,Data Analyst,85000,USD,85000,US,100,US,L
|
||||
15,2020,MI,FT,Data Analyst,8000,USD,8000,PK,50,PK,L
|
||||
16,2020,EN,FT,Data Engineer,4450000,JPY,41689,JP,100,JP,S
|
||||
17,2020,SE,FT,Big Data Engineer,100000,EUR,114047,PL,100,GB,S
|
||||
18,2020,EN,FT,Data Science Consultant,423000,INR,5707,IN,50,IN,M
|
||||
19,2020,MI,FT,Lead Data Engineer,56000,USD,56000,PT,100,US,M
|
||||
20,2020,MI,FT,Machine Learning Engineer,299000,CNY,43331,CN,0,CN,M
|
||||
21,2020,MI,FT,Product Data Analyst,450000,INR,6072,IN,100,IN,L
|
||||
22,2020,SE,FT,Data Engineer,42000,EUR,47899,GR,50,GR,L
|
||||
23,2020,MI,FT,BI Data Analyst,98000,USD,98000,US,0,US,M
|
||||
24,2020,MI,FT,Lead Data Scientist,115000,USD,115000,AE,0,AE,L
|
||||
25,2020,EX,FT,Director of Data Science,325000,USD,325000,US,100,US,L
|
||||
26,2020,EN,FT,Research Scientist,42000,USD,42000,NL,50,NL,L
|
||||
27,2020,SE,FT,Data Engineer,720000,MXN,33511,MX,0,MX,S
|
||||
28,2020,EN,CT,Business Data Analyst,100000,USD,100000,US,100,US,L
|
||||
29,2020,SE,FT,Machine Learning Manager,157000,CAD,117104,CA,50,CA,L
|
||||
30,2020,MI,FT,Data Engineering Manager,51999,EUR,59303,DE,100,DE,S
|
||||
31,2020,EN,FT,Big Data Engineer,70000,USD,70000,US,100,US,L
|
||||
32,2020,SE,FT,Data Scientist,60000,EUR,68428,GR,100,US,L
|
||||
33,2020,MI,FT,Research Scientist,450000,USD,450000,US,0,US,M
|
||||
34,2020,MI,FT,Data Analyst,41000,EUR,46759,FR,50,FR,L
|
||||
35,2020,MI,FT,Data Engineer,65000,EUR,74130,AT,50,AT,L
|
||||
36,2020,MI,FT,Data Science Consultant,103000,USD,103000,US,100,US,L
|
||||
37,2020,EN,FT,Machine Learning Engineer,250000,USD,250000,US,50,US,L
|
||||
38,2020,EN,FT,Data Analyst,10000,USD,10000,NG,100,NG,S
|
||||
39,2020,EN,FT,Machine Learning Engineer,138000,USD,138000,US,100,US,S
|
||||
40,2020,MI,FT,Data Scientist,45760,USD,45760,PH,100,US,S
|
||||
41,2020,EX,FT,Data Engineering Manager,70000,EUR,79833,ES,50,ES,L
|
||||
42,2020,MI,FT,Machine Learning Infrastructure Engineer,44000,EUR,50180,PT,0,PT,M
|
||||
43,2020,MI,FT,Data Engineer,106000,USD,106000,US,100,US,L
|
||||
44,2020,MI,FT,Data Engineer,88000,GBP,112872,GB,50,GB,L
|
||||
45,2020,EN,PT,ML Engineer,14000,EUR,15966,DE,100,DE,S
|
||||
46,2020,MI,FT,Data Scientist,60000,GBP,76958,GB,100,GB,S
|
||||
47,2020,SE,FT,Data Engineer,188000,USD,188000,US,100,US,L
|
||||
48,2020,MI,FT,Data Scientist,105000,USD,105000,US,100,US,L
|
||||
49,2020,MI,FT,Data Engineer,61500,EUR,70139,FR,50,FR,L
|
||||
50,2020,EN,FT,Data Analyst,450000,INR,6072,IN,0,IN,S
|
||||
51,2020,EN,FT,Data Analyst,91000,USD,91000,US,100,US,L
|
||||
52,2020,EN,FT,AI Scientist,300000,DKK,45896,DK,50,DK,S
|
||||
53,2020,EN,FT,Data Engineer,48000,EUR,54742,PK,100,DE,L
|
||||
54,2020,SE,FL,Computer Vision Engineer,60000,USD,60000,RU,100,US,S
|
||||
55,2020,SE,FT,Principal Data Scientist,130000,EUR,148261,DE,100,DE,M
|
||||
56,2020,MI,FT,Data Scientist,34000,EUR,38776,ES,100,ES,M
|
||||
57,2020,MI,FT,Data Scientist,118000,USD,118000,US,100,US,M
|
||||
58,2020,SE,FT,Data Scientist,120000,USD,120000,US,50,US,L
|
||||
59,2020,MI,FT,Data Scientist,138350,USD,138350,US,100,US,M
|
||||
60,2020,MI,FT,Data Engineer,110000,USD,110000,US,100,US,L
|
||||
61,2020,MI,FT,Data Engineer,130800,USD,130800,ES,100,US,M
|
||||
62,2020,EN,PT,Data Scientist,19000,EUR,21669,IT,50,IT,S
|
||||
63,2020,SE,FT,Data Scientist,412000,USD,412000,US,100,US,L
|
||||
64,2020,SE,FT,Machine Learning Engineer,40000,EUR,45618,HR,100,HR,S
|
||||
65,2020,EN,FT,Data Scientist,55000,EUR,62726,DE,50,DE,S
|
||||
66,2020,EN,FT,Data Scientist,43200,EUR,49268,DE,0,DE,S
|
||||
67,2020,SE,FT,Data Science Manager,190200,USD,190200,US,100,US,M
|
||||
68,2020,EN,FT,Data Scientist,105000,USD,105000,US,100,US,S
|
||||
69,2020,SE,FT,Data Scientist,80000,EUR,91237,AT,0,AT,S
|
||||
70,2020,MI,FT,Data Scientist,55000,EUR,62726,FR,50,LU,S
|
||||
71,2020,MI,FT,Data Scientist,37000,EUR,42197,FR,50,FR,S
|
||||
72,2021,EN,FT,Research Scientist,60000,GBP,82528,GB,50,GB,L
|
||||
73,2021,EX,FT,BI Data Analyst,150000,USD,150000,IN,100,US,L
|
||||
74,2021,EX,FT,Head of Data,235000,USD,235000,US,100,US,L
|
||||
75,2021,SE,FT,Data Scientist,45000,EUR,53192,FR,50,FR,L
|
||||
76,2021,MI,FT,BI Data Analyst,100000,USD,100000,US,100,US,M
|
||||
77,2021,MI,PT,3D Computer Vision Researcher,400000,INR,5409,IN,50,IN,M
|
||||
78,2021,MI,CT,ML Engineer,270000,USD,270000,US,100,US,L
|
||||
79,2021,EN,FT,Data Analyst,80000,USD,80000,US,100,US,M
|
||||
80,2021,SE,FT,Data Analytics Engineer,67000,EUR,79197,DE,100,DE,L
|
||||
81,2021,MI,FT,Data Engineer,140000,USD,140000,US,100,US,L
|
||||
82,2021,MI,FT,Applied Data Scientist,68000,CAD,54238,GB,50,CA,L
|
||||
83,2021,MI,FT,Machine Learning Engineer,40000,EUR,47282,ES,100,ES,S
|
||||
84,2021,EX,FT,Director of Data Science,130000,EUR,153667,IT,100,PL,L
|
||||
85,2021,MI,FT,Data Engineer,110000,PLN,28476,PL,100,PL,L
|
||||
86,2021,EN,FT,Data Analyst,50000,EUR,59102,FR,50,FR,M
|
||||
87,2021,MI,FT,Data Analytics Engineer,110000,USD,110000,US,100,US,L
|
||||
88,2021,SE,FT,Lead Data Analyst,170000,USD,170000,US,100,US,L
|
||||
89,2021,SE,FT,Data Analyst,80000,USD,80000,BG,100,US,S
|
||||
90,2021,SE,FT,Marketing Data Analyst,75000,EUR,88654,GR,100,DK,L
|
||||
91,2021,EN,FT,Data Science Consultant,65000,EUR,76833,DE,100,DE,S
|
||||
92,2021,MI,FT,Lead Data Analyst,1450000,INR,19609,IN,100,IN,L
|
||||
93,2021,SE,FT,Lead Data Engineer,276000,USD,276000,US,0,US,L
|
||||
94,2021,EN,FT,Data Scientist,2200000,INR,29751,IN,50,IN,L
|
||||
95,2021,MI,FT,Cloud Data Engineer,120000,SGD,89294,SG,50,SG,L
|
||||
96,2021,EN,PT,AI Scientist,12000,USD,12000,BR,100,US,S
|
||||
97,2021,MI,FT,Financial Data Analyst,450000,USD,450000,US,100,US,L
|
||||
98,2021,EN,FT,Computer Vision Software Engineer,70000,USD,70000,US,100,US,M
|
||||
99,2021,MI,FT,Computer Vision Software Engineer,81000,EUR,95746,DE,100,US,S
|
||||
100,2021,MI,FT,Data Analyst,75000,USD,75000,US,0,US,L
|
||||
101,2021,SE,FT,Data Engineer,150000,USD,150000,US,100,US,L
|
||||
102,2021,MI,FT,BI Data Analyst,11000000,HUF,36259,HU,50,US,L
|
||||
103,2021,MI,FT,Data Analyst,62000,USD,62000,US,0,US,L
|
||||
104,2021,MI,FT,Data Scientist,73000,USD,73000,US,0,US,L
|
||||
105,2021,MI,FT,Data Analyst,37456,GBP,51519,GB,50,GB,L
|
||||
106,2021,MI,FT,Research Scientist,235000,CAD,187442,CA,100,CA,L
|
||||
107,2021,SE,FT,Data Engineer,115000,USD,115000,US,100,US,S
|
||||
108,2021,SE,FT,Data Engineer,150000,USD,150000,US,100,US,M
|
||||
109,2021,EN,FT,Data Engineer,2250000,INR,30428,IN,100,IN,L
|
||||
110,2021,SE,FT,Machine Learning Engineer,80000,EUR,94564,DE,50,DE,L
|
||||
111,2021,SE,FT,Director of Data Engineering,82500,GBP,113476,GB,100,GB,M
|
||||
112,2021,SE,FT,Lead Data Engineer,75000,GBP,103160,GB,100,GB,S
|
||||
113,2021,EN,PT,AI Scientist,12000,USD,12000,PK,100,US,M
|
||||
114,2021,MI,FT,Data Engineer,38400,EUR,45391,NL,100,NL,L
|
||||
115,2021,EN,FT,Machine Learning Scientist,225000,USD,225000,US,100,US,L
|
||||
116,2021,MI,FT,Data Scientist,50000,USD,50000,NG,100,NG,L
|
||||
117,2021,MI,FT,Data Science Engineer,34000,EUR,40189,GR,100,GR,M
|
||||
118,2021,EN,FT,Data Analyst,90000,USD,90000,US,100,US,S
|
||||
119,2021,MI,FT,Data Engineer,200000,USD,200000,US,100,US,L
|
||||
120,2021,MI,FT,Big Data Engineer,60000,USD,60000,ES,50,RO,M
|
||||
121,2021,SE,FT,Principal Data Engineer,200000,USD,200000,US,100,US,M
|
||||
122,2021,EN,FT,Data Analyst,50000,USD,50000,US,100,US,M
|
||||
123,2021,EN,FT,Applied Data Scientist,80000,GBP,110037,GB,0,GB,L
|
||||
124,2021,EN,PT,Data Analyst,8760,EUR,10354,ES,50,ES,M
|
||||
125,2021,MI,FT,Principal Data Scientist,151000,USD,151000,US,100,US,L
|
||||
126,2021,SE,FT,Machine Learning Scientist,120000,USD,120000,US,50,US,S
|
||||
127,2021,MI,FT,Data Scientist,700000,INR,9466,IN,0,IN,S
|
||||
128,2021,EN,FT,Machine Learning Engineer,20000,USD,20000,IN,100,IN,S
|
||||
129,2021,SE,FT,Lead Data Scientist,3000000,INR,40570,IN,50,IN,L
|
||||
130,2021,EN,FT,Machine Learning Developer,100000,USD,100000,IQ,50,IQ,S
|
||||
131,2021,EN,FT,Data Scientist,42000,EUR,49646,FR,50,FR,M
|
||||
132,2021,MI,FT,Applied Machine Learning Scientist,38400,USD,38400,VN,100,US,M
|
||||
133,2021,SE,FT,Computer Vision Engineer,24000,USD,24000,BR,100,BR,M
|
||||
134,2021,EN,FT,Data Scientist,100000,USD,100000,US,0,US,S
|
||||
135,2021,MI,FT,Data Analyst,90000,USD,90000,US,100,US,M
|
||||
136,2021,MI,FT,ML Engineer,7000000,JPY,63711,JP,50,JP,S
|
||||
137,2021,MI,FT,ML Engineer,8500000,JPY,77364,JP,50,JP,S
|
||||
138,2021,SE,FT,Principal Data Scientist,220000,USD,220000,US,0,US,L
|
||||
139,2021,EN,FT,Data Scientist,80000,USD,80000,US,100,US,M
|
||||
140,2021,MI,FT,Data Analyst,135000,USD,135000,US,100,US,L
|
||||
141,2021,SE,FT,Data Science Manager,240000,USD,240000,US,0,US,L
|
||||
142,2021,SE,FT,Data Engineering Manager,150000,USD,150000,US,0,US,L
|
||||
143,2021,MI,FT,Data Scientist,82500,USD,82500,US,100,US,S
|
||||
144,2021,MI,FT,Data Engineer,100000,USD,100000,US,100,US,L
|
||||
145,2021,SE,FT,Machine Learning Engineer,70000,EUR,82744,BE,50,BE,M
|
||||
146,2021,MI,FT,Research Scientist,53000,EUR,62649,FR,50,FR,M
|
||||
147,2021,MI,FT,Data Engineer,90000,USD,90000,US,100,US,L
|
||||
148,2021,SE,FT,Data Engineering Manager,153000,USD,153000,US,100,US,L
|
||||
149,2021,SE,FT,Cloud Data Engineer,160000,USD,160000,BR,100,US,S
|
||||
150,2021,SE,FT,Director of Data Science,168000,USD,168000,JP,0,JP,S
|
||||
151,2021,MI,FT,Data Scientist,150000,USD,150000,US,100,US,M
|
||||
152,2021,MI,FT,Data Scientist,95000,CAD,75774,CA,100,CA,L
|
||||
153,2021,EN,FT,Data Scientist,13400,USD,13400,UA,100,UA,L
|
||||
154,2021,SE,FT,Data Science Manager,144000,USD,144000,US,100,US,L
|
||||
155,2021,SE,FT,Data Science Engineer,159500,CAD,127221,CA,50,CA,L
|
||||
156,2021,MI,FT,Data Scientist,160000,SGD,119059,SG,100,IL,M
|
||||
157,2021,MI,FT,Applied Machine Learning Scientist,423000,USD,423000,US,50,US,L
|
||||
158,2021,SE,FT,Data Analytics Manager,120000,USD,120000,US,100,US,M
|
||||
159,2021,EN,FT,Machine Learning Engineer,125000,USD,125000,US,100,US,S
|
||||
160,2021,EX,FT,Head of Data,230000,USD,230000,RU,50,RU,L
|
||||
161,2021,EX,FT,Head of Data Science,85000,USD,85000,RU,0,RU,M
|
||||
162,2021,MI,FT,Data Engineer,24000,EUR,28369,MT,50,MT,L
|
||||
163,2021,EN,FT,Data Science Consultant,54000,EUR,63831,DE,50,DE,L
|
||||
164,2021,EX,FT,Director of Data Science,110000,EUR,130026,DE,50,DE,M
|
||||
165,2021,SE,FT,Data Specialist,165000,USD,165000,US,100,US,L
|
||||
166,2021,EN,FT,Data Engineer,80000,USD,80000,US,100,US,L
|
||||
167,2021,EX,FT,Director of Data Science,250000,USD,250000,US,0,US,L
|
||||
168,2021,EN,FT,BI Data Analyst,55000,USD,55000,US,50,US,S
|
||||
169,2021,MI,FT,Data Architect,150000,USD,150000,US,100,US,L
|
||||
170,2021,MI,FT,Data Architect,170000,USD,170000,US,100,US,L
|
||||
171,2021,MI,FT,Data Engineer,60000,GBP,82528,GB,100,GB,L
|
||||
172,2021,EN,FT,Data Analyst,60000,USD,60000,US,100,US,S
|
||||
173,2021,SE,FT,Principal Data Scientist,235000,USD,235000,US,100,US,L
|
||||
174,2021,SE,FT,Research Scientist,51400,EUR,60757,PT,50,PT,L
|
||||
175,2021,SE,FT,Data Engineering Manager,174000,USD,174000,US,100,US,L
|
||||
176,2021,MI,FT,Data Scientist,58000,MXN,2859,MX,0,MX,S
|
||||
177,2021,MI,FT,Data Scientist,30400000,CLP,40038,CL,100,CL,L
|
||||
178,2021,EN,FT,Machine Learning Engineer,81000,USD,81000,US,50,US,S
|
||||
179,2021,MI,FT,Data Scientist,420000,INR,5679,IN,100,US,S
|
||||
180,2021,MI,FT,Big Data Engineer,1672000,INR,22611,IN,0,IN,L
|
||||
181,2021,MI,FT,Data Scientist,76760,EUR,90734,DE,50,DE,L
|
||||
182,2021,MI,FT,Data Engineer,22000,EUR,26005,RO,0,US,L
|
||||
183,2021,SE,FT,Finance Data Analyst,45000,GBP,61896,GB,50,GB,L
|
||||
184,2021,MI,FL,Machine Learning Scientist,12000,USD,12000,PK,50,PK,M
|
||||
185,2021,MI,FT,Data Engineer,4000,USD,4000,IR,100,IR,M
|
||||
186,2021,SE,FT,Data Analytics Engineer,50000,USD,50000,VN,100,GB,M
|
||||
187,2021,EX,FT,Data Science Consultant,59000,EUR,69741,FR,100,ES,S
|
||||
188,2021,SE,FT,Data Engineer,65000,EUR,76833,RO,50,GB,S
|
||||
189,2021,MI,FT,Machine Learning Engineer,74000,USD,74000,JP,50,JP,S
|
||||
190,2021,SE,FT,Data Science Manager,152000,USD,152000,US,100,FR,L
|
||||
191,2021,EN,FT,Machine Learning Engineer,21844,USD,21844,CO,50,CO,M
|
||||
192,2021,MI,FT,Big Data Engineer,18000,USD,18000,MD,0,MD,S
|
||||
193,2021,SE,FT,Data Science Manager,174000,USD,174000,US,100,US,L
|
||||
194,2021,SE,FT,Research Scientist,120500,CAD,96113,CA,50,CA,L
|
||||
195,2021,MI,FT,Data Scientist,147000,USD,147000,US,50,US,L
|
||||
196,2021,EN,FT,BI Data Analyst,9272,USD,9272,KE,100,KE,S
|
||||
197,2021,SE,FT,Machine Learning Engineer,1799997,INR,24342,IN,100,IN,L
|
||||
198,2021,SE,FT,Data Science Manager,4000000,INR,54094,IN,50,US,L
|
||||
199,2021,EN,FT,Data Science Consultant,90000,USD,90000,US,100,US,S
|
||||
200,2021,MI,FT,Data Scientist,52000,EUR,61467,DE,50,AT,M
|
||||
201,2021,SE,FT,Machine Learning Infrastructure Engineer,195000,USD,195000,US,100,US,M
|
||||
202,2021,MI,FT,Data Scientist,32000,EUR,37825,ES,100,ES,L
|
||||
203,2021,SE,FT,Research Scientist,50000,USD,50000,FR,100,US,S
|
||||
204,2021,MI,FT,Data Scientist,160000,USD,160000,US,100,US,L
|
||||
205,2021,MI,FT,Data Scientist,69600,BRL,12901,BR,0,BR,S
|
||||
206,2021,SE,FT,Machine Learning Engineer,200000,USD,200000,US,100,US,L
|
||||
207,2021,SE,FT,Data Engineer,165000,USD,165000,US,0,US,M
|
||||
208,2021,MI,FL,Data Engineer,20000,USD,20000,IT,0,US,L
|
||||
209,2021,SE,FT,Data Analytics Manager,120000,USD,120000,US,0,US,L
|
||||
210,2021,MI,FT,Machine Learning Engineer,21000,EUR,24823,SI,50,SI,L
|
||||
211,2021,MI,FT,Research Scientist,48000,EUR,56738,FR,50,FR,S
|
||||
212,2021,MI,FT,Data Engineer,48000,GBP,66022,HK,50,GB,S
|
||||
213,2021,EN,FT,Big Data Engineer,435000,INR,5882,IN,0,CH,L
|
||||
214,2021,EN,FT,Machine Learning Engineer,21000,EUR,24823,DE,50,DE,M
|
||||
215,2021,SE,FT,Principal Data Engineer,185000,USD,185000,US,100,US,L
|
||||
216,2021,EN,PT,Computer Vision Engineer,180000,DKK,28609,DK,50,DK,S
|
||||
217,2021,MI,FT,Data Scientist,76760,EUR,90734,DE,50,DE,L
|
||||
218,2021,MI,FT,Machine Learning Engineer,75000,EUR,88654,BE,100,BE,M
|
||||
219,2021,SE,FT,Data Analytics Manager,140000,USD,140000,US,100,US,L
|
||||
220,2021,MI,FT,Machine Learning Engineer,180000,PLN,46597,PL,100,PL,L
|
||||
221,2021,MI,FT,Data Scientist,85000,GBP,116914,GB,50,GB,L
|
||||
222,2021,MI,FT,Data Scientist,2500000,INR,33808,IN,0,IN,M
|
||||
223,2021,MI,FT,Data Scientist,40900,GBP,56256,GB,50,GB,L
|
||||
224,2021,SE,FT,Machine Learning Scientist,225000,USD,225000,US,100,CA,L
|
||||
225,2021,EX,CT,Principal Data Scientist,416000,USD,416000,US,100,US,S
|
||||
226,2021,SE,FT,Data Scientist,110000,CAD,87738,CA,100,CA,S
|
||||
227,2021,MI,FT,Data Scientist,75000,EUR,88654,DE,50,DE,L
|
||||
228,2021,SE,FT,Data Scientist,135000,USD,135000,US,0,US,L
|
||||
229,2021,SE,FT,Data Analyst,90000,CAD,71786,CA,100,CA,M
|
||||
230,2021,EN,FT,Big Data Engineer,1200000,INR,16228,IN,100,IN,L
|
||||
231,2021,SE,FT,ML Engineer,256000,USD,256000,US,100,US,S
|
||||
232,2021,SE,FT,Director of Data Engineering,200000,USD,200000,US,100,US,L
|
||||
233,2021,SE,FT,Data Analyst,200000,USD,200000,US,100,US,L
|
||||
234,2021,MI,FT,Data Architect,180000,USD,180000,US,100,US,L
|
||||
235,2021,MI,FT,Head of Data Science,110000,USD,110000,US,0,US,S
|
||||
236,2021,MI,FT,Research Scientist,80000,CAD,63810,CA,100,CA,M
|
||||
237,2021,MI,FT,Data Scientist,39600,EUR,46809,ES,100,ES,M
|
||||
238,2021,EN,FT,Data Scientist,4000,USD,4000,VN,0,VN,M
|
||||
239,2021,EN,FT,Data Engineer,1600000,INR,21637,IN,50,IN,M
|
||||
240,2021,SE,FT,Data Scientist,130000,CAD,103691,CA,100,CA,L
|
||||
241,2021,MI,FT,Data Analyst,80000,USD,80000,US,100,US,L
|
||||
242,2021,MI,FT,Data Engineer,110000,USD,110000,US,100,US,L
|
||||
243,2021,SE,FT,Data Scientist,165000,USD,165000,US,100,US,L
|
||||
244,2021,EN,FT,AI Scientist,1335000,INR,18053,IN,100,AS,S
|
||||
245,2021,MI,FT,Data Engineer,52500,GBP,72212,GB,50,GB,L
|
||||
246,2021,EN,FT,Data Scientist,31000,EUR,36643,FR,50,FR,L
|
||||
247,2021,MI,FT,Data Engineer,108000,TRY,12103,TR,0,TR,M
|
||||
248,2021,SE,FT,Data Engineer,70000,GBP,96282,GB,50,GB,L
|
||||
249,2021,SE,FT,Principal Data Analyst,170000,USD,170000,US,100,US,M
|
||||
250,2021,MI,FT,Data Scientist,115000,USD,115000,US,50,US,L
|
||||
251,2021,EN,FT,Data Scientist,90000,USD,90000,US,100,US,S
|
||||
252,2021,EX,FT,Principal Data Engineer,600000,USD,600000,US,100,US,L
|
||||
253,2021,EN,FT,Data Scientist,2100000,INR,28399,IN,100,IN,M
|
||||
254,2021,MI,FT,Data Analyst,93000,USD,93000,US,100,US,L
|
||||
255,2021,SE,FT,Big Data Architect,125000,CAD,99703,CA,50,CA,M
|
||||
256,2021,MI,FT,Data Engineer,200000,USD,200000,US,100,US,L
|
||||
257,2021,SE,FT,Principal Data Scientist,147000,EUR,173762,DE,100,DE,M
|
||||
258,2021,SE,FT,Machine Learning Engineer,185000,USD,185000,US,50,US,L
|
||||
259,2021,EX,FT,Director of Data Science,120000,EUR,141846,DE,0,DE,L
|
||||
260,2021,MI,FT,Data Scientist,130000,USD,130000,US,50,US,L
|
||||
261,2021,SE,FT,Data Analyst,54000,EUR,63831,DE,50,DE,L
|
||||
262,2021,MI,FT,Data Scientist,1250000,INR,16904,IN,100,IN,S
|
||||
263,2021,SE,FT,Machine Learning Engineer,4900000,INR,66265,IN,0,IN,L
|
||||
264,2021,MI,FT,Data Scientist,21600,EUR,25532,RS,100,DE,S
|
||||
265,2021,SE,FT,Lead Data Engineer,160000,USD,160000,PR,50,US,S
|
||||
266,2021,MI,FT,Data Engineer,93150,USD,93150,US,0,US,M
|
||||
267,2021,MI,FT,Data Engineer,111775,USD,111775,US,0,US,M
|
||||
268,2021,MI,FT,Data Engineer,250000,TRY,28016,TR,100,TR,M
|
||||
269,2021,EN,FT,Data Engineer,55000,EUR,65013,DE,50,DE,M
|
||||
270,2021,EN,FT,Data Engineer,72500,USD,72500,US,100,US,L
|
||||
271,2021,SE,FT,Computer Vision Engineer,102000,BRL,18907,BR,0,BR,M
|
||||
272,2021,EN,FT,Data Science Consultant,65000,EUR,76833,DE,0,DE,L
|
||||
273,2021,EN,FT,Machine Learning Engineer,85000,USD,85000,NL,100,DE,S
|
||||
274,2021,SE,FT,Data Scientist,65720,EUR,77684,FR,50,FR,M
|
||||
275,2021,EN,FT,Data Scientist,100000,USD,100000,US,100,US,M
|
||||
276,2021,EN,FT,Data Scientist,58000,USD,58000,US,50,US,L
|
||||
277,2021,SE,FT,AI Scientist,55000,USD,55000,ES,100,ES,L
|
||||
278,2021,SE,FT,Data Scientist,180000,TRY,20171,TR,50,TR,L
|
||||
279,2021,EN,FT,Business Data Analyst,50000,EUR,59102,LU,100,LU,L
|
||||
280,2021,MI,FT,Data Engineer,112000,USD,112000,US,100,US,L
|
||||
281,2021,EN,FT,Research Scientist,100000,USD,100000,JE,0,CN,L
|
||||
282,2021,MI,PT,Data Engineer,59000,EUR,69741,NL,100,NL,L
|
||||
283,2021,SE,CT,Staff Data Scientist,105000,USD,105000,US,100,US,M
|
||||
284,2021,MI,FT,Research Scientist,69999,USD,69999,CZ,50,CZ,L
|
||||
285,2021,SE,FT,Data Science Manager,7000000,INR,94665,IN,50,IN,L
|
||||
286,2021,SE,FT,Head of Data,87000,EUR,102839,SI,100,SI,L
|
||||
287,2021,MI,FT,Data Scientist,109000,USD,109000,US,50,US,L
|
||||
288,2021,MI,FT,Machine Learning Engineer,43200,EUR,51064,IT,50,IT,L
|
||||
289,2022,SE,FT,Data Engineer,135000,USD,135000,US,100,US,M
|
||||
290,2022,SE,FT,Data Analyst,155000,USD,155000,US,100,US,M
|
||||
291,2022,SE,FT,Data Analyst,120600,USD,120600,US,100,US,M
|
||||
292,2022,MI,FT,Data Scientist,130000,USD,130000,US,0,US,M
|
||||
293,2022,MI,FT,Data Scientist,90000,USD,90000,US,0,US,M
|
||||
294,2022,MI,FT,Data Engineer,170000,USD,170000,US,100,US,M
|
||||
295,2022,MI,FT,Data Engineer,150000,USD,150000,US,100,US,M
|
||||
296,2022,SE,FT,Data Analyst,102100,USD,102100,US,100,US,M
|
||||
297,2022,SE,FT,Data Analyst,84900,USD,84900,US,100,US,M
|
||||
298,2022,SE,FT,Data Scientist,136620,USD,136620,US,100,US,M
|
||||
299,2022,SE,FT,Data Scientist,99360,USD,99360,US,100,US,M
|
||||
300,2022,SE,FT,Data Scientist,90000,GBP,117789,GB,0,GB,M
|
||||
301,2022,SE,FT,Data Scientist,80000,GBP,104702,GB,0,GB,M
|
||||
302,2022,SE,FT,Data Scientist,146000,USD,146000,US,100,US,M
|
||||
303,2022,SE,FT,Data Scientist,123000,USD,123000,US,100,US,M
|
||||
304,2022,EN,FT,Data Engineer,40000,GBP,52351,GB,100,GB,M
|
||||
305,2022,SE,FT,Data Analyst,99000,USD,99000,US,0,US,M
|
||||
306,2022,SE,FT,Data Analyst,116000,USD,116000,US,0,US,M
|
||||
307,2022,MI,FT,Data Analyst,106260,USD,106260,US,0,US,M
|
||||
308,2022,MI,FT,Data Analyst,126500,USD,126500,US,0,US,M
|
||||
309,2022,EX,FT,Data Engineer,242000,USD,242000,US,100,US,M
|
||||
310,2022,EX,FT,Data Engineer,200000,USD,200000,US,100,US,M
|
||||
311,2022,MI,FT,Data Scientist,50000,GBP,65438,GB,0,GB,M
|
||||
312,2022,MI,FT,Data Scientist,30000,GBP,39263,GB,0,GB,M
|
||||
313,2022,MI,FT,Data Engineer,60000,GBP,78526,GB,0,GB,M
|
||||
314,2022,MI,FT,Data Engineer,40000,GBP,52351,GB,0,GB,M
|
||||
315,2022,SE,FT,Data Scientist,165220,USD,165220,US,100,US,M
|
||||
316,2022,EN,FT,Data Engineer,35000,GBP,45807,GB,100,GB,M
|
||||
317,2022,SE,FT,Data Scientist,120160,USD,120160,US,100,US,M
|
||||
318,2022,SE,FT,Data Analyst,90320,USD,90320,US,100,US,M
|
||||
319,2022,SE,FT,Data Engineer,181940,USD,181940,US,0,US,M
|
||||
320,2022,SE,FT,Data Engineer,132320,USD,132320,US,0,US,M
|
||||
321,2022,SE,FT,Data Engineer,220110,USD,220110,US,0,US,M
|
||||
322,2022,SE,FT,Data Engineer,160080,USD,160080,US,0,US,M
|
||||
323,2022,SE,FT,Data Scientist,180000,USD,180000,US,0,US,L
|
||||
324,2022,SE,FT,Data Scientist,120000,USD,120000,US,0,US,L
|
||||
325,2022,SE,FT,Data Analyst,124190,USD,124190,US,100,US,M
|
||||
326,2022,EX,FT,Data Analyst,130000,USD,130000,US,100,US,M
|
||||
327,2022,EX,FT,Data Analyst,110000,USD,110000,US,100,US,M
|
||||
328,2022,SE,FT,Data Analyst,170000,USD,170000,US,100,US,M
|
||||
329,2022,MI,FT,Data Analyst,115500,USD,115500,US,100,US,M
|
||||
330,2022,SE,FT,Data Analyst,112900,USD,112900,US,100,US,M
|
||||
331,2022,SE,FT,Data Analyst,90320,USD,90320,US,100,US,M
|
||||
332,2022,SE,FT,Data Analyst,112900,USD,112900,US,100,US,M
|
||||
333,2022,SE,FT,Data Analyst,90320,USD,90320,US,100,US,M
|
||||
334,2022,SE,FT,Data Engineer,165400,USD,165400,US,100,US,M
|
||||
335,2022,SE,FT,Data Engineer,132320,USD,132320,US,100,US,M
|
||||
336,2022,MI,FT,Data Analyst,167000,USD,167000,US,100,US,M
|
||||
337,2022,SE,FT,Data Engineer,243900,USD,243900,US,100,US,M
|
||||
338,2022,SE,FT,Data Analyst,136600,USD,136600,US,100,US,M
|
||||
339,2022,SE,FT,Data Analyst,109280,USD,109280,US,100,US,M
|
||||
340,2022,SE,FT,Data Engineer,128875,USD,128875,US,100,US,M
|
||||
341,2022,SE,FT,Data Engineer,93700,USD,93700,US,100,US,M
|
||||
342,2022,EX,FT,Head of Data Science,224000,USD,224000,US,100,US,M
|
||||
343,2022,EX,FT,Head of Data Science,167875,USD,167875,US,100,US,M
|
||||
344,2022,EX,FT,Analytics Engineer,175000,USD,175000,US,100,US,M
|
||||
345,2022,SE,FT,Data Engineer,156600,USD,156600,US,100,US,M
|
||||
346,2022,SE,FT,Data Engineer,108800,USD,108800,US,0,US,M
|
||||
347,2022,SE,FT,Data Scientist,95550,USD,95550,US,0,US,M
|
||||
348,2022,SE,FT,Data Engineer,113000,USD,113000,US,0,US,L
|
||||
349,2022,SE,FT,Data Analyst,135000,USD,135000,US,100,US,M
|
||||
350,2022,SE,FT,Data Science Manager,161342,USD,161342,US,100,US,M
|
||||
351,2022,SE,FT,Data Science Manager,137141,USD,137141,US,100,US,M
|
||||
352,2022,SE,FT,Data Scientist,167000,USD,167000,US,100,US,M
|
||||
353,2022,SE,FT,Data Scientist,123000,USD,123000,US,100,US,M
|
||||
354,2022,SE,FT,Data Engineer,60000,GBP,78526,GB,0,GB,M
|
||||
355,2022,SE,FT,Data Engineer,50000,GBP,65438,GB,0,GB,M
|
||||
356,2022,SE,FT,Data Scientist,150000,USD,150000,US,0,US,M
|
||||
357,2022,SE,FT,Data Scientist,211500,USD,211500,US,100,US,M
|
||||
358,2022,SE,FT,Data Architect,192400,USD,192400,CA,100,CA,M
|
||||
359,2022,SE,FT,Data Architect,90700,USD,90700,CA,100,CA,M
|
||||
360,2022,SE,FT,Data Analyst,130000,USD,130000,CA,100,CA,M
|
||||
361,2022,SE,FT,Data Analyst,61300,USD,61300,CA,100,CA,M
|
||||
362,2022,SE,FT,Data Analyst,130000,USD,130000,CA,100,CA,M
|
||||
363,2022,SE,FT,Data Analyst,61300,USD,61300,CA,100,CA,M
|
||||
364,2022,SE,FT,Data Engineer,160000,USD,160000,US,0,US,L
|
||||
365,2022,SE,FT,Data Scientist,138600,USD,138600,US,100,US,M
|
||||
366,2022,SE,FT,Data Engineer,136000,USD,136000,US,0,US,M
|
||||
367,2022,MI,FT,Data Analyst,58000,USD,58000,US,0,US,S
|
||||
368,2022,EX,FT,Analytics Engineer,135000,USD,135000,US,100,US,M
|
||||
369,2022,SE,FT,Data Scientist,170000,USD,170000,US,100,US,M
|
||||
370,2022,SE,FT,Data Scientist,123000,USD,123000,US,100,US,M
|
||||
371,2022,SE,FT,Machine Learning Engineer,189650,USD,189650,US,0,US,M
|
||||
372,2022,SE,FT,Machine Learning Engineer,164996,USD,164996,US,0,US,M
|
||||
373,2022,MI,FT,ETL Developer,50000,EUR,54957,GR,0,GR,M
|
||||
374,2022,MI,FT,ETL Developer,50000,EUR,54957,GR,0,GR,M
|
||||
375,2022,EX,FT,Lead Data Engineer,150000,CAD,118187,CA,100,CA,S
|
||||
376,2022,SE,FT,Data Analyst,132000,USD,132000,US,0,US,M
|
||||
377,2022,SE,FT,Data Engineer,165400,USD,165400,US,100,US,M
|
||||
378,2022,SE,FT,Data Architect,208775,USD,208775,US,100,US,M
|
||||
379,2022,SE,FT,Data Architect,147800,USD,147800,US,100,US,M
|
||||
380,2022,SE,FT,Data Engineer,136994,USD,136994,US,100,US,M
|
||||
381,2022,SE,FT,Data Engineer,101570,USD,101570,US,100,US,M
|
||||
382,2022,SE,FT,Data Analyst,128875,USD,128875,US,100,US,M
|
||||
383,2022,SE,FT,Data Analyst,93700,USD,93700,US,100,US,M
|
||||
384,2022,EX,FT,Head of Machine Learning,6000000,INR,79039,IN,50,IN,L
|
||||
385,2022,SE,FT,Data Engineer,132320,USD,132320,US,100,US,M
|
||||
386,2022,EN,FT,Machine Learning Engineer,28500,GBP,37300,GB,100,GB,L
|
||||
387,2022,SE,FT,Data Analyst,164000,USD,164000,US,0,US,M
|
||||
388,2022,SE,FT,Data Engineer,155000,USD,155000,US,100,US,M
|
||||
389,2022,MI,FT,Machine Learning Engineer,95000,GBP,124333,GB,0,GB,M
|
||||
390,2022,MI,FT,Machine Learning Engineer,75000,GBP,98158,GB,0,GB,M
|
||||
391,2022,MI,FT,AI Scientist,120000,USD,120000,US,0,US,M
|
||||
392,2022,SE,FT,Data Analyst,112900,USD,112900,US,100,US,M
|
||||
393,2022,SE,FT,Data Analyst,90320,USD,90320,US,100,US,M
|
||||
394,2022,SE,FT,Data Analytics Manager,145000,USD,145000,US,100,US,M
|
||||
395,2022,SE,FT,Data Analytics Manager,105400,USD,105400,US,100,US,M
|
||||
396,2022,MI,FT,Machine Learning Engineer,80000,EUR,87932,FR,100,DE,M
|
||||
397,2022,MI,FT,Data Engineer,90000,GBP,117789,GB,0,GB,M
|
||||
398,2022,SE,FT,Data Scientist,215300,USD,215300,US,100,US,L
|
||||
399,2022,SE,FT,Data Scientist,158200,USD,158200,US,100,US,L
|
||||
400,2022,SE,FT,Data Engineer,209100,USD,209100,US,100,US,L
|
||||
401,2022,SE,FT,Data Engineer,154600,USD,154600,US,100,US,L
|
||||
402,2022,SE,FT,Data Analyst,115934,USD,115934,US,0,US,M
|
||||
403,2022,SE,FT,Data Analyst,81666,USD,81666,US,0,US,M
|
||||
404,2022,SE,FT,Data Engineer,175000,USD,175000,US,100,US,M
|
||||
405,2022,MI,FT,Data Engineer,75000,GBP,98158,GB,0,GB,M
|
||||
406,2022,MI,FT,Data Analyst,58000,USD,58000,US,0,US,S
|
||||
407,2022,SE,FT,Data Engineer,183600,USD,183600,US,100,US,L
|
||||
408,2022,MI,FT,Data Analyst,40000,GBP,52351,GB,100,GB,M
|
||||
409,2022,SE,FT,Data Scientist,180000,USD,180000,US,100,US,M
|
||||
410,2022,MI,FT,Data Scientist,55000,GBP,71982,GB,0,GB,M
|
||||
411,2022,MI,FT,Data Scientist,35000,GBP,45807,GB,0,GB,M
|
||||
412,2022,MI,FT,Data Engineer,60000,EUR,65949,GR,100,GR,M
|
||||
413,2022,MI,FT,Data Engineer,45000,EUR,49461,GR,100,GR,M
|
||||
414,2022,MI,FT,Data Engineer,60000,GBP,78526,GB,100,GB,M
|
||||
415,2022,MI,FT,Data Engineer,45000,GBP,58894,GB,100,GB,M
|
||||
416,2022,SE,FT,Data Scientist,260000,USD,260000,US,100,US,M
|
||||
417,2022,SE,FT,Data Science Engineer,60000,USD,60000,AR,100,MX,L
|
||||
418,2022,MI,FT,Data Engineer,63900,USD,63900,US,0,US,M
|
||||
419,2022,MI,FT,Machine Learning Scientist,160000,USD,160000,US,100,US,L
|
||||
420,2022,MI,FT,Machine Learning Scientist,112300,USD,112300,US,100,US,L
|
||||
421,2022,MI,FT,Data Science Manager,241000,USD,241000,US,100,US,M
|
||||
422,2022,MI,FT,Data Science Manager,159000,USD,159000,US,100,US,M
|
||||
423,2022,SE,FT,Data Scientist,180000,USD,180000,US,0,US,M
|
||||
424,2022,SE,FT,Data Scientist,80000,USD,80000,US,0,US,M
|
||||
425,2022,MI,FT,Data Engineer,82900,USD,82900,US,0,US,M
|
||||
426,2022,SE,FT,Data Engineer,100800,USD,100800,US,100,US,L
|
||||
427,2022,MI,FT,Data Engineer,45000,EUR,49461,ES,100,ES,M
|
||||
428,2022,SE,FT,Data Scientist,140400,USD,140400,US,0,US,L
|
||||
429,2022,MI,FT,Data Analyst,30000,GBP,39263,GB,100,GB,M
|
||||
430,2022,MI,FT,Data Analyst,40000,EUR,43966,ES,100,ES,M
|
||||
431,2022,MI,FT,Data Analyst,30000,EUR,32974,ES,100,ES,M
|
||||
432,2022,MI,FT,Data Engineer,80000,EUR,87932,ES,100,ES,M
|
||||
433,2022,MI,FT,Data Engineer,70000,EUR,76940,ES,100,ES,M
|
||||
434,2022,MI,FT,Data Engineer,80000,GBP,104702,GB,100,GB,M
|
||||
435,2022,MI,FT,Data Engineer,70000,GBP,91614,GB,100,GB,M
|
||||
436,2022,MI,FT,Data Engineer,60000,EUR,65949,ES,100,ES,M
|
||||
437,2022,MI,FT,Data Engineer,80000,EUR,87932,GR,100,GR,M
|
||||
438,2022,SE,FT,Machine Learning Engineer,189650,USD,189650,US,0,US,M
|
||||
439,2022,SE,FT,Machine Learning Engineer,164996,USD,164996,US,0,US,M
|
||||
440,2022,MI,FT,Data Analyst,40000,EUR,43966,GR,100,GR,M
|
||||
441,2022,MI,FT,Data Analyst,30000,EUR,32974,GR,100,GR,M
|
||||
442,2022,MI,FT,Data Engineer,75000,GBP,98158,GB,100,GB,M
|
||||
443,2022,MI,FT,Data Engineer,60000,GBP,78526,GB,100,GB,M
|
||||
444,2022,SE,FT,Data Scientist,215300,USD,215300,US,0,US,L
|
||||
445,2022,MI,FT,Data Engineer,70000,EUR,76940,GR,100,GR,M
|
||||
446,2022,SE,FT,Data Engineer,209100,USD,209100,US,100,US,L
|
||||
447,2022,SE,FT,Data Engineer,154600,USD,154600,US,100,US,L
|
||||
448,2022,SE,FT,Data Engineer,180000,USD,180000,US,100,US,M
|
||||
449,2022,EN,FT,ML Engineer,20000,EUR,21983,PT,100,PT,L
|
||||
450,2022,SE,FT,Data Engineer,80000,USD,80000,US,100,US,M
|
||||
451,2022,MI,FT,Machine Learning Developer,100000,CAD,78791,CA,100,CA,M
|
||||
452,2022,EX,FT,Director of Data Science,250000,CAD,196979,CA,50,CA,L
|
||||
453,2022,MI,FT,Machine Learning Engineer,120000,USD,120000,US,100,US,S
|
||||
454,2022,EN,FT,Computer Vision Engineer,125000,USD,125000,US,0,US,M
|
||||
455,2022,MI,FT,NLP Engineer,240000,CNY,37236,US,50,US,L
|
||||
456,2022,SE,FT,Data Engineer,105000,USD,105000,US,100,US,M
|
||||
457,2022,SE,FT,Lead Machine Learning Engineer,80000,EUR,87932,DE,0,DE,M
|
||||
458,2022,MI,FT,Business Data Analyst,1400000,INR,18442,IN,100,IN,M
|
||||
459,2022,MI,FT,Data Scientist,2400000,INR,31615,IN,100,IN,L
|
||||
460,2022,MI,FT,Machine Learning Infrastructure Engineer,53000,EUR,58255,PT,50,PT,L
|
||||
461,2022,EN,FT,Financial Data Analyst,100000,USD,100000,US,50,US,L
|
||||
462,2022,MI,PT,Data Engineer,50000,EUR,54957,DE,50,DE,L
|
||||
463,2022,EN,FT,Data Scientist,1400000,INR,18442,IN,100,IN,M
|
||||
464,2022,SE,FT,Principal Data Scientist,148000,EUR,162674,DE,100,DE,M
|
||||
465,2022,EN,FT,Data Engineer,120000,USD,120000,US,100,US,M
|
||||
466,2022,SE,FT,Research Scientist,144000,USD,144000,US,50,US,L
|
||||
467,2022,SE,FT,Data Scientist,104890,USD,104890,US,100,US,M
|
||||
468,2022,SE,FT,Data Engineer,100000,USD,100000,US,100,US,M
|
||||
469,2022,SE,FT,Data Scientist,140000,USD,140000,US,100,US,M
|
||||
470,2022,MI,FT,Data Analyst,135000,USD,135000,US,100,US,M
|
||||
471,2022,MI,FT,Data Analyst,50000,USD,50000,US,100,US,M
|
||||
472,2022,SE,FT,Data Scientist,220000,USD,220000,US,100,US,M
|
||||
473,2022,SE,FT,Data Scientist,140000,USD,140000,US,100,US,M
|
||||
474,2022,MI,FT,Data Scientist,140000,GBP,183228,GB,0,GB,M
|
||||
475,2022,MI,FT,Data Scientist,70000,GBP,91614,GB,0,GB,M
|
||||
476,2022,SE,FT,Data Scientist,185100,USD,185100,US,100,US,M
|
||||
477,2022,SE,FT,Machine Learning Engineer,220000,USD,220000,US,100,US,M
|
||||
478,2022,MI,FT,Data Scientist,200000,USD,200000,US,100,US,M
|
||||
479,2022,MI,FT,Data Scientist,120000,USD,120000,US,100,US,M
|
||||
480,2022,SE,FT,Machine Learning Engineer,120000,USD,120000,AE,100,AE,S
|
||||
481,2022,SE,FT,Machine Learning Engineer,65000,USD,65000,AE,100,AE,S
|
||||
482,2022,EX,FT,Data Engineer,324000,USD,324000,US,100,US,M
|
||||
483,2022,EX,FT,Data Engineer,216000,USD,216000,US,100,US,M
|
||||
484,2022,SE,FT,Data Engineer,210000,USD,210000,US,100,US,M
|
||||
485,2022,SE,FT,Machine Learning Engineer,120000,USD,120000,US,100,US,M
|
||||
486,2022,SE,FT,Data Scientist,230000,USD,230000,US,100,US,M
|
||||
487,2022,EN,PT,Data Scientist,100000,USD,100000,DZ,50,DZ,M
|
||||
488,2022,MI,FL,Data Scientist,100000,USD,100000,CA,100,US,M
|
||||
489,2022,EN,CT,Applied Machine Learning Scientist,29000,EUR,31875,TN,100,CZ,M
|
||||
490,2022,SE,FT,Head of Data,200000,USD,200000,MY,100,US,M
|
||||
491,2022,MI,FT,Principal Data Analyst,75000,USD,75000,CA,100,CA,S
|
||||
492,2022,MI,FT,Data Scientist,150000,PLN,35590,PL,100,PL,L
|
||||
493,2022,SE,FT,Machine Learning Developer,100000,CAD,78791,CA,100,CA,M
|
||||
494,2022,SE,FT,Data Scientist,100000,USD,100000,BR,100,US,M
|
||||
495,2022,MI,FT,Machine Learning Scientist,153000,USD,153000,US,50,US,M
|
||||
496,2022,EN,FT,Data Engineer,52800,EUR,58035,PK,100,DE,M
|
||||
497,2022,SE,FT,Data Scientist,165000,USD,165000,US,100,US,M
|
||||
498,2022,SE,FT,Research Scientist,85000,EUR,93427,FR,50,FR,L
|
||||
499,2022,EN,FT,Data Scientist,66500,CAD,52396,CA,100,CA,L
|
||||
500,2022,SE,FT,Machine Learning Engineer,57000,EUR,62651,NL,100,NL,L
|
||||
501,2022,MI,FT,Head of Data,30000,EUR,32974,EE,100,EE,S
|
||||
502,2022,EN,FT,Data Scientist,40000,USD,40000,JP,100,MY,L
|
||||
503,2022,MI,FT,Machine Learning Engineer,121000,AUD,87425,AU,100,AU,L
|
||||
504,2022,SE,FT,Data Engineer,115000,USD,115000,US,100,US,M
|
||||
505,2022,EN,FT,Data Scientist,120000,AUD,86703,AU,50,AU,M
|
||||
506,2022,MI,FT,Applied Machine Learning Scientist,75000,USD,75000,BO,100,US,L
|
||||
507,2022,MI,FT,Research Scientist,59000,EUR,64849,AT,0,AT,L
|
||||
508,2022,EN,FT,Research Scientist,120000,USD,120000,US,100,US,L
|
||||
509,2022,MI,FT,Applied Data Scientist,157000,USD,157000,US,100,US,L
|
||||
510,2022,EN,FT,Computer Vision Software Engineer,150000,USD,150000,AU,100,AU,S
|
||||
511,2022,MI,FT,Business Data Analyst,90000,CAD,70912,CA,50,CA,L
|
||||
512,2022,EN,FT,Data Engineer,65000,USD,65000,US,100,US,S
|
||||
513,2022,SE,FT,Machine Learning Engineer,65000,EUR,71444,IE,100,IE,S
|
||||
514,2022,EN,FT,Data Analytics Engineer,20000,USD,20000,PK,0,PK,M
|
||||
515,2022,MI,FT,Data Scientist,48000,USD,48000,RU,100,US,S
|
||||
516,2022,SE,FT,Data Science Manager,152500,USD,152500,US,100,US,M
|
||||
517,2022,MI,FT,Data Engineer,62000,EUR,68147,FR,100,FR,M
|
||||
518,2022,MI,FT,Data Scientist,115000,CHF,122346,CH,0,CH,L
|
||||
519,2022,SE,FT,Applied Data Scientist,380000,USD,380000,US,100,US,L
|
||||
520,2022,MI,FT,Data Scientist,88000,CAD,69336,CA,100,CA,M
|
||||
521,2022,EN,FT,Computer Vision Engineer,10000,USD,10000,PT,100,LU,M
|
||||
522,2022,MI,FT,Data Analyst,20000,USD,20000,GR,100,GR,S
|
||||
523,2022,SE,FT,Data Analytics Lead,405000,USD,405000,US,100,US,L
|
||||
524,2022,MI,FT,Data Scientist,135000,USD,135000,US,100,US,L
|
||||
525,2022,SE,FT,Applied Data Scientist,177000,USD,177000,US,100,US,L
|
||||
526,2022,MI,FT,Data Scientist,78000,USD,78000,US,100,US,M
|
||||
527,2022,SE,FT,Data Analyst,135000,USD,135000,US,100,US,M
|
||||
528,2022,SE,FT,Data Analyst,100000,USD,100000,US,100,US,M
|
||||
529,2022,SE,FT,Data Analyst,90320,USD,90320,US,100,US,M
|
||||
530,2022,MI,FT,Data Analyst,85000,USD,85000,CA,0,CA,M
|
||||
531,2022,MI,FT,Data Analyst,75000,USD,75000,CA,0,CA,M
|
||||
532,2022,SE,FT,Machine Learning Engineer,214000,USD,214000,US,100,US,M
|
||||
533,2022,SE,FT,Machine Learning Engineer,192600,USD,192600,US,100,US,M
|
||||
534,2022,SE,FT,Data Architect,266400,USD,266400,US,100,US,M
|
||||
535,2022,SE,FT,Data Architect,213120,USD,213120,US,100,US,M
|
||||
536,2022,SE,FT,Data Analyst,112900,USD,112900,US,100,US,M
|
||||
537,2022,SE,FT,Data Engineer,155000,USD,155000,US,100,US,M
|
||||
538,2022,MI,FT,Data Scientist,141300,USD,141300,US,0,US,M
|
||||
539,2022,MI,FT,Data Scientist,102100,USD,102100,US,0,US,M
|
||||
540,2022,SE,FT,Data Analyst,115934,USD,115934,US,100,US,M
|
||||
541,2022,SE,FT,Data Analyst,81666,USD,81666,US,100,US,M
|
||||
542,2022,MI,FT,Data Engineer,206699,USD,206699,US,0,US,M
|
||||
543,2022,MI,FT,Data Engineer,99100,USD,99100,US,0,US,M
|
||||
544,2022,SE,FT,Data Engineer,130000,USD,130000,US,100,US,M
|
||||
545,2022,SE,FT,Data Engineer,115000,USD,115000,US,100,US,M
|
||||
546,2022,SE,FT,Data Engineer,110500,USD,110500,US,100,US,M
|
||||
547,2022,SE,FT,Data Engineer,130000,USD,130000,US,100,US,M
|
||||
548,2022,SE,FT,Data Analyst,99050,USD,99050,US,100,US,M
|
||||
549,2022,SE,FT,Data Engineer,160000,USD,160000,US,100,US,M
|
||||
550,2022,SE,FT,Data Scientist,205300,USD,205300,US,0,US,L
|
||||
551,2022,SE,FT,Data Scientist,140400,USD,140400,US,0,US,L
|
||||
552,2022,SE,FT,Data Scientist,176000,USD,176000,US,100,US,M
|
||||
553,2022,SE,FT,Data Scientist,144000,USD,144000,US,100,US,M
|
||||
554,2022,SE,FT,Data Engineer,200100,USD,200100,US,100,US,M
|
||||
555,2022,SE,FT,Data Engineer,160000,USD,160000,US,100,US,M
|
||||
556,2022,SE,FT,Data Engineer,145000,USD,145000,US,100,US,M
|
||||
557,2022,SE,FT,Data Engineer,70500,USD,70500,US,0,US,M
|
||||
558,2022,SE,FT,Data Scientist,205300,USD,205300,US,0,US,M
|
||||
559,2022,SE,FT,Data Scientist,140400,USD,140400,US,0,US,M
|
||||
560,2022,SE,FT,Analytics Engineer,205300,USD,205300,US,0,US,M
|
||||
561,2022,SE,FT,Analytics Engineer,184700,USD,184700,US,0,US,M
|
||||
562,2022,SE,FT,Data Engineer,175100,USD,175100,US,100,US,M
|
||||
563,2022,SE,FT,Data Engineer,140250,USD,140250,US,100,US,M
|
||||
564,2022,SE,FT,Data Analyst,116150,USD,116150,US,100,US,M
|
||||
565,2022,SE,FT,Data Engineer,54000,USD,54000,US,0,US,M
|
||||
566,2022,SE,FT,Data Analyst,170000,USD,170000,US,100,US,M
|
||||
567,2022,MI,FT,Data Analyst,50000,GBP,65438,GB,0,GB,M
|
||||
568,2022,SE,FT,Data Analyst,80000,USD,80000,US,100,US,M
|
||||
569,2022,SE,FT,Data Scientist,140000,USD,140000,US,100,US,M
|
||||
570,2022,SE,FT,Data Scientist,210000,USD,210000,US,100,US,M
|
||||
571,2022,SE,FT,Data Scientist,140000,USD,140000,US,100,US,M
|
||||
572,2022,SE,FT,Data Analyst,100000,USD,100000,US,100,US,M
|
||||
573,2022,SE,FT,Data Analyst,69000,USD,69000,US,100,US,M
|
||||
574,2022,SE,FT,Data Scientist,210000,USD,210000,US,100,US,M
|
||||
575,2022,SE,FT,Data Scientist,140000,USD,140000,US,100,US,M
|
||||
576,2022,SE,FT,Data Scientist,210000,USD,210000,US,100,US,M
|
||||
577,2022,SE,FT,Data Analyst,150075,USD,150075,US,100,US,M
|
||||
578,2022,SE,FT,Data Engineer,100000,USD,100000,US,100,US,M
|
||||
579,2022,SE,FT,Data Engineer,25000,USD,25000,US,100,US,M
|
||||
580,2022,SE,FT,Data Analyst,126500,USD,126500,US,100,US,M
|
||||
581,2022,SE,FT,Data Analyst,106260,USD,106260,US,100,US,M
|
||||
582,2022,SE,FT,Data Engineer,220110,USD,220110,US,100,US,M
|
||||
583,2022,SE,FT,Data Engineer,160080,USD,160080,US,100,US,M
|
||||
584,2022,SE,FT,Data Analyst,105000,USD,105000,US,100,US,M
|
||||
585,2022,SE,FT,Data Analyst,110925,USD,110925,US,100,US,M
|
||||
586,2022,MI,FT,Data Analyst,35000,GBP,45807,GB,0,GB,M
|
||||
587,2022,SE,FT,Data Scientist,140000,USD,140000,US,100,US,M
|
||||
588,2022,SE,FT,Data Analyst,99000,USD,99000,US,0,US,M
|
||||
589,2022,SE,FT,Data Analyst,60000,USD,60000,US,100,US,M
|
||||
590,2022,SE,FT,Data Architect,192564,USD,192564,US,100,US,M
|
||||
591,2022,SE,FT,Data Architect,144854,USD,144854,US,100,US,M
|
||||
592,2022,SE,FT,Data Scientist,230000,USD,230000,US,100,US,M
|
||||
593,2022,SE,FT,Data Scientist,150000,USD,150000,US,100,US,M
|
||||
594,2022,SE,FT,Data Analytics Manager,150260,USD,150260,US,100,US,M
|
||||
595,2022,SE,FT,Data Analytics Manager,109280,USD,109280,US,100,US,M
|
||||
596,2022,SE,FT,Data Scientist,210000,USD,210000,US,100,US,M
|
||||
597,2022,SE,FT,Data Analyst,170000,USD,170000,US,100,US,M
|
||||
598,2022,MI,FT,Data Scientist,160000,USD,160000,US,100,US,M
|
||||
599,2022,MI,FT,Data Scientist,130000,USD,130000,US,100,US,M
|
||||
600,2022,EN,FT,Data Analyst,67000,USD,67000,CA,0,CA,M
|
||||
601,2022,EN,FT,Data Analyst,52000,USD,52000,CA,0,CA,M
|
||||
602,2022,SE,FT,Data Engineer,154000,USD,154000,US,100,US,M
|
||||
603,2022,SE,FT,Data Engineer,126000,USD,126000,US,100,US,M
|
||||
604,2022,SE,FT,Data Analyst,129000,USD,129000,US,0,US,M
|
||||
605,2022,SE,FT,Data Analyst,150000,USD,150000,US,100,US,M
|
||||
606,2022,MI,FT,AI Scientist,200000,USD,200000,IN,100,US,L
|
||||
|
BIN
degtyarev_mikhail_lab_5/img.png
Normal file
|
After Width: | Height: | Size: 123 KiB |
60
degtyarev_mikhail_lab_5/main.py
Normal file
@@ -0,0 +1,60 @@
|
||||
import pandas as pd
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.linear_model import Lasso
|
||||
from sklearn.metrics import mean_squared_error
|
||||
from sklearn.preprocessing import StandardScaler, OneHotEncoder
|
||||
from sklearn.compose import ColumnTransformer
|
||||
from sklearn.pipeline import Pipeline
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
# Загрузка данных
|
||||
file_path = 'ds_salaries.csv'
|
||||
data = pd.read_csv(file_path)
|
||||
|
||||
# Предварительная обработка данных
|
||||
categorical_features = ['experience_level', 'employment_type', 'company_location', 'company_size']
|
||||
numeric_features = ['work_year']
|
||||
|
||||
preprocessor = ColumnTransformer(
|
||||
transformers=[
|
||||
('num', StandardScaler(), numeric_features),
|
||||
('cat', OneHotEncoder(handle_unknown='ignore'), categorical_features)
|
||||
])
|
||||
|
||||
# Выбор признаков
|
||||
features = ['work_year', 'experience_level', 'employment_type', 'company_location', 'company_size']
|
||||
X = data[features]
|
||||
y = data['salary_in_usd']
|
||||
|
||||
# Разделение данных на обучающий и тестовый наборы
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
||||
|
||||
# Создание и обучение модели с использованием предварительного обработчика данных
|
||||
alpha = 0.01
|
||||
lasso_model = Pipeline([
|
||||
('preprocessor', preprocessor),
|
||||
('lasso', Lasso(alpha=alpha))
|
||||
])
|
||||
|
||||
lasso_model.fit(X_train, y_train)
|
||||
|
||||
# Получение прогнозов
|
||||
y_pred = lasso_model.predict(X_test)
|
||||
|
||||
# Оценка точности модели
|
||||
accuracy = lasso_model.score(X_test, y_test)
|
||||
mse = mean_squared_error(y_test, y_pred)
|
||||
|
||||
print(f"R^2 Score: {accuracy:.2f}")
|
||||
print(f"Mean Squared Error: {mse:.2f}")
|
||||
|
||||
# Вывод предсказанных и фактических значений
|
||||
predictions_df = pd.DataFrame({'Actual': y_test, 'Predicted': y_pred})
|
||||
print(predictions_df)
|
||||
|
||||
# Визуализация весов (коэффициентов) модели
|
||||
coefficients = pd.Series(lasso_model.named_steps['lasso'].coef_, index=numeric_features + list(lasso_model.named_steps['preprocessor'].transformers_[1][1].get_feature_names(categorical_features)))
|
||||
plt.figure(figsize=(10, 6))
|
||||
coefficients.sort_values().plot(kind='barh')
|
||||
plt.title('Lasso Regression Coefficients')
|
||||
plt.show()
|
||||
41
gusev_vladislav_lab_7/README.md
Normal file
@@ -0,0 +1,41 @@
|
||||
### Вариант 9
|
||||
### Задание на лабораторную работу:
|
||||
Выбрать художественный текст (четные варианты – русскоязычный, нечетные – англоязычный) и
|
||||
обучить на нем рекуррентную нейронную сеть для решения задачи генерации.
|
||||
Подобрать архитектуру и параметры так, чтобы приблизиться к максимально осмысленному результату.
|
||||
Далее разбиться на пары четный-нечетный вариант, обменяться разработанными сетями и проверить,
|
||||
как архитектура товарища справляется с вашим текстом.
|
||||
В завершении подобрать компромиссную архитектуру, справляющуюся достаточно хорошо с обоими видами
|
||||
текстов.
|
||||
### Как запустить лабораторную работу:
|
||||
Выполняем файл gusev_vladislav_lab_7.py, решение будет в консоли.
|
||||
|
||||
### Технологии
|
||||
Keras - это библиотека для Python, позволяющая легко и быстро создавать нейронные сети.
|
||||
NumPy - библиотека для работы с многомерными массивами.
|
||||
|
||||
### По коду
|
||||
1) Читаем файл с текстом
|
||||
2) Создаем объект tokenizer для превращение текста в числа для нейронной сети.
|
||||
3) Создаем модель нейронной сети с следующими аргументами:
|
||||
|
||||
- Embedding - это слой, который обычно используется для векторного представления категориальных данных, таких как слова или символы. Он позволяет нейронной сети изучать эмбеддинги, то есть отображение слов (или символов) в вектора низкой размерности. Это позволяет сети понимать семантические отношения между словами.
|
||||
- LSTM - это слой, представляющий собой рекуррентный нейрон, который способен учитывать зависимости в последовательных данных. Он хорошо подходит для обработки последовательных данных, таких как текст.
|
||||
- Dense - это полносвязный слой, который принимает входные данные и применяет весовые коэффициенты к ним. Этот слой часто используется в конце нейронных сетей для решения задачи классификации или регрессии.
|
||||
|
||||
4) Обучаем модель на 100 эпохах (итерациях по данным) и генерируем текст.
|
||||
|
||||
|
||||
|
||||

|
||||
Английский 100 эпох
|
||||

|
||||
|
||||

|
||||
Русский 100 эпох
|
||||

|
||||
Русский 17 эпох
|
||||

|
||||
### По консоли
|
||||
- Английский текст генерировался на 100 эпохах, начало получилось осмысленным, но чем ближе к концу тем хуже.
|
||||
- Русский текст также генерировался на 100 эпохах, с многочисленными ошибками в словах. Русский текст,сгенерированный на 17 эпохах по ошибкам в словах оказался лучше, но всё равно не идеально.
|
||||
61
gusev_vladislav_lab_7/gusev_vladislav_lab_7.py
Normal file
@@ -0,0 +1,61 @@
|
||||
import numpy as np
|
||||
from keras.models import Sequential
|
||||
from keras.layers import Embedding, LSTM, Dense
|
||||
from keras.preprocessing.text import Tokenizer
|
||||
from keras.preprocessing.sequence import pad_sequences
|
||||
|
||||
# Загрузка текста из файла
|
||||
with open('text_ru.txt', 'r', encoding='utf-8') as file:
|
||||
text = file.read()
|
||||
|
||||
# Создание экземпляра Tokenizer
|
||||
tokenizer = Tokenizer(char_level=True)
|
||||
tokenizer.fit_on_texts(text)
|
||||
|
||||
# Преобразование текста в последовательность чисел
|
||||
sequences = tokenizer.texts_to_sequences(text)
|
||||
|
||||
# Подготовка обучающих данных
|
||||
seq_length = 100
|
||||
dataX, dataY = [], []
|
||||
for i in range(0, len(sequences) - seq_length):
|
||||
seq_in = sequences[i:i + seq_length]
|
||||
seq_out = sequences[i + seq_length]
|
||||
dataX.append(seq_in)
|
||||
dataY.append(seq_out)
|
||||
|
||||
dataX = np.array(dataX)
|
||||
dataY = np.array(dataY)
|
||||
|
||||
# Создание модели
|
||||
vocab_size = len(tokenizer.word_index) + 1
|
||||
embedding_dim = 256
|
||||
rnn_units = 1024
|
||||
|
||||
model = Sequential()
|
||||
model.add(Embedding(input_dim=vocab_size, output_dim=embedding_dim, input_length=seq_length))
|
||||
model.add(LSTM(units=rnn_units))
|
||||
model.add(Dense(units=vocab_size, activation='softmax'))
|
||||
|
||||
model.compile(loss='sparse_categorical_crossentropy', optimizer='adam')
|
||||
|
||||
# Обучение модели
|
||||
batch_size = 64
|
||||
model.fit(dataX, dataY, epochs=17, batch_size=batch_size)
|
||||
def generate_text(seed_text, gen_length):
|
||||
generated_text = seed_text
|
||||
|
||||
for _ in range(gen_length):
|
||||
sequence = tokenizer.texts_to_sequences([seed_text])[0]
|
||||
sequence = pad_sequences([sequence], maxlen=seq_length)
|
||||
prediction = model.predict(sequence)[0]
|
||||
predicted_index = np.argmax(prediction)
|
||||
predicted_char = tokenizer.index_word[predicted_index]
|
||||
generated_text += predicted_char
|
||||
seed_text += predicted_char
|
||||
seed_text = seed_text[1:]
|
||||
|
||||
return generated_text
|
||||
# Пример использования
|
||||
generated_text = generate_text("Мультфильмы", 250)
|
||||
print(generated_text)
|
||||
BIN
gusev_vladislav_lab_7/img.png
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
gusev_vladislav_lab_7/img_1.png
Normal file
|
After Width: | Height: | Size: 27 KiB |
BIN
gusev_vladislav_lab_7/img_2.png
Normal file
|
After Width: | Height: | Size: 29 KiB |
BIN
gusev_vladislav_lab_7/img_3.png
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
gusev_vladislav_lab_7/img_4.png
Normal file
|
After Width: | Height: | Size: 20 KiB |
21
gusev_vladislav_lab_7/text_eng.txt
Normal file
@@ -0,0 +1,21 @@
|
||||
Do you like watching cartoons? Probably you do! But how did they come to be? Who invented them?
|
||||
|
||||
This is actually a very tough question. The first cartoons were created long before the TV.
|
||||
For example, shadow play was a very popular form of entertainment in ancient China. Such shows looked almost like modern cartoons!
|
||||
|
||||
A toy called a flip book was made in the late 19th century. It was a small soft book with pictures.
|
||||
Each picture was drawn in a slightly different5 way. When you bend this book and release the pages one by one, the images start to move.
|
||||
Strictly speaking, they don’t, but our eyes see it like that anyway. The first real cartoons were made using this trick, too!
|
||||
|
||||
In 1895 brothers Louis and Auguste Lumière created a cinematograph.
|
||||
It was a camera and a film projector in one device. Using this device, many aspiring film directors started to create their own cartoons.
|
||||
|
||||
This developed into a full industry by 1910. Many cartoons of that era are forgotten now, but some are still with us.
|
||||
For example, Felix the Cat was created by Otto Messmer in 1919, and he’s still with us, more than a hundred years later.
|
||||
Currently the rights to the character are held by DreamWorks Animation.
|
||||
|
||||
One of the pioneers in the industry was famous Walt Disney.
|
||||
He was not afraid to experiment to make a cartoon, and his Snow White film was among the firsts to use a multiplane camera.
|
||||
With its help the characters were able to move around the objects, creating an illusion of a 3D world.
|
||||
|
||||
Today most of the cartoons are made with computer animation. The last traditional Disney cartoon to date was Winnie the Pooh (2011).
|
||||
21
gusev_vladislav_lab_7/text_ru.txt
Normal file
@@ -0,0 +1,21 @@
|
||||
Вам нравится смотреть мультфильмы? Вероятно, так оно и есть! Но как они появились на свет? Кто их изобрел?
|
||||
|
||||
На самом деле это очень сложный вопрос. Первые мультфильмы были созданы задолго до появления телевидения.
|
||||
Например, игра с тенью была очень популярной формой развлечения в Древнем Китае. Такие шоу выглядели почти как современные мультфильмы!
|
||||
|
||||
Игрушка под названием книжка-перевертыш была изготовлена в конце 19 века. Это была маленькая мягкая книжка с картинками.
|
||||
Каждая картинка была нарисована немного по-разному. Когда вы сгибаете эту книгу и отпускаете страницы одну за другой, изображения начинают двигаться.
|
||||
Строго говоря, это не так, но наши глаза все равно видят это именно так. Первые настоящие мультфильмы тоже были сделаны с использованием этого трюка!
|
||||
|
||||
В 1895 году братья Луи и Огюст Люмьер создали кинематограф.
|
||||
Это была камера и кинопроектор в одном устройстве. Используя это устройство, многие начинающие режиссеры начали создавать свои собственные мультфильмы.
|
||||
|
||||
К 1910 году это развилось в полноценную индустрию. Многие мультфильмы той эпохи сейчас забыты, но некоторые все еще с нами.
|
||||
Например, кот Феликс был создан Отто Мессмером в 1919 году, и он все еще с нами, более ста лет спустя.
|
||||
В настоящее время правами на персонажа владеет DreamWorks Animation.
|
||||
|
||||
Одним из пионеров в этой отрасли был знаменитый Уолт Дисней.
|
||||
Он не боялся экспериментировать при создании мультфильма, и его фильм "Белоснежка" был одним из первых, в котором использовалась многоплановая камера.
|
||||
С его помощью персонажи смогли передвигаться по объектам, создавая иллюзию трехмерного мира.
|
||||
|
||||
Сегодня большинство мультфильмов создано с использованием компьютерной анимации. Последним традиционным диснеевским мультфильмом на сегодняшний день был "Винни-Пух" (2011).
|
||||
53
kurmyza_pavel_lab_5/README.md
Normal file
@@ -0,0 +1,53 @@
|
||||
# Лабораторная работа №5
|
||||
|
||||
## ПИбд-41, Курмыза Павел
|
||||
|
||||
Датасет по варианту: https://www.kaggle.com/datasets/jessemostipak/hotel-booking-demand.
|
||||
|
||||
Данный набор данных содержит информацию о бронировании городской и курортной гостиниц и включает в себя такие
|
||||
сведения, как время бронирования, продолжительность пребывания, количество взрослых, детей и/или младенцев, количество
|
||||
свободных парковочных мест и т.д.
|
||||
|
||||
## Как запустить ЛР
|
||||
|
||||
- Запустить файл main.py
|
||||
|
||||
## Используемые технологии
|
||||
|
||||
- Язык программирования Python
|
||||
- Библиотеки: sklearn, numpy, pandas
|
||||
|
||||
## Что делает программа
|
||||
|
||||
Программа решает задачу кластеризации на выбранном датасете: выделение наиболее прибыльных посетителей отелей на основе
|
||||
их времени прибывания и средней цены одной ночи пребывания в отели. Решение достигается в несколько этапов:
|
||||
|
||||
- Предобработка данных
|
||||
- Стандартизация данных и приведение их к виду, удобном для работы с моделями ML
|
||||
- Использование модели кластеризации K-средних
|
||||
- Визуализация полученных результатов и вывод
|
||||
|
||||
## Тестирование
|
||||
|
||||
Теперь мы рассмотрели задачу кластеризации K-средних, и проанализируем результаты каждого
|
||||
кластера, чтобы определить наиболее прибыльных клиентов в нашем наборе данных на основе времени выполнения заказа и ADR.
|
||||
Первая проблема, с которой мы сталкиваемся, когда хотим использовать кластеризацию с помощью K-средних, - это
|
||||
определение оптимального количества кластеров, которые мы хотим получить в качестве результатов. Поэтому сначала для
|
||||
определения количества кластеров мы использовали метод локтя:
|
||||
|
||||

|
||||
|
||||
Для определения оптимального количества кластеров необходимо выбрать значение k, после которого искажение начинает
|
||||
линейно уменьшаться. Таким образом, мы пришли к выводу, что оптимальное количество кластеров для данных равно 4. Поэтому
|
||||
мы запустили алгоритм K-средних на основе lead_time и ADR с количеством кластеров, равным 4, и вывели центры кластеров:
|
||||
|
||||

|
||||
|
||||
## Вывод
|
||||
|
||||
Наиболее прибыльными считаются клиенты с наименьшим временем пребывания и наибольшим ADR, т.е. клиенты, попавшие в
|
||||
зеленый кластер. В то время как красная категория показывает самый низкий ADR и самое высокое (наименее выгодное) время
|
||||
пребывания. В нашем случае после визуализации графика мы можем задать такие вопросы, как: почему у
|
||||
одних клиентов время пребывания меньше, чем у других? и есть ли вероятность, что клиенты в определенных странах
|
||||
соответствуют этому профилю? и т.д. На все эти вопросы алгоритм кластеризации K-средних может и не ответить напрямую,
|
||||
но сведение данных в отдельные кластеры обеспечивает надежную основу для постановки подобных вопросов.
|
||||
BIN
kurmyza_pavel_lab_5/centers.jpg
Normal file
|
After Width: | Height: | Size: 47 KiB |
BIN
kurmyza_pavel_lab_5/clusters.jpg
Normal file
|
After Width: | Height: | Size: 12 KiB |
119391
kurmyza_pavel_lab_5/hotel_bookings.csv
Normal file
81
kurmyza_pavel_lab_5/main.py
Normal file
@@ -0,0 +1,81 @@
|
||||
import pandas as pd
|
||||
from sklearn.model_selection import train_test_split
|
||||
import datetime as dt
|
||||
import matplotlib.pyplot as plt
|
||||
import seaborn as sns
|
||||
from sklearn.preprocessing import LabelEncoder
|
||||
import sklearn.cluster as cluster
|
||||
|
||||
# Чтение данных датасета
|
||||
df = pd.read_csv('hotel_bookings.csv')
|
||||
|
||||
# Удаление строк, содержащих отсутствующие значения
|
||||
df = df[df['children'].notna()]
|
||||
df = df[df['country'].notna()]
|
||||
|
||||
# Объединение столбцов 'arrival_date_year', 'arrival_date_month', 'arrival date day_of_month' в столбец
|
||||
# 'arrival_date', содержащий день, месяц и год приезда клиента в формате datetime
|
||||
df["arrival_date_month"] = pd.to_datetime(df['arrival_date_month'], format='%B').dt.month
|
||||
df["arrival_date"] = pd.to_datetime({"year": df["arrival_date_year"].values,
|
||||
"month": df["arrival_date_month"].values,
|
||||
"day": df["arrival_date_day_of_month"].values})
|
||||
df = df.drop(columns=['arrival_date_year', 'arrival_date_month', 'arrival_date_day_of_month'])
|
||||
|
||||
# Преобразование типа столбца reservation_status_date в datetime
|
||||
df["reservation_status_date"] = pd.to_datetime(df["reservation_status_date"], format='%Y-%m-%d')
|
||||
|
||||
# Заполнение нулевых значений в столбцах средним значением каждого столбца
|
||||
for column in ['agent', 'company', 'arrival_date']:
|
||||
df[column] = df[column].fillna(df[column].mean())
|
||||
|
||||
# Удаляем повторяющиеся значения
|
||||
df.drop_duplicates(inplace=True)
|
||||
|
||||
# Преобразование категориальных переменных в числовые переменные для того, чтобы модель могла с ними работать
|
||||
categoricalV = ["hotel", "meal", "country", "market_segment", "distribution_channel", "reserved_room_type",
|
||||
"assigned_room_type", "deposit_type", "customer_type"]
|
||||
df[categoricalV[1:11]] = df[categoricalV[1:11]].astype('category')
|
||||
|
||||
df[categoricalV[1:11]] = df[categoricalV[1:11]].apply(lambda x: LabelEncoder().fit_transform(x))
|
||||
|
||||
df['hotel_Num'] = LabelEncoder().fit_transform(df['hotel'])
|
||||
|
||||
df['numerical_larrival_date'] = df['arrival_date'].map(dt.datetime.toordinal)
|
||||
df['numerical_reservation_status_date'] = df['reservation_status_date'].map(dt.datetime.toordinal)
|
||||
|
||||
df["is_canceled"].replace({'not canceled': 0, 'canceled': 1}, inplace=True)
|
||||
df["reservation_status"].replace({'Canceled': 0, 'Check-Out': 1, 'No-Show': 2}, inplace=True)
|
||||
|
||||
# Определение входных и выходных значений
|
||||
usefull_columns = df.columns.difference(['hotel', 'hotel_Num', 'arrival_date', 'reservation_status_date'])
|
||||
X = df[usefull_columns]
|
||||
Y = df["hotel_Num"].astype(int)
|
||||
|
||||
# Деление данных на тестовую и обучающую выборки
|
||||
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.3, random_state=150)
|
||||
|
||||
# Определение оптимального количества кластеров
|
||||
df_Short = df[['lead_time', 'adr']]
|
||||
|
||||
K = range(1, 12)
|
||||
wss = []
|
||||
for k in K:
|
||||
kmeans = cluster.KMeans(n_clusters=k, init="k-means++")
|
||||
kmeans = kmeans.fit(df_Short)
|
||||
wss_iter = kmeans.inertia_
|
||||
wss.append(wss_iter)
|
||||
|
||||
mycenters = pd.DataFrame({'Clusters': K, 'WSS': wss})
|
||||
|
||||
sns.scatterplot(x='Clusters', y='WSS', data=mycenters, marker="+")
|
||||
|
||||
# Решение задачи кластеризации с использованием K-Means
|
||||
kmeans = cluster.KMeans(n_clusters=4, init="k-means++")
|
||||
kmeans = kmeans.fit(df[['lead_time', 'adr']])
|
||||
df['Clusters'] = kmeans.labels_
|
||||
|
||||
# Визуализируем кластеры
|
||||
sns.lmplot(x="lead_time", y="adr", hue='Clusters', data=df)
|
||||
plt.ylim(0, 600)
|
||||
plt.xlim(0, 800)
|
||||
plt.show()
|
||||
51
kurmyza_pavel_lab_6/README.md
Normal file
@@ -0,0 +1,51 @@
|
||||
# Лабораторная работа №6
|
||||
|
||||
## ПИбд-41, Курмыза Павел
|
||||
|
||||
Датасет по варианту: https://www.kaggle.com/datasets/jessemostipak/hotel-booking-demand.
|
||||
|
||||
Данный набор данных содержит информацию о бронировании городской и курортной гостиниц и включает в себя такие
|
||||
сведения, как время бронирования, продолжительность пребывания, количество взрослых, детей и/или младенцев, количество
|
||||
свободных парковочных мест и т.д.
|
||||
|
||||
## Как запустить ЛР
|
||||
|
||||
- Запустить файл main.py
|
||||
|
||||
## Используемые технологии
|
||||
|
||||
- Язык программирования Python
|
||||
- Библиотеки: sklearn, numpy, pandas, xgboost, matplotlib, seaborn
|
||||
|
||||
## Что делает программа
|
||||
|
||||
Программа решает задачу классификации на выбранном датасете: определение гостиничного класса отеля (городской отель или
|
||||
курортный отель). Решение достигается в несколько этапов:
|
||||
|
||||
- Предобработка данных
|
||||
- Балансировка данных
|
||||
- Стандартизация данных и приведение их к виду, удобном для работы с моделью ML
|
||||
- Использование модели классификации MLPClassifier
|
||||
- Оценка точности и специфичности данной модели классификации
|
||||
|
||||
## Тестирование
|
||||
|
||||
Для решения задачи классификации были выбрана модель MLPClassifier.
|
||||
|
||||
Оценка точности модели: 0.9778297119757453
|
||||
|
||||

|
||||
|
||||
Оценка способности модели MLPClassifier предсказывать истинные положительные результаты (TP / (TP + FN)), также
|
||||
известные как коэффициент чувствительности, и истинные отрицательные результаты (TN / (TN + FP)), также известный как
|
||||
коэффициент специфичности через матрицу неточностей:
|
||||
|
||||

|
||||
|
||||
Матрица неточностей подтверждает приведенную ранее оценку модели MLPClassifier. Кроме того, она указывает на
|
||||
то, что помимо высокой точности, модель также имеет высокую специфичность.
|
||||
|
||||
## Вывод
|
||||
|
||||
По итогу тестирования было выявлено, что модель MLPClassifier подходит для решения поставленной задачи, на что указывают
|
||||
высокая оценка точности (97%) и специфичности данной модели.
|
||||
BIN
kurmyza_pavel_lab_6/classification_report.jpg
Normal file
|
After Width: | Height: | Size: 33 KiB |
BIN
kurmyza_pavel_lab_6/confusion_matrix.jpg
Normal file
|
After Width: | Height: | Size: 27 KiB |
119391
kurmyza_pavel_lab_6/hotel_bookings.csv
Normal file
104
kurmyza_pavel_lab_6/main.py
Normal file
@@ -0,0 +1,104 @@
|
||||
import pandas as pd
|
||||
from matplotlib import pyplot as plt
|
||||
from sklearn.preprocessing import LabelEncoder
|
||||
from sklearn.feature_selection import VarianceThreshold
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.preprocessing import StandardScaler
|
||||
from sklearn.metrics import confusion_matrix, classification_report
|
||||
import seaborn as sns
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
|
||||
# Считываем датасет
|
||||
ds = pd.read_csv('hotel_bookings.csv')
|
||||
|
||||
# Удаляем из датасета строки с пропущенными значениями столбцов country, children.
|
||||
# Выбраны именно данные столбцы, так как, по информации из kaggle, только они могут содержать пропущеные значения
|
||||
ds.dropna(axis=0, subset=['country', 'children'], inplace=True)
|
||||
|
||||
# Усредняем значения столбца agent, чтобы убрать его влияние на результат, так как столбец содержит неважные данные
|
||||
moa = ds['agent'].mean()
|
||||
ds['agent'].fillna(value=moa, axis=0, inplace=True)
|
||||
|
||||
# Заполняем пропущенные значения ячеек, чтобы исключить незаполненные
|
||||
ds.fillna(method='pad', inplace=True)
|
||||
ds.dropna(inplace=True, subset=['company'])
|
||||
|
||||
# Переводим столбцы, содержащие текстовые данные в числовое представление
|
||||
hotel = LabelEncoder()
|
||||
meal = LabelEncoder()
|
||||
country = LabelEncoder()
|
||||
market_segment = LabelEncoder()
|
||||
distribution_channel = LabelEncoder()
|
||||
reserved_room_type = LabelEncoder()
|
||||
assigned_room_type = LabelEncoder()
|
||||
deposit_type = LabelEncoder()
|
||||
customer_type = LabelEncoder()
|
||||
reservation_status = LabelEncoder()
|
||||
reservation_status_date = LabelEncoder()
|
||||
|
||||
ds['hotel_n'] = hotel.fit_transform(ds['hotel'])
|
||||
ds['arrival_date_month_n'] = hotel.fit_transform(ds['arrival_date_month'])
|
||||
ds['meal_n'] = hotel.fit_transform(ds['meal'])
|
||||
ds['country_n'] = hotel.fit_transform(ds['country'])
|
||||
ds['market_segment_n'] = hotel.fit_transform(ds['market_segment'])
|
||||
ds['distribution_channel_n'] = hotel.fit_transform(ds['distribution_channel'])
|
||||
ds['reserved_room_type_n'] = hotel.fit_transform(ds['reserved_room_type'])
|
||||
ds['assigned_room_type_n'] = hotel.fit_transform(ds['assigned_room_type'])
|
||||
ds['deposit_type_n'] = hotel.fit_transform(ds['deposit_type'])
|
||||
ds['customer_type_n'] = hotel.fit_transform(ds['customer_type'])
|
||||
ds['reservation_status_n'] = hotel.fit_transform(ds['reservation_status'])
|
||||
ds['reservation_status_date_n'] = hotel.fit_transform(ds['reservation_status_date'])
|
||||
|
||||
# Удаляем приведенные к числовым данным столбцы, они больше не нужны
|
||||
ds.drop(
|
||||
['hotel', 'arrival_date_month', 'meal', 'country', 'market_segment', 'distribution_channel', 'reserved_room_type',
|
||||
'assigned_room_type', 'deposit_type', 'customer_type', 'reservation_status', 'reservation_status_date'], axis=1,
|
||||
inplace=True)
|
||||
|
||||
# Производим балансировку данных таким образом, чтобы было одинаковое количество отелей всех классов
|
||||
ds_0 = ds[ds['hotel_n'] == 0]
|
||||
ds_1 = ds[ds['hotel_n'] == 1]
|
||||
ds_0 = ds_0.sample(ds_1.shape[0])
|
||||
ds = ds_0._append(ds_1, ignore_index=True)
|
||||
|
||||
# Полдготовка данных для выполнения модели
|
||||
x = ds.drop('hotel_n', axis=1)
|
||||
y = ds['hotel_n']
|
||||
|
||||
threshold = VarianceThreshold()
|
||||
|
||||
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.2)
|
||||
x_train = threshold.fit_transform(x_train)
|
||||
x_test = threshold.transform(x_test)
|
||||
|
||||
# Производим стандартизацию данных и приводим их к виду, с которым работают модель классификации MLPClassifier
|
||||
scaler = StandardScaler()
|
||||
|
||||
x_train = scaler.fit_transform(x_train)
|
||||
x_test = scaler.fit_transform(x_test)
|
||||
|
||||
y_train = y_train.to_numpy()
|
||||
y_test = y_test.to_numpy()
|
||||
|
||||
# Инициализируем модель MLPClassifier и обучаем её
|
||||
|
||||
mlp = MLPClassifier()
|
||||
mlp.fit(x_train, y_train)
|
||||
|
||||
# Оценка точности моделей классификации
|
||||
|
||||
mlp_accuracy = mlp.score(x_test, y_test)
|
||||
print(f"Оценка точности модели: {mlp_accuracy}")
|
||||
|
||||
# Оценка коэффициента специфичности через матрицу неточностей
|
||||
|
||||
y_pred = mlp.predict(x_test)
|
||||
|
||||
cm = confusion_matrix(y_test, y_pred)
|
||||
plt.figure(figsize=(7, 5))
|
||||
sns.heatmap(cm, annot=True)
|
||||
plt.xlabel('Prediction')
|
||||
plt.ylabel('Actual')
|
||||
plt.show()
|
||||
|
||||
print(classification_report(y_test, y_pred))
|
||||
118
kutygin_andre_lab_3/README.md
Normal file
@@ -0,0 +1,118 @@
|
||||
**Задание**
|
||||
***
|
||||
Решите с помощью библиотечной реализации дерева решений задачу из лабораторной работы «Веб-сервис «Дерево решений» по предмету «Методы искусственного интеллекта»на 99% ваших данных. Проверьте работу модели на оставшемся проценте, сделайте вывод
|
||||
|
||||
**Как запустить лабораторную**
|
||||
***
|
||||
Запустить файл main.py
|
||||
|
||||
**Используемые технологии**
|
||||
***
|
||||
Библиотеки pandas, scikit-learn, matplotlib, их компоненты
|
||||
|
||||
**Описание лабораторной (программы)**
|
||||
***
|
||||
В данном коде мы создаем и обучаем модель дерева решений для прогнозирования инцидентов с НЛО на основе набора данных.
|
||||
|
||||
1. В первой строке кода мы загружаем данные из CSV-файла 'ufo_data_nuforc.csv' с помощью функции pd.read_csv(). Эти данные содержат информацию о различных инцидентах с НЛО.
|
||||
2. Далее мы выбираем набор признаков, в данном случае, эти признаки - населенность и время, которые будут использоваться для обучения модели, и сохраняем их в переменную features.
|
||||
3. Затем преобразуем категориальные признаки в числовой вид при помощи функции pd.get_dummies(). Это необходимо, так как модель дерева решений работает только с числовыми данными.
|
||||
4. После этого мы разделяем данные на обучающую и тестовую выборки с помощью функции train_test_split(). Обучающая выборка будет использоваться для обучения модели, а тестовая - для проверки ее точности.
|
||||
5. Создаем модель дерева решений с помощью класса DecisionTreeClassifier() из библиотеки sklearn.tree.
|
||||
6. Обучаем модель на обучающей выборке с помощью метода fit(). В процессе обучения модель настраивает параметры дерева решений, чтобы лучше предсказывать целевой признак.
|
||||
7. После обучения модели, мы производим прогнозы на тестовых данных с помощью метода predict().
|
||||
8. Оцениваем точность модели на тестовой выборке с помощью метода accuracy_score() из библиотеки sklearn.metrics. Этот метод сравнивает фактические значения целевого признака с предсказанными и возвращает точность модели.
|
||||
9. Наконец, выводим точность модели на тестовой выборке, чтобы оценить, насколько хорошо модель предсказывает инциденты с НЛО.
|
||||
10. Также, код визуализирует данные в виде графика с помощью библиотеки matplotlib.pyplot, отображая фактические значения целевого признака и предсказания модели. Это помогает наглядно оценить, насколько близки предсказания модели к реальным значениям.
|
||||
**Результат**
|
||||
***
|
||||
Точность модели на тестовой выборке: 0.1377245508982036
|
||||
Прогнозы по оставшемуся проценту данных: 'cylinder' 'circle' 'sphere' 'disk' 'disk' 'fireball' 'disk' 'oval'
|
||||
'circle' 'disk' 'disk' 'other' 'light' 'light' 'oval' 'fireball' 'light'
|
||||
'rectangle' 'chevron' 'unknown' 'sphere' 'oval' 'light' 'circle'
|
||||
'unknown' 'unknown' 'disk' 'triangle' 'triangle' 'unknown' 'formation'
|
||||
'unknown' 'cigar' 'unknown' 'light' 'other' 'rectangle' 'light' 'other'
|
||||
'light' 'cylinder' 'delta' 'sphere' 'other' 'changing' 'fireball'
|
||||
'cylinder' 'cigar' 'circle' 'triangle' 'light' 'fireball' 'fireball'
|
||||
'sphere' 'circle' 'light' 'chevron' 'oval' 'oval' 'light' 'unknown'
|
||||
'triangle' 'other' 'rectangle' 'triangle' 'triangle' 'flash' 'unknown'
|
||||
'sphere' 'unknown' 'other' 'circle' 'oval' 'light' 'oval' 'formation'
|
||||
'sphere' 'triangle' 'changing' 'sphere' 'oval' 'unknown' 'circle'
|
||||
'circle' 'flash' 'light' 'light' 'sphere' 'other' 'other' 'egg' 'unknown'
|
||||
'other' 'light' 'light' 'disk' 'diamond' 'oval' 'unknown' 'light'
|
||||
'triangle' 'other' 'light' 'disk' 'unknown' 'light' 'changing' 'sphere'
|
||||
'triangle' 'circle' 'flash' 'sphere' 'light' 'unknown' 'oval' 'formation'
|
||||
'light' 'circle' 'unknown' 'other' 'triangle' 'other' 'light' 'disk'
|
||||
'formation' 'oval' 'triangle' 'triangle' 'light' 'formation' 'oval'
|
||||
'light' 'light' 'oval' 'disk' 'sphere' 'egg' 'unknown' 'unknown'
|
||||
'unknown' 'light' 'disk' 'changing' 'light' 'light' 'circle' 'circle'
|
||||
'formation' 'light' 'light' 'cigar' 'light' 'triangle' 'oval' 'fireball'
|
||||
'cylinder' 'other' 'circle' 'egg' 'changing' 'triangle' 'circle' 'other'
|
||||
'oval' 'disk' 'light' 'flash' 'fireball' 'circle' 'circle' 'circle'
|
||||
'circle' 'light' 'disk' 'fireball' 'other' 'sphere' 'light' 'changing'
|
||||
'cigar' 'light' 'cylinder' 'rectangle' 'chevron' 'light' 'light' 'light'
|
||||
'light' 'circle' 'circle' 'light' 'light' 'circle' 'sphere' 'triangle'
|
||||
'light' 'egg' 'circle' 'fireball' 'sphere' 'sphere' 'triangle' 'light'
|
||||
'other' 'cigar' 'sphere' 'sphere' 'fireball' 'light' 'light' 'disk'
|
||||
'oval' 'oval' 'other' 'cigar' 'triangle' 'light' 'light' 'light' 'disk'
|
||||
'light' 'light' 'light' 'light' 'other' 'light' 'teardrop' 'triangle'
|
||||
'teardrop' 'fireball' 'sphere' 'cylinder' 'fireball' 'circle' 'egg'
|
||||
'sphere' 'disk' 'chevron' 'triangle' 'light' 'other' 'light' 'circle'
|
||||
'rectangle' 'fireball' 'formation' 'light' 'light' 'circle' 'light'
|
||||
'light' 'formation' 'light' 'triangle' 'light' 'oval' 'light' 'unknown'
|
||||
'fireball' 'diamond' 'light' 'circle' 'light' 'triangle' 'oval' 'oval'
|
||||
'cylinder' 'circle' 'light' 'disk' 'light' 'sphere' 'circle' 'light'
|
||||
'triangle' 'light' 'fireball' 'triangle' 'light' 'flash' 'triangle' 'egg'
|
||||
'disk' 'oval' 'circle' 'flash' 'light' 'oval' 'sphere' 'light' 'triangle'
|
||||
'other' 'chevron' 'other' 'circle' 'unknown' 'unknown' 'sphere' 'light'
|
||||
'cigar' 'light' 'fireball' 'circle' 'diamond' 'fireball' 'triangle'
|
||||
'diamond' 'sphere' 'circle' 'chevron' 'cylinder' 'light' 'circle'
|
||||
'fireball' 'unknown' 'light' 'circle' 'fireball' 'light' 'fireball'
|
||||
'fireball' 'fireball' 'light' 'sphere' 'light' 'sphere' 'sphere'
|
||||
'formation' 'light' 'fireball' 'fireball' 'disk' 'disk' 'circle'
|
||||
'rectangle' 'unknown' 'disk' 'unknown' 'disk' 'triangle' 'other' 'sphere'
|
||||
'diamond' 'light' 'light' 'unknown' 'sphere' 'circle' 'disk' 'circle'
|
||||
'oval' 'changing' 'other' 'other' 'disk' 'unknown' 'unknown' 'disk'
|
||||
'rectangle' 'disk' 'light' 'oval' 'unknown' 'sphere' 'light' 'changing'
|
||||
'disk' 'disk' 'other' 'other' 'disk' 'cylinder' 'disk' 'rectangle'
|
||||
'light' 'disk' 'disk' 'light' 'fireball' 'formation' 'cigar' 'oval'
|
||||
'fireball' 'unknown' 'disk' 'light' 'light' 'triangle' 'triangle' 'light'
|
||||
'sphere' 'triangle' 'sphere' 'circle' 'light' 'oval' 'oval' 'circle'
|
||||
'oval' 'rectangle' 'disk' 'oval' 'light' 'light' 'other' 'cigar'
|
||||
'triangle' 'disk' 'cigar' 'other' 'triangle' 'egg' 'unknown' 'triangle'
|
||||
'light' 'triangle' 'disk' 'changing' 'triangle' 'disk' 'disk' 'rectangle'
|
||||
'other' 'triangle' 'triangle' 'formation' 'triangle' 'egg' 'sphere'
|
||||
'fireball' 'triangle' 'rectangle' 'light' 'triangle' 'triangle' 'other'
|
||||
'light' 'light' 'disk' 'fireball' 'light' 'disk' 'oval' 'triangle'
|
||||
'other' 'fireball' 'light' 'light' 'triangle' 'unknown' 'cigar' 'light'
|
||||
'unknown' 'chevron' 'formation' 'disk' 'cigar' 'light' 'sphere' 'cigar'
|
||||
'unknown' 'triangle' 'other' 'light' 'light' 'triangle' 'diamond' 'light'
|
||||
'triangle' 'oval' 'changing' 'light' 'flash' 'circle' 'oval' 'other'
|
||||
'sphere' 'circle' 'triangle' 'unknown' 'teardrop' 'unknown' 'fireball'
|
||||
'light' 'light' 'cigar' 'cigar' 'light' 'fireball' 'other' 'egg' 'light'
|
||||
'other' 'unknown' 'unknown' 'changing' 'circle' 'light' 'other' 'unknown'
|
||||
'unknown' 'light' 'other' 'light' 'unknown' 'cylinder' 'triangle'
|
||||
'circle' 'light' 'circle' 'circle' 'circle' 'light' 'light' 'changing'
|
||||
'changing' 'circle' 'circle' 'triangle' 'triangle' 'light' 'light'
|
||||
'light' 'light' 'other' 'changing' 'triangle' 'cylinder' 'light'
|
||||
'unknown' 'circle' 'disk' 'sphere' 'oval' 'formation' 'teardrop'
|
||||
'triangle' 'chevron' 'light' 'unknown' 'unknown' 'other' 'egg' 'circle'
|
||||
'oval' 'cigar' 'unknown' 'chevron' 'oval' 'cigar' 'fireball' 'circle'
|
||||
'unknown' 'light' 'sphere' 'fireball' 'changing' 'light' 'circle'
|
||||
'unknown' 'fireball' 'light' 'sphere' 'light' 'formation' 'circle'
|
||||
'fireball' 'formation' 'formation' 'formation' 'light' 'other' 'light'
|
||||
'light' 'circle' 'diamond' 'oval' 'circle' 'oval' 'triangle' 'light'
|
||||
'disk' 'light' 'other' 'triangle' 'triangle' 'cylinder' 'disk' 'cylinder'
|
||||
'light' 'oval' 'cigar' 'circle' 'disk' 'light' 'unknown' 'circle' 'other'
|
||||
'light' 'light' 'light' 'unknown' 'triangle' 'other' 'disk' 'cylinder'
|
||||
'triangle' 'oval' 'disk' 'light' 'triangle' 'circle' 'light' 'other'
|
||||
'light' 'other' 'circle' 'disk' 'other' 'triangle' 'oval' 'unknown'
|
||||
'light' 'triangle' 'unknown' 'circle' 'unknown' 'light' 'fireball'
|
||||
'fireball' 'rectangle' 'light' 'formation' 'unknown' 'light' 'light'
|
||||
'formation' 'fireball' 'light' 'light' 'other' 'unknown' 'light'
|
||||
'triangle' 'fireball' 'triangle' 'triangle' 'flash' 'circle' 'triangle'
|
||||
'disk' 'light' 'unknown' 'light' 'light' 'fireball' 'circle' 'unknown'
|
||||
'unknown' 'circle' 'disk' 'chevron' 'disk' 'disk' 'triangle' 'light'
|
||||
'light' 'disk'
|
||||
|
||||
***Вывод:*** Наша модель дерева решений показала низкую точность предсказаний (Точность модели на тестовой выборке: 0.1377245508982036), что означает, что она не очень хорошо предсказывает форму НЛО на основе выбранных признаков (население и время). Из-за чего можно сделать вывод, что возможно, эти признаки недостаточно информативны или недостаточно связаны с формой НЛО.
|
||||
39
kutygin_andre_lab_3/main.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import pandas as pd
|
||||
from sklearn.tree import DecisionTreeClassifier
|
||||
from sklearn.metrics import accuracy_score
|
||||
from sklearn.model_selection import train_test_split
|
||||
|
||||
# Загрузка данных
|
||||
data = pd.read_csv('ufo_sighting_data.csv')
|
||||
|
||||
# Выбор признаков
|
||||
features = [ 'length_of_encounter_seconds', 'latitude', 'longitude']
|
||||
target = 'UFO_shape'
|
||||
# Удаление строк содержащих NaN
|
||||
data.dropna(inplace=True)
|
||||
|
||||
# Удаление столбцов содержащих NaN
|
||||
data.dropna(axis='columns', inplace=True)
|
||||
|
||||
# Разделение данных на обучающую и тестовую выборки
|
||||
train_data, test_data, train_labels, test_labels = train_test_split(data[features], data[target], test_size=0.2, random_state=42)
|
||||
|
||||
# Создание и обучение модели дерева решений
|
||||
model = DecisionTreeClassifier()
|
||||
model.fit(train_data, train_labels)
|
||||
|
||||
# Прогнозирование на тестовой выборке
|
||||
predictions = model.predict(test_data)
|
||||
|
||||
# Оценка точности модели
|
||||
accuracy = accuracy_score(test_labels, predictions)
|
||||
print('Точность модели на тестовой выборке:', accuracy)
|
||||
|
||||
# Прогнозирование на оставшемся проценте данных
|
||||
remaining_data = data.drop(test_data.index)
|
||||
remaining_predictions = model.predict(remaining_data[features])
|
||||
|
||||
# Вывод результатов
|
||||
print('Прогнозы по оставшемуся проценту данных:', remaining_predictions)
|
||||
|
||||
# Сделайте необходимые выводы
|
||||
1
kutygin_andre_lab_3/ufo_sighting_data.csv
Normal file
26
lipatov_ilya_lab_4/README.md
Normal file
@@ -0,0 +1,26 @@
|
||||
## Лабораторная работа №4
|
||||
|
||||
### Кластеризация
|
||||
|
||||
## Выполнил студент группы ПИбд-41 Липатов Илья
|
||||
|
||||
### Как запустить лабораторную работу:
|
||||
|
||||
* установить python, numpy, matplotlib, sklearn
|
||||
* запустить проект (стартовая точка класс lab4)
|
||||
|
||||
### Какие технологии использовались:
|
||||
|
||||
* Язык программирования `Python`, библиотеки numpy, matplotlib, sklearn
|
||||
* Среда разработки `PyCharm`
|
||||
|
||||
### Что делает лабораторная работа:
|
||||
|
||||
* Кластеризирует данные о домах в Бостоне исходя из уровня преступности на душу населения в разбивке по городам и процента более низкого статуса населения. Ожидаем, что разбиение домов будет на три кластера.
|
||||
|
||||
### Примеры работы:
|
||||
|
||||
#### Результаты:
|
||||
* Кластеризация разбила наши дома в Бостоне на три большие группы, как мы этого и ожидали, значит алгоритм с задачей справился.
|
||||
|
||||

|
||||
507
lipatov_ilya_lab_4/boston.csv
Normal file
@@ -0,0 +1,507 @@
|
||||
CRIM,ZN,INDUS,CHAS,NOX,RM,AGE,DIS,RAD,TAX,PTRATIO,B,LSTAT,MEDV
|
||||
0.00632,18.00,2.310,0,0.5380,6.5750,65.20,4.0900,1,296.0,15.30,396.90,4.98,24.00
|
||||
0.02731,0.00,7.070,0,0.4690,6.4210,78.90,4.9671,2,242.0,17.80,396.90,9.14,21.60
|
||||
0.02729,0.00,7.070,0,0.4690,7.1850,61.10,4.9671,2,242.0,17.80,392.83,4.03,34.70
|
||||
0.03237,0.00,2.180,0,0.4580,6.9980,45.80,6.0622,3,222.0,18.70,394.63,2.94,33.40
|
||||
0.06905,0.00,2.180,0,0.4580,7.1470,54.20,6.0622,3,222.0,18.70,396.90,5.33,36.20
|
||||
0.02985,0.00,2.180,0,0.4580,6.4300,58.70,6.0622,3,222.0,18.70,394.12,5.21,28.70
|
||||
0.08829,12.50,7.870,0,0.5240,6.0120,66.60,5.5605,5,311.0,15.20,395.60,12.43,22.90
|
||||
0.14455,12.50,7.870,0,0.5240,6.1720,96.10,5.9505,5,311.0,15.20,396.90,19.15,27.10
|
||||
0.21124,12.50,7.870,0,0.5240,5.6310,100.00,6.0821,5,311.0,15.20,386.63,29.93,16.50
|
||||
0.17004,12.50,7.870,0,0.5240,6.0040,85.90,6.5921,5,311.0,15.20,386.71,17.10,18.90
|
||||
0.22489,12.50,7.870,0,0.5240,6.3770,94.30,6.3467,5,311.0,15.20,392.52,20.45,15.00
|
||||
0.11747,12.50,7.870,0,0.5240,6.0090,82.90,6.2267,5,311.0,15.20,396.90,13.27,18.90
|
||||
0.09378,12.50,7.870,0,0.5240,5.8890,39.00,5.4509,5,311.0,15.20,390.50,15.71,21.70
|
||||
0.62976,0.00,8.140,0,0.5380,5.9490,61.80,4.7075,4,307.0,21.00,396.90,8.26,20.40
|
||||
0.63796,0.00,8.140,0,0.5380,6.0960,84.50,4.4619,4,307.0,21.00,380.02,10.26,18.20
|
||||
0.62739,0.00,8.140,0,0.5380,5.8340,56.50,4.4986,4,307.0,21.00,395.62,8.47,19.90
|
||||
1.05393,0.00,8.140,0,0.5380,5.9350,29.30,4.4986,4,307.0,21.00,386.85,6.58,23.10
|
||||
0.78420,0.00,8.140,0,0.5380,5.9900,81.70,4.2579,4,307.0,21.00,386.75,14.67,17.50
|
||||
0.80271,0.00,8.140,0,0.5380,5.4560,36.60,3.7965,4,307.0,21.00,288.99,11.69,20.20
|
||||
0.72580,0.00,8.140,0,0.5380,5.7270,69.50,3.7965,4,307.0,21.00,390.95,11.28,18.20
|
||||
1.25179,0.00,8.140,0,0.5380,5.5700,98.10,3.7979,4,307.0,21.00,376.57,21.02,13.60
|
||||
0.85204,0.00,8.140,0,0.5380,5.9650,89.20,4.0123,4,307.0,21.00,392.53,13.83,19.60
|
||||
1.23247,0.00,8.140,0,0.5380,6.1420,91.70,3.9769,4,307.0,21.00,396.90,18.72,15.20
|
||||
0.98843,0.00,8.140,0,0.5380,5.8130,100.00,4.0952,4,307.0,21.00,394.54,19.88,14.50
|
||||
0.75026,0.00,8.140,0,0.5380,5.9240,94.10,4.3996,4,307.0,21.00,394.33,16.30,15.60
|
||||
0.84054,0.00,8.140,0,0.5380,5.5990,85.70,4.4546,4,307.0,21.00,303.42,16.51,13.90
|
||||
0.67191,0.00,8.140,0,0.5380,5.8130,90.30,4.6820,4,307.0,21.00,376.88,14.81,16.60
|
||||
0.95577,0.00,8.140,0,0.5380,6.0470,88.80,4.4534,4,307.0,21.00,306.38,17.28,14.80
|
||||
0.77299,0.00,8.140,0,0.5380,6.4950,94.40,4.4547,4,307.0,21.00,387.94,12.80,18.40
|
||||
1.00245,0.00,8.140,0,0.5380,6.6740,87.30,4.2390,4,307.0,21.00,380.23,11.98,21.00
|
||||
1.13081,0.00,8.140,0,0.5380,5.7130,94.10,4.2330,4,307.0,21.00,360.17,22.60,12.70
|
||||
1.35472,0.00,8.140,0,0.5380,6.0720,100.00,4.1750,4,307.0,21.00,376.73,13.04,14.50
|
||||
1.38799,0.00,8.140,0,0.5380,5.9500,82.00,3.9900,4,307.0,21.00,232.60,27.71,13.20
|
||||
1.15172,0.00,8.140,0,0.5380,5.7010,95.00,3.7872,4,307.0,21.00,358.77,18.35,13.10
|
||||
1.61282,0.00,8.140,0,0.5380,6.0960,96.90,3.7598,4,307.0,21.00,248.31,20.34,13.50
|
||||
0.06417,0.00,5.960,0,0.4990,5.9330,68.20,3.3603,5,279.0,19.20,396.90,9.68,18.90
|
||||
0.09744,0.00,5.960,0,0.4990,5.8410,61.40,3.3779,5,279.0,19.20,377.56,11.41,20.00
|
||||
0.08014,0.00,5.960,0,0.4990,5.8500,41.50,3.9342,5,279.0,19.20,396.90,8.77,21.00
|
||||
0.17505,0.00,5.960,0,0.4990,5.9660,30.20,3.8473,5,279.0,19.20,393.43,10.13,24.70
|
||||
0.02763,75.00,2.950,0,0.4280,6.5950,21.80,5.4011,3,252.0,18.30,395.63,4.32,30.80
|
||||
0.03359,75.00,2.950,0,0.4280,7.0240,15.80,5.4011,3,252.0,18.30,395.62,1.98,34.90
|
||||
0.12744,0.00,6.910,0,0.4480,6.7700,2.90,5.7209,3,233.0,17.90,385.41,4.84,26.60
|
||||
0.14150,0.00,6.910,0,0.4480,6.1690,6.60,5.7209,3,233.0,17.90,383.37,5.81,25.30
|
||||
0.15936,0.00,6.910,0,0.4480,6.2110,6.50,5.7209,3,233.0,17.90,394.46,7.44,24.70
|
||||
0.12269,0.00,6.910,0,0.4480,6.0690,40.00,5.7209,3,233.0,17.90,389.39,9.55,21.20
|
||||
0.17142,0.00,6.910,0,0.4480,5.6820,33.80,5.1004,3,233.0,17.90,396.90,10.21,19.30
|
||||
0.18836,0.00,6.910,0,0.4480,5.7860,33.30,5.1004,3,233.0,17.90,396.90,14.15,20.00
|
||||
0.22927,0.00,6.910,0,0.4480,6.0300,85.50,5.6894,3,233.0,17.90,392.74,18.80,16.60
|
||||
0.25387,0.00,6.910,0,0.4480,5.3990,95.30,5.8700,3,233.0,17.90,396.90,30.81,14.40
|
||||
0.21977,0.00,6.910,0,0.4480,5.6020,62.00,6.0877,3,233.0,17.90,396.90,16.20,19.40
|
||||
0.08873,21.00,5.640,0,0.4390,5.9630,45.70,6.8147,4,243.0,16.80,395.56,13.45,19.70
|
||||
0.04337,21.00,5.640,0,0.4390,6.1150,63.00,6.8147,4,243.0,16.80,393.97,9.43,20.50
|
||||
0.05360,21.00,5.640,0,0.4390,6.5110,21.10,6.8147,4,243.0,16.80,396.90,5.28,25.00
|
||||
0.04981,21.00,5.640,0,0.4390,5.9980,21.40,6.8147,4,243.0,16.80,396.90,8.43,23.40
|
||||
0.01360,75.00,4.000,0,0.4100,5.8880,47.60,7.3197,3,469.0,21.10,396.90,14.80,18.90
|
||||
0.01311,90.00,1.220,0,0.4030,7.2490,21.90,8.6966,5,226.0,17.90,395.93,4.81,35.40
|
||||
0.02055,85.00,0.740,0,0.4100,6.3830,35.70,9.1876,2,313.0,17.30,396.90,5.77,24.70
|
||||
0.01432,100.00,1.320,0,0.4110,6.8160,40.50,8.3248,5,256.0,15.10,392.90,3.95,31.60
|
||||
0.15445,25.00,5.130,0,0.4530,6.1450,29.20,7.8148,8,284.0,19.70,390.68,6.86,23.30
|
||||
0.10328,25.00,5.130,0,0.4530,5.9270,47.20,6.9320,8,284.0,19.70,396.90,9.22,19.60
|
||||
0.14932,25.00,5.130,0,0.4530,5.7410,66.20,7.2254,8,284.0,19.70,395.11,13.15,18.70
|
||||
0.17171,25.00,5.130,0,0.4530,5.9660,93.40,6.8185,8,284.0,19.70,378.08,14.44,16.00
|
||||
0.11027,25.00,5.130,0,0.4530,6.4560,67.80,7.2255,8,284.0,19.70,396.90,6.73,22.20
|
||||
0.12650,25.00,5.130,0,0.4530,6.7620,43.40,7.9809,8,284.0,19.70,395.58,9.50,25.00
|
||||
0.01951,17.50,1.380,0,0.4161,7.1040,59.50,9.2229,3,216.0,18.60,393.24,8.05,33.00
|
||||
0.03584,80.00,3.370,0,0.3980,6.2900,17.80,6.6115,4,337.0,16.10,396.90,4.67,23.50
|
||||
0.04379,80.00,3.370,0,0.3980,5.7870,31.10,6.6115,4,337.0,16.10,396.90,10.24,19.40
|
||||
0.05789,12.50,6.070,0,0.4090,5.8780,21.40,6.4980,4,345.0,18.90,396.21,8.10,22.00
|
||||
0.13554,12.50,6.070,0,0.4090,5.5940,36.80,6.4980,4,345.0,18.90,396.90,13.09,17.40
|
||||
0.12816,12.50,6.070,0,0.4090,5.8850,33.00,6.4980,4,345.0,18.90,396.90,8.79,20.90
|
||||
0.08826,0.00,10.810,0,0.4130,6.4170,6.60,5.2873,4,305.0,19.20,383.73,6.72,24.20
|
||||
0.15876,0.00,10.810,0,0.4130,5.9610,17.50,5.2873,4,305.0,19.20,376.94,9.88,21.70
|
||||
0.09164,0.00,10.810,0,0.4130,6.0650,7.80,5.2873,4,305.0,19.20,390.91,5.52,22.80
|
||||
0.19539,0.00,10.810,0,0.4130,6.2450,6.20,5.2873,4,305.0,19.20,377.17,7.54,23.40
|
||||
0.07896,0.00,12.830,0,0.4370,6.2730,6.00,4.2515,5,398.0,18.70,394.92,6.78,24.10
|
||||
0.09512,0.00,12.830,0,0.4370,6.2860,45.00,4.5026,5,398.0,18.70,383.23,8.94,21.40
|
||||
0.10153,0.00,12.830,0,0.4370,6.2790,74.50,4.0522,5,398.0,18.70,373.66,11.97,20.00
|
||||
0.08707,0.00,12.830,0,0.4370,6.1400,45.80,4.0905,5,398.0,18.70,386.96,10.27,20.80
|
||||
0.05646,0.00,12.830,0,0.4370,6.2320,53.70,5.0141,5,398.0,18.70,386.40,12.34,21.20
|
||||
0.08387,0.00,12.830,0,0.4370,5.8740,36.60,4.5026,5,398.0,18.70,396.06,9.10,20.30
|
||||
0.04113,25.00,4.860,0,0.4260,6.7270,33.50,5.4007,4,281.0,19.00,396.90,5.29,28.00
|
||||
0.04462,25.00,4.860,0,0.4260,6.6190,70.40,5.4007,4,281.0,19.00,395.63,7.22,23.90
|
||||
0.03659,25.00,4.860,0,0.4260,6.3020,32.20,5.4007,4,281.0,19.00,396.90,6.72,24.80
|
||||
0.03551,25.00,4.860,0,0.4260,6.1670,46.70,5.4007,4,281.0,19.00,390.64,7.51,22.90
|
||||
0.05059,0.00,4.490,0,0.4490,6.3890,48.00,4.7794,3,247.0,18.50,396.90,9.62,23.90
|
||||
0.05735,0.00,4.490,0,0.4490,6.6300,56.10,4.4377,3,247.0,18.50,392.30,6.53,26.60
|
||||
0.05188,0.00,4.490,0,0.4490,6.0150,45.10,4.4272,3,247.0,18.50,395.99,12.86,22.50
|
||||
0.07151,0.00,4.490,0,0.4490,6.1210,56.80,3.7476,3,247.0,18.50,395.15,8.44,22.20
|
||||
0.05660,0.00,3.410,0,0.4890,7.0070,86.30,3.4217,2,270.0,17.80,396.90,5.50,23.60
|
||||
0.05302,0.00,3.410,0,0.4890,7.0790,63.10,3.4145,2,270.0,17.80,396.06,5.70,28.70
|
||||
0.04684,0.00,3.410,0,0.4890,6.4170,66.10,3.0923,2,270.0,17.80,392.18,8.81,22.60
|
||||
0.03932,0.00,3.410,0,0.4890,6.4050,73.90,3.0921,2,270.0,17.80,393.55,8.20,22.00
|
||||
0.04203,28.00,15.040,0,0.4640,6.4420,53.60,3.6659,4,270.0,18.20,395.01,8.16,22.90
|
||||
0.02875,28.00,15.040,0,0.4640,6.2110,28.90,3.6659,4,270.0,18.20,396.33,6.21,25.00
|
||||
0.04294,28.00,15.040,0,0.4640,6.2490,77.30,3.6150,4,270.0,18.20,396.90,10.59,20.60
|
||||
0.12204,0.00,2.890,0,0.4450,6.6250,57.80,3.4952,2,276.0,18.00,357.98,6.65,28.40
|
||||
0.11504,0.00,2.890,0,0.4450,6.1630,69.60,3.4952,2,276.0,18.00,391.83,11.34,21.40
|
||||
0.12083,0.00,2.890,0,0.4450,8.0690,76.00,3.4952,2,276.0,18.00,396.90,4.21,38.70
|
||||
0.08187,0.00,2.890,0,0.4450,7.8200,36.90,3.4952,2,276.0,18.00,393.53,3.57,43.80
|
||||
0.06860,0.00,2.890,0,0.4450,7.4160,62.50,3.4952,2,276.0,18.00,396.90,6.19,33.20
|
||||
0.14866,0.00,8.560,0,0.5200,6.7270,79.90,2.7778,5,384.0,20.90,394.76,9.42,27.50
|
||||
0.11432,0.00,8.560,0,0.5200,6.7810,71.30,2.8561,5,384.0,20.90,395.58,7.67,26.50
|
||||
0.22876,0.00,8.560,0,0.5200,6.4050,85.40,2.7147,5,384.0,20.90,70.80,10.63,18.60
|
||||
0.21161,0.00,8.560,0,0.5200,6.1370,87.40,2.7147,5,384.0,20.90,394.47,13.44,19.30
|
||||
0.13960,0.00,8.560,0,0.5200,6.1670,90.00,2.4210,5,384.0,20.90,392.69,12.33,20.10
|
||||
0.13262,0.00,8.560,0,0.5200,5.8510,96.70,2.1069,5,384.0,20.90,394.05,16.47,19.50
|
||||
0.17120,0.00,8.560,0,0.5200,5.8360,91.90,2.2110,5,384.0,20.90,395.67,18.66,19.50
|
||||
0.13117,0.00,8.560,0,0.5200,6.1270,85.20,2.1224,5,384.0,20.90,387.69,14.09,20.40
|
||||
0.12802,0.00,8.560,0,0.5200,6.4740,97.10,2.4329,5,384.0,20.90,395.24,12.27,19.80
|
||||
0.26363,0.00,8.560,0,0.5200,6.2290,91.20,2.5451,5,384.0,20.90,391.23,15.55,19.40
|
||||
0.10793,0.00,8.560,0,0.5200,6.1950,54.40,2.7778,5,384.0,20.90,393.49,13.00,21.70
|
||||
0.10084,0.00,10.010,0,0.5470,6.7150,81.60,2.6775,6,432.0,17.80,395.59,10.16,22.80
|
||||
0.12329,0.00,10.010,0,0.5470,5.9130,92.90,2.3534,6,432.0,17.80,394.95,16.21,18.80
|
||||
0.22212,0.00,10.010,0,0.5470,6.0920,95.40,2.5480,6,432.0,17.80,396.90,17.09,18.70
|
||||
0.14231,0.00,10.010,0,0.5470,6.2540,84.20,2.2565,6,432.0,17.80,388.74,10.45,18.50
|
||||
0.17134,0.00,10.010,0,0.5470,5.9280,88.20,2.4631,6,432.0,17.80,344.91,15.76,18.30
|
||||
0.13158,0.00,10.010,0,0.5470,6.1760,72.50,2.7301,6,432.0,17.80,393.30,12.04,21.20
|
||||
0.15098,0.00,10.010,0,0.5470,6.0210,82.60,2.7474,6,432.0,17.80,394.51,10.30,19.20
|
||||
0.13058,0.00,10.010,0,0.5470,5.8720,73.10,2.4775,6,432.0,17.80,338.63,15.37,20.40
|
||||
0.14476,0.00,10.010,0,0.5470,5.7310,65.20,2.7592,6,432.0,17.80,391.50,13.61,19.30
|
||||
0.06899,0.00,25.650,0,0.5810,5.8700,69.70,2.2577,2,188.0,19.10,389.15,14.37,22.00
|
||||
0.07165,0.00,25.650,0,0.5810,6.0040,84.10,2.1974,2,188.0,19.10,377.67,14.27,20.30
|
||||
0.09299,0.00,25.650,0,0.5810,5.9610,92.90,2.0869,2,188.0,19.10,378.09,17.93,20.50
|
||||
0.15038,0.00,25.650,0,0.5810,5.8560,97.00,1.9444,2,188.0,19.10,370.31,25.41,17.30
|
||||
0.09849,0.00,25.650,0,0.5810,5.8790,95.80,2.0063,2,188.0,19.10,379.38,17.58,18.80
|
||||
0.16902,0.00,25.650,0,0.5810,5.9860,88.40,1.9929,2,188.0,19.10,385.02,14.81,21.40
|
||||
0.38735,0.00,25.650,0,0.5810,5.6130,95.60,1.7572,2,188.0,19.10,359.29,27.26,15.70
|
||||
0.25915,0.00,21.890,0,0.6240,5.6930,96.00,1.7883,4,437.0,21.20,392.11,17.19,16.20
|
||||
0.32543,0.00,21.890,0,0.6240,6.4310,98.80,1.8125,4,437.0,21.20,396.90,15.39,18.00
|
||||
0.88125,0.00,21.890,0,0.6240,5.6370,94.70,1.9799,4,437.0,21.20,396.90,18.34,14.30
|
||||
0.34006,0.00,21.890,0,0.6240,6.4580,98.90,2.1185,4,437.0,21.20,395.04,12.60,19.20
|
||||
1.19294,0.00,21.890,0,0.6240,6.3260,97.70,2.2710,4,437.0,21.20,396.90,12.26,19.60
|
||||
0.59005,0.00,21.890,0,0.6240,6.3720,97.90,2.3274,4,437.0,21.20,385.76,11.12,23.00
|
||||
0.32982,0.00,21.890,0,0.6240,5.8220,95.40,2.4699,4,437.0,21.20,388.69,15.03,18.40
|
||||
0.97617,0.00,21.890,0,0.6240,5.7570,98.40,2.3460,4,437.0,21.20,262.76,17.31,15.60
|
||||
0.55778,0.00,21.890,0,0.6240,6.3350,98.20,2.1107,4,437.0,21.20,394.67,16.96,18.10
|
||||
0.32264,0.00,21.890,0,0.6240,5.9420,93.50,1.9669,4,437.0,21.20,378.25,16.90,17.40
|
||||
0.35233,0.00,21.890,0,0.6240,6.4540,98.40,1.8498,4,437.0,21.20,394.08,14.59,17.10
|
||||
0.24980,0.00,21.890,0,0.6240,5.8570,98.20,1.6686,4,437.0,21.20,392.04,21.32,13.30
|
||||
0.54452,0.00,21.890,0,0.6240,6.1510,97.90,1.6687,4,437.0,21.20,396.90,18.46,17.80
|
||||
0.29090,0.00,21.890,0,0.6240,6.1740,93.60,1.6119,4,437.0,21.20,388.08,24.16,14.00
|
||||
1.62864,0.00,21.890,0,0.6240,5.0190,100.00,1.4394,4,437.0,21.20,396.90,34.41,14.40
|
||||
3.32105,0.00,19.580,1,0.8710,5.4030,100.00,1.3216,5,403.0,14.70,396.90,26.82,13.40
|
||||
4.09740,0.00,19.580,0,0.8710,5.4680,100.00,1.4118,5,403.0,14.70,396.90,26.42,15.60
|
||||
2.77974,0.00,19.580,0,0.8710,4.9030,97.80,1.3459,5,403.0,14.70,396.90,29.29,11.80
|
||||
2.37934,0.00,19.580,0,0.8710,6.1300,100.00,1.4191,5,403.0,14.70,172.91,27.80,13.80
|
||||
2.15505,0.00,19.580,0,0.8710,5.6280,100.00,1.5166,5,403.0,14.70,169.27,16.65,15.60
|
||||
2.36862,0.00,19.580,0,0.8710,4.9260,95.70,1.4608,5,403.0,14.70,391.71,29.53,14.60
|
||||
2.33099,0.00,19.580,0,0.8710,5.1860,93.80,1.5296,5,403.0,14.70,356.99,28.32,17.80
|
||||
2.73397,0.00,19.580,0,0.8710,5.5970,94.90,1.5257,5,403.0,14.70,351.85,21.45,15.40
|
||||
1.65660,0.00,19.580,0,0.8710,6.1220,97.30,1.6180,5,403.0,14.70,372.80,14.10,21.50
|
||||
1.49632,0.00,19.580,0,0.8710,5.4040,100.00,1.5916,5,403.0,14.70,341.60,13.28,19.60
|
||||
1.12658,0.00,19.580,1,0.8710,5.0120,88.00,1.6102,5,403.0,14.70,343.28,12.12,15.30
|
||||
2.14918,0.00,19.580,0,0.8710,5.7090,98.50,1.6232,5,403.0,14.70,261.95,15.79,19.40
|
||||
1.41385,0.00,19.580,1,0.8710,6.1290,96.00,1.7494,5,403.0,14.70,321.02,15.12,17.00
|
||||
3.53501,0.00,19.580,1,0.8710,6.1520,82.60,1.7455,5,403.0,14.70,88.01,15.02,15.60
|
||||
2.44668,0.00,19.580,0,0.8710,5.2720,94.00,1.7364,5,403.0,14.70,88.63,16.14,13.10
|
||||
1.22358,0.00,19.580,0,0.6050,6.9430,97.40,1.8773,5,403.0,14.70,363.43,4.59,41.30
|
||||
1.34284,0.00,19.580,0,0.6050,6.0660,100.00,1.7573,5,403.0,14.70,353.89,6.43,24.30
|
||||
1.42502,0.00,19.580,0,0.8710,6.5100,100.00,1.7659,5,403.0,14.70,364.31,7.39,23.30
|
||||
1.27346,0.00,19.580,1,0.6050,6.2500,92.60,1.7984,5,403.0,14.70,338.92,5.50,27.00
|
||||
1.46336,0.00,19.580,0,0.6050,7.4890,90.80,1.9709,5,403.0,14.70,374.43,1.73,50.00
|
||||
1.83377,0.00,19.580,1,0.6050,7.8020,98.20,2.0407,5,403.0,14.70,389.61,1.92,50.00
|
||||
1.51902,0.00,19.580,1,0.6050,8.3750,93.90,2.1620,5,403.0,14.70,388.45,3.32,50.00
|
||||
2.24236,0.00,19.580,0,0.6050,5.8540,91.80,2.4220,5,403.0,14.70,395.11,11.64,22.70
|
||||
2.92400,0.00,19.580,0,0.6050,6.1010,93.00,2.2834,5,403.0,14.70,240.16,9.81,25.00
|
||||
2.01019,0.00,19.580,0,0.6050,7.9290,96.20,2.0459,5,403.0,14.70,369.30,3.70,50.00
|
||||
1.80028,0.00,19.580,0,0.6050,5.8770,79.20,2.4259,5,403.0,14.70,227.61,12.14,23.80
|
||||
2.30040,0.00,19.580,0,0.6050,6.3190,96.10,2.1000,5,403.0,14.70,297.09,11.10,23.80
|
||||
2.44953,0.00,19.580,0,0.6050,6.4020,95.20,2.2625,5,403.0,14.70,330.04,11.32,22.30
|
||||
1.20742,0.00,19.580,0,0.6050,5.8750,94.60,2.4259,5,403.0,14.70,292.29,14.43,17.40
|
||||
2.31390,0.00,19.580,0,0.6050,5.8800,97.30,2.3887,5,403.0,14.70,348.13,12.03,19.10
|
||||
0.13914,0.00,4.050,0,0.5100,5.5720,88.50,2.5961,5,296.0,16.60,396.90,14.69,23.10
|
||||
0.09178,0.00,4.050,0,0.5100,6.4160,84.10,2.6463,5,296.0,16.60,395.50,9.04,23.60
|
||||
0.08447,0.00,4.050,0,0.5100,5.8590,68.70,2.7019,5,296.0,16.60,393.23,9.64,22.60
|
||||
0.06664,0.00,4.050,0,0.5100,6.5460,33.10,3.1323,5,296.0,16.60,390.96,5.33,29.40
|
||||
0.07022,0.00,4.050,0,0.5100,6.0200,47.20,3.5549,5,296.0,16.60,393.23,10.11,23.20
|
||||
0.05425,0.00,4.050,0,0.5100,6.3150,73.40,3.3175,5,296.0,16.60,395.60,6.29,24.60
|
||||
0.06642,0.00,4.050,0,0.5100,6.8600,74.40,2.9153,5,296.0,16.60,391.27,6.92,29.90
|
||||
0.05780,0.00,2.460,0,0.4880,6.9800,58.40,2.8290,3,193.0,17.80,396.90,5.04,37.20
|
||||
0.06588,0.00,2.460,0,0.4880,7.7650,83.30,2.7410,3,193.0,17.80,395.56,7.56,39.80
|
||||
0.06888,0.00,2.460,0,0.4880,6.1440,62.20,2.5979,3,193.0,17.80,396.90,9.45,36.20
|
||||
0.09103,0.00,2.460,0,0.4880,7.1550,92.20,2.7006,3,193.0,17.80,394.12,4.82,37.90
|
||||
0.10008,0.00,2.460,0,0.4880,6.5630,95.60,2.8470,3,193.0,17.80,396.90,5.68,32.50
|
||||
0.08308,0.00,2.460,0,0.4880,5.6040,89.80,2.9879,3,193.0,17.80,391.00,13.98,26.40
|
||||
0.06047,0.00,2.460,0,0.4880,6.1530,68.80,3.2797,3,193.0,17.80,387.11,13.15,29.60
|
||||
0.05602,0.00,2.460,0,0.4880,7.8310,53.60,3.1992,3,193.0,17.80,392.63,4.45,50.00
|
||||
0.07875,45.00,3.440,0,0.4370,6.7820,41.10,3.7886,5,398.0,15.20,393.87,6.68,32.00
|
||||
0.12579,45.00,3.440,0,0.4370,6.5560,29.10,4.5667,5,398.0,15.20,382.84,4.56,29.80
|
||||
0.08370,45.00,3.440,0,0.4370,7.1850,38.90,4.5667,5,398.0,15.20,396.90,5.39,34.90
|
||||
0.09068,45.00,3.440,0,0.4370,6.9510,21.50,6.4798,5,398.0,15.20,377.68,5.10,37.00
|
||||
0.06911,45.00,3.440,0,0.4370,6.7390,30.80,6.4798,5,398.0,15.20,389.71,4.69,30.50
|
||||
0.08664,45.00,3.440,0,0.4370,7.1780,26.30,6.4798,5,398.0,15.20,390.49,2.87,36.40
|
||||
0.02187,60.00,2.930,0,0.4010,6.8000,9.90,6.2196,1,265.0,15.60,393.37,5.03,31.10
|
||||
0.01439,60.00,2.930,0,0.4010,6.6040,18.80,6.2196,1,265.0,15.60,376.70,4.38,29.10
|
||||
0.01381,80.00,0.460,0,0.4220,7.8750,32.00,5.6484,4,255.0,14.40,394.23,2.97,50.00
|
||||
0.04011,80.00,1.520,0,0.4040,7.2870,34.10,7.3090,2,329.0,12.60,396.90,4.08,33.30
|
||||
0.04666,80.00,1.520,0,0.4040,7.1070,36.60,7.3090,2,329.0,12.60,354.31,8.61,30.30
|
||||
0.03768,80.00,1.520,0,0.4040,7.2740,38.30,7.3090,2,329.0,12.60,392.20,6.62,34.60
|
||||
0.03150,95.00,1.470,0,0.4030,6.9750,15.30,7.6534,3,402.0,17.00,396.90,4.56,34.90
|
||||
0.01778,95.00,1.470,0,0.4030,7.1350,13.90,7.6534,3,402.0,17.00,384.30,4.45,32.90
|
||||
0.03445,82.50,2.030,0,0.4150,6.1620,38.40,6.2700,2,348.0,14.70,393.77,7.43,24.10
|
||||
0.02177,82.50,2.030,0,0.4150,7.6100,15.70,6.2700,2,348.0,14.70,395.38,3.11,42.30
|
||||
0.03510,95.00,2.680,0,0.4161,7.8530,33.20,5.1180,4,224.0,14.70,392.78,3.81,48.50
|
||||
0.02009,95.00,2.680,0,0.4161,8.0340,31.90,5.1180,4,224.0,14.70,390.55,2.88,50.00
|
||||
0.13642,0.00,10.590,0,0.4890,5.8910,22.30,3.9454,4,277.0,18.60,396.90,10.87,22.60
|
||||
0.22969,0.00,10.590,0,0.4890,6.3260,52.50,4.3549,4,277.0,18.60,394.87,10.97,24.40
|
||||
0.25199,0.00,10.590,0,0.4890,5.7830,72.70,4.3549,4,277.0,18.60,389.43,18.06,22.50
|
||||
0.13587,0.00,10.590,1,0.4890,6.0640,59.10,4.2392,4,277.0,18.60,381.32,14.66,24.40
|
||||
0.43571,0.00,10.590,1,0.4890,5.3440,100.00,3.8750,4,277.0,18.60,396.90,23.09,20.00
|
||||
0.17446,0.00,10.590,1,0.4890,5.9600,92.10,3.8771,4,277.0,18.60,393.25,17.27,21.70
|
||||
0.37578,0.00,10.590,1,0.4890,5.4040,88.60,3.6650,4,277.0,18.60,395.24,23.98,19.30
|
||||
0.21719,0.00,10.590,1,0.4890,5.8070,53.80,3.6526,4,277.0,18.60,390.94,16.03,22.40
|
||||
0.14052,0.00,10.590,0,0.4890,6.3750,32.30,3.9454,4,277.0,18.60,385.81,9.38,28.10
|
||||
0.28955,0.00,10.590,0,0.4890,5.4120,9.80,3.5875,4,277.0,18.60,348.93,29.55,23.70
|
||||
0.19802,0.00,10.590,0,0.4890,6.1820,42.40,3.9454,4,277.0,18.60,393.63,9.47,25.00
|
||||
0.04560,0.00,13.890,1,0.5500,5.8880,56.00,3.1121,5,276.0,16.40,392.80,13.51,23.30
|
||||
0.07013,0.00,13.890,0,0.5500,6.6420,85.10,3.4211,5,276.0,16.40,392.78,9.69,28.70
|
||||
0.11069,0.00,13.890,1,0.5500,5.9510,93.80,2.8893,5,276.0,16.40,396.90,17.92,21.50
|
||||
0.11425,0.00,13.890,1,0.5500,6.3730,92.40,3.3633,5,276.0,16.40,393.74,10.50,23.00
|
||||
0.35809,0.00,6.200,1,0.5070,6.9510,88.50,2.8617,8,307.0,17.40,391.70,9.71,26.70
|
||||
0.40771,0.00,6.200,1,0.5070,6.1640,91.30,3.0480,8,307.0,17.40,395.24,21.46,21.70
|
||||
0.62356,0.00,6.200,1,0.5070,6.8790,77.70,3.2721,8,307.0,17.40,390.39,9.93,27.50
|
||||
0.61470,0.00,6.200,0,0.5070,6.6180,80.80,3.2721,8,307.0,17.40,396.90,7.60,30.10
|
||||
0.31533,0.00,6.200,0,0.5040,8.2660,78.30,2.8944,8,307.0,17.40,385.05,4.14,44.80
|
||||
0.52693,0.00,6.200,0,0.5040,8.7250,83.00,2.8944,8,307.0,17.40,382.00,4.63,50.00
|
||||
0.38214,0.00,6.200,0,0.5040,8.0400,86.50,3.2157,8,307.0,17.40,387.38,3.13,37.60
|
||||
0.41238,0.00,6.200,0,0.5040,7.1630,79.90,3.2157,8,307.0,17.40,372.08,6.36,31.60
|
||||
0.29819,0.00,6.200,0,0.5040,7.6860,17.00,3.3751,8,307.0,17.40,377.51,3.92,46.70
|
||||
0.44178,0.00,6.200,0,0.5040,6.5520,21.40,3.3751,8,307.0,17.40,380.34,3.76,31.50
|
||||
0.53700,0.00,6.200,0,0.5040,5.9810,68.10,3.6715,8,307.0,17.40,378.35,11.65,24.30
|
||||
0.46296,0.00,6.200,0,0.5040,7.4120,76.90,3.6715,8,307.0,17.40,376.14,5.25,31.70
|
||||
0.57529,0.00,6.200,0,0.5070,8.3370,73.30,3.8384,8,307.0,17.40,385.91,2.47,41.70
|
||||
0.33147,0.00,6.200,0,0.5070,8.2470,70.40,3.6519,8,307.0,17.40,378.95,3.95,48.30
|
||||
0.44791,0.00,6.200,1,0.5070,6.7260,66.50,3.6519,8,307.0,17.40,360.20,8.05,29.00
|
||||
0.33045,0.00,6.200,0,0.5070,6.0860,61.50,3.6519,8,307.0,17.40,376.75,10.88,24.00
|
||||
0.52058,0.00,6.200,1,0.5070,6.6310,76.50,4.1480,8,307.0,17.40,388.45,9.54,25.10
|
||||
0.51183,0.00,6.200,0,0.5070,7.3580,71.60,4.1480,8,307.0,17.40,390.07,4.73,31.50
|
||||
0.08244,30.00,4.930,0,0.4280,6.4810,18.50,6.1899,6,300.0,16.60,379.41,6.36,23.70
|
||||
0.09252,30.00,4.930,0,0.4280,6.6060,42.20,6.1899,6,300.0,16.60,383.78,7.37,23.30
|
||||
0.11329,30.00,4.930,0,0.4280,6.8970,54.30,6.3361,6,300.0,16.60,391.25,11.38,22.00
|
||||
0.10612,30.00,4.930,0,0.4280,6.0950,65.10,6.3361,6,300.0,16.60,394.62,12.40,20.10
|
||||
0.10290,30.00,4.930,0,0.4280,6.3580,52.90,7.0355,6,300.0,16.60,372.75,11.22,22.20
|
||||
0.12757,30.00,4.930,0,0.4280,6.3930,7.80,7.0355,6,300.0,16.60,374.71,5.19,23.70
|
||||
0.20608,22.00,5.860,0,0.4310,5.5930,76.50,7.9549,7,330.0,19.10,372.49,12.50,17.60
|
||||
0.19133,22.00,5.860,0,0.4310,5.6050,70.20,7.9549,7,330.0,19.10,389.13,18.46,18.50
|
||||
0.33983,22.00,5.860,0,0.4310,6.1080,34.90,8.0555,7,330.0,19.10,390.18,9.16,24.30
|
||||
0.19657,22.00,5.860,0,0.4310,6.2260,79.20,8.0555,7,330.0,19.10,376.14,10.15,20.50
|
||||
0.16439,22.00,5.860,0,0.4310,6.4330,49.10,7.8265,7,330.0,19.10,374.71,9.52,24.50
|
||||
0.19073,22.00,5.860,0,0.4310,6.7180,17.50,7.8265,7,330.0,19.10,393.74,6.56,26.20
|
||||
0.14030,22.00,5.860,0,0.4310,6.4870,13.00,7.3967,7,330.0,19.10,396.28,5.90,24.40
|
||||
0.21409,22.00,5.860,0,0.4310,6.4380,8.90,7.3967,7,330.0,19.10,377.07,3.59,24.80
|
||||
0.08221,22.00,5.860,0,0.4310,6.9570,6.80,8.9067,7,330.0,19.10,386.09,3.53,29.60
|
||||
0.36894,22.00,5.860,0,0.4310,8.2590,8.40,8.9067,7,330.0,19.10,396.90,3.54,42.80
|
||||
0.04819,80.00,3.640,0,0.3920,6.1080,32.00,9.2203,1,315.0,16.40,392.89,6.57,21.90
|
||||
0.03548,80.00,3.640,0,0.3920,5.8760,19.10,9.2203,1,315.0,16.40,395.18,9.25,20.90
|
||||
0.01538,90.00,3.750,0,0.3940,7.4540,34.20,6.3361,3,244.0,15.90,386.34,3.11,44.00
|
||||
0.61154,20.00,3.970,0,0.6470,8.7040,86.90,1.8010,5,264.0,13.00,389.70,5.12,50.00
|
||||
0.66351,20.00,3.970,0,0.6470,7.3330,100.00,1.8946,5,264.0,13.00,383.29,7.79,36.00
|
||||
0.65665,20.00,3.970,0,0.6470,6.8420,100.00,2.0107,5,264.0,13.00,391.93,6.90,30.10
|
||||
0.54011,20.00,3.970,0,0.6470,7.2030,81.80,2.1121,5,264.0,13.00,392.80,9.59,33.80
|
||||
0.53412,20.00,3.970,0,0.6470,7.5200,89.40,2.1398,5,264.0,13.00,388.37,7.26,43.10
|
||||
0.52014,20.00,3.970,0,0.6470,8.3980,91.50,2.2885,5,264.0,13.00,386.86,5.91,48.80
|
||||
0.82526,20.00,3.970,0,0.6470,7.3270,94.50,2.0788,5,264.0,13.00,393.42,11.25,31.00
|
||||
0.55007,20.00,3.970,0,0.6470,7.2060,91.60,1.9301,5,264.0,13.00,387.89,8.10,36.50
|
||||
0.76162,20.00,3.970,0,0.6470,5.5600,62.80,1.9865,5,264.0,13.00,392.40,10.45,22.80
|
||||
0.78570,20.00,3.970,0,0.6470,7.0140,84.60,2.1329,5,264.0,13.00,384.07,14.79,30.70
|
||||
0.57834,20.00,3.970,0,0.5750,8.2970,67.00,2.4216,5,264.0,13.00,384.54,7.44,50.00
|
||||
0.54050,20.00,3.970,0,0.5750,7.4700,52.60,2.8720,5,264.0,13.00,390.30,3.16,43.50
|
||||
0.09065,20.00,6.960,1,0.4640,5.9200,61.50,3.9175,3,223.0,18.60,391.34,13.65,20.70
|
||||
0.29916,20.00,6.960,0,0.4640,5.8560,42.10,4.4290,3,223.0,18.60,388.65,13.00,21.10
|
||||
0.16211,20.00,6.960,0,0.4640,6.2400,16.30,4.4290,3,223.0,18.60,396.90,6.59,25.20
|
||||
0.11460,20.00,6.960,0,0.4640,6.5380,58.70,3.9175,3,223.0,18.60,394.96,7.73,24.40
|
||||
0.22188,20.00,6.960,1,0.4640,7.6910,51.80,4.3665,3,223.0,18.60,390.77,6.58,35.20
|
||||
0.05644,40.00,6.410,1,0.4470,6.7580,32.90,4.0776,4,254.0,17.60,396.90,3.53,32.40
|
||||
0.09604,40.00,6.410,0,0.4470,6.8540,42.80,4.2673,4,254.0,17.60,396.90,2.98,32.00
|
||||
0.10469,40.00,6.410,1,0.4470,7.2670,49.00,4.7872,4,254.0,17.60,389.25,6.05,33.20
|
||||
0.06127,40.00,6.410,1,0.4470,6.8260,27.60,4.8628,4,254.0,17.60,393.45,4.16,33.10
|
||||
0.07978,40.00,6.410,0,0.4470,6.4820,32.10,4.1403,4,254.0,17.60,396.90,7.19,29.10
|
||||
0.21038,20.00,3.330,0,0.4429,6.8120,32.20,4.1007,5,216.0,14.90,396.90,4.85,35.10
|
||||
0.03578,20.00,3.330,0,0.4429,7.8200,64.50,4.6947,5,216.0,14.90,387.31,3.76,45.40
|
||||
0.03705,20.00,3.330,0,0.4429,6.9680,37.20,5.2447,5,216.0,14.90,392.23,4.59,35.40
|
||||
0.06129,20.00,3.330,1,0.4429,7.6450,49.70,5.2119,5,216.0,14.90,377.07,3.01,46.00
|
||||
0.01501,90.00,1.210,1,0.4010,7.9230,24.80,5.8850,1,198.0,13.60,395.52,3.16,50.00
|
||||
0.00906,90.00,2.970,0,0.4000,7.0880,20.80,7.3073,1,285.0,15.30,394.72,7.85,32.20
|
||||
0.01096,55.00,2.250,0,0.3890,6.4530,31.90,7.3073,1,300.0,15.30,394.72,8.23,22.00
|
||||
0.01965,80.00,1.760,0,0.3850,6.2300,31.50,9.0892,1,241.0,18.20,341.60,12.93,20.10
|
||||
0.03871,52.50,5.320,0,0.4050,6.2090,31.30,7.3172,6,293.0,16.60,396.90,7.14,23.20
|
||||
0.04590,52.50,5.320,0,0.4050,6.3150,45.60,7.3172,6,293.0,16.60,396.90,7.60,22.30
|
||||
0.04297,52.50,5.320,0,0.4050,6.5650,22.90,7.3172,6,293.0,16.60,371.72,9.51,24.80
|
||||
0.03502,80.00,4.950,0,0.4110,6.8610,27.90,5.1167,4,245.0,19.20,396.90,3.33,28.50
|
||||
0.07886,80.00,4.950,0,0.4110,7.1480,27.70,5.1167,4,245.0,19.20,396.90,3.56,37.30
|
||||
0.03615,80.00,4.950,0,0.4110,6.6300,23.40,5.1167,4,245.0,19.20,396.90,4.70,27.90
|
||||
0.08265,0.00,13.920,0,0.4370,6.1270,18.40,5.5027,4,289.0,16.00,396.90,8.58,23.90
|
||||
0.08199,0.00,13.920,0,0.4370,6.0090,42.30,5.5027,4,289.0,16.00,396.90,10.40,21.70
|
||||
0.12932,0.00,13.920,0,0.4370,6.6780,31.10,5.9604,4,289.0,16.00,396.90,6.27,28.60
|
||||
0.05372,0.00,13.920,0,0.4370,6.5490,51.00,5.9604,4,289.0,16.00,392.85,7.39,27.10
|
||||
0.14103,0.00,13.920,0,0.4370,5.7900,58.00,6.3200,4,289.0,16.00,396.90,15.84,20.30
|
||||
0.06466,70.00,2.240,0,0.4000,6.3450,20.10,7.8278,5,358.0,14.80,368.24,4.97,22.50
|
||||
0.05561,70.00,2.240,0,0.4000,7.0410,10.00,7.8278,5,358.0,14.80,371.58,4.74,29.00
|
||||
0.04417,70.00,2.240,0,0.4000,6.8710,47.40,7.8278,5,358.0,14.80,390.86,6.07,24.80
|
||||
0.03537,34.00,6.090,0,0.4330,6.5900,40.40,5.4917,7,329.0,16.10,395.75,9.50,22.00
|
||||
0.09266,34.00,6.090,0,0.4330,6.4950,18.40,5.4917,7,329.0,16.10,383.61,8.67,26.40
|
||||
0.10000,34.00,6.090,0,0.4330,6.9820,17.70,5.4917,7,329.0,16.10,390.43,4.86,33.10
|
||||
0.05515,33.00,2.180,0,0.4720,7.2360,41.10,4.0220,7,222.0,18.40,393.68,6.93,36.10
|
||||
0.05479,33.00,2.180,0,0.4720,6.6160,58.10,3.3700,7,222.0,18.40,393.36,8.93,28.40
|
||||
0.07503,33.00,2.180,0,0.4720,7.4200,71.90,3.0992,7,222.0,18.40,396.90,6.47,33.40
|
||||
0.04932,33.00,2.180,0,0.4720,6.8490,70.30,3.1827,7,222.0,18.40,396.90,7.53,28.20
|
||||
0.49298,0.00,9.900,0,0.5440,6.6350,82.50,3.3175,4,304.0,18.40,396.90,4.54,22.80
|
||||
0.34940,0.00,9.900,0,0.5440,5.9720,76.70,3.1025,4,304.0,18.40,396.24,9.97,20.30
|
||||
2.63548,0.00,9.900,0,0.5440,4.9730,37.80,2.5194,4,304.0,18.40,350.45,12.64,16.10
|
||||
0.79041,0.00,9.900,0,0.5440,6.1220,52.80,2.6403,4,304.0,18.40,396.90,5.98,22.10
|
||||
0.26169,0.00,9.900,0,0.5440,6.0230,90.40,2.8340,4,304.0,18.40,396.30,11.72,19.40
|
||||
0.26938,0.00,9.900,0,0.5440,6.2660,82.80,3.2628,4,304.0,18.40,393.39,7.90,21.60
|
||||
0.36920,0.00,9.900,0,0.5440,6.5670,87.30,3.6023,4,304.0,18.40,395.69,9.28,23.80
|
||||
0.25356,0.00,9.900,0,0.5440,5.7050,77.70,3.9450,4,304.0,18.40,396.42,11.50,16.20
|
||||
0.31827,0.00,9.900,0,0.5440,5.9140,83.20,3.9986,4,304.0,18.40,390.70,18.33,17.80
|
||||
0.24522,0.00,9.900,0,0.5440,5.7820,71.70,4.0317,4,304.0,18.40,396.90,15.94,19.80
|
||||
0.40202,0.00,9.900,0,0.5440,6.3820,67.20,3.5325,4,304.0,18.40,395.21,10.36,23.10
|
||||
0.47547,0.00,9.900,0,0.5440,6.1130,58.80,4.0019,4,304.0,18.40,396.23,12.73,21.00
|
||||
0.16760,0.00,7.380,0,0.4930,6.4260,52.30,4.5404,5,287.0,19.60,396.90,7.20,23.80
|
||||
0.18159,0.00,7.380,0,0.4930,6.3760,54.30,4.5404,5,287.0,19.60,396.90,6.87,23.10
|
||||
0.35114,0.00,7.380,0,0.4930,6.0410,49.90,4.7211,5,287.0,19.60,396.90,7.70,20.40
|
||||
0.28392,0.00,7.380,0,0.4930,5.7080,74.30,4.7211,5,287.0,19.60,391.13,11.74,18.50
|
||||
0.34109,0.00,7.380,0,0.4930,6.4150,40.10,4.7211,5,287.0,19.60,396.90,6.12,25.00
|
||||
0.19186,0.00,7.380,0,0.4930,6.4310,14.70,5.4159,5,287.0,19.60,393.68,5.08,24.60
|
||||
0.30347,0.00,7.380,0,0.4930,6.3120,28.90,5.4159,5,287.0,19.60,396.90,6.15,23.00
|
||||
0.24103,0.00,7.380,0,0.4930,6.0830,43.70,5.4159,5,287.0,19.60,396.90,12.79,22.20
|
||||
0.06617,0.00,3.240,0,0.4600,5.8680,25.80,5.2146,4,430.0,16.90,382.44,9.97,19.30
|
||||
0.06724,0.00,3.240,0,0.4600,6.3330,17.20,5.2146,4,430.0,16.90,375.21,7.34,22.60
|
||||
0.04544,0.00,3.240,0,0.4600,6.1440,32.20,5.8736,4,430.0,16.90,368.57,9.09,19.80
|
||||
0.05023,35.00,6.060,0,0.4379,5.7060,28.40,6.6407,1,304.0,16.90,394.02,12.43,17.10
|
||||
0.03466,35.00,6.060,0,0.4379,6.0310,23.30,6.6407,1,304.0,16.90,362.25,7.83,19.40
|
||||
0.05083,0.00,5.190,0,0.5150,6.3160,38.10,6.4584,5,224.0,20.20,389.71,5.68,22.20
|
||||
0.03738,0.00,5.190,0,0.5150,6.3100,38.50,6.4584,5,224.0,20.20,389.40,6.75,20.70
|
||||
0.03961,0.00,5.190,0,0.5150,6.0370,34.50,5.9853,5,224.0,20.20,396.90,8.01,21.10
|
||||
0.03427,0.00,5.190,0,0.5150,5.8690,46.30,5.2311,5,224.0,20.20,396.90,9.80,19.50
|
||||
0.03041,0.00,5.190,0,0.5150,5.8950,59.60,5.6150,5,224.0,20.20,394.81,10.56,18.50
|
||||
0.03306,0.00,5.190,0,0.5150,6.0590,37.30,4.8122,5,224.0,20.20,396.14,8.51,20.60
|
||||
0.05497,0.00,5.190,0,0.5150,5.9850,45.40,4.8122,5,224.0,20.20,396.90,9.74,19.00
|
||||
0.06151,0.00,5.190,0,0.5150,5.9680,58.50,4.8122,5,224.0,20.20,396.90,9.29,18.70
|
||||
0.01301,35.00,1.520,0,0.4420,7.2410,49.30,7.0379,1,284.0,15.50,394.74,5.49,32.70
|
||||
0.02498,0.00,1.890,0,0.5180,6.5400,59.70,6.2669,1,422.0,15.90,389.96,8.65,16.50
|
||||
0.02543,55.00,3.780,0,0.4840,6.6960,56.40,5.7321,5,370.0,17.60,396.90,7.18,23.90
|
||||
0.03049,55.00,3.780,0,0.4840,6.8740,28.10,6.4654,5,370.0,17.60,387.97,4.61,31.20
|
||||
0.03113,0.00,4.390,0,0.4420,6.0140,48.50,8.0136,3,352.0,18.80,385.64,10.53,17.50
|
||||
0.06162,0.00,4.390,0,0.4420,5.8980,52.30,8.0136,3,352.0,18.80,364.61,12.67,17.20
|
||||
0.01870,85.00,4.150,0,0.4290,6.5160,27.70,8.5353,4,351.0,17.90,392.43,6.36,23.10
|
||||
0.01501,80.00,2.010,0,0.4350,6.6350,29.70,8.3440,4,280.0,17.00,390.94,5.99,24.50
|
||||
0.02899,40.00,1.250,0,0.4290,6.9390,34.50,8.7921,1,335.0,19.70,389.85,5.89,26.60
|
||||
0.06211,40.00,1.250,0,0.4290,6.4900,44.40,8.7921,1,335.0,19.70,396.90,5.98,22.90
|
||||
0.07950,60.00,1.690,0,0.4110,6.5790,35.90,10.7103,4,411.0,18.30,370.78,5.49,24.10
|
||||
0.07244,60.00,1.690,0,0.4110,5.8840,18.50,10.7103,4,411.0,18.30,392.33,7.79,18.60
|
||||
0.01709,90.00,2.020,0,0.4100,6.7280,36.10,12.1265,5,187.0,17.00,384.46,4.50,30.10
|
||||
0.04301,80.00,1.910,0,0.4130,5.6630,21.90,10.5857,4,334.0,22.00,382.80,8.05,18.20
|
||||
0.10659,80.00,1.910,0,0.4130,5.9360,19.50,10.5857,4,334.0,22.00,376.04,5.57,20.60
|
||||
8.98296,0.00,18.100,1,0.7700,6.2120,97.40,2.1222,24,666.0,20.20,377.73,17.60,17.80
|
||||
3.84970,0.00,18.100,1,0.7700,6.3950,91.00,2.5052,24,666.0,20.20,391.34,13.27,21.70
|
||||
5.20177,0.00,18.100,1,0.7700,6.1270,83.40,2.7227,24,666.0,20.20,395.43,11.48,22.70
|
||||
4.26131,0.00,18.100,0,0.7700,6.1120,81.30,2.5091,24,666.0,20.20,390.74,12.67,22.60
|
||||
4.54192,0.00,18.100,0,0.7700,6.3980,88.00,2.5182,24,666.0,20.20,374.56,7.79,25.00
|
||||
3.83684,0.00,18.100,0,0.7700,6.2510,91.10,2.2955,24,666.0,20.20,350.65,14.19,19.90
|
||||
3.67822,0.00,18.100,0,0.7700,5.3620,96.20,2.1036,24,666.0,20.20,380.79,10.19,20.80
|
||||
4.22239,0.00,18.100,1,0.7700,5.8030,89.00,1.9047,24,666.0,20.20,353.04,14.64,16.80
|
||||
3.47428,0.00,18.100,1,0.7180,8.7800,82.90,1.9047,24,666.0,20.20,354.55,5.29,21.90
|
||||
4.55587,0.00,18.100,0,0.7180,3.5610,87.90,1.6132,24,666.0,20.20,354.70,7.12,27.50
|
||||
3.69695,0.00,18.100,0,0.7180,4.9630,91.40,1.7523,24,666.0,20.20,316.03,14.00,21.90
|
||||
13.52220,0.00,18.100,0,0.6310,3.8630,100.00,1.5106,24,666.0,20.20,131.42,13.33,23.10
|
||||
4.89822,0.00,18.100,0,0.6310,4.9700,100.00,1.3325,24,666.0,20.20,375.52,3.26,50.00
|
||||
5.66998,0.00,18.100,1,0.6310,6.6830,96.80,1.3567,24,666.0,20.20,375.33,3.73,50.00
|
||||
6.53876,0.00,18.100,1,0.6310,7.0160,97.50,1.2024,24,666.0,20.20,392.05,2.96,50.00
|
||||
9.23230,0.00,18.100,0,0.6310,6.2160,100.00,1.1691,24,666.0,20.20,366.15,9.53,50.00
|
||||
8.26725,0.00,18.100,1,0.6680,5.8750,89.60,1.1296,24,666.0,20.20,347.88,8.88,50.00
|
||||
11.10810,0.00,18.100,0,0.6680,4.9060,100.00,1.1742,24,666.0,20.20,396.90,34.77,13.80
|
||||
18.49820,0.00,18.100,0,0.6680,4.1380,100.00,1.1370,24,666.0,20.20,396.90,37.97,13.80
|
||||
19.60910,0.00,18.100,0,0.6710,7.3130,97.90,1.3163,24,666.0,20.20,396.90,13.44,15.00
|
||||
15.28800,0.00,18.100,0,0.6710,6.6490,93.30,1.3449,24,666.0,20.20,363.02,23.24,13.90
|
||||
9.82349,0.00,18.100,0,0.6710,6.7940,98.80,1.3580,24,666.0,20.20,396.90,21.24,13.30
|
||||
23.64820,0.00,18.100,0,0.6710,6.3800,96.20,1.3861,24,666.0,20.20,396.90,23.69,13.10
|
||||
17.86670,0.00,18.100,0,0.6710,6.2230,100.00,1.3861,24,666.0,20.20,393.74,21.78,10.20
|
||||
88.97620,0.00,18.100,0,0.6710,6.9680,91.90,1.4165,24,666.0,20.20,396.90,17.21,10.40
|
||||
15.87440,0.00,18.100,0,0.6710,6.5450,99.10,1.5192,24,666.0,20.20,396.90,21.08,10.90
|
||||
9.18702,0.00,18.100,0,0.7000,5.5360,100.00,1.5804,24,666.0,20.20,396.90,23.60,11.30
|
||||
7.99248,0.00,18.100,0,0.7000,5.5200,100.00,1.5331,24,666.0,20.20,396.90,24.56,12.30
|
||||
20.08490,0.00,18.100,0,0.7000,4.3680,91.20,1.4395,24,666.0,20.20,285.83,30.63,8.80
|
||||
16.81180,0.00,18.100,0,0.7000,5.2770,98.10,1.4261,24,666.0,20.20,396.90,30.81,7.20
|
||||
24.39380,0.00,18.100,0,0.7000,4.6520,100.00,1.4672,24,666.0,20.20,396.90,28.28,10.50
|
||||
22.59710,0.00,18.100,0,0.7000,5.0000,89.50,1.5184,24,666.0,20.20,396.90,31.99,7.40
|
||||
14.33370,0.00,18.100,0,0.7000,4.8800,100.00,1.5895,24,666.0,20.20,372.92,30.62,10.20
|
||||
8.15174,0.00,18.100,0,0.7000,5.3900,98.90,1.7281,24,666.0,20.20,396.90,20.85,11.50
|
||||
6.96215,0.00,18.100,0,0.7000,5.7130,97.00,1.9265,24,666.0,20.20,394.43,17.11,15.10
|
||||
5.29305,0.00,18.100,0,0.7000,6.0510,82.50,2.1678,24,666.0,20.20,378.38,18.76,23.20
|
||||
11.57790,0.00,18.100,0,0.7000,5.0360,97.00,1.7700,24,666.0,20.20,396.90,25.68,9.70
|
||||
8.64476,0.00,18.100,0,0.6930,6.1930,92.60,1.7912,24,666.0,20.20,396.90,15.17,13.80
|
||||
13.35980,0.00,18.100,0,0.6930,5.8870,94.70,1.7821,24,666.0,20.20,396.90,16.35,12.70
|
||||
8.71675,0.00,18.100,0,0.6930,6.4710,98.80,1.7257,24,666.0,20.20,391.98,17.12,13.10
|
||||
5.87205,0.00,18.100,0,0.6930,6.4050,96.00,1.6768,24,666.0,20.20,396.90,19.37,12.50
|
||||
7.67202,0.00,18.100,0,0.6930,5.7470,98.90,1.6334,24,666.0,20.20,393.10,19.92,8.50
|
||||
38.35180,0.00,18.100,0,0.6930,5.4530,100.00,1.4896,24,666.0,20.20,396.90,30.59,5.00
|
||||
9.91655,0.00,18.100,0,0.6930,5.8520,77.80,1.5004,24,666.0,20.20,338.16,29.97,6.30
|
||||
25.04610,0.00,18.100,0,0.6930,5.9870,100.00,1.5888,24,666.0,20.20,396.90,26.77,5.60
|
||||
14.23620,0.00,18.100,0,0.6930,6.3430,100.00,1.5741,24,666.0,20.20,396.90,20.32,7.20
|
||||
9.59571,0.00,18.100,0,0.6930,6.4040,100.00,1.6390,24,666.0,20.20,376.11,20.31,12.10
|
||||
24.80170,0.00,18.100,0,0.6930,5.3490,96.00,1.7028,24,666.0,20.20,396.90,19.77,8.30
|
||||
41.52920,0.00,18.100,0,0.6930,5.5310,85.40,1.6074,24,666.0,20.20,329.46,27.38,8.50
|
||||
67.92080,0.00,18.100,0,0.6930,5.6830,100.00,1.4254,24,666.0,20.20,384.97,22.98,5.00
|
||||
20.71620,0.00,18.100,0,0.6590,4.1380,100.00,1.1781,24,666.0,20.20,370.22,23.34,11.90
|
||||
11.95110,0.00,18.100,0,0.6590,5.6080,100.00,1.2852,24,666.0,20.20,332.09,12.13,27.90
|
||||
7.40389,0.00,18.100,0,0.5970,5.6170,97.90,1.4547,24,666.0,20.20,314.64,26.40,17.20
|
||||
14.43830,0.00,18.100,0,0.5970,6.8520,100.00,1.4655,24,666.0,20.20,179.36,19.78,27.50
|
||||
51.13580,0.00,18.100,0,0.5970,5.7570,100.00,1.4130,24,666.0,20.20,2.60,10.11,15.00
|
||||
14.05070,0.00,18.100,0,0.5970,6.6570,100.00,1.5275,24,666.0,20.20,35.05,21.22,17.20
|
||||
18.81100,0.00,18.100,0,0.5970,4.6280,100.00,1.5539,24,666.0,20.20,28.79,34.37,17.90
|
||||
28.65580,0.00,18.100,0,0.5970,5.1550,100.00,1.5894,24,666.0,20.20,210.97,20.08,16.30
|
||||
45.74610,0.00,18.100,0,0.6930,4.5190,100.00,1.6582,24,666.0,20.20,88.27,36.98,7.00
|
||||
18.08460,0.00,18.100,0,0.6790,6.4340,100.00,1.8347,24,666.0,20.20,27.25,29.05,7.20
|
||||
10.83420,0.00,18.100,0,0.6790,6.7820,90.80,1.8195,24,666.0,20.20,21.57,25.79,7.50
|
||||
25.94060,0.00,18.100,0,0.6790,5.3040,89.10,1.6475,24,666.0,20.20,127.36,26.64,10.40
|
||||
73.53410,0.00,18.100,0,0.6790,5.9570,100.00,1.8026,24,666.0,20.20,16.45,20.62,8.80
|
||||
11.81230,0.00,18.100,0,0.7180,6.8240,76.50,1.7940,24,666.0,20.20,48.45,22.74,8.40
|
||||
11.08740,0.00,18.100,0,0.7180,6.4110,100.00,1.8589,24,666.0,20.20,318.75,15.02,16.70
|
||||
7.02259,0.00,18.100,0,0.7180,6.0060,95.30,1.8746,24,666.0,20.20,319.98,15.70,14.20
|
||||
12.04820,0.00,18.100,0,0.6140,5.6480,87.60,1.9512,24,666.0,20.20,291.55,14.10,20.80
|
||||
7.05042,0.00,18.100,0,0.6140,6.1030,85.10,2.0218,24,666.0,20.20,2.52,23.29,13.40
|
||||
8.79212,0.00,18.100,0,0.5840,5.5650,70.60,2.0635,24,666.0,20.20,3.65,17.16,11.70
|
||||
15.86030,0.00,18.100,0,0.6790,5.8960,95.40,1.9096,24,666.0,20.20,7.68,24.39,8.30
|
||||
12.24720,0.00,18.100,0,0.5840,5.8370,59.70,1.9976,24,666.0,20.20,24.65,15.69,10.20
|
||||
37.66190,0.00,18.100,0,0.6790,6.2020,78.70,1.8629,24,666.0,20.20,18.82,14.52,10.90
|
||||
7.36711,0.00,18.100,0,0.6790,6.1930,78.10,1.9356,24,666.0,20.20,96.73,21.52,11.00
|
||||
9.33889,0.00,18.100,0,0.6790,6.3800,95.60,1.9682,24,666.0,20.20,60.72,24.08,9.50
|
||||
8.49213,0.00,18.100,0,0.5840,6.3480,86.10,2.0527,24,666.0,20.20,83.45,17.64,14.50
|
||||
10.06230,0.00,18.100,0,0.5840,6.8330,94.30,2.0882,24,666.0,20.20,81.33,19.69,14.10
|
||||
6.44405,0.00,18.100,0,0.5840,6.4250,74.80,2.2004,24,666.0,20.20,97.95,12.03,16.10
|
||||
5.58107,0.00,18.100,0,0.7130,6.4360,87.90,2.3158,24,666.0,20.20,100.19,16.22,14.30
|
||||
13.91340,0.00,18.100,0,0.7130,6.2080,95.00,2.2222,24,666.0,20.20,100.63,15.17,11.70
|
||||
11.16040,0.00,18.100,0,0.7400,6.6290,94.60,2.1247,24,666.0,20.20,109.85,23.27,13.40
|
||||
14.42080,0.00,18.100,0,0.7400,6.4610,93.30,2.0026,24,666.0,20.20,27.49,18.05,9.60
|
||||
15.17720,0.00,18.100,0,0.7400,6.1520,100.00,1.9142,24,666.0,20.20,9.32,26.45,8.70
|
||||
13.67810,0.00,18.100,0,0.7400,5.9350,87.90,1.8206,24,666.0,20.20,68.95,34.02,8.40
|
||||
9.39063,0.00,18.100,0,0.7400,5.6270,93.90,1.8172,24,666.0,20.20,396.90,22.88,12.80
|
||||
22.05110,0.00,18.100,0,0.7400,5.8180,92.40,1.8662,24,666.0,20.20,391.45,22.11,10.50
|
||||
9.72418,0.00,18.100,0,0.7400,6.4060,97.20,2.0651,24,666.0,20.20,385.96,19.52,17.10
|
||||
5.66637,0.00,18.100,0,0.7400,6.2190,100.00,2.0048,24,666.0,20.20,395.69,16.59,18.40
|
||||
9.96654,0.00,18.100,0,0.7400,6.4850,100.00,1.9784,24,666.0,20.20,386.73,18.85,15.40
|
||||
12.80230,0.00,18.100,0,0.7400,5.8540,96.60,1.8956,24,666.0,20.20,240.52,23.79,10.80
|
||||
10.67180,0.00,18.100,0,0.7400,6.4590,94.80,1.9879,24,666.0,20.20,43.06,23.98,11.80
|
||||
6.28807,0.00,18.100,0,0.7400,6.3410,96.40,2.0720,24,666.0,20.20,318.01,17.79,14.90
|
||||
9.92485,0.00,18.100,0,0.7400,6.2510,96.60,2.1980,24,666.0,20.20,388.52,16.44,12.60
|
||||
9.32909,0.00,18.100,0,0.7130,6.1850,98.70,2.2616,24,666.0,20.20,396.90,18.13,14.10
|
||||
7.52601,0.00,18.100,0,0.7130,6.4170,98.30,2.1850,24,666.0,20.20,304.21,19.31,13.00
|
||||
6.71772,0.00,18.100,0,0.7130,6.7490,92.60,2.3236,24,666.0,20.20,0.32,17.44,13.40
|
||||
5.44114,0.00,18.100,0,0.7130,6.6550,98.20,2.3552,24,666.0,20.20,355.29,17.73,15.20
|
||||
5.09017,0.00,18.100,0,0.7130,6.2970,91.80,2.3682,24,666.0,20.20,385.09,17.27,16.10
|
||||
8.24809,0.00,18.100,0,0.7130,7.3930,99.30,2.4527,24,666.0,20.20,375.87,16.74,17.80
|
||||
9.51363,0.00,18.100,0,0.7130,6.7280,94.10,2.4961,24,666.0,20.20,6.68,18.71,14.90
|
||||
4.75237,0.00,18.100,0,0.7130,6.5250,86.50,2.4358,24,666.0,20.20,50.92,18.13,14.10
|
||||
4.66883,0.00,18.100,0,0.7130,5.9760,87.90,2.5806,24,666.0,20.20,10.48,19.01,12.70
|
||||
8.20058,0.00,18.100,0,0.7130,5.9360,80.30,2.7792,24,666.0,20.20,3.50,16.94,13.50
|
||||
7.75223,0.00,18.100,0,0.7130,6.3010,83.70,2.7831,24,666.0,20.20,272.21,16.23,14.90
|
||||
6.80117,0.00,18.100,0,0.7130,6.0810,84.40,2.7175,24,666.0,20.20,396.90,14.70,20.00
|
||||
4.81213,0.00,18.100,0,0.7130,6.7010,90.00,2.5975,24,666.0,20.20,255.23,16.42,16.40
|
||||
3.69311,0.00,18.100,0,0.7130,6.3760,88.40,2.5671,24,666.0,20.20,391.43,14.65,17.70
|
||||
6.65492,0.00,18.100,0,0.7130,6.3170,83.00,2.7344,24,666.0,20.20,396.90,13.99,19.50
|
||||
5.82115,0.00,18.100,0,0.7130,6.5130,89.90,2.8016,24,666.0,20.20,393.82,10.29,20.20
|
||||
7.83932,0.00,18.100,0,0.6550,6.2090,65.40,2.9634,24,666.0,20.20,396.90,13.22,21.40
|
||||
3.16360,0.00,18.100,0,0.6550,5.7590,48.20,3.0665,24,666.0,20.20,334.40,14.13,19.90
|
||||
3.77498,0.00,18.100,0,0.6550,5.9520,84.70,2.8715,24,666.0,20.20,22.01,17.15,19.00
|
||||
4.42228,0.00,18.100,0,0.5840,6.0030,94.50,2.5403,24,666.0,20.20,331.29,21.32,19.10
|
||||
15.57570,0.00,18.100,0,0.5800,5.9260,71.00,2.9084,24,666.0,20.20,368.74,18.13,19.10
|
||||
13.07510,0.00,18.100,0,0.5800,5.7130,56.70,2.8237,24,666.0,20.20,396.90,14.76,20.10
|
||||
4.34879,0.00,18.100,0,0.5800,6.1670,84.00,3.0334,24,666.0,20.20,396.90,16.29,19.90
|
||||
4.03841,0.00,18.100,0,0.5320,6.2290,90.70,3.0993,24,666.0,20.20,395.33,12.87,19.60
|
||||
3.56868,0.00,18.100,0,0.5800,6.4370,75.00,2.8965,24,666.0,20.20,393.37,14.36,23.20
|
||||
4.64689,0.00,18.100,0,0.6140,6.9800,67.60,2.5329,24,666.0,20.20,374.68,11.66,29.80
|
||||
8.05579,0.00,18.100,0,0.5840,5.4270,95.40,2.4298,24,666.0,20.20,352.58,18.14,13.80
|
||||
6.39312,0.00,18.100,0,0.5840,6.1620,97.40,2.2060,24,666.0,20.20,302.76,24.10,13.30
|
||||
4.87141,0.00,18.100,0,0.6140,6.4840,93.60,2.3053,24,666.0,20.20,396.21,18.68,16.70
|
||||
15.02340,0.00,18.100,0,0.6140,5.3040,97.30,2.1007,24,666.0,20.20,349.48,24.91,12.00
|
||||
10.23300,0.00,18.100,0,0.6140,6.1850,96.70,2.1705,24,666.0,20.20,379.70,18.03,14.60
|
||||
14.33370,0.00,18.100,0,0.6140,6.2290,88.00,1.9512,24,666.0,20.20,383.32,13.11,21.40
|
||||
5.82401,0.00,18.100,0,0.5320,6.2420,64.70,3.4242,24,666.0,20.20,396.90,10.74,23.00
|
||||
5.70818,0.00,18.100,0,0.5320,6.7500,74.90,3.3317,24,666.0,20.20,393.07,7.74,23.70
|
||||
5.73116,0.00,18.100,0,0.5320,7.0610,77.00,3.4106,24,666.0,20.20,395.28,7.01,25.00
|
||||
2.81838,0.00,18.100,0,0.5320,5.7620,40.30,4.0983,24,666.0,20.20,392.92,10.42,21.80
|
||||
2.37857,0.00,18.100,0,0.5830,5.8710,41.90,3.7240,24,666.0,20.20,370.73,13.34,20.60
|
||||
3.67367,0.00,18.100,0,0.5830,6.3120,51.90,3.9917,24,666.0,20.20,388.62,10.58,21.20
|
||||
5.69175,0.00,18.100,0,0.5830,6.1140,79.80,3.5459,24,666.0,20.20,392.68,14.98,19.10
|
||||
4.83567,0.00,18.100,0,0.5830,5.9050,53.20,3.1523,24,666.0,20.20,388.22,11.45,20.60
|
||||
0.15086,0.00,27.740,0,0.6090,5.4540,92.70,1.8209,4,711.0,20.10,395.09,18.06,15.20
|
||||
0.18337,0.00,27.740,0,0.6090,5.4140,98.30,1.7554,4,711.0,20.10,344.05,23.97,7.00
|
||||
0.20746,0.00,27.740,0,0.6090,5.0930,98.00,1.8226,4,711.0,20.10,318.43,29.68,8.10
|
||||
0.10574,0.00,27.740,0,0.6090,5.9830,98.80,1.8681,4,711.0,20.10,390.11,18.07,13.60
|
||||
0.11132,0.00,27.740,0,0.6090,5.9830,83.50,2.1099,4,711.0,20.10,396.90,13.35,20.10
|
||||
0.17331,0.00,9.690,0,0.5850,5.7070,54.00,2.3817,6,391.0,19.20,396.90,12.01,21.80
|
||||
0.27957,0.00,9.690,0,0.5850,5.9260,42.60,2.3817,6,391.0,19.20,396.90,13.59,24.50
|
||||
0.17899,0.00,9.690,0,0.5850,5.6700,28.80,2.7986,6,391.0,19.20,393.29,17.60,23.10
|
||||
0.28960,0.00,9.690,0,0.5850,5.3900,72.90,2.7986,6,391.0,19.20,396.90,21.14,19.70
|
||||
0.26838,0.00,9.690,0,0.5850,5.7940,70.60,2.8927,6,391.0,19.20,396.90,14.10,18.30
|
||||
0.23912,0.00,9.690,0,0.5850,6.0190,65.30,2.4091,6,391.0,19.20,396.90,12.92,21.20
|
||||
0.17783,0.00,9.690,0,0.5850,5.5690,73.50,2.3999,6,391.0,19.20,395.77,15.10,17.50
|
||||
0.22438,0.00,9.690,0,0.5850,6.0270,79.70,2.4982,6,391.0,19.20,396.90,14.33,16.80
|
||||
0.06263,0.00,11.930,0,0.5730,6.5930,69.10,2.4786,1,273.0,21.00,391.99,9.67,22.40
|
||||
0.04527,0.00,11.930,0,0.5730,6.1200,76.70,2.2875,1,273.0,21.00,396.90,9.08,20.60
|
||||
0.06076,0.00,11.930,0,0.5730,6.9760,91.00,2.1675,1,273.0,21.00,396.90,5.64,23.90
|
||||
0.10959,0.00,11.930,0,0.5730,6.7940,89.30,2.3889,1,273.0,21.00,393.45,6.48,22.00
|
||||
0.04741,0.00,11.930,0,0.5730,6.0300,80.80,2.5050,1,273.0,21.00,396.90,7.88,11.90
|
||||
|
36
lipatov_ilya_lab_4/lab4.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from sklearn.cluster import AgglomerativeClustering
|
||||
from scipy.cluster.hierarchy import dendrogram
|
||||
import matplotlib.pyplot as plt
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
|
||||
FILE_PATH = "boston.csv"
|
||||
FEATURES = ['LSTAT', 'CRIM']
|
||||
|
||||
def plot_dendrogram(model, **kwargs):
|
||||
counts = np.zeros(model.children_.shape[0])
|
||||
n_samples = len(model.labels_)
|
||||
for i, merge in enumerate(model.children_):
|
||||
current_count = 0
|
||||
for child_idx in merge:
|
||||
if child_idx < n_samples:
|
||||
current_count += 1
|
||||
else:
|
||||
current_count += counts[child_idx - n_samples]
|
||||
counts[i] = current_count
|
||||
|
||||
linkage_matrix = np.column_stack(
|
||||
[model.children_, model.distances_, counts]
|
||||
).astype(float)
|
||||
|
||||
dendrogram(linkage_matrix, **kwargs)
|
||||
|
||||
|
||||
data = pd.read_csv(FILE_PATH)
|
||||
X = data[FEATURES]
|
||||
model = AgglomerativeClustering(distance_threshold=0, n_clusters=None)
|
||||
model = model.fit(X)
|
||||
plt.title("Hierarchical Clustering Dendrogram for Boston House Prices")
|
||||
|
||||
plot_dendrogram(model, truncate_mode="level", p=2)
|
||||
plt.show()
|
||||
BIN
lipatov_ilya_lab_4/result.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
51
lipatov_ilya_lab_5/README.md
Normal file
@@ -0,0 +1,51 @@
|
||||
## Лабораторная работа №5
|
||||
|
||||
### Регрессия
|
||||
|
||||
## Выполнил студент группы ПИбд-41 Липатов Илья
|
||||
|
||||
### Как запустить лабораторную работу:
|
||||
|
||||
* установить python, numpy, matplotlib, sklearn
|
||||
* запустить проект (стартовая точка класс lab5)
|
||||
|
||||
### Какие технологии использовались:
|
||||
|
||||
* Язык программирования `Python`, библиотеки numpy, matplotlib, sklearn
|
||||
* Среда разработки `PyCharm`
|
||||
|
||||
### Что делает лабораторная работа:
|
||||
|
||||
* С помощью полиномиальной регрессии предсказывает среднюю стоимость домов в 1000 долларах [тыс. долларов] исходя из среднего количества комнат в жилом помещении, уровень преступности на душу населения в разбивке по городам и индекса доступности к радиальным магистралям.
|
||||
* Выводит размер ошибки, оценку модели и полученное предсказание
|
||||
|
||||
### Примеры работы:
|
||||
|
||||
### Результаты:
|
||||
Были проведены тесты с различными параметрами степени (от 1 до 6). По итогу степень ошибки большая, меньше всего она при степени равной 2 или 4 (при этом и оценка модели 0.68 и 0.55 соответственно).
|
||||
|
||||
#### Тесты
|
||||
|
||||
#### degree = 1
|
||||
* Оценка качества: 0.4252542186083391
|
||||
* Ошибка: 0.22653604605972913
|
||||
|
||||
#### degree = 2
|
||||
* Оценка качества: 0.6835376807930289
|
||||
* Ошибка: 0.1625504540569756
|
||||
|
||||
#### degree = 3
|
||||
* Оценка качества: 0.5267438865953347
|
||||
* Ошибка: 0.195302452251188
|
||||
|
||||
#### degree = 4
|
||||
* Оценка качества: 0.5481932964142193
|
||||
* Ошибка: 0.17852746450144702
|
||||
|
||||
#### degree = 5
|
||||
* Оценка качества: -3.372087305867348
|
||||
* Ошибка: 0.4163026401028063
|
||||
|
||||
#### degree = 6
|
||||
* Оценка качества: -69.05174526020205
|
||||
* Ошибка: 1.3125236408458876
|
||||
507
lipatov_ilya_lab_5/boston.csv
Normal file
@@ -0,0 +1,507 @@
|
||||
CRIM,ZN,INDUS,CHAS,NOX,RM,AGE,DIS,RAD,TAX,PTRATIO,B,LSTAT,MEDV
|
||||
0.00632,18.00,2.310,0,0.5380,6.5750,65.20,4.0900,1,296.0,15.30,396.90,4.98,24.00
|
||||
0.02731,0.00,7.070,0,0.4690,6.4210,78.90,4.9671,2,242.0,17.80,396.90,9.14,21.60
|
||||
0.02729,0.00,7.070,0,0.4690,7.1850,61.10,4.9671,2,242.0,17.80,392.83,4.03,34.70
|
||||
0.03237,0.00,2.180,0,0.4580,6.9980,45.80,6.0622,3,222.0,18.70,394.63,2.94,33.40
|
||||
0.06905,0.00,2.180,0,0.4580,7.1470,54.20,6.0622,3,222.0,18.70,396.90,5.33,36.20
|
||||
0.02985,0.00,2.180,0,0.4580,6.4300,58.70,6.0622,3,222.0,18.70,394.12,5.21,28.70
|
||||
0.08829,12.50,7.870,0,0.5240,6.0120,66.60,5.5605,5,311.0,15.20,395.60,12.43,22.90
|
||||
0.14455,12.50,7.870,0,0.5240,6.1720,96.10,5.9505,5,311.0,15.20,396.90,19.15,27.10
|
||||
0.21124,12.50,7.870,0,0.5240,5.6310,100.00,6.0821,5,311.0,15.20,386.63,29.93,16.50
|
||||
0.17004,12.50,7.870,0,0.5240,6.0040,85.90,6.5921,5,311.0,15.20,386.71,17.10,18.90
|
||||
0.22489,12.50,7.870,0,0.5240,6.3770,94.30,6.3467,5,311.0,15.20,392.52,20.45,15.00
|
||||
0.11747,12.50,7.870,0,0.5240,6.0090,82.90,6.2267,5,311.0,15.20,396.90,13.27,18.90
|
||||
0.09378,12.50,7.870,0,0.5240,5.8890,39.00,5.4509,5,311.0,15.20,390.50,15.71,21.70
|
||||
0.62976,0.00,8.140,0,0.5380,5.9490,61.80,4.7075,4,307.0,21.00,396.90,8.26,20.40
|
||||
0.63796,0.00,8.140,0,0.5380,6.0960,84.50,4.4619,4,307.0,21.00,380.02,10.26,18.20
|
||||
0.62739,0.00,8.140,0,0.5380,5.8340,56.50,4.4986,4,307.0,21.00,395.62,8.47,19.90
|
||||
1.05393,0.00,8.140,0,0.5380,5.9350,29.30,4.4986,4,307.0,21.00,386.85,6.58,23.10
|
||||
0.78420,0.00,8.140,0,0.5380,5.9900,81.70,4.2579,4,307.0,21.00,386.75,14.67,17.50
|
||||
0.80271,0.00,8.140,0,0.5380,5.4560,36.60,3.7965,4,307.0,21.00,288.99,11.69,20.20
|
||||
0.72580,0.00,8.140,0,0.5380,5.7270,69.50,3.7965,4,307.0,21.00,390.95,11.28,18.20
|
||||
1.25179,0.00,8.140,0,0.5380,5.5700,98.10,3.7979,4,307.0,21.00,376.57,21.02,13.60
|
||||
0.85204,0.00,8.140,0,0.5380,5.9650,89.20,4.0123,4,307.0,21.00,392.53,13.83,19.60
|
||||
1.23247,0.00,8.140,0,0.5380,6.1420,91.70,3.9769,4,307.0,21.00,396.90,18.72,15.20
|
||||
0.98843,0.00,8.140,0,0.5380,5.8130,100.00,4.0952,4,307.0,21.00,394.54,19.88,14.50
|
||||
0.75026,0.00,8.140,0,0.5380,5.9240,94.10,4.3996,4,307.0,21.00,394.33,16.30,15.60
|
||||
0.84054,0.00,8.140,0,0.5380,5.5990,85.70,4.4546,4,307.0,21.00,303.42,16.51,13.90
|
||||
0.67191,0.00,8.140,0,0.5380,5.8130,90.30,4.6820,4,307.0,21.00,376.88,14.81,16.60
|
||||
0.95577,0.00,8.140,0,0.5380,6.0470,88.80,4.4534,4,307.0,21.00,306.38,17.28,14.80
|
||||
0.77299,0.00,8.140,0,0.5380,6.4950,94.40,4.4547,4,307.0,21.00,387.94,12.80,18.40
|
||||
1.00245,0.00,8.140,0,0.5380,6.6740,87.30,4.2390,4,307.0,21.00,380.23,11.98,21.00
|
||||
1.13081,0.00,8.140,0,0.5380,5.7130,94.10,4.2330,4,307.0,21.00,360.17,22.60,12.70
|
||||
1.35472,0.00,8.140,0,0.5380,6.0720,100.00,4.1750,4,307.0,21.00,376.73,13.04,14.50
|
||||
1.38799,0.00,8.140,0,0.5380,5.9500,82.00,3.9900,4,307.0,21.00,232.60,27.71,13.20
|
||||
1.15172,0.00,8.140,0,0.5380,5.7010,95.00,3.7872,4,307.0,21.00,358.77,18.35,13.10
|
||||
1.61282,0.00,8.140,0,0.5380,6.0960,96.90,3.7598,4,307.0,21.00,248.31,20.34,13.50
|
||||
0.06417,0.00,5.960,0,0.4990,5.9330,68.20,3.3603,5,279.0,19.20,396.90,9.68,18.90
|
||||
0.09744,0.00,5.960,0,0.4990,5.8410,61.40,3.3779,5,279.0,19.20,377.56,11.41,20.00
|
||||
0.08014,0.00,5.960,0,0.4990,5.8500,41.50,3.9342,5,279.0,19.20,396.90,8.77,21.00
|
||||
0.17505,0.00,5.960,0,0.4990,5.9660,30.20,3.8473,5,279.0,19.20,393.43,10.13,24.70
|
||||
0.02763,75.00,2.950,0,0.4280,6.5950,21.80,5.4011,3,252.0,18.30,395.63,4.32,30.80
|
||||
0.03359,75.00,2.950,0,0.4280,7.0240,15.80,5.4011,3,252.0,18.30,395.62,1.98,34.90
|
||||
0.12744,0.00,6.910,0,0.4480,6.7700,2.90,5.7209,3,233.0,17.90,385.41,4.84,26.60
|
||||
0.14150,0.00,6.910,0,0.4480,6.1690,6.60,5.7209,3,233.0,17.90,383.37,5.81,25.30
|
||||
0.15936,0.00,6.910,0,0.4480,6.2110,6.50,5.7209,3,233.0,17.90,394.46,7.44,24.70
|
||||
0.12269,0.00,6.910,0,0.4480,6.0690,40.00,5.7209,3,233.0,17.90,389.39,9.55,21.20
|
||||
0.17142,0.00,6.910,0,0.4480,5.6820,33.80,5.1004,3,233.0,17.90,396.90,10.21,19.30
|
||||
0.18836,0.00,6.910,0,0.4480,5.7860,33.30,5.1004,3,233.0,17.90,396.90,14.15,20.00
|
||||
0.22927,0.00,6.910,0,0.4480,6.0300,85.50,5.6894,3,233.0,17.90,392.74,18.80,16.60
|
||||
0.25387,0.00,6.910,0,0.4480,5.3990,95.30,5.8700,3,233.0,17.90,396.90,30.81,14.40
|
||||
0.21977,0.00,6.910,0,0.4480,5.6020,62.00,6.0877,3,233.0,17.90,396.90,16.20,19.40
|
||||
0.08873,21.00,5.640,0,0.4390,5.9630,45.70,6.8147,4,243.0,16.80,395.56,13.45,19.70
|
||||
0.04337,21.00,5.640,0,0.4390,6.1150,63.00,6.8147,4,243.0,16.80,393.97,9.43,20.50
|
||||
0.05360,21.00,5.640,0,0.4390,6.5110,21.10,6.8147,4,243.0,16.80,396.90,5.28,25.00
|
||||
0.04981,21.00,5.640,0,0.4390,5.9980,21.40,6.8147,4,243.0,16.80,396.90,8.43,23.40
|
||||
0.01360,75.00,4.000,0,0.4100,5.8880,47.60,7.3197,3,469.0,21.10,396.90,14.80,18.90
|
||||
0.01311,90.00,1.220,0,0.4030,7.2490,21.90,8.6966,5,226.0,17.90,395.93,4.81,35.40
|
||||
0.02055,85.00,0.740,0,0.4100,6.3830,35.70,9.1876,2,313.0,17.30,396.90,5.77,24.70
|
||||
0.01432,100.00,1.320,0,0.4110,6.8160,40.50,8.3248,5,256.0,15.10,392.90,3.95,31.60
|
||||
0.15445,25.00,5.130,0,0.4530,6.1450,29.20,7.8148,8,284.0,19.70,390.68,6.86,23.30
|
||||
0.10328,25.00,5.130,0,0.4530,5.9270,47.20,6.9320,8,284.0,19.70,396.90,9.22,19.60
|
||||
0.14932,25.00,5.130,0,0.4530,5.7410,66.20,7.2254,8,284.0,19.70,395.11,13.15,18.70
|
||||
0.17171,25.00,5.130,0,0.4530,5.9660,93.40,6.8185,8,284.0,19.70,378.08,14.44,16.00
|
||||
0.11027,25.00,5.130,0,0.4530,6.4560,67.80,7.2255,8,284.0,19.70,396.90,6.73,22.20
|
||||
0.12650,25.00,5.130,0,0.4530,6.7620,43.40,7.9809,8,284.0,19.70,395.58,9.50,25.00
|
||||
0.01951,17.50,1.380,0,0.4161,7.1040,59.50,9.2229,3,216.0,18.60,393.24,8.05,33.00
|
||||
0.03584,80.00,3.370,0,0.3980,6.2900,17.80,6.6115,4,337.0,16.10,396.90,4.67,23.50
|
||||
0.04379,80.00,3.370,0,0.3980,5.7870,31.10,6.6115,4,337.0,16.10,396.90,10.24,19.40
|
||||
0.05789,12.50,6.070,0,0.4090,5.8780,21.40,6.4980,4,345.0,18.90,396.21,8.10,22.00
|
||||
0.13554,12.50,6.070,0,0.4090,5.5940,36.80,6.4980,4,345.0,18.90,396.90,13.09,17.40
|
||||
0.12816,12.50,6.070,0,0.4090,5.8850,33.00,6.4980,4,345.0,18.90,396.90,8.79,20.90
|
||||
0.08826,0.00,10.810,0,0.4130,6.4170,6.60,5.2873,4,305.0,19.20,383.73,6.72,24.20
|
||||
0.15876,0.00,10.810,0,0.4130,5.9610,17.50,5.2873,4,305.0,19.20,376.94,9.88,21.70
|
||||
0.09164,0.00,10.810,0,0.4130,6.0650,7.80,5.2873,4,305.0,19.20,390.91,5.52,22.80
|
||||
0.19539,0.00,10.810,0,0.4130,6.2450,6.20,5.2873,4,305.0,19.20,377.17,7.54,23.40
|
||||
0.07896,0.00,12.830,0,0.4370,6.2730,6.00,4.2515,5,398.0,18.70,394.92,6.78,24.10
|
||||
0.09512,0.00,12.830,0,0.4370,6.2860,45.00,4.5026,5,398.0,18.70,383.23,8.94,21.40
|
||||
0.10153,0.00,12.830,0,0.4370,6.2790,74.50,4.0522,5,398.0,18.70,373.66,11.97,20.00
|
||||
0.08707,0.00,12.830,0,0.4370,6.1400,45.80,4.0905,5,398.0,18.70,386.96,10.27,20.80
|
||||
0.05646,0.00,12.830,0,0.4370,6.2320,53.70,5.0141,5,398.0,18.70,386.40,12.34,21.20
|
||||
0.08387,0.00,12.830,0,0.4370,5.8740,36.60,4.5026,5,398.0,18.70,396.06,9.10,20.30
|
||||
0.04113,25.00,4.860,0,0.4260,6.7270,33.50,5.4007,4,281.0,19.00,396.90,5.29,28.00
|
||||
0.04462,25.00,4.860,0,0.4260,6.6190,70.40,5.4007,4,281.0,19.00,395.63,7.22,23.90
|
||||
0.03659,25.00,4.860,0,0.4260,6.3020,32.20,5.4007,4,281.0,19.00,396.90,6.72,24.80
|
||||
0.03551,25.00,4.860,0,0.4260,6.1670,46.70,5.4007,4,281.0,19.00,390.64,7.51,22.90
|
||||
0.05059,0.00,4.490,0,0.4490,6.3890,48.00,4.7794,3,247.0,18.50,396.90,9.62,23.90
|
||||
0.05735,0.00,4.490,0,0.4490,6.6300,56.10,4.4377,3,247.0,18.50,392.30,6.53,26.60
|
||||
0.05188,0.00,4.490,0,0.4490,6.0150,45.10,4.4272,3,247.0,18.50,395.99,12.86,22.50
|
||||
0.07151,0.00,4.490,0,0.4490,6.1210,56.80,3.7476,3,247.0,18.50,395.15,8.44,22.20
|
||||
0.05660,0.00,3.410,0,0.4890,7.0070,86.30,3.4217,2,270.0,17.80,396.90,5.50,23.60
|
||||
0.05302,0.00,3.410,0,0.4890,7.0790,63.10,3.4145,2,270.0,17.80,396.06,5.70,28.70
|
||||
0.04684,0.00,3.410,0,0.4890,6.4170,66.10,3.0923,2,270.0,17.80,392.18,8.81,22.60
|
||||
0.03932,0.00,3.410,0,0.4890,6.4050,73.90,3.0921,2,270.0,17.80,393.55,8.20,22.00
|
||||
0.04203,28.00,15.040,0,0.4640,6.4420,53.60,3.6659,4,270.0,18.20,395.01,8.16,22.90
|
||||
0.02875,28.00,15.040,0,0.4640,6.2110,28.90,3.6659,4,270.0,18.20,396.33,6.21,25.00
|
||||
0.04294,28.00,15.040,0,0.4640,6.2490,77.30,3.6150,4,270.0,18.20,396.90,10.59,20.60
|
||||
0.12204,0.00,2.890,0,0.4450,6.6250,57.80,3.4952,2,276.0,18.00,357.98,6.65,28.40
|
||||
0.11504,0.00,2.890,0,0.4450,6.1630,69.60,3.4952,2,276.0,18.00,391.83,11.34,21.40
|
||||
0.12083,0.00,2.890,0,0.4450,8.0690,76.00,3.4952,2,276.0,18.00,396.90,4.21,38.70
|
||||
0.08187,0.00,2.890,0,0.4450,7.8200,36.90,3.4952,2,276.0,18.00,393.53,3.57,43.80
|
||||
0.06860,0.00,2.890,0,0.4450,7.4160,62.50,3.4952,2,276.0,18.00,396.90,6.19,33.20
|
||||
0.14866,0.00,8.560,0,0.5200,6.7270,79.90,2.7778,5,384.0,20.90,394.76,9.42,27.50
|
||||
0.11432,0.00,8.560,0,0.5200,6.7810,71.30,2.8561,5,384.0,20.90,395.58,7.67,26.50
|
||||
0.22876,0.00,8.560,0,0.5200,6.4050,85.40,2.7147,5,384.0,20.90,70.80,10.63,18.60
|
||||
0.21161,0.00,8.560,0,0.5200,6.1370,87.40,2.7147,5,384.0,20.90,394.47,13.44,19.30
|
||||
0.13960,0.00,8.560,0,0.5200,6.1670,90.00,2.4210,5,384.0,20.90,392.69,12.33,20.10
|
||||
0.13262,0.00,8.560,0,0.5200,5.8510,96.70,2.1069,5,384.0,20.90,394.05,16.47,19.50
|
||||
0.17120,0.00,8.560,0,0.5200,5.8360,91.90,2.2110,5,384.0,20.90,395.67,18.66,19.50
|
||||
0.13117,0.00,8.560,0,0.5200,6.1270,85.20,2.1224,5,384.0,20.90,387.69,14.09,20.40
|
||||
0.12802,0.00,8.560,0,0.5200,6.4740,97.10,2.4329,5,384.0,20.90,395.24,12.27,19.80
|
||||
0.26363,0.00,8.560,0,0.5200,6.2290,91.20,2.5451,5,384.0,20.90,391.23,15.55,19.40
|
||||
0.10793,0.00,8.560,0,0.5200,6.1950,54.40,2.7778,5,384.0,20.90,393.49,13.00,21.70
|
||||
0.10084,0.00,10.010,0,0.5470,6.7150,81.60,2.6775,6,432.0,17.80,395.59,10.16,22.80
|
||||
0.12329,0.00,10.010,0,0.5470,5.9130,92.90,2.3534,6,432.0,17.80,394.95,16.21,18.80
|
||||
0.22212,0.00,10.010,0,0.5470,6.0920,95.40,2.5480,6,432.0,17.80,396.90,17.09,18.70
|
||||
0.14231,0.00,10.010,0,0.5470,6.2540,84.20,2.2565,6,432.0,17.80,388.74,10.45,18.50
|
||||
0.17134,0.00,10.010,0,0.5470,5.9280,88.20,2.4631,6,432.0,17.80,344.91,15.76,18.30
|
||||
0.13158,0.00,10.010,0,0.5470,6.1760,72.50,2.7301,6,432.0,17.80,393.30,12.04,21.20
|
||||
0.15098,0.00,10.010,0,0.5470,6.0210,82.60,2.7474,6,432.0,17.80,394.51,10.30,19.20
|
||||
0.13058,0.00,10.010,0,0.5470,5.8720,73.10,2.4775,6,432.0,17.80,338.63,15.37,20.40
|
||||
0.14476,0.00,10.010,0,0.5470,5.7310,65.20,2.7592,6,432.0,17.80,391.50,13.61,19.30
|
||||
0.06899,0.00,25.650,0,0.5810,5.8700,69.70,2.2577,2,188.0,19.10,389.15,14.37,22.00
|
||||
0.07165,0.00,25.650,0,0.5810,6.0040,84.10,2.1974,2,188.0,19.10,377.67,14.27,20.30
|
||||
0.09299,0.00,25.650,0,0.5810,5.9610,92.90,2.0869,2,188.0,19.10,378.09,17.93,20.50
|
||||
0.15038,0.00,25.650,0,0.5810,5.8560,97.00,1.9444,2,188.0,19.10,370.31,25.41,17.30
|
||||
0.09849,0.00,25.650,0,0.5810,5.8790,95.80,2.0063,2,188.0,19.10,379.38,17.58,18.80
|
||||
0.16902,0.00,25.650,0,0.5810,5.9860,88.40,1.9929,2,188.0,19.10,385.02,14.81,21.40
|
||||
0.38735,0.00,25.650,0,0.5810,5.6130,95.60,1.7572,2,188.0,19.10,359.29,27.26,15.70
|
||||
0.25915,0.00,21.890,0,0.6240,5.6930,96.00,1.7883,4,437.0,21.20,392.11,17.19,16.20
|
||||
0.32543,0.00,21.890,0,0.6240,6.4310,98.80,1.8125,4,437.0,21.20,396.90,15.39,18.00
|
||||
0.88125,0.00,21.890,0,0.6240,5.6370,94.70,1.9799,4,437.0,21.20,396.90,18.34,14.30
|
||||
0.34006,0.00,21.890,0,0.6240,6.4580,98.90,2.1185,4,437.0,21.20,395.04,12.60,19.20
|
||||
1.19294,0.00,21.890,0,0.6240,6.3260,97.70,2.2710,4,437.0,21.20,396.90,12.26,19.60
|
||||
0.59005,0.00,21.890,0,0.6240,6.3720,97.90,2.3274,4,437.0,21.20,385.76,11.12,23.00
|
||||
0.32982,0.00,21.890,0,0.6240,5.8220,95.40,2.4699,4,437.0,21.20,388.69,15.03,18.40
|
||||
0.97617,0.00,21.890,0,0.6240,5.7570,98.40,2.3460,4,437.0,21.20,262.76,17.31,15.60
|
||||
0.55778,0.00,21.890,0,0.6240,6.3350,98.20,2.1107,4,437.0,21.20,394.67,16.96,18.10
|
||||
0.32264,0.00,21.890,0,0.6240,5.9420,93.50,1.9669,4,437.0,21.20,378.25,16.90,17.40
|
||||
0.35233,0.00,21.890,0,0.6240,6.4540,98.40,1.8498,4,437.0,21.20,394.08,14.59,17.10
|
||||
0.24980,0.00,21.890,0,0.6240,5.8570,98.20,1.6686,4,437.0,21.20,392.04,21.32,13.30
|
||||
0.54452,0.00,21.890,0,0.6240,6.1510,97.90,1.6687,4,437.0,21.20,396.90,18.46,17.80
|
||||
0.29090,0.00,21.890,0,0.6240,6.1740,93.60,1.6119,4,437.0,21.20,388.08,24.16,14.00
|
||||
1.62864,0.00,21.890,0,0.6240,5.0190,100.00,1.4394,4,437.0,21.20,396.90,34.41,14.40
|
||||
3.32105,0.00,19.580,1,0.8710,5.4030,100.00,1.3216,5,403.0,14.70,396.90,26.82,13.40
|
||||
4.09740,0.00,19.580,0,0.8710,5.4680,100.00,1.4118,5,403.0,14.70,396.90,26.42,15.60
|
||||
2.77974,0.00,19.580,0,0.8710,4.9030,97.80,1.3459,5,403.0,14.70,396.90,29.29,11.80
|
||||
2.37934,0.00,19.580,0,0.8710,6.1300,100.00,1.4191,5,403.0,14.70,172.91,27.80,13.80
|
||||
2.15505,0.00,19.580,0,0.8710,5.6280,100.00,1.5166,5,403.0,14.70,169.27,16.65,15.60
|
||||
2.36862,0.00,19.580,0,0.8710,4.9260,95.70,1.4608,5,403.0,14.70,391.71,29.53,14.60
|
||||
2.33099,0.00,19.580,0,0.8710,5.1860,93.80,1.5296,5,403.0,14.70,356.99,28.32,17.80
|
||||
2.73397,0.00,19.580,0,0.8710,5.5970,94.90,1.5257,5,403.0,14.70,351.85,21.45,15.40
|
||||
1.65660,0.00,19.580,0,0.8710,6.1220,97.30,1.6180,5,403.0,14.70,372.80,14.10,21.50
|
||||
1.49632,0.00,19.580,0,0.8710,5.4040,100.00,1.5916,5,403.0,14.70,341.60,13.28,19.60
|
||||
1.12658,0.00,19.580,1,0.8710,5.0120,88.00,1.6102,5,403.0,14.70,343.28,12.12,15.30
|
||||
2.14918,0.00,19.580,0,0.8710,5.7090,98.50,1.6232,5,403.0,14.70,261.95,15.79,19.40
|
||||
1.41385,0.00,19.580,1,0.8710,6.1290,96.00,1.7494,5,403.0,14.70,321.02,15.12,17.00
|
||||
3.53501,0.00,19.580,1,0.8710,6.1520,82.60,1.7455,5,403.0,14.70,88.01,15.02,15.60
|
||||
2.44668,0.00,19.580,0,0.8710,5.2720,94.00,1.7364,5,403.0,14.70,88.63,16.14,13.10
|
||||
1.22358,0.00,19.580,0,0.6050,6.9430,97.40,1.8773,5,403.0,14.70,363.43,4.59,41.30
|
||||
1.34284,0.00,19.580,0,0.6050,6.0660,100.00,1.7573,5,403.0,14.70,353.89,6.43,24.30
|
||||
1.42502,0.00,19.580,0,0.8710,6.5100,100.00,1.7659,5,403.0,14.70,364.31,7.39,23.30
|
||||
1.27346,0.00,19.580,1,0.6050,6.2500,92.60,1.7984,5,403.0,14.70,338.92,5.50,27.00
|
||||
1.46336,0.00,19.580,0,0.6050,7.4890,90.80,1.9709,5,403.0,14.70,374.43,1.73,50.00
|
||||
1.83377,0.00,19.580,1,0.6050,7.8020,98.20,2.0407,5,403.0,14.70,389.61,1.92,50.00
|
||||
1.51902,0.00,19.580,1,0.6050,8.3750,93.90,2.1620,5,403.0,14.70,388.45,3.32,50.00
|
||||
2.24236,0.00,19.580,0,0.6050,5.8540,91.80,2.4220,5,403.0,14.70,395.11,11.64,22.70
|
||||
2.92400,0.00,19.580,0,0.6050,6.1010,93.00,2.2834,5,403.0,14.70,240.16,9.81,25.00
|
||||
2.01019,0.00,19.580,0,0.6050,7.9290,96.20,2.0459,5,403.0,14.70,369.30,3.70,50.00
|
||||
1.80028,0.00,19.580,0,0.6050,5.8770,79.20,2.4259,5,403.0,14.70,227.61,12.14,23.80
|
||||
2.30040,0.00,19.580,0,0.6050,6.3190,96.10,2.1000,5,403.0,14.70,297.09,11.10,23.80
|
||||
2.44953,0.00,19.580,0,0.6050,6.4020,95.20,2.2625,5,403.0,14.70,330.04,11.32,22.30
|
||||
1.20742,0.00,19.580,0,0.6050,5.8750,94.60,2.4259,5,403.0,14.70,292.29,14.43,17.40
|
||||
2.31390,0.00,19.580,0,0.6050,5.8800,97.30,2.3887,5,403.0,14.70,348.13,12.03,19.10
|
||||
0.13914,0.00,4.050,0,0.5100,5.5720,88.50,2.5961,5,296.0,16.60,396.90,14.69,23.10
|
||||
0.09178,0.00,4.050,0,0.5100,6.4160,84.10,2.6463,5,296.0,16.60,395.50,9.04,23.60
|
||||
0.08447,0.00,4.050,0,0.5100,5.8590,68.70,2.7019,5,296.0,16.60,393.23,9.64,22.60
|
||||
0.06664,0.00,4.050,0,0.5100,6.5460,33.10,3.1323,5,296.0,16.60,390.96,5.33,29.40
|
||||
0.07022,0.00,4.050,0,0.5100,6.0200,47.20,3.5549,5,296.0,16.60,393.23,10.11,23.20
|
||||
0.05425,0.00,4.050,0,0.5100,6.3150,73.40,3.3175,5,296.0,16.60,395.60,6.29,24.60
|
||||
0.06642,0.00,4.050,0,0.5100,6.8600,74.40,2.9153,5,296.0,16.60,391.27,6.92,29.90
|
||||
0.05780,0.00,2.460,0,0.4880,6.9800,58.40,2.8290,3,193.0,17.80,396.90,5.04,37.20
|
||||
0.06588,0.00,2.460,0,0.4880,7.7650,83.30,2.7410,3,193.0,17.80,395.56,7.56,39.80
|
||||
0.06888,0.00,2.460,0,0.4880,6.1440,62.20,2.5979,3,193.0,17.80,396.90,9.45,36.20
|
||||
0.09103,0.00,2.460,0,0.4880,7.1550,92.20,2.7006,3,193.0,17.80,394.12,4.82,37.90
|
||||
0.10008,0.00,2.460,0,0.4880,6.5630,95.60,2.8470,3,193.0,17.80,396.90,5.68,32.50
|
||||
0.08308,0.00,2.460,0,0.4880,5.6040,89.80,2.9879,3,193.0,17.80,391.00,13.98,26.40
|
||||
0.06047,0.00,2.460,0,0.4880,6.1530,68.80,3.2797,3,193.0,17.80,387.11,13.15,29.60
|
||||
0.05602,0.00,2.460,0,0.4880,7.8310,53.60,3.1992,3,193.0,17.80,392.63,4.45,50.00
|
||||
0.07875,45.00,3.440,0,0.4370,6.7820,41.10,3.7886,5,398.0,15.20,393.87,6.68,32.00
|
||||
0.12579,45.00,3.440,0,0.4370,6.5560,29.10,4.5667,5,398.0,15.20,382.84,4.56,29.80
|
||||
0.08370,45.00,3.440,0,0.4370,7.1850,38.90,4.5667,5,398.0,15.20,396.90,5.39,34.90
|
||||
0.09068,45.00,3.440,0,0.4370,6.9510,21.50,6.4798,5,398.0,15.20,377.68,5.10,37.00
|
||||
0.06911,45.00,3.440,0,0.4370,6.7390,30.80,6.4798,5,398.0,15.20,389.71,4.69,30.50
|
||||
0.08664,45.00,3.440,0,0.4370,7.1780,26.30,6.4798,5,398.0,15.20,390.49,2.87,36.40
|
||||
0.02187,60.00,2.930,0,0.4010,6.8000,9.90,6.2196,1,265.0,15.60,393.37,5.03,31.10
|
||||
0.01439,60.00,2.930,0,0.4010,6.6040,18.80,6.2196,1,265.0,15.60,376.70,4.38,29.10
|
||||
0.01381,80.00,0.460,0,0.4220,7.8750,32.00,5.6484,4,255.0,14.40,394.23,2.97,50.00
|
||||
0.04011,80.00,1.520,0,0.4040,7.2870,34.10,7.3090,2,329.0,12.60,396.90,4.08,33.30
|
||||
0.04666,80.00,1.520,0,0.4040,7.1070,36.60,7.3090,2,329.0,12.60,354.31,8.61,30.30
|
||||
0.03768,80.00,1.520,0,0.4040,7.2740,38.30,7.3090,2,329.0,12.60,392.20,6.62,34.60
|
||||
0.03150,95.00,1.470,0,0.4030,6.9750,15.30,7.6534,3,402.0,17.00,396.90,4.56,34.90
|
||||
0.01778,95.00,1.470,0,0.4030,7.1350,13.90,7.6534,3,402.0,17.00,384.30,4.45,32.90
|
||||
0.03445,82.50,2.030,0,0.4150,6.1620,38.40,6.2700,2,348.0,14.70,393.77,7.43,24.10
|
||||
0.02177,82.50,2.030,0,0.4150,7.6100,15.70,6.2700,2,348.0,14.70,395.38,3.11,42.30
|
||||
0.03510,95.00,2.680,0,0.4161,7.8530,33.20,5.1180,4,224.0,14.70,392.78,3.81,48.50
|
||||
0.02009,95.00,2.680,0,0.4161,8.0340,31.90,5.1180,4,224.0,14.70,390.55,2.88,50.00
|
||||
0.13642,0.00,10.590,0,0.4890,5.8910,22.30,3.9454,4,277.0,18.60,396.90,10.87,22.60
|
||||
0.22969,0.00,10.590,0,0.4890,6.3260,52.50,4.3549,4,277.0,18.60,394.87,10.97,24.40
|
||||
0.25199,0.00,10.590,0,0.4890,5.7830,72.70,4.3549,4,277.0,18.60,389.43,18.06,22.50
|
||||
0.13587,0.00,10.590,1,0.4890,6.0640,59.10,4.2392,4,277.0,18.60,381.32,14.66,24.40
|
||||
0.43571,0.00,10.590,1,0.4890,5.3440,100.00,3.8750,4,277.0,18.60,396.90,23.09,20.00
|
||||
0.17446,0.00,10.590,1,0.4890,5.9600,92.10,3.8771,4,277.0,18.60,393.25,17.27,21.70
|
||||
0.37578,0.00,10.590,1,0.4890,5.4040,88.60,3.6650,4,277.0,18.60,395.24,23.98,19.30
|
||||
0.21719,0.00,10.590,1,0.4890,5.8070,53.80,3.6526,4,277.0,18.60,390.94,16.03,22.40
|
||||
0.14052,0.00,10.590,0,0.4890,6.3750,32.30,3.9454,4,277.0,18.60,385.81,9.38,28.10
|
||||
0.28955,0.00,10.590,0,0.4890,5.4120,9.80,3.5875,4,277.0,18.60,348.93,29.55,23.70
|
||||
0.19802,0.00,10.590,0,0.4890,6.1820,42.40,3.9454,4,277.0,18.60,393.63,9.47,25.00
|
||||
0.04560,0.00,13.890,1,0.5500,5.8880,56.00,3.1121,5,276.0,16.40,392.80,13.51,23.30
|
||||
0.07013,0.00,13.890,0,0.5500,6.6420,85.10,3.4211,5,276.0,16.40,392.78,9.69,28.70
|
||||
0.11069,0.00,13.890,1,0.5500,5.9510,93.80,2.8893,5,276.0,16.40,396.90,17.92,21.50
|
||||
0.11425,0.00,13.890,1,0.5500,6.3730,92.40,3.3633,5,276.0,16.40,393.74,10.50,23.00
|
||||
0.35809,0.00,6.200,1,0.5070,6.9510,88.50,2.8617,8,307.0,17.40,391.70,9.71,26.70
|
||||
0.40771,0.00,6.200,1,0.5070,6.1640,91.30,3.0480,8,307.0,17.40,395.24,21.46,21.70
|
||||
0.62356,0.00,6.200,1,0.5070,6.8790,77.70,3.2721,8,307.0,17.40,390.39,9.93,27.50
|
||||
0.61470,0.00,6.200,0,0.5070,6.6180,80.80,3.2721,8,307.0,17.40,396.90,7.60,30.10
|
||||
0.31533,0.00,6.200,0,0.5040,8.2660,78.30,2.8944,8,307.0,17.40,385.05,4.14,44.80
|
||||
0.52693,0.00,6.200,0,0.5040,8.7250,83.00,2.8944,8,307.0,17.40,382.00,4.63,50.00
|
||||
0.38214,0.00,6.200,0,0.5040,8.0400,86.50,3.2157,8,307.0,17.40,387.38,3.13,37.60
|
||||
0.41238,0.00,6.200,0,0.5040,7.1630,79.90,3.2157,8,307.0,17.40,372.08,6.36,31.60
|
||||
0.29819,0.00,6.200,0,0.5040,7.6860,17.00,3.3751,8,307.0,17.40,377.51,3.92,46.70
|
||||
0.44178,0.00,6.200,0,0.5040,6.5520,21.40,3.3751,8,307.0,17.40,380.34,3.76,31.50
|
||||
0.53700,0.00,6.200,0,0.5040,5.9810,68.10,3.6715,8,307.0,17.40,378.35,11.65,24.30
|
||||
0.46296,0.00,6.200,0,0.5040,7.4120,76.90,3.6715,8,307.0,17.40,376.14,5.25,31.70
|
||||
0.57529,0.00,6.200,0,0.5070,8.3370,73.30,3.8384,8,307.0,17.40,385.91,2.47,41.70
|
||||
0.33147,0.00,6.200,0,0.5070,8.2470,70.40,3.6519,8,307.0,17.40,378.95,3.95,48.30
|
||||
0.44791,0.00,6.200,1,0.5070,6.7260,66.50,3.6519,8,307.0,17.40,360.20,8.05,29.00
|
||||
0.33045,0.00,6.200,0,0.5070,6.0860,61.50,3.6519,8,307.0,17.40,376.75,10.88,24.00
|
||||
0.52058,0.00,6.200,1,0.5070,6.6310,76.50,4.1480,8,307.0,17.40,388.45,9.54,25.10
|
||||
0.51183,0.00,6.200,0,0.5070,7.3580,71.60,4.1480,8,307.0,17.40,390.07,4.73,31.50
|
||||
0.08244,30.00,4.930,0,0.4280,6.4810,18.50,6.1899,6,300.0,16.60,379.41,6.36,23.70
|
||||
0.09252,30.00,4.930,0,0.4280,6.6060,42.20,6.1899,6,300.0,16.60,383.78,7.37,23.30
|
||||
0.11329,30.00,4.930,0,0.4280,6.8970,54.30,6.3361,6,300.0,16.60,391.25,11.38,22.00
|
||||
0.10612,30.00,4.930,0,0.4280,6.0950,65.10,6.3361,6,300.0,16.60,394.62,12.40,20.10
|
||||
0.10290,30.00,4.930,0,0.4280,6.3580,52.90,7.0355,6,300.0,16.60,372.75,11.22,22.20
|
||||
0.12757,30.00,4.930,0,0.4280,6.3930,7.80,7.0355,6,300.0,16.60,374.71,5.19,23.70
|
||||
0.20608,22.00,5.860,0,0.4310,5.5930,76.50,7.9549,7,330.0,19.10,372.49,12.50,17.60
|
||||
0.19133,22.00,5.860,0,0.4310,5.6050,70.20,7.9549,7,330.0,19.10,389.13,18.46,18.50
|
||||
0.33983,22.00,5.860,0,0.4310,6.1080,34.90,8.0555,7,330.0,19.10,390.18,9.16,24.30
|
||||
0.19657,22.00,5.860,0,0.4310,6.2260,79.20,8.0555,7,330.0,19.10,376.14,10.15,20.50
|
||||
0.16439,22.00,5.860,0,0.4310,6.4330,49.10,7.8265,7,330.0,19.10,374.71,9.52,24.50
|
||||
0.19073,22.00,5.860,0,0.4310,6.7180,17.50,7.8265,7,330.0,19.10,393.74,6.56,26.20
|
||||
0.14030,22.00,5.860,0,0.4310,6.4870,13.00,7.3967,7,330.0,19.10,396.28,5.90,24.40
|
||||
0.21409,22.00,5.860,0,0.4310,6.4380,8.90,7.3967,7,330.0,19.10,377.07,3.59,24.80
|
||||
0.08221,22.00,5.860,0,0.4310,6.9570,6.80,8.9067,7,330.0,19.10,386.09,3.53,29.60
|
||||
0.36894,22.00,5.860,0,0.4310,8.2590,8.40,8.9067,7,330.0,19.10,396.90,3.54,42.80
|
||||
0.04819,80.00,3.640,0,0.3920,6.1080,32.00,9.2203,1,315.0,16.40,392.89,6.57,21.90
|
||||
0.03548,80.00,3.640,0,0.3920,5.8760,19.10,9.2203,1,315.0,16.40,395.18,9.25,20.90
|
||||
0.01538,90.00,3.750,0,0.3940,7.4540,34.20,6.3361,3,244.0,15.90,386.34,3.11,44.00
|
||||
0.61154,20.00,3.970,0,0.6470,8.7040,86.90,1.8010,5,264.0,13.00,389.70,5.12,50.00
|
||||
0.66351,20.00,3.970,0,0.6470,7.3330,100.00,1.8946,5,264.0,13.00,383.29,7.79,36.00
|
||||
0.65665,20.00,3.970,0,0.6470,6.8420,100.00,2.0107,5,264.0,13.00,391.93,6.90,30.10
|
||||
0.54011,20.00,3.970,0,0.6470,7.2030,81.80,2.1121,5,264.0,13.00,392.80,9.59,33.80
|
||||
0.53412,20.00,3.970,0,0.6470,7.5200,89.40,2.1398,5,264.0,13.00,388.37,7.26,43.10
|
||||
0.52014,20.00,3.970,0,0.6470,8.3980,91.50,2.2885,5,264.0,13.00,386.86,5.91,48.80
|
||||
0.82526,20.00,3.970,0,0.6470,7.3270,94.50,2.0788,5,264.0,13.00,393.42,11.25,31.00
|
||||
0.55007,20.00,3.970,0,0.6470,7.2060,91.60,1.9301,5,264.0,13.00,387.89,8.10,36.50
|
||||
0.76162,20.00,3.970,0,0.6470,5.5600,62.80,1.9865,5,264.0,13.00,392.40,10.45,22.80
|
||||
0.78570,20.00,3.970,0,0.6470,7.0140,84.60,2.1329,5,264.0,13.00,384.07,14.79,30.70
|
||||
0.57834,20.00,3.970,0,0.5750,8.2970,67.00,2.4216,5,264.0,13.00,384.54,7.44,50.00
|
||||
0.54050,20.00,3.970,0,0.5750,7.4700,52.60,2.8720,5,264.0,13.00,390.30,3.16,43.50
|
||||
0.09065,20.00,6.960,1,0.4640,5.9200,61.50,3.9175,3,223.0,18.60,391.34,13.65,20.70
|
||||
0.29916,20.00,6.960,0,0.4640,5.8560,42.10,4.4290,3,223.0,18.60,388.65,13.00,21.10
|
||||
0.16211,20.00,6.960,0,0.4640,6.2400,16.30,4.4290,3,223.0,18.60,396.90,6.59,25.20
|
||||
0.11460,20.00,6.960,0,0.4640,6.5380,58.70,3.9175,3,223.0,18.60,394.96,7.73,24.40
|
||||
0.22188,20.00,6.960,1,0.4640,7.6910,51.80,4.3665,3,223.0,18.60,390.77,6.58,35.20
|
||||
0.05644,40.00,6.410,1,0.4470,6.7580,32.90,4.0776,4,254.0,17.60,396.90,3.53,32.40
|
||||
0.09604,40.00,6.410,0,0.4470,6.8540,42.80,4.2673,4,254.0,17.60,396.90,2.98,32.00
|
||||
0.10469,40.00,6.410,1,0.4470,7.2670,49.00,4.7872,4,254.0,17.60,389.25,6.05,33.20
|
||||
0.06127,40.00,6.410,1,0.4470,6.8260,27.60,4.8628,4,254.0,17.60,393.45,4.16,33.10
|
||||
0.07978,40.00,6.410,0,0.4470,6.4820,32.10,4.1403,4,254.0,17.60,396.90,7.19,29.10
|
||||
0.21038,20.00,3.330,0,0.4429,6.8120,32.20,4.1007,5,216.0,14.90,396.90,4.85,35.10
|
||||
0.03578,20.00,3.330,0,0.4429,7.8200,64.50,4.6947,5,216.0,14.90,387.31,3.76,45.40
|
||||
0.03705,20.00,3.330,0,0.4429,6.9680,37.20,5.2447,5,216.0,14.90,392.23,4.59,35.40
|
||||
0.06129,20.00,3.330,1,0.4429,7.6450,49.70,5.2119,5,216.0,14.90,377.07,3.01,46.00
|
||||
0.01501,90.00,1.210,1,0.4010,7.9230,24.80,5.8850,1,198.0,13.60,395.52,3.16,50.00
|
||||
0.00906,90.00,2.970,0,0.4000,7.0880,20.80,7.3073,1,285.0,15.30,394.72,7.85,32.20
|
||||
0.01096,55.00,2.250,0,0.3890,6.4530,31.90,7.3073,1,300.0,15.30,394.72,8.23,22.00
|
||||
0.01965,80.00,1.760,0,0.3850,6.2300,31.50,9.0892,1,241.0,18.20,341.60,12.93,20.10
|
||||
0.03871,52.50,5.320,0,0.4050,6.2090,31.30,7.3172,6,293.0,16.60,396.90,7.14,23.20
|
||||
0.04590,52.50,5.320,0,0.4050,6.3150,45.60,7.3172,6,293.0,16.60,396.90,7.60,22.30
|
||||
0.04297,52.50,5.320,0,0.4050,6.5650,22.90,7.3172,6,293.0,16.60,371.72,9.51,24.80
|
||||
0.03502,80.00,4.950,0,0.4110,6.8610,27.90,5.1167,4,245.0,19.20,396.90,3.33,28.50
|
||||
0.07886,80.00,4.950,0,0.4110,7.1480,27.70,5.1167,4,245.0,19.20,396.90,3.56,37.30
|
||||
0.03615,80.00,4.950,0,0.4110,6.6300,23.40,5.1167,4,245.0,19.20,396.90,4.70,27.90
|
||||
0.08265,0.00,13.920,0,0.4370,6.1270,18.40,5.5027,4,289.0,16.00,396.90,8.58,23.90
|
||||
0.08199,0.00,13.920,0,0.4370,6.0090,42.30,5.5027,4,289.0,16.00,396.90,10.40,21.70
|
||||
0.12932,0.00,13.920,0,0.4370,6.6780,31.10,5.9604,4,289.0,16.00,396.90,6.27,28.60
|
||||
0.05372,0.00,13.920,0,0.4370,6.5490,51.00,5.9604,4,289.0,16.00,392.85,7.39,27.10
|
||||
0.14103,0.00,13.920,0,0.4370,5.7900,58.00,6.3200,4,289.0,16.00,396.90,15.84,20.30
|
||||
0.06466,70.00,2.240,0,0.4000,6.3450,20.10,7.8278,5,358.0,14.80,368.24,4.97,22.50
|
||||
0.05561,70.00,2.240,0,0.4000,7.0410,10.00,7.8278,5,358.0,14.80,371.58,4.74,29.00
|
||||
0.04417,70.00,2.240,0,0.4000,6.8710,47.40,7.8278,5,358.0,14.80,390.86,6.07,24.80
|
||||
0.03537,34.00,6.090,0,0.4330,6.5900,40.40,5.4917,7,329.0,16.10,395.75,9.50,22.00
|
||||
0.09266,34.00,6.090,0,0.4330,6.4950,18.40,5.4917,7,329.0,16.10,383.61,8.67,26.40
|
||||
0.10000,34.00,6.090,0,0.4330,6.9820,17.70,5.4917,7,329.0,16.10,390.43,4.86,33.10
|
||||
0.05515,33.00,2.180,0,0.4720,7.2360,41.10,4.0220,7,222.0,18.40,393.68,6.93,36.10
|
||||
0.05479,33.00,2.180,0,0.4720,6.6160,58.10,3.3700,7,222.0,18.40,393.36,8.93,28.40
|
||||
0.07503,33.00,2.180,0,0.4720,7.4200,71.90,3.0992,7,222.0,18.40,396.90,6.47,33.40
|
||||
0.04932,33.00,2.180,0,0.4720,6.8490,70.30,3.1827,7,222.0,18.40,396.90,7.53,28.20
|
||||
0.49298,0.00,9.900,0,0.5440,6.6350,82.50,3.3175,4,304.0,18.40,396.90,4.54,22.80
|
||||
0.34940,0.00,9.900,0,0.5440,5.9720,76.70,3.1025,4,304.0,18.40,396.24,9.97,20.30
|
||||
2.63548,0.00,9.900,0,0.5440,4.9730,37.80,2.5194,4,304.0,18.40,350.45,12.64,16.10
|
||||
0.79041,0.00,9.900,0,0.5440,6.1220,52.80,2.6403,4,304.0,18.40,396.90,5.98,22.10
|
||||
0.26169,0.00,9.900,0,0.5440,6.0230,90.40,2.8340,4,304.0,18.40,396.30,11.72,19.40
|
||||
0.26938,0.00,9.900,0,0.5440,6.2660,82.80,3.2628,4,304.0,18.40,393.39,7.90,21.60
|
||||
0.36920,0.00,9.900,0,0.5440,6.5670,87.30,3.6023,4,304.0,18.40,395.69,9.28,23.80
|
||||
0.25356,0.00,9.900,0,0.5440,5.7050,77.70,3.9450,4,304.0,18.40,396.42,11.50,16.20
|
||||
0.31827,0.00,9.900,0,0.5440,5.9140,83.20,3.9986,4,304.0,18.40,390.70,18.33,17.80
|
||||
0.24522,0.00,9.900,0,0.5440,5.7820,71.70,4.0317,4,304.0,18.40,396.90,15.94,19.80
|
||||
0.40202,0.00,9.900,0,0.5440,6.3820,67.20,3.5325,4,304.0,18.40,395.21,10.36,23.10
|
||||
0.47547,0.00,9.900,0,0.5440,6.1130,58.80,4.0019,4,304.0,18.40,396.23,12.73,21.00
|
||||
0.16760,0.00,7.380,0,0.4930,6.4260,52.30,4.5404,5,287.0,19.60,396.90,7.20,23.80
|
||||
0.18159,0.00,7.380,0,0.4930,6.3760,54.30,4.5404,5,287.0,19.60,396.90,6.87,23.10
|
||||
0.35114,0.00,7.380,0,0.4930,6.0410,49.90,4.7211,5,287.0,19.60,396.90,7.70,20.40
|
||||
0.28392,0.00,7.380,0,0.4930,5.7080,74.30,4.7211,5,287.0,19.60,391.13,11.74,18.50
|
||||
0.34109,0.00,7.380,0,0.4930,6.4150,40.10,4.7211,5,287.0,19.60,396.90,6.12,25.00
|
||||
0.19186,0.00,7.380,0,0.4930,6.4310,14.70,5.4159,5,287.0,19.60,393.68,5.08,24.60
|
||||
0.30347,0.00,7.380,0,0.4930,6.3120,28.90,5.4159,5,287.0,19.60,396.90,6.15,23.00
|
||||
0.24103,0.00,7.380,0,0.4930,6.0830,43.70,5.4159,5,287.0,19.60,396.90,12.79,22.20
|
||||
0.06617,0.00,3.240,0,0.4600,5.8680,25.80,5.2146,4,430.0,16.90,382.44,9.97,19.30
|
||||
0.06724,0.00,3.240,0,0.4600,6.3330,17.20,5.2146,4,430.0,16.90,375.21,7.34,22.60
|
||||
0.04544,0.00,3.240,0,0.4600,6.1440,32.20,5.8736,4,430.0,16.90,368.57,9.09,19.80
|
||||
0.05023,35.00,6.060,0,0.4379,5.7060,28.40,6.6407,1,304.0,16.90,394.02,12.43,17.10
|
||||
0.03466,35.00,6.060,0,0.4379,6.0310,23.30,6.6407,1,304.0,16.90,362.25,7.83,19.40
|
||||
0.05083,0.00,5.190,0,0.5150,6.3160,38.10,6.4584,5,224.0,20.20,389.71,5.68,22.20
|
||||
0.03738,0.00,5.190,0,0.5150,6.3100,38.50,6.4584,5,224.0,20.20,389.40,6.75,20.70
|
||||
0.03961,0.00,5.190,0,0.5150,6.0370,34.50,5.9853,5,224.0,20.20,396.90,8.01,21.10
|
||||
0.03427,0.00,5.190,0,0.5150,5.8690,46.30,5.2311,5,224.0,20.20,396.90,9.80,19.50
|
||||
0.03041,0.00,5.190,0,0.5150,5.8950,59.60,5.6150,5,224.0,20.20,394.81,10.56,18.50
|
||||
0.03306,0.00,5.190,0,0.5150,6.0590,37.30,4.8122,5,224.0,20.20,396.14,8.51,20.60
|
||||
0.05497,0.00,5.190,0,0.5150,5.9850,45.40,4.8122,5,224.0,20.20,396.90,9.74,19.00
|
||||
0.06151,0.00,5.190,0,0.5150,5.9680,58.50,4.8122,5,224.0,20.20,396.90,9.29,18.70
|
||||
0.01301,35.00,1.520,0,0.4420,7.2410,49.30,7.0379,1,284.0,15.50,394.74,5.49,32.70
|
||||
0.02498,0.00,1.890,0,0.5180,6.5400,59.70,6.2669,1,422.0,15.90,389.96,8.65,16.50
|
||||
0.02543,55.00,3.780,0,0.4840,6.6960,56.40,5.7321,5,370.0,17.60,396.90,7.18,23.90
|
||||
0.03049,55.00,3.780,0,0.4840,6.8740,28.10,6.4654,5,370.0,17.60,387.97,4.61,31.20
|
||||
0.03113,0.00,4.390,0,0.4420,6.0140,48.50,8.0136,3,352.0,18.80,385.64,10.53,17.50
|
||||
0.06162,0.00,4.390,0,0.4420,5.8980,52.30,8.0136,3,352.0,18.80,364.61,12.67,17.20
|
||||
0.01870,85.00,4.150,0,0.4290,6.5160,27.70,8.5353,4,351.0,17.90,392.43,6.36,23.10
|
||||
0.01501,80.00,2.010,0,0.4350,6.6350,29.70,8.3440,4,280.0,17.00,390.94,5.99,24.50
|
||||
0.02899,40.00,1.250,0,0.4290,6.9390,34.50,8.7921,1,335.0,19.70,389.85,5.89,26.60
|
||||
0.06211,40.00,1.250,0,0.4290,6.4900,44.40,8.7921,1,335.0,19.70,396.90,5.98,22.90
|
||||
0.07950,60.00,1.690,0,0.4110,6.5790,35.90,10.7103,4,411.0,18.30,370.78,5.49,24.10
|
||||
0.07244,60.00,1.690,0,0.4110,5.8840,18.50,10.7103,4,411.0,18.30,392.33,7.79,18.60
|
||||
0.01709,90.00,2.020,0,0.4100,6.7280,36.10,12.1265,5,187.0,17.00,384.46,4.50,30.10
|
||||
0.04301,80.00,1.910,0,0.4130,5.6630,21.90,10.5857,4,334.0,22.00,382.80,8.05,18.20
|
||||
0.10659,80.00,1.910,0,0.4130,5.9360,19.50,10.5857,4,334.0,22.00,376.04,5.57,20.60
|
||||
8.98296,0.00,18.100,1,0.7700,6.2120,97.40,2.1222,24,666.0,20.20,377.73,17.60,17.80
|
||||
3.84970,0.00,18.100,1,0.7700,6.3950,91.00,2.5052,24,666.0,20.20,391.34,13.27,21.70
|
||||
5.20177,0.00,18.100,1,0.7700,6.1270,83.40,2.7227,24,666.0,20.20,395.43,11.48,22.70
|
||||
4.26131,0.00,18.100,0,0.7700,6.1120,81.30,2.5091,24,666.0,20.20,390.74,12.67,22.60
|
||||
4.54192,0.00,18.100,0,0.7700,6.3980,88.00,2.5182,24,666.0,20.20,374.56,7.79,25.00
|
||||
3.83684,0.00,18.100,0,0.7700,6.2510,91.10,2.2955,24,666.0,20.20,350.65,14.19,19.90
|
||||
3.67822,0.00,18.100,0,0.7700,5.3620,96.20,2.1036,24,666.0,20.20,380.79,10.19,20.80
|
||||
4.22239,0.00,18.100,1,0.7700,5.8030,89.00,1.9047,24,666.0,20.20,353.04,14.64,16.80
|
||||
3.47428,0.00,18.100,1,0.7180,8.7800,82.90,1.9047,24,666.0,20.20,354.55,5.29,21.90
|
||||
4.55587,0.00,18.100,0,0.7180,3.5610,87.90,1.6132,24,666.0,20.20,354.70,7.12,27.50
|
||||
3.69695,0.00,18.100,0,0.7180,4.9630,91.40,1.7523,24,666.0,20.20,316.03,14.00,21.90
|
||||
13.52220,0.00,18.100,0,0.6310,3.8630,100.00,1.5106,24,666.0,20.20,131.42,13.33,23.10
|
||||
4.89822,0.00,18.100,0,0.6310,4.9700,100.00,1.3325,24,666.0,20.20,375.52,3.26,50.00
|
||||
5.66998,0.00,18.100,1,0.6310,6.6830,96.80,1.3567,24,666.0,20.20,375.33,3.73,50.00
|
||||
6.53876,0.00,18.100,1,0.6310,7.0160,97.50,1.2024,24,666.0,20.20,392.05,2.96,50.00
|
||||
9.23230,0.00,18.100,0,0.6310,6.2160,100.00,1.1691,24,666.0,20.20,366.15,9.53,50.00
|
||||
8.26725,0.00,18.100,1,0.6680,5.8750,89.60,1.1296,24,666.0,20.20,347.88,8.88,50.00
|
||||
11.10810,0.00,18.100,0,0.6680,4.9060,100.00,1.1742,24,666.0,20.20,396.90,34.77,13.80
|
||||
18.49820,0.00,18.100,0,0.6680,4.1380,100.00,1.1370,24,666.0,20.20,396.90,37.97,13.80
|
||||
19.60910,0.00,18.100,0,0.6710,7.3130,97.90,1.3163,24,666.0,20.20,396.90,13.44,15.00
|
||||
15.28800,0.00,18.100,0,0.6710,6.6490,93.30,1.3449,24,666.0,20.20,363.02,23.24,13.90
|
||||
9.82349,0.00,18.100,0,0.6710,6.7940,98.80,1.3580,24,666.0,20.20,396.90,21.24,13.30
|
||||
23.64820,0.00,18.100,0,0.6710,6.3800,96.20,1.3861,24,666.0,20.20,396.90,23.69,13.10
|
||||
17.86670,0.00,18.100,0,0.6710,6.2230,100.00,1.3861,24,666.0,20.20,393.74,21.78,10.20
|
||||
88.97620,0.00,18.100,0,0.6710,6.9680,91.90,1.4165,24,666.0,20.20,396.90,17.21,10.40
|
||||
15.87440,0.00,18.100,0,0.6710,6.5450,99.10,1.5192,24,666.0,20.20,396.90,21.08,10.90
|
||||
9.18702,0.00,18.100,0,0.7000,5.5360,100.00,1.5804,24,666.0,20.20,396.90,23.60,11.30
|
||||
7.99248,0.00,18.100,0,0.7000,5.5200,100.00,1.5331,24,666.0,20.20,396.90,24.56,12.30
|
||||
20.08490,0.00,18.100,0,0.7000,4.3680,91.20,1.4395,24,666.0,20.20,285.83,30.63,8.80
|
||||
16.81180,0.00,18.100,0,0.7000,5.2770,98.10,1.4261,24,666.0,20.20,396.90,30.81,7.20
|
||||
24.39380,0.00,18.100,0,0.7000,4.6520,100.00,1.4672,24,666.0,20.20,396.90,28.28,10.50
|
||||
22.59710,0.00,18.100,0,0.7000,5.0000,89.50,1.5184,24,666.0,20.20,396.90,31.99,7.40
|
||||
14.33370,0.00,18.100,0,0.7000,4.8800,100.00,1.5895,24,666.0,20.20,372.92,30.62,10.20
|
||||
8.15174,0.00,18.100,0,0.7000,5.3900,98.90,1.7281,24,666.0,20.20,396.90,20.85,11.50
|
||||
6.96215,0.00,18.100,0,0.7000,5.7130,97.00,1.9265,24,666.0,20.20,394.43,17.11,15.10
|
||||
5.29305,0.00,18.100,0,0.7000,6.0510,82.50,2.1678,24,666.0,20.20,378.38,18.76,23.20
|
||||
11.57790,0.00,18.100,0,0.7000,5.0360,97.00,1.7700,24,666.0,20.20,396.90,25.68,9.70
|
||||
8.64476,0.00,18.100,0,0.6930,6.1930,92.60,1.7912,24,666.0,20.20,396.90,15.17,13.80
|
||||
13.35980,0.00,18.100,0,0.6930,5.8870,94.70,1.7821,24,666.0,20.20,396.90,16.35,12.70
|
||||
8.71675,0.00,18.100,0,0.6930,6.4710,98.80,1.7257,24,666.0,20.20,391.98,17.12,13.10
|
||||
5.87205,0.00,18.100,0,0.6930,6.4050,96.00,1.6768,24,666.0,20.20,396.90,19.37,12.50
|
||||
7.67202,0.00,18.100,0,0.6930,5.7470,98.90,1.6334,24,666.0,20.20,393.10,19.92,8.50
|
||||
38.35180,0.00,18.100,0,0.6930,5.4530,100.00,1.4896,24,666.0,20.20,396.90,30.59,5.00
|
||||
9.91655,0.00,18.100,0,0.6930,5.8520,77.80,1.5004,24,666.0,20.20,338.16,29.97,6.30
|
||||
25.04610,0.00,18.100,0,0.6930,5.9870,100.00,1.5888,24,666.0,20.20,396.90,26.77,5.60
|
||||
14.23620,0.00,18.100,0,0.6930,6.3430,100.00,1.5741,24,666.0,20.20,396.90,20.32,7.20
|
||||
9.59571,0.00,18.100,0,0.6930,6.4040,100.00,1.6390,24,666.0,20.20,376.11,20.31,12.10
|
||||
24.80170,0.00,18.100,0,0.6930,5.3490,96.00,1.7028,24,666.0,20.20,396.90,19.77,8.30
|
||||
41.52920,0.00,18.100,0,0.6930,5.5310,85.40,1.6074,24,666.0,20.20,329.46,27.38,8.50
|
||||
67.92080,0.00,18.100,0,0.6930,5.6830,100.00,1.4254,24,666.0,20.20,384.97,22.98,5.00
|
||||
20.71620,0.00,18.100,0,0.6590,4.1380,100.00,1.1781,24,666.0,20.20,370.22,23.34,11.90
|
||||
11.95110,0.00,18.100,0,0.6590,5.6080,100.00,1.2852,24,666.0,20.20,332.09,12.13,27.90
|
||||
7.40389,0.00,18.100,0,0.5970,5.6170,97.90,1.4547,24,666.0,20.20,314.64,26.40,17.20
|
||||
14.43830,0.00,18.100,0,0.5970,6.8520,100.00,1.4655,24,666.0,20.20,179.36,19.78,27.50
|
||||
51.13580,0.00,18.100,0,0.5970,5.7570,100.00,1.4130,24,666.0,20.20,2.60,10.11,15.00
|
||||
14.05070,0.00,18.100,0,0.5970,6.6570,100.00,1.5275,24,666.0,20.20,35.05,21.22,17.20
|
||||
18.81100,0.00,18.100,0,0.5970,4.6280,100.00,1.5539,24,666.0,20.20,28.79,34.37,17.90
|
||||
28.65580,0.00,18.100,0,0.5970,5.1550,100.00,1.5894,24,666.0,20.20,210.97,20.08,16.30
|
||||
45.74610,0.00,18.100,0,0.6930,4.5190,100.00,1.6582,24,666.0,20.20,88.27,36.98,7.00
|
||||
18.08460,0.00,18.100,0,0.6790,6.4340,100.00,1.8347,24,666.0,20.20,27.25,29.05,7.20
|
||||
10.83420,0.00,18.100,0,0.6790,6.7820,90.80,1.8195,24,666.0,20.20,21.57,25.79,7.50
|
||||
25.94060,0.00,18.100,0,0.6790,5.3040,89.10,1.6475,24,666.0,20.20,127.36,26.64,10.40
|
||||
73.53410,0.00,18.100,0,0.6790,5.9570,100.00,1.8026,24,666.0,20.20,16.45,20.62,8.80
|
||||
11.81230,0.00,18.100,0,0.7180,6.8240,76.50,1.7940,24,666.0,20.20,48.45,22.74,8.40
|
||||
11.08740,0.00,18.100,0,0.7180,6.4110,100.00,1.8589,24,666.0,20.20,318.75,15.02,16.70
|
||||
7.02259,0.00,18.100,0,0.7180,6.0060,95.30,1.8746,24,666.0,20.20,319.98,15.70,14.20
|
||||
12.04820,0.00,18.100,0,0.6140,5.6480,87.60,1.9512,24,666.0,20.20,291.55,14.10,20.80
|
||||
7.05042,0.00,18.100,0,0.6140,6.1030,85.10,2.0218,24,666.0,20.20,2.52,23.29,13.40
|
||||
8.79212,0.00,18.100,0,0.5840,5.5650,70.60,2.0635,24,666.0,20.20,3.65,17.16,11.70
|
||||
15.86030,0.00,18.100,0,0.6790,5.8960,95.40,1.9096,24,666.0,20.20,7.68,24.39,8.30
|
||||
12.24720,0.00,18.100,0,0.5840,5.8370,59.70,1.9976,24,666.0,20.20,24.65,15.69,10.20
|
||||
37.66190,0.00,18.100,0,0.6790,6.2020,78.70,1.8629,24,666.0,20.20,18.82,14.52,10.90
|
||||
7.36711,0.00,18.100,0,0.6790,6.1930,78.10,1.9356,24,666.0,20.20,96.73,21.52,11.00
|
||||
9.33889,0.00,18.100,0,0.6790,6.3800,95.60,1.9682,24,666.0,20.20,60.72,24.08,9.50
|
||||
8.49213,0.00,18.100,0,0.5840,6.3480,86.10,2.0527,24,666.0,20.20,83.45,17.64,14.50
|
||||
10.06230,0.00,18.100,0,0.5840,6.8330,94.30,2.0882,24,666.0,20.20,81.33,19.69,14.10
|
||||
6.44405,0.00,18.100,0,0.5840,6.4250,74.80,2.2004,24,666.0,20.20,97.95,12.03,16.10
|
||||
5.58107,0.00,18.100,0,0.7130,6.4360,87.90,2.3158,24,666.0,20.20,100.19,16.22,14.30
|
||||
13.91340,0.00,18.100,0,0.7130,6.2080,95.00,2.2222,24,666.0,20.20,100.63,15.17,11.70
|
||||
11.16040,0.00,18.100,0,0.7400,6.6290,94.60,2.1247,24,666.0,20.20,109.85,23.27,13.40
|
||||
14.42080,0.00,18.100,0,0.7400,6.4610,93.30,2.0026,24,666.0,20.20,27.49,18.05,9.60
|
||||
15.17720,0.00,18.100,0,0.7400,6.1520,100.00,1.9142,24,666.0,20.20,9.32,26.45,8.70
|
||||
13.67810,0.00,18.100,0,0.7400,5.9350,87.90,1.8206,24,666.0,20.20,68.95,34.02,8.40
|
||||
9.39063,0.00,18.100,0,0.7400,5.6270,93.90,1.8172,24,666.0,20.20,396.90,22.88,12.80
|
||||
22.05110,0.00,18.100,0,0.7400,5.8180,92.40,1.8662,24,666.0,20.20,391.45,22.11,10.50
|
||||
9.72418,0.00,18.100,0,0.7400,6.4060,97.20,2.0651,24,666.0,20.20,385.96,19.52,17.10
|
||||
5.66637,0.00,18.100,0,0.7400,6.2190,100.00,2.0048,24,666.0,20.20,395.69,16.59,18.40
|
||||
9.96654,0.00,18.100,0,0.7400,6.4850,100.00,1.9784,24,666.0,20.20,386.73,18.85,15.40
|
||||
12.80230,0.00,18.100,0,0.7400,5.8540,96.60,1.8956,24,666.0,20.20,240.52,23.79,10.80
|
||||
10.67180,0.00,18.100,0,0.7400,6.4590,94.80,1.9879,24,666.0,20.20,43.06,23.98,11.80
|
||||
6.28807,0.00,18.100,0,0.7400,6.3410,96.40,2.0720,24,666.0,20.20,318.01,17.79,14.90
|
||||
9.92485,0.00,18.100,0,0.7400,6.2510,96.60,2.1980,24,666.0,20.20,388.52,16.44,12.60
|
||||
9.32909,0.00,18.100,0,0.7130,6.1850,98.70,2.2616,24,666.0,20.20,396.90,18.13,14.10
|
||||
7.52601,0.00,18.100,0,0.7130,6.4170,98.30,2.1850,24,666.0,20.20,304.21,19.31,13.00
|
||||
6.71772,0.00,18.100,0,0.7130,6.7490,92.60,2.3236,24,666.0,20.20,0.32,17.44,13.40
|
||||
5.44114,0.00,18.100,0,0.7130,6.6550,98.20,2.3552,24,666.0,20.20,355.29,17.73,15.20
|
||||
5.09017,0.00,18.100,0,0.7130,6.2970,91.80,2.3682,24,666.0,20.20,385.09,17.27,16.10
|
||||
8.24809,0.00,18.100,0,0.7130,7.3930,99.30,2.4527,24,666.0,20.20,375.87,16.74,17.80
|
||||
9.51363,0.00,18.100,0,0.7130,6.7280,94.10,2.4961,24,666.0,20.20,6.68,18.71,14.90
|
||||
4.75237,0.00,18.100,0,0.7130,6.5250,86.50,2.4358,24,666.0,20.20,50.92,18.13,14.10
|
||||
4.66883,0.00,18.100,0,0.7130,5.9760,87.90,2.5806,24,666.0,20.20,10.48,19.01,12.70
|
||||
8.20058,0.00,18.100,0,0.7130,5.9360,80.30,2.7792,24,666.0,20.20,3.50,16.94,13.50
|
||||
7.75223,0.00,18.100,0,0.7130,6.3010,83.70,2.7831,24,666.0,20.20,272.21,16.23,14.90
|
||||
6.80117,0.00,18.100,0,0.7130,6.0810,84.40,2.7175,24,666.0,20.20,396.90,14.70,20.00
|
||||
4.81213,0.00,18.100,0,0.7130,6.7010,90.00,2.5975,24,666.0,20.20,255.23,16.42,16.40
|
||||
3.69311,0.00,18.100,0,0.7130,6.3760,88.40,2.5671,24,666.0,20.20,391.43,14.65,17.70
|
||||
6.65492,0.00,18.100,0,0.7130,6.3170,83.00,2.7344,24,666.0,20.20,396.90,13.99,19.50
|
||||
5.82115,0.00,18.100,0,0.7130,6.5130,89.90,2.8016,24,666.0,20.20,393.82,10.29,20.20
|
||||
7.83932,0.00,18.100,0,0.6550,6.2090,65.40,2.9634,24,666.0,20.20,396.90,13.22,21.40
|
||||
3.16360,0.00,18.100,0,0.6550,5.7590,48.20,3.0665,24,666.0,20.20,334.40,14.13,19.90
|
||||
3.77498,0.00,18.100,0,0.6550,5.9520,84.70,2.8715,24,666.0,20.20,22.01,17.15,19.00
|
||||
4.42228,0.00,18.100,0,0.5840,6.0030,94.50,2.5403,24,666.0,20.20,331.29,21.32,19.10
|
||||
15.57570,0.00,18.100,0,0.5800,5.9260,71.00,2.9084,24,666.0,20.20,368.74,18.13,19.10
|
||||
13.07510,0.00,18.100,0,0.5800,5.7130,56.70,2.8237,24,666.0,20.20,396.90,14.76,20.10
|
||||
4.34879,0.00,18.100,0,0.5800,6.1670,84.00,3.0334,24,666.0,20.20,396.90,16.29,19.90
|
||||
4.03841,0.00,18.100,0,0.5320,6.2290,90.70,3.0993,24,666.0,20.20,395.33,12.87,19.60
|
||||
3.56868,0.00,18.100,0,0.5800,6.4370,75.00,2.8965,24,666.0,20.20,393.37,14.36,23.20
|
||||
4.64689,0.00,18.100,0,0.6140,6.9800,67.60,2.5329,24,666.0,20.20,374.68,11.66,29.80
|
||||
8.05579,0.00,18.100,0,0.5840,5.4270,95.40,2.4298,24,666.0,20.20,352.58,18.14,13.80
|
||||
6.39312,0.00,18.100,0,0.5840,6.1620,97.40,2.2060,24,666.0,20.20,302.76,24.10,13.30
|
||||
4.87141,0.00,18.100,0,0.6140,6.4840,93.60,2.3053,24,666.0,20.20,396.21,18.68,16.70
|
||||
15.02340,0.00,18.100,0,0.6140,5.3040,97.30,2.1007,24,666.0,20.20,349.48,24.91,12.00
|
||||
10.23300,0.00,18.100,0,0.6140,6.1850,96.70,2.1705,24,666.0,20.20,379.70,18.03,14.60
|
||||
14.33370,0.00,18.100,0,0.6140,6.2290,88.00,1.9512,24,666.0,20.20,383.32,13.11,21.40
|
||||
5.82401,0.00,18.100,0,0.5320,6.2420,64.70,3.4242,24,666.0,20.20,396.90,10.74,23.00
|
||||
5.70818,0.00,18.100,0,0.5320,6.7500,74.90,3.3317,24,666.0,20.20,393.07,7.74,23.70
|
||||
5.73116,0.00,18.100,0,0.5320,7.0610,77.00,3.4106,24,666.0,20.20,395.28,7.01,25.00
|
||||
2.81838,0.00,18.100,0,0.5320,5.7620,40.30,4.0983,24,666.0,20.20,392.92,10.42,21.80
|
||||
2.37857,0.00,18.100,0,0.5830,5.8710,41.90,3.7240,24,666.0,20.20,370.73,13.34,20.60
|
||||
3.67367,0.00,18.100,0,0.5830,6.3120,51.90,3.9917,24,666.0,20.20,388.62,10.58,21.20
|
||||
5.69175,0.00,18.100,0,0.5830,6.1140,79.80,3.5459,24,666.0,20.20,392.68,14.98,19.10
|
||||
4.83567,0.00,18.100,0,0.5830,5.9050,53.20,3.1523,24,666.0,20.20,388.22,11.45,20.60
|
||||
0.15086,0.00,27.740,0,0.6090,5.4540,92.70,1.8209,4,711.0,20.10,395.09,18.06,15.20
|
||||
0.18337,0.00,27.740,0,0.6090,5.4140,98.30,1.7554,4,711.0,20.10,344.05,23.97,7.00
|
||||
0.20746,0.00,27.740,0,0.6090,5.0930,98.00,1.8226,4,711.0,20.10,318.43,29.68,8.10
|
||||
0.10574,0.00,27.740,0,0.6090,5.9830,98.80,1.8681,4,711.0,20.10,390.11,18.07,13.60
|
||||
0.11132,0.00,27.740,0,0.6090,5.9830,83.50,2.1099,4,711.0,20.10,396.90,13.35,20.10
|
||||
0.17331,0.00,9.690,0,0.5850,5.7070,54.00,2.3817,6,391.0,19.20,396.90,12.01,21.80
|
||||
0.27957,0.00,9.690,0,0.5850,5.9260,42.60,2.3817,6,391.0,19.20,396.90,13.59,24.50
|
||||
0.17899,0.00,9.690,0,0.5850,5.6700,28.80,2.7986,6,391.0,19.20,393.29,17.60,23.10
|
||||
0.28960,0.00,9.690,0,0.5850,5.3900,72.90,2.7986,6,391.0,19.20,396.90,21.14,19.70
|
||||
0.26838,0.00,9.690,0,0.5850,5.7940,70.60,2.8927,6,391.0,19.20,396.90,14.10,18.30
|
||||
0.23912,0.00,9.690,0,0.5850,6.0190,65.30,2.4091,6,391.0,19.20,396.90,12.92,21.20
|
||||
0.17783,0.00,9.690,0,0.5850,5.5690,73.50,2.3999,6,391.0,19.20,395.77,15.10,17.50
|
||||
0.22438,0.00,9.690,0,0.5850,6.0270,79.70,2.4982,6,391.0,19.20,396.90,14.33,16.80
|
||||
0.06263,0.00,11.930,0,0.5730,6.5930,69.10,2.4786,1,273.0,21.00,391.99,9.67,22.40
|
||||
0.04527,0.00,11.930,0,0.5730,6.1200,76.70,2.2875,1,273.0,21.00,396.90,9.08,20.60
|
||||
0.06076,0.00,11.930,0,0.5730,6.9760,91.00,2.1675,1,273.0,21.00,396.90,5.64,23.90
|
||||
0.10959,0.00,11.930,0,0.5730,6.7940,89.30,2.3889,1,273.0,21.00,393.45,6.48,22.00
|
||||
0.04741,0.00,11.930,0,0.5730,6.0300,80.80,2.5050,1,273.0,21.00,396.90,7.88,11.90
|
||||
|
20
lipatov_ilya_lab_5/lab5.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from sklearn.metrics import mean_absolute_percentage_error
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.preprocessing import PolynomialFeatures
|
||||
from sklearn.linear_model import LinearRegression
|
||||
from sklearn.pipeline import Pipeline
|
||||
import pandas as pd
|
||||
|
||||
data = pd.read_csv('boston.csv')
|
||||
X = (data[['CRIM', 'RM', 'RAD']])
|
||||
y = data['MEDV']
|
||||
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
||||
lin = LinearRegression()
|
||||
polynomial_features = PolynomialFeatures(degree=1)
|
||||
pipeline = Pipeline([("Linear", polynomial_features), ("linear_regression", lin)])
|
||||
pipeline.fit(X_train, y_train)
|
||||
y_predict = lin.predict(polynomial_features.fit_transform(X_test))
|
||||
print('Предсказание: ', y_predict)
|
||||
print('Оценка качества:', pipeline.score(X_test, y_test))
|
||||
print('Ошибка:', mean_absolute_percentage_error(y_test, y_predict))
|
||||
1001
madyshev_egor_lab_5/StudentsPerformance.csv
Normal file
57
madyshev_egor_lab_5/main.py
Normal file
@@ -0,0 +1,57 @@
|
||||
import numpy as np
|
||||
import pandas as pb
|
||||
import matplotlib.pyplot as plt
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.linear_model import LinearRegression, Perceptron, LogisticRegression, Lasso, Ridge
|
||||
from sklearn.neural_network import MLPClassifier, MLPRegressor
|
||||
from sklearn.pipeline import make_pipeline
|
||||
from sklearn.preprocessing import LabelEncoder, OneHotEncoder, MinMaxScaler
|
||||
from sklearn.tree import DecisionTreeRegressor, DecisionTreeClassifier
|
||||
from sklearn.preprocessing import PolynomialFeatures
|
||||
|
||||
df = pb.read_csv("StudentsPerformance.csv", sep=",", encoding="windows-1251")
|
||||
df1 = df
|
||||
print("Данные без подготовки:")
|
||||
with pb.option_context('display.max_rows', None, 'display.max_columns', None, 'display.width', 1000):
|
||||
print(df[:5])
|
||||
|
||||
def prepareStringData(columnName):
|
||||
uniq = df[columnName].unique()
|
||||
mp = {}
|
||||
for i in uniq:
|
||||
mp[i] = len(mp)
|
||||
df[columnName] = df[columnName].map(mp)
|
||||
|
||||
|
||||
print()
|
||||
print("Данные после подготовки:")
|
||||
prepareStringData("gender")
|
||||
prepareStringData("race/ethnicity")
|
||||
prepareStringData("parental level of education")
|
||||
prepareStringData("lunch")
|
||||
prepareStringData("test preparation course")
|
||||
with pb.option_context('display.max_rows', None, 'display.max_columns', None, 'display.width', 1000):
|
||||
print(df[:5])
|
||||
|
||||
|
||||
|
||||
X = df[["gender", "race/ethnicity", "lunch", "test preparation course", "parental level of education", "reading score", "writing score"]]
|
||||
y = df["math score"]
|
||||
X_train, X_Test, y_train, y_test = train_test_split(X, y, test_size=0.26, random_state=42)
|
||||
lnr = LinearRegression()
|
||||
lnr = lnr.fit(X_train,y_train)
|
||||
|
||||
poly_regression = make_pipeline(PolynomialFeatures(degree=4), LinearRegression())
|
||||
poly_regression.fit(X_train, y_train)
|
||||
|
||||
lasso = Lasso()
|
||||
lasso.fit(X_train, y_train)
|
||||
|
||||
ridge = Ridge()
|
||||
ridge.fit(X_train, y_train)
|
||||
|
||||
|
||||
print("Линейная регрессия: ", lnr.score(X_Test,y_test))
|
||||
print("Полиномиальная регрессия: ", poly_regression.score(X_Test,y_test))
|
||||
print("Лассо-регрессия: ", lasso.score(X_Test,y_test))
|
||||
print("Гребневая регрессия: ", ridge.score(X_Test,y_test))
|
||||
41
madyshev_egor_lab_5/readme.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Задание
|
||||
Использовать регрессию по варианту для данных из таблицы 1 по варианту (таблица 10), самостоятельно сформулировав задачу. Оценить, насколько хорошо она подходит для решения сформулированной вами задачи.
|
||||
## Задание по варианту
|
||||
Полиномиальная регрессия
|
||||
## Решение
|
||||
### Запуск программы
|
||||
Для запуска программы необходимо запустить файл main.py, содержащий код программы
|
||||
### Используемые технологии
|
||||
Программа использует следующие библиотеки:
|
||||
- numpy - библиотека для работы с массивами и матрицами.
|
||||
- matplotlib - библиотека для создания графиков и визуализации данных.
|
||||
- sklearn - библиотека для машинного обучения и анализа данных.
|
||||
### Что делает программа
|
||||
Программа читает данные из csv файла. Подготавливает их для работы модели, приводя текстовые параметры к числам. И пытается научиться предсказывать оценку по математике на основании остальных данных с помощью различных моделей.
|
||||
### Тесты
|
||||
Данные без подготовки:
|
||||
gender race/ethnicity parental level of education lunch test preparation course math score reading score writing score
|
||||
0 female group B bachelor's degree standard none 72 72 74
|
||||
1 female group C some college standard completed 69 90 88
|
||||
2 female group B master's degree standard none 90 95 93
|
||||
3 male group A associate's degree free/reduced none 47 57 44
|
||||
4 male group C some college standard none 76 78 75
|
||||
|
||||
Данные после подготовки:
|
||||
gender race/ethnicity parental level of education lunch test preparation course math score reading score writing score
|
||||
0 0 0 0 0 0 72 72 74
|
||||
1 0 1 1 0 1 69 90 88
|
||||
2 0 0 2 0 0 90 95 93
|
||||
3 1 2 3 1 0 47 57 44
|
||||
4 1 1 1 0 0 76 78 75
|
||||
|
||||
Линейная регрессия: 0.8769480272687482
|
||||
Полиномиальная регрессия: 0.736490555768213
|
||||
Лассо-регрессия: 0.8299946331354273
|
||||
Гребневая регрессия: 0.8768384994076267
|
||||
|
||||
Логическая регрессия не подошла так как требует чтобы переменная ответа была двоичной.
|
||||
Из результатов четырех моделей видно, что для решения задачи предсказания оценки по математике неплохо подходит модель Линейной регрессии.
|
||||
Модель гребневой регрессии имеет схожие результаты. Далее идет лассо, и хуже всех полиномиальная регрессия.
|
||||
|
||||
Вывод: Для решения задачи предсказания результатов экзамена по математике неплохо подходят линейные модели, а именно линейная регрессия и гребневая регрессия
|
||||
1001
madyshev_egor_lab_6/StudentsPerformance.csv
Normal file
49
madyshev_egor_lab_6/main.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import numpy as np
|
||||
import pandas as pb
|
||||
import matplotlib.pyplot as plt
|
||||
from sklearn.metrics import accuracy_score
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.linear_model import LinearRegression, Perceptron, LogisticRegression, Lasso, Ridge
|
||||
from sklearn.neural_network import MLPClassifier, MLPRegressor
|
||||
from sklearn.pipeline import make_pipeline
|
||||
from sklearn.preprocessing import LabelEncoder, OneHotEncoder, MinMaxScaler
|
||||
from sklearn.tree import DecisionTreeRegressor, DecisionTreeClassifier
|
||||
from sklearn.preprocessing import PolynomialFeatures
|
||||
|
||||
df = pb.read_csv("StudentsPerformance.csv", sep=",", encoding="windows-1251")
|
||||
df1 = df
|
||||
print("Данные без подготовки:")
|
||||
with pb.option_context('display.max_rows', None, 'display.max_columns', None, 'display.width', 1000):
|
||||
print(df[:5])
|
||||
|
||||
def prepareStringData(columnName):
|
||||
uniq = df[columnName].unique()
|
||||
mp = {}
|
||||
for i in uniq:
|
||||
mp[i] = len(mp)
|
||||
df[columnName] = df[columnName].map(mp)
|
||||
|
||||
|
||||
print()
|
||||
print("Данные после подготовки:")
|
||||
prepareStringData("gender")
|
||||
prepareStringData("race/ethnicity")
|
||||
prepareStringData("parental level of education")
|
||||
prepareStringData("lunch")
|
||||
prepareStringData("test preparation course")
|
||||
with pb.option_context('display.max_rows', None, 'display.max_columns', None, 'display.width', 1000):
|
||||
print(df[:5])
|
||||
|
||||
|
||||
|
||||
X = df[["gender", "race/ethnicity", "lunch", "parental level of education", "reading score", "writing score", "math score"]]
|
||||
y = df["test preparation course"]
|
||||
X_train, X_Test, y_train, y_test = train_test_split(X, y, test_size=0.26, random_state=42)
|
||||
|
||||
mlpr = MLPRegressor()
|
||||
mlpc = MLPClassifier()
|
||||
mlpr.fit(X_train, y_train)
|
||||
mlpc.fit(X_train, y_train)
|
||||
|
||||
print("MLPRegressor:", mlpr.score(X_Test, y_test))
|
||||
print("MLPClassifier:", mlpc.score(X_Test, y_test))
|
||||
38
madyshev_egor_lab_6/readme.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# Задание
|
||||
Использовать нейронную сеть (четные варианты – MLPRegressor, нечетные – MLPClassifier) для данных из таблицы 1 по варианту, самостоятельно сформулировав задачу. Интерпретировать результаты и оценить, насколько хорошо она подходит для решения сформулированной вами задачи.
|
||||
## Задание по варианту
|
||||
MLPRegressor
|
||||
## Решение
|
||||
### Запуск программы
|
||||
Для запуска программы необходимо запустить файл main.py, содержащий код программы
|
||||
### Используемые технологии
|
||||
Программа использует следующие библиотеки:
|
||||
- numpy - библиотека для работы с массивами и матрицами.
|
||||
- matplotlib - библиотека для создания графиков и визуализации данных.
|
||||
- sklearn - библиотека для машинного обучения и анализа данных.
|
||||
### Что делает программа
|
||||
Программа читает данные из csv файла. Подготавливает их для работы модели, приводя текстовые параметры к числам. И пытается научиться предсказывать прохождение подготовительных курсов с помощью моделей нейронных сетей.
|
||||
### Тесты
|
||||
Данные без подготовки:
|
||||
gender race/ethnicity parental level of education lunch test preparation course math score reading score writing score
|
||||
0 female group B bachelor's degree standard none 72 72 74
|
||||
1 female group C some college standard completed 69 90 88
|
||||
2 female group B master's degree standard none 90 95 93
|
||||
3 male group A associate's degree free/reduced none 47 57 44
|
||||
4 male group C some college standard none 76 78 75
|
||||
|
||||
Данные после подготовки:
|
||||
gender race/ethnicity parental level of education lunch test preparation course math score reading score writing score
|
||||
0 0 0 0 0 0 72 72 74
|
||||
1 0 1 1 0 1 69 90 88
|
||||
2 0 0 2 0 0 90 95 93
|
||||
3 1 2 3 1 0 47 57 44
|
||||
4 1 1 1 0 0 76 78 75
|
||||
|
||||
MLPRegressor: 0.1347847602324338
|
||||
MLPClassifier: 0.65
|
||||
|
||||
Модель регрессии показала себя хуже чем модель классификации. Хотя модель классификации показала себя чуть лучше, результаты её работы всё равно не очень высоки.
|
||||
Итоговый результат лежит в границах между 0 и 1, и в тестовых результатах является целым. Это значит, что угадывая произвольно модель в любом случае может достигнуть точности близкой к 0.5
|
||||
|
||||
Вывод: Модели нейронных сетей MLPRegressor и MLPClassifier не подходят для решения поставленной задачи, предсказания прохождения курсов по остальным данным. Или на практике не существует соответствующей зависимости в данных.
|
||||
65
madyshev_egor_lab_7/main.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import numpy as np
|
||||
from keras.preprocessing.text import Tokenizer
|
||||
from keras.preprocessing.sequence import pad_sequences
|
||||
from keras.models import Sequential
|
||||
from keras.layers import LSTM, Dense, Embedding
|
||||
|
||||
# Чтение текста из файла
|
||||
with open('mumu.txt', 'r', encoding='utf-8') as file:
|
||||
text = file.read()
|
||||
|
||||
# Параметры модели
|
||||
seq_length = 50 # Длина входных последовательностей
|
||||
num_epochs = 50
|
||||
gen_length = 200 # Длина генерируемого текста
|
||||
seed_text = "Начнем с этого" # Начальная фраза для генерации
|
||||
|
||||
# Создание экземпляра Tokenizer и обучение на тексте
|
||||
tokenizer = Tokenizer()
|
||||
tokenizer.fit_on_texts([text])
|
||||
vocab_size = len(tokenizer.word_index) + 1 # Размер словаря
|
||||
|
||||
# Преобразование текста в последовательности чисел
|
||||
sequences = tokenizer.texts_to_sequences([text])[0]
|
||||
|
||||
# Создание входных и выходных последовательностей
|
||||
X_data = []
|
||||
y_data = []
|
||||
for i in range(seq_length, len(sequences)):
|
||||
sequence = sequences[i - seq_length:i]
|
||||
target = sequences[i]
|
||||
X_data.append(sequence)
|
||||
y_data.append(target)
|
||||
|
||||
X = np.array(X_data)
|
||||
y = np.array(y_data)
|
||||
|
||||
# Создание модели RNN
|
||||
model = Sequential()
|
||||
model.add(Embedding(input_dim=vocab_size, output_dim=128, input_length=seq_length))
|
||||
model.add(LSTM(256, return_sequences=True))
|
||||
model.add(LSTM(256))
|
||||
model.add(Dense(vocab_size, activation='softmax'))
|
||||
|
||||
# Компиляция модели
|
||||
model.compile(loss='sparse_categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
|
||||
|
||||
# Обучение модели
|
||||
model.fit(X, y, epochs=num_epochs, batch_size=64, verbose=1)
|
||||
|
||||
# Функция для генерации текста
|
||||
def generate_text(seed_text, gen_length):
|
||||
generated_text = seed_text
|
||||
for _ in range(gen_length):
|
||||
sequence = tokenizer.texts_to_sequences([seed_text])[0]
|
||||
sequence = pad_sequences([sequence], maxlen=seq_length)
|
||||
prediction = model.predict(sequence, verbose=0)
|
||||
predicted_index = np.argmax(prediction)
|
||||
predicted_word = [word for word, index in tokenizer.word_index.items() if index == predicted_index][0]
|
||||
generated_text += " " + predicted_word
|
||||
seed_text += " " + predicted_word
|
||||
return generated_text
|
||||
|
||||
# Генерация текста
|
||||
generated_text = generate_text(seed_text, gen_length)
|
||||
print(generated_text)
|
||||
13
madyshev_egor_lab_7/mumu.txt
Normal file
@@ -0,0 +1,13 @@
|
||||
Артем поежился, представляя себе туннель за семисотым метром. Страшно было даже помыслить о том, чтобы показаться там. За семисотый метр на север не отваживался ходить никто. Патрули доезжали до пятисотого и, осветив пограничный столб прожектором с дрезины, убедившись, что никакая дрянь не переползла за него, торопливо возвращались. Разведчики, здоровые мужики, бывшие морские пехотинцы, и те останавливались на шестьсот восьмидесятом, прятали горящие сигареты в ладонях и замирали, прильнув к приборам ночного видения. А потом медленно, тихо отходили назад, не спуская глаз с туннеля и ни в коем случае не поворачиваясь к нему спиной.
|
||||
|
||||
Дозор, в котором они сейчас стояли, находился на четыреста пятидесятом, в пятидесяти метрах от пограничного столба. Но граница проверялась раз в день, и осмотр закончился уже несколько часов назад. Теперь их пост был крайним, а за часы, прошедшие со времени последней проверки, твари, которых патруль мог спугнуть, наверняка снова начали подползать. Тянуло их на огонек, поближе к людям…
|
||||
|
||||
Артем уселся на свое место и спросил:
|
||||
|
||||
– А что там с Полежаевской случилось?
|
||||
|
||||
И хотя он уже знал эту леденящую кровь историю – рассказывали челноки на станции, его тянуло послушать ее еще раз, как неудержимо тянет детей на страшные байки о безголовых мутантах и упырях, похищающих младенцев.
|
||||
– С Полежаевской? А ты не слышал? Странная история с ними вышла. Странная и страшная. Сначала у них разведчики стали пропадать. Уходили в туннели и не возвращались. У них, правда, салаги разведчики, не то что наши, но у них ведь и станция поменьше, и народу там не столько живет… Жило. Так вот, стали, значит, у них пропадать разведчики. Один отряд ушел – и нет его. Сначала думали, задержало его что-то, у них там еще туннель петляет, совсем как у нас, – Артему стало не по себе при этих словах, – и ни дозорам, ни тем более со станции ничего не видно, сколько ни свети. Нет их и нет, полчаса нет, час нет, два нет. Казалось бы, где там пропасть – всего ведь на километр уходили, им запретили дальше идти, да они и сами не дураки… В общем, так и не дождались, послали усиленный дозор, те искали, искали, кричали, кричали – все зря. Нету. Пропали разведчики. И ладно еще, что никто не видел, что с ними случилось. Плохо, что слышно ничего не было… Ни звука. И следов никаких.
|
||||
|
||||
Артем уже начал жалеть, что попросил Петра Андреевича рассказать о Полежаевской. Тот был то ли лучше осведомлен, то ли сам что-то додумывал, только рассказывал он такие подробности, какие и не снились челнокам, уж на что те были мастера и любители рассказать байку. От подробностей этих мороз шел по коже и неуютно становилось даже у костра, а любые, пусть и совсем безобидные шорохи из туннеля будоражили воображение.
|
||||
– Ну, так вот. Стрельбы слышно не было, те и решили, что разведчики, наверное, ушли от них – недовольны, может, чем-то были и сбежали. Ну, и шут с ними. Хотят легкой жизни, хотят со всяким отребьем мотаться, с анархистами всякими, пусть себе мотаются. Так проще было думать. Спокойнее. А через неделю еще одна разведгруппа пропала. Те вообще не должны были дальше полукилометра от станции отходить. И опять та же история. Ни звука, ни следа. Как в воду канули. Тут на станции забеспокоились. Это уже непорядок, когда за неделю два отряда исчезают. С этим уже надо что-то делать. Меры, значит, принимать. Ну, они выставили на трехсотом кордон. Мешков с песком натаскали, пулемет установили, прожектор – по всем правилам фортификации. Послали на Беговую гонца – у них с Беговой и с Улицей 1905 года конфедерация. Раньше Октябрьское Поле тоже было с ними, но потом там что-то случилось, никто не знает точно что, авария какая-то: жить там стало нельзя, и оттуда все разбежались, ну, да это неважно. Послали они на Беговую гонца – предупредить, мол, творится что-то неладное, и о помощи попросить в случае чего. Не успел первый гонец до Беговой добраться, дня не прошло – те еще ответ обдумывали, – прибегает второй, весь в мыле, и рассказывает, что их усиленный кордон погиб поголовно, не сделав ни единого выстрела. Всех перерезали. И словно во сне зарезали – вот что страшно-то! А ведь они и не смогли бы заснуть после пережитого страха, не говоря уж о приказах и инструкциях. Тут на Беговой поняли, что, если ничего не сделать, скоро та же петрушка и у них начнется. Снарядили ударный отряд из ветеранов – около сотни человек, пулеметы, гранатометы… Времени, конечно, это заняло порядком, дня полтора, но все же отправили группу на помощь. А когда та вошла на Полежаевскую, там уже ни одной живой души не было. И тел не было – только кровь повсюду. Вот так вот. И черт знает, кто это сделал. Я вот не верю, что люди вообще на такое способны.
|
||||
17
madyshev_egor_lab_7/readme.md
Normal file
@@ -0,0 +1,17 @@
|
||||
# Задание
|
||||
Выбрать художественный текст (четные варианты – русскоязычный, нечетные – англоязычный) и обучить на нем рекуррентную нейронную сеть для решения задачи генерации. Подобрать архитектуру и параметры так, чтобы приблизиться к максимально осмысленному результату.
|
||||
## Задание по варианту
|
||||
Русский язык
|
||||
## Решение
|
||||
### Запуск программы
|
||||
Для запуска программы необходимо запустить файл main.py, содержащий код программы
|
||||
### Используемые технологии
|
||||
Программа использует следующие библиотеки:
|
||||
- NumPy: Используется для работы с массивами и матрицами, особенно для обработки данных и их подготовки для обучения моделей глубокого обучения.
|
||||
- Keras: Используется для создания и обучения нейронных сетей. В коде представлены классы Tokenizer для обработки текста, Sequential для создания модели и различные слои, такие как LSTM, Dense и Embedding.
|
||||
### Что делает программа
|
||||
Программа читает текст из файла, обучается на нем, и генерирует новый текст.
|
||||
### Тесты
|
||||
Получившийся сгенерированный текст:
|
||||
Начнем с этого прильнув к к приборам ночного видения а потом потом тихо отходили не спуская глаз и туннеля и ни в коем случае не поворачиваясь к нему спиной дозор в котором они сейчас стояли находился на четыреста пятидесятом
|
||||
Тест на тексте на английском языке, показал что параметры модели не подходят. Сгенерированный текст представлял собой набор букв.
|
||||
44
malkova_anastasia_lab_1/README.md
Normal file
@@ -0,0 +1,44 @@
|
||||
# Лабораторная работа №1
|
||||
|
||||
> Работа с типовыми наборами данных и различными моделями
|
||||
|
||||
# Задание
|
||||
|
||||
Сгенерировать определённый тип данных, сравнить на нём разные модели и отобразить качество на графиках.
|
||||
|
||||
Данные: make_classification (n_samples=500, n_features=2, n_redundant=0, n_informative=2, random_state=rs, n_clusters_per_class=1)
|
||||
Модели:
|
||||
* Линейную регрессию
|
||||
* Персептрон
|
||||
* Гребневую полиномиальную регрессию (со степенью 3, alpha= 1.0)
|
||||
|
||||
### Как запустить лабораторную работу
|
||||
|
||||
1. Установить python, numpy, sklearn, matplotlib
|
||||
2. Запустить команду `python main.py` в корне проекта
|
||||
|
||||
### Использованные технологии
|
||||
|
||||
* Язык программирования `python`
|
||||
* Библиотеки `numpy, sklearn, matplotlib`
|
||||
* Среда разработки `PyCharm`
|
||||
|
||||
### Что делает программа?
|
||||
|
||||
Генерирует набор данных для классификации с помощью make_classification.
|
||||
Обучает на них 3 модели:
|
||||
|
||||
- Линейную регрессию
|
||||
- Персептрон
|
||||
- Гребневую полиномиальную регрессию (со степенью 3, alpha = 1.0)
|
||||
|
||||
Собирает итоговые оценки моделей:
|
||||
|
||||
- Линейная регрессия - коэффициент детерминации R2
|
||||
- Персептрон - средняя точность по заданным тестовым данным
|
||||
- Гребневая полиномиальная регрессия - Перекрёстная проверка
|
||||
|
||||

|
||||
|
||||
Лучший результат показала модель персептрона
|
||||
|
||||
16
malkova_anastasia_lab_1/dataset.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import numpy as np
|
||||
from sklearn.datasets import make_classification
|
||||
from sklearn.model_selection import train_test_split
|
||||
|
||||
|
||||
def generate_dataset():
|
||||
x, y = make_classification(n_samples=500, n_features=2, n_redundant=0,
|
||||
n_informative=2, random_state=0, n_clusters_per_class=1)
|
||||
random = np.random.RandomState(2)
|
||||
x += 2.5 * random.uniform(size=x.shape)
|
||||
return x, y
|
||||
|
||||
|
||||
def split_dataset(x, y):
|
||||
return train_test_split(
|
||||
x, y, test_size=.05, random_state=42)
|
||||
19
malkova_anastasia_lab_1/main.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from dataset import generate_dataset, split_dataset
|
||||
from models import launch_linear_regression, launch_perceptron, launch_ridge_poly_regression
|
||||
from plots import show_plot
|
||||
|
||||
x, y = generate_dataset()
|
||||
|
||||
x_train, x_test, y_train, y_test = split_dataset(x, y)
|
||||
|
||||
my_linear_model, linear_model_score = launch_linear_regression(
|
||||
x_train, x_test, y_train, y_test)
|
||||
my_perceptron_model, perceptron_model_score = launch_perceptron(
|
||||
x_train, x_test, y_train, y_test)
|
||||
my_polynomial_model, polynomial_model_score = launch_ridge_poly_regression(
|
||||
x_train, x_test, y_train, y_test)
|
||||
|
||||
show_plot(x, x_train, x_test, y_train, y_test,
|
||||
my_linear_model, linear_model_score,
|
||||
my_perceptron_model, perceptron_model_score,
|
||||
my_polynomial_model, polynomial_model_score)
|
||||
37
malkova_anastasia_lab_1/models.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from sklearn.linear_model import LinearRegression, Perceptron, Ridge
|
||||
from sklearn.preprocessing import PolynomialFeatures
|
||||
from sklearn.model_selection import cross_val_score
|
||||
from sklearn.pipeline import Pipeline
|
||||
|
||||
|
||||
def launch_linear_regression(x_train, x_test, y_train, y_test):
|
||||
my_linear_model = LinearRegression()
|
||||
my_linear_model.fit(x_train, y_train)
|
||||
linear_model_score = my_linear_model.score(
|
||||
x_test, y_test)
|
||||
print('linear_model_score: ', linear_model_score)
|
||||
return my_linear_model, linear_model_score
|
||||
|
||||
|
||||
# Perceptron
|
||||
def launch_perceptron(x_train, x_test, y_train, y_test):
|
||||
my_perceptron_model = Perceptron()
|
||||
my_perceptron_model.fit(x_train, y_train)
|
||||
perceptron_model_score = my_perceptron_model.score(
|
||||
x_test, y_test)
|
||||
print('perceptron_model_score: ', perceptron_model_score)
|
||||
return my_perceptron_model, perceptron_model_score
|
||||
|
||||
|
||||
# RidgePolyRegression
|
||||
def launch_ridge_poly_regression(x_train, x_test, y_train, y_test):
|
||||
my_polynomial_model = PolynomialFeatures(degree=3, include_bias=False)
|
||||
ridge = Ridge(alpha=1)
|
||||
pipeline = Pipeline(
|
||||
[("polynomial_features", my_polynomial_model), ("ridge_regression", ridge)])
|
||||
pipeline.fit(x_train, y_train)
|
||||
scores = cross_val_score(pipeline, x_test, y_test,
|
||||
scoring="neg_mean_squared_error", cv=5)
|
||||
polynomial_model_score = -scores.mean()
|
||||
print('mean polynomial_model_score: ', polynomial_model_score)
|
||||
return my_polynomial_model, polynomial_model_score
|
||||
BIN
malkova_anastasia_lab_1/plots.jpg
Normal file
|
After Width: | Height: | Size: 194 KiB |
71
malkova_anastasia_lab_1/plots.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import numpy as np
|
||||
from matplotlib.colors import ListedColormap
|
||||
from matplotlib.axes import Axes
|
||||
from matplotlib import pyplot as plt
|
||||
|
||||
TRAIN_DATA_ROW_LENGTH = 3
|
||||
TEST_DATA_ROW_LENGTH = 6
|
||||
LINEAR_REGRESSION_PLOT_INDEX = 6
|
||||
PERCEPTRON_REGRESSION_PLOT_INDEX = 7
|
||||
RIDGE_POLY_REGRESSION_REGRESSION_PLOT_INDEX = 8
|
||||
|
||||
|
||||
def show_plot(x, x_train, x_test, y_train, y_test, my_linear_model, linear_model_score, my_perceptron_model, perceptron_model_score, pipeline, polynomial_model_score):
|
||||
h = .02 # шаг регулярной сетки
|
||||
x0_min, x0_max = x[:, 0].min() - .5, x[:, 0].max() + .5
|
||||
x1_min, x1_max = x[:, 1].min() - .5, x[:, 1].max() + .5
|
||||
xx0, xx1 = np.meshgrid(np.arange(x0_min, x0_max, h),
|
||||
np.arange(x1_min, x1_max, h))
|
||||
cm = plt.cm.RdBu
|
||||
|
||||
cm_bright = ListedColormap(['#FF0000', '#0000FF'])
|
||||
|
||||
for i in range(9):
|
||||
current_subplot = plt.subplot(3, 3, i+1)
|
||||
if i < TRAIN_DATA_ROW_LENGTH:
|
||||
current_subplot.scatter(
|
||||
x_train[:, 0], x_train[:, 1], c=y_train, cmap=cm_bright)
|
||||
elif i < TEST_DATA_ROW_LENGTH:
|
||||
current_subplot.scatter(
|
||||
x_test[:, 0], x_test[:, 1], c=y_test, cmap=cm_bright, alpha=0.6)
|
||||
else:
|
||||
if i == LINEAR_REGRESSION_PLOT_INDEX:
|
||||
show_gradient(my_linear_model, current_subplot=current_subplot,
|
||||
title='LinearRegression', score=linear_model_score, xx0=xx0, xx1=xx1, cm=cm)
|
||||
|
||||
elif i == PERCEPTRON_REGRESSION_PLOT_INDEX:
|
||||
show_gradient(my_perceptron_model, current_subplot=current_subplot,
|
||||
title='Perceptron', score=perceptron_model_score, xx0=xx0, xx1=xx1, cm=cm)
|
||||
|
||||
elif i == RIDGE_POLY_REGRESSION_REGRESSION_PLOT_INDEX:
|
||||
current_subplot.set_title('RidgePolyRegression')
|
||||
show_gradient(pipeline, current_subplot=current_subplot,
|
||||
title='RidgePolyRegression', score=polynomial_model_score, xx0=xx0, xx1=xx1, cm=cm)
|
||||
|
||||
current_subplot.scatter(
|
||||
x_train[:, 0], x_train[:, 1], c=y_train, cmap=cm_bright)
|
||||
current_subplot.scatter(
|
||||
x_test[:, 0], x_test[:, 1], c=y_test, cmap=cm_bright, alpha=0.6)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
def show_gradient(model, current_subplot: Axes, title: str, score: float, xx0, xx1, cm):
|
||||
current_subplot.set_title(title)
|
||||
if hasattr(model, "decision_function"):
|
||||
Z = model.decision_function(np.c_[xx0.ravel(), xx1.ravel()])
|
||||
elif hasattr(model, "predict_proba"):
|
||||
Z = model.predict_proba(np.c_[xx0.ravel(), xx1.ravel()])[:, 1]
|
||||
elif hasattr(model, "predict"):
|
||||
Z = model.predict(np.c_[xx0.ravel(), xx1.ravel()])
|
||||
else:
|
||||
return
|
||||
|
||||
Z = Z.reshape(xx0.shape)
|
||||
current_subplot.contourf(xx0, xx1, Z, cmap=cm, alpha=.8)
|
||||
current_subplot.set_xlim(xx0.min(), xx0.max())
|
||||
current_subplot.set_ylim(xx0.min(), xx1.max())
|
||||
current_subplot.set_xticks(())
|
||||
current_subplot.set_yticks(())
|
||||
current_subplot.text(xx0.max() - .3, xx1.min() + .3, ('%.2f' % score),
|
||||
size=15, horizontalalignment='left')
|
||||
45
podkorytova_yulia_lab_3/README.md
Normal file
@@ -0,0 +1,45 @@
|
||||
# Лабораторная работа 3. Деревья решений
|
||||
### Задание на лабораторную:
|
||||
Часть 1. По данным о пассажирах Титаника решите задачу классификации (с помощью дерева решений), в которой по различным характеристикам пассажиров требуется найти у выживших пассажиров два наиболее важных признака из трех рассматриваемых (по варианту).
|
||||
|
||||
**Вариант 20.**
|
||||
Pclass, Parch, Fare
|
||||
|
||||
Часть 2. Решите с помощью библиотечной реализации дерева решений задачу из лабораторной работы «Веб-сервис «Дерево решений» по предмету «Методы искусственного интеллекта» на 99% ваших данных. Проверьте работу модели на оставшемся проценте, сделайте вывод.
|
||||
***
|
||||
### Как запустить лабораторную работу:
|
||||
Для запуска лабораторной работы необходимо открыть файл `lr3.py`, нажать на ПКМ и в выпадающем списке выбрать опцию "Run".
|
||||
***
|
||||
### Технологии:
|
||||
**NumPy (Numerical Python)** - это библиотека для научных вычислений в Python, которая обеспечивает эффективные вычисления и манипуляции с данными.
|
||||
|
||||
**Pandas** - это библиотека на языке Python, которая предоставляет удобные и эффективные инструменты для обработки и анализа данных. Она предоставляет высокоуровневые структуры данных, такие как DataFrame, которые позволяют легко и гибко работать с табличными данными.
|
||||
|
||||
**Scikit-learn (Sklearn)** - это библиотека для языка программирования Python, которая предоставляет инструменты для разработки и применения различных алгоритмов машинного обучения, включая классификацию, регрессию, кластеризацию, снижение размерности и многое другое. Scikit-learn также предлагает функции для предобработки данных, оценки моделей и выбора наилучших параметров.
|
||||
***
|
||||
### Что делает лабораторная работа:
|
||||
В первой части лабораторной работе загружается выборка из файла `titanic.csv` с помощью пакета *Pandas*, пустые значения убираются из выборки.
|
||||
Далее в выборку отбираются 3 признака *(Pclass, Parch, Fare)* и определяется целевая переменная *(Survived)*.
|
||||
После обучается решающее дерево классификации с параметром *random_state=241* и остальными параметрами по умолчанию.
|
||||
Результатом первой части лабораторной работы являются определение двух наиболее важных признаков у выживших пассажиров.
|
||||
|
||||
Во второй части лабораторной работе загружается выборка из файла `dataset.csv` с помощью пакета *Pandas*, тип устройства и уровень гибкости приводятся в числовому виду.
|
||||
Далее в выборку отбираются 2 признака *(Age и Device)* и определяется целевая переменная *(Flexibility Level)*.
|
||||
После данные разделяются на обучающие и тестовые выборки, создается и обучается дерево регрессии с параметрами по умолчанию.
|
||||
Результатом второй части лабораторной работы являются определение зависимости уровня гибкости от возраста и типа устройства и оценка точности модели.
|
||||
***
|
||||
### Пример выходных данных:
|
||||
***Часть 1:***
|
||||
выводятся первые 5 записей таблицы со столбцами по варианту, важности признаков и 2 наиболее важных признака из трех.
|
||||

|
||||
|
||||
***Часть 2:***
|
||||
выводятся первые 5 записей таблицы со столбцами по варианту, важности признаков, 2 наиболее важных признака из трех и средняя квадратичная ошибка.
|
||||

|
||||
***
|
||||
**Вывод**: результаты первой части лабораторной работы показали, что у выживших пассажиров наиболее важными признаками являются *Fare* и *Parch*, причем *Fare* оказался самым важным признаком.
|
||||
|
||||
По результатам второй части лабораторной можно сказать, что уровень гибкости праткически одинаково зависит как от типа устройства, с которого человек работает, так и от возраста учащегося.
|
||||
*Device* оказался более важным признаком, чем *Age*, но стоит сделать замечание, тип устройства и уровень гибкости были преобразованы к числовому виду, характер данных был искажен,
|
||||
так как ранее объекты столбцов не могли быть математически сравнимы между собой, а после преобразований эта характеристика у них появилась.
|
||||
Посчитанная среднеквадратичная ошибка находится ближе к 0, чем к 1, это говорит о высоком качестве модели.
|
||||
1205
podkorytova_yulia_lab_3/dataset.csv
Normal file
79
podkorytova_yulia_lab_3/lr3.py
Normal file
@@ -0,0 +1,79 @@
|
||||
import pandas as pd
|
||||
from sklearn.metrics import mean_squared_error
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
|
||||
import numpy as np
|
||||
|
||||
# 1 часть лабораторной работы
|
||||
# Вариант 20. Pclass, Parch, Fare
|
||||
def part_one():
|
||||
data = pd.read_csv('titanic.csv', index_col='Passengerid')
|
||||
# выгрузка непустых данных
|
||||
data = data.loc[(np.isnan(data['Pclass']) == False) & (np.isnan(data['Fare']) == False) & (np.isnan(data['Parch']) == False) & (np.isnan(data['Survived']) == False)]
|
||||
# отбор нужных столбцов
|
||||
corr = data[['Pclass', 'Parch', 'Fare']]
|
||||
# респечатка первых 5 строк данных
|
||||
print(corr.head())
|
||||
# определение целевой переменной
|
||||
y = data['Survived']
|
||||
# создание и обучение дерева решений
|
||||
clf = DecisionTreeClassifier(random_state=241)
|
||||
clf.fit(corr, y)
|
||||
# получение и распечатка важностей признаков
|
||||
importances = clf.feature_importances_
|
||||
print(importances)
|
||||
top_importances = importances.argsort()[-2:][::-1]
|
||||
print("Наиболее важные признаки:", corr.columns[top_importances][0], "и", corr.columns[top_importances][1])
|
||||
|
||||
# функция для приведения типа мобильного устройства к числу
|
||||
def device_to_bool(device):
|
||||
if device == "Computer":
|
||||
return 0
|
||||
elif device == "Mobile":
|
||||
return 1
|
||||
elif device == "Tab":
|
||||
return 2
|
||||
|
||||
# функция для приведения уровня гибкости к числу
|
||||
def flexibility_level_to_bool(flexibility_level):
|
||||
if flexibility_level == "Low":
|
||||
return 0
|
||||
elif flexibility_level == "Moderate":
|
||||
return 1
|
||||
elif flexibility_level == "High":
|
||||
return 2
|
||||
|
||||
# 2 часть лабораторной работы
|
||||
# Вариант 20. Зависимость уровня гибкости от возраста и устройства, с которого человек работает
|
||||
def part_two():
|
||||
data = pd.read_csv('dataset.csv')
|
||||
# приведение типа мобильного устройства к числу
|
||||
data['Device'] = data['Device'].apply(device_to_bool)
|
||||
# приведение уровня гибкости к числу
|
||||
data['Flexibility Level'] = data['Flexibility Level'].apply(flexibility_level_to_bool)
|
||||
# отбор нужных столбцов
|
||||
X = data[['Age', 'Device']]
|
||||
# респечатка первых 5 строк данных
|
||||
print(X.head())
|
||||
# определение целевой переменной
|
||||
y = data['Flexibility Level']
|
||||
# разделение данных на обучающую и тестовую выборки
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.01, random_state=42)
|
||||
# создание и обучение дерева регрессии
|
||||
tree_reg = DecisionTreeRegressor()
|
||||
tree_reg.fit(X_train, y_train)
|
||||
# получение и распечатка важностей признаков
|
||||
importances = tree_reg.feature_importances_
|
||||
print(importances)
|
||||
top_importances = importances.argsort()[-2:][::-1]
|
||||
print("Наиболее важные признаки:", X.columns[top_importances][0], "и", X.columns[top_importances][1])
|
||||
# предсказание на тестовых данных
|
||||
y_pred = tree_reg.predict(X_test)
|
||||
# оценка точности модели
|
||||
mse = mean_squared_error(y_test, y_pred)
|
||||
print("Средняя квадратичная ошибка:", mse)
|
||||
|
||||
print("---ПЕРВАЯ ЧАСТЬ ЛАБОРАТОРНОЙ РАБОТЫ---")
|
||||
part_one()
|
||||
print("\n---ВТОРАЯ ЧАСТЬ ЛАБОРАТОРНОЙ РАБОТЫ---")
|
||||
part_two()
|
||||
BIN
podkorytova_yulia_lab_3/result1.JPG
Normal file
|
After Width: | Height: | Size: 23 KiB |