Compare commits
191 Commits
gordeeva_a
...
podkorytov
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c92f833265 | ||
| b26c54a7e4 | |||
| 9e6286a3a4 | |||
| 4a6bb8139e | |||
| 8b9050cce3 | |||
| 3e08abf42b | |||
| c6d41e1157 | |||
| 6a9310501a | |||
| bed476a27b | |||
| 2607c0dbfd | |||
| be253bf939 | |||
| 9ab1a0f1ca | |||
| 8bd93ee83e | |||
| 1fddfd2362 | |||
| 994129b8a9 | |||
| 79b5e5bb12 | |||
| 08aa85abbc | |||
| de50a5f08d | |||
| c37eca50a6 | |||
| 2906d3886f | |||
| e034d93062 | |||
| d19941c6ec | |||
| 2a51665e61 | |||
| 879a1c5730 | |||
|
|
78bec04c10 | ||
| c212c98a90 | |||
|
|
25acce2c79 | ||
|
|
db918284b5 | ||
| 71cad406c2 | |||
| a076fd78ae | |||
| 124f682c8b | |||
| 8834f99ecf | |||
| dd0d45ef93 | |||
| c7060e6719 | |||
|
|
23bc64c816 | ||
|
|
be1b6a74ae | ||
|
|
32821e551a | ||
|
|
231aa0d062 | ||
|
|
10799cb639 | ||
|
|
0f61b37f8b | ||
|
|
3a68c16a44 | ||
|
|
481361b7e0 | ||
| 0c414d7ab4 | |||
| d61b7c24f2 | |||
| b5fa7754bb | |||
| d575910860 | |||
| 5894881f24 | |||
| 92ec657bcd | |||
| 346241253f | |||
|
|
ed5c549a0b | ||
| 65b47c7d0e | |||
| f7af263316 | |||
| c45de91019 | |||
| 4fad5585c1 | |||
| c9d485daca | |||
|
|
1638a80b4a | ||
| 6a9602359c | |||
| cee99b90a5 | |||
| bb7b8e6ac0 | |||
| 18ea7ee729 | |||
| 200d8dee7e | |||
| 4e1980e638 | |||
| a43eb72079 | |||
|
|
464b437c69 | ||
| 0b422e70f9 | |||
| b0accdaf06 | |||
|
|
716e7b7ee6 | ||
| 145b7336b8 | |||
| bea977d84c | |||
|
|
1e03e8b1d2 | ||
| ad5ed23a4c | |||
|
|
1e1a73de10 | ||
|
|
226dd4efe9 | ||
|
|
c0217ad0d3 | ||
|
|
caab9f2f8b | ||
|
|
d2580ffa9e | ||
| a98d914e7c | |||
| a4985e4d76 | |||
| 3bb04b059b | |||
| a9e1145b0e | |||
| f44ba0d0a2 | |||
| ccf3bfb561 | |||
| 4f349a1d49 | |||
| f8075403a3 | |||
| c20695af79 | |||
| 33dba33cc4 | |||
| 41e0e8598f | |||
| 53a25975f9 | |||
| 5e00a83340 | |||
| 2239c15572 | |||
| 07333219ed | |||
| 5891b16f9d | |||
| 81874f0f84 | |||
| ce6105bee6 | |||
| ca3b734361 | |||
| 2f1d67dc8f | |||
| b9ec1fd145 | |||
| f84f7abaa9 | |||
| 5445cef67d | |||
| b967af636c | |||
| ad60c6221e | |||
|
|
8942f824d5 | ||
| 106e02f76b | |||
| 81479f5221 | |||
|
|
abd650a641 | ||
| 15936c6996 | |||
|
|
c03b5e3a94 | ||
| 16db685d3d | |||
| 84fe84a15a | |||
| 406315ddf7 | |||
| d592186245 | |||
| 1f70bc7eb8 | |||
| 7ccd400417 | |||
|
|
c15ab42cd4 | ||
| 5eb35fe26d | |||
|
|
ef485bf514 | ||
|
|
3a868e5545 | ||
| fc2fe74052 | |||
| 35826f2461 | |||
| 7781a379c3 | |||
| adca415462 | |||
| 9613109f32 | |||
| d4d25953d2 | |||
| d09383f064 | |||
| f1ccc12524 | |||
| 0446928927 | |||
| 1dffe857da | |||
| 19ed166e7b | |||
| 1a4d9cb435 | |||
| bac437629a | |||
| a062f64611 | |||
| 04862f1077 | |||
| ae4894e12d | |||
| 7fe16431a8 | |||
| 0c0bbab9e5 | |||
| 72507eb3af | |||
| 516c7aea4f | |||
| 7674b6f48a | |||
|
|
39f0867f3c | ||
|
|
2acd2f9b5b | ||
|
|
5865c2147c | ||
|
|
b6ab40cae3 | ||
| fd951127b0 | |||
|
|
d4e65b3373 | ||
|
|
b855fc2dd4 | ||
|
|
2065c480df | ||
| 7ce7f86d4b | |||
| 5992dba12c | |||
| 4e17d37a32 | |||
| 78422060f3 | |||
| d0fbf61dc0 | |||
| 4daf833167 | |||
| 964a9042fa | |||
| 5a2ec3e827 | |||
|
|
8c47411bf1 | ||
|
|
401a5454ee | ||
|
|
a847058d44 | ||
|
|
de0b7d831a | ||
| eeb3c15730 | |||
| bbb46d3cd1 | |||
| 88b0909ebf | |||
|
|
fcfd628305 | ||
|
|
b239521f36 | ||
| 4747d4f1db | |||
|
|
b049265089 | ||
| dfc7f8c06f | |||
| 71887f8076 | |||
| ae454ae9ef | |||
| 5d8a090a38 | |||
| 06116369e5 | |||
| 6ad79769f3 | |||
|
|
fc35bc8158 | ||
|
|
059d5b0b12 | ||
|
|
c943260db9 | ||
|
|
cfc34f0e10 | ||
|
|
d30caee3db | ||
|
|
0b83c390f5 | ||
|
|
8a288f0abf | ||
|
|
3543ab5163 | ||
|
|
9bf1c4845a | ||
| 46de7c113c | |||
| d26e2f5535 | |||
| 63e5a3a708 | |||
|
|
453d40504e | ||
| 8ee5b74e58 | |||
| 27e65004fa | |||
| 9c5a45feed | |||
| efa81f50bf | |||
|
|
f11ba4d365 | ||
|
|
94a76f47d8 | ||
| 9a7b986e00 |
3
.idea/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
10
.idea/IIS_2023_1.iml
generated
Normal file
@@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="PYTHON_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<excludeFolder url="file://$MODULE_DIR$/venv" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="Python 3.9 (PyCharmProjects)" jdkType="Python SDK" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
7
.idea/discord.xml
generated
Normal file
@@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="DiscordProjectSettings">
|
||||
<option name="show" value="ASK" />
|
||||
<option name="description" value="" />
|
||||
</component>
|
||||
</project>
|
||||
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<settings>
|
||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||
<version value="1.0" />
|
||||
</settings>
|
||||
</component>
|
||||
10
.idea/misc.xml
generated
Normal file
@@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="Black">
|
||||
<option name="sdkName" value="Python 3.9 (PyCharmProjects)" />
|
||||
</component>
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (PyCharmProjects)" project-jdk-type="Python SDK" />
|
||||
<component name="PyCharmProfessionalAdvertiser">
|
||||
<option name="shown" value="true" />
|
||||
</component>
|
||||
</project>
|
||||
8
.idea/modules.xml
generated
Normal file
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/IIS_2023_1.iml" filepath="$PROJECT_DIR$/.idea/IIS_2023_1.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
||||
6
.idea/vcs.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="$PROJECT_DIR$" vcs="Git" />
|
||||
</component>
|
||||
</project>
|
||||
187
.idea/workspace.xml
generated
Normal file
@@ -0,0 +1,187 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="AutoImportSettings">
|
||||
<option name="autoReloadType" value="SELECTIVE" />
|
||||
</component>
|
||||
<component name="ChangeListManager">
|
||||
<list default="true" id="0ceb130e-88da-4a20-aad6-17f5ab4226ac" name="Changes" comment="">
|
||||
<change beforePath="$PROJECT_DIR$/.idea/IIS_2023_1.iml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/IIS_2023_1.iml" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/.idea/misc.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/misc.xml" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
||||
</list>
|
||||
<option name="SHOW_DIALOG" value="false" />
|
||||
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
|
||||
<option name="LAST_RESOLUTION" value="IGNORE" />
|
||||
</component>
|
||||
<component name="FileTemplateManagerImpl">
|
||||
<option name="RECENT_TEMPLATES">
|
||||
<list>
|
||||
<option value="Python Script" />
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="Git.Settings">
|
||||
<option name="RECENT_BRANCH_BY_REPOSITORY">
|
||||
<map>
|
||||
<entry key="$PROJECT_DIR$" value="main" />
|
||||
</map>
|
||||
</option>
|
||||
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
|
||||
</component>
|
||||
<component name="MarkdownSettingsMigration">
|
||||
<option name="stateVersion" value="1" />
|
||||
</component>
|
||||
<component name="ProjectColorInfo">{
|
||||
"associatedIndex": 2
|
||||
}</component>
|
||||
<component name="ProjectId" id="2VlZqWiOX68aCf0o2y0AtYJWURS" />
|
||||
<component name="ProjectLevelVcsManager">
|
||||
<ConfirmationsSetting value="1" id="Add" />
|
||||
</component>
|
||||
<component name="ProjectViewState">
|
||||
<option name="hideEmptyMiddlePackages" value="true" />
|
||||
<option name="showLibraryContents" value="true" />
|
||||
</component>
|
||||
<component name="PropertiesComponent">{
|
||||
"keyToString": {
|
||||
"RunOnceActivity.OpenProjectViewOnStart": "true",
|
||||
"RunOnceActivity.ShowReadmeOnStart": "true",
|
||||
"WebServerToolWindowFactoryState": "false",
|
||||
"git-widget-placeholder": "senkin__alexander__lab__1",
|
||||
"last_opened_file_path": "D:/ulstukek/Course4/IIS/labs",
|
||||
"node.js.detected.package.eslint": "true",
|
||||
"node.js.detected.package.tslint": "true",
|
||||
"node.js.selected.package.eslint": "(autodetect)",
|
||||
"node.js.selected.package.tslint": "(autodetect)",
|
||||
"nodejs_package_manager_path": "npm",
|
||||
"settings.editor.selected.configurable": "reference.settings.ide.settings.new.ui",
|
||||
"vue.rearranger.settings.migration": "true"
|
||||
}
|
||||
}</component>
|
||||
<component name="RecentsManager">
|
||||
<key name="CopyFile.RECENT_KEYS">
|
||||
<recent name="D:\ulstukek\Course4\IIS\IISLabs\IIS_2023_1\zavrazhnova_svetlana_lab_3" />
|
||||
<recent name="D:\ulstukek\Course4\IIS\IISLabs\IIS_2023_1\zavrazhnova_svetlana_lab_1" />
|
||||
</key>
|
||||
</component>
|
||||
<component name="RunManager">
|
||||
<configuration name="zavrazhnova_svetlana_lab3_2" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
|
||||
<module name="IIS_2023_1" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="PARENT_ENVS" value="true" />
|
||||
<envs>
|
||||
<env name="PYTHONUNBUFFERED" value="1" />
|
||||
</envs>
|
||||
<option name="SDK_HOME" value="" />
|
||||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/zavrazhnova_svetlana_lab_3" />
|
||||
<option name="IS_MODULE_SDK" value="true" />
|
||||
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
|
||||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/zavrazhnova_svetlana_lab_3/zavrazhnova_svetlana_lab3_2.py" />
|
||||
<option name="PARAMETERS" value="" />
|
||||
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||
<option name="EMULATE_TERMINAL" value="false" />
|
||||
<option name="MODULE_MODE" value="false" />
|
||||
<option name="REDIRECT_INPUT" value="false" />
|
||||
<option name="INPUT_FILE" value="" />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
<configuration name="zavrazhnova_svetlana_lab_2" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
|
||||
<module name="IIS_2023_1" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="PARENT_ENVS" value="true" />
|
||||
<envs>
|
||||
<env name="PYTHONUNBUFFERED" value="1" />
|
||||
</envs>
|
||||
<option name="SDK_HOME" value="" />
|
||||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/zavrazhnova_svetlana_lab_2" />
|
||||
<option name="IS_MODULE_SDK" value="true" />
|
||||
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
|
||||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/zavrazhnova_svetlana_lab_2/zavrazhnova_svetlana_lab_2.py" />
|
||||
<option name="PARAMETERS" value="" />
|
||||
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||
<option name="EMULATE_TERMINAL" value="false" />
|
||||
<option name="MODULE_MODE" value="false" />
|
||||
<option name="REDIRECT_INPUT" value="false" />
|
||||
<option name="INPUT_FILE" value="" />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
<configuration name="zavrazhnova_svetlana_lab_3_1" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
|
||||
<module name="IIS_2023_1" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="PARENT_ENVS" value="true" />
|
||||
<envs>
|
||||
<env name="PYTHONUNBUFFERED" value="1" />
|
||||
</envs>
|
||||
<option name="SDK_HOME" value="" />
|
||||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/zavrazhnova_svetlana_lab_3" />
|
||||
<option name="IS_MODULE_SDK" value="true" />
|
||||
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
|
||||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/zavrazhnova_svetlana_lab_3/zavrazhnova_svetlana_lab_3_1.py" />
|
||||
<option name="PARAMETERS" value="" />
|
||||
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||
<option name="EMULATE_TERMINAL" value="false" />
|
||||
<option name="MODULE_MODE" value="false" />
|
||||
<option name="REDIRECT_INPUT" value="false" />
|
||||
<option name="INPUT_FILE" value="" />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
<recent_temporary>
|
||||
<list>
|
||||
<item itemvalue="Python.zavrazhnova_svetlana_lab_3_1" />
|
||||
<item itemvalue="Python.zavrazhnova_svetlana_lab_2" />
|
||||
<item itemvalue="Python.zavrazhnova_svetlana_lab3_2" />
|
||||
<item itemvalue="Python.zavrazhnova_svetlana_lab3_2" />
|
||||
<item itemvalue="Python.zavrazhnova_svetlana_lab_3_1" />
|
||||
</list>
|
||||
</recent_temporary>
|
||||
</component>
|
||||
<component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" />
|
||||
<component name="TaskManager">
|
||||
<task active="true" id="Default" summary="Default task">
|
||||
<changelist id="0ceb130e-88da-4a20-aad6-17f5ab4226ac" name="Changes" comment="" />
|
||||
<created>1695412818437</created>
|
||||
<option name="number" value="Default" />
|
||||
<option name="presentableId" value="Default" />
|
||||
<updated>1695412818437</updated>
|
||||
<workItem from="1697735437405" duration="1706000" />
|
||||
<workItem from="1697740229646" duration="3802000" />
|
||||
</task>
|
||||
<servers />
|
||||
</component>
|
||||
<component name="TypeScriptGeneratedFilesManager">
|
||||
<option name="version" value="3" />
|
||||
</component>
|
||||
<component name="Vcs.Log.Tabs.Properties">
|
||||
<option name="TAB_STATES">
|
||||
<map>
|
||||
<entry key="MAIN">
|
||||
<value>
|
||||
<State>
|
||||
<option name="FILTERS">
|
||||
<map>
|
||||
<entry key="branch">
|
||||
<value>
|
||||
<list>
|
||||
<option value="HEAD" />
|
||||
</list>
|
||||
</value>
|
||||
</entry>
|
||||
</map>
|
||||
</option>
|
||||
</State>
|
||||
</value>
|
||||
</entry>
|
||||
</map>
|
||||
</option>
|
||||
</component>
|
||||
<component name="com.intellij.coverage.CoverageDataManagerImpl">
|
||||
<SUITE FILE_PATH="coverage/PyCharmProjects$senkin_alexander_lab_1.coverage" NAME="senkin_alexander_lab_1 Coverage Results" MODIFIED="1697744262965" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/senkin_alexander_lab_1" />
|
||||
</component>
|
||||
</project>
|
||||
47
abanin_daniil_lab_1/README.md
Normal file
@@ -0,0 +1,47 @@
|
||||
## Лабораторная работа №1
|
||||
|
||||
### Работа с типовыми наборами данных и различными моделями
|
||||
|
||||
### ПИбд-41 Абанин Даниил
|
||||
|
||||
### Как запустить лабораторную работу:
|
||||
|
||||
* установить python, numpy, matplotlib, sklearn
|
||||
* запустить проект (стартовая точка класс lab1)
|
||||
|
||||
### Какие технологии использовались:
|
||||
|
||||
* Язык программирования `Python`,
|
||||
* Библиотеки numpy, matplotlib, sklearn
|
||||
* Среда разработки `PyCharm`
|
||||
|
||||
### Что делает лабораторная работа:
|
||||
|
||||
* Программа гененерирует данные с make_moonsmake_moons (noise=0.3, random_state=rs)
|
||||
* Сравнивает три типа моделей: инейная, полиномиальная, гребневая полиномиальная регрессии
|
||||
|
||||
### Примеры работы:
|
||||
|
||||
#### Результаты:
|
||||
MAE - средняя абсолютная ошибка, измеряет среднюю абсолютную разницу между прогнозируемыми значениями модели и фактическими значениями целевой переменной
|
||||
MSE - средняя квадратическая ошибка, измеряет среднюю квадратичную разницу между прогнозируемыми значениями модели и фактическими значениями целевой переменной
|
||||
|
||||
Чем меньше значения показателей, тем лучше модель справляется с предсказанием
|
||||
|
||||
Линейная регрессия
|
||||
MAE 0.2959889435199454
|
||||
MSE 0.13997968555679302
|
||||
|
||||
Полиномиальная регрессия
|
||||
MAE 0.21662135861071705
|
||||
MSE 0.08198825629271855
|
||||
|
||||
Гребневая полиномиальная регрессия
|
||||
MAE 0.2102788716636562
|
||||
MSE 0.07440133949387796
|
||||
|
||||
Лучший результат показала модель **Гребневая полиномиальная регрессия**
|
||||
|
||||

|
||||

|
||||

|
||||
BIN
abanin_daniil_lab_1/greb_reg.jpg
Normal file
|
After Width: | Height: | Size: 59 KiB |
66
abanin_daniil_lab_1/lab1.py
Normal file
@@ -0,0 +1,66 @@
|
||||
from matplotlib import pyplot as plt
|
||||
from matplotlib.colors import ListedColormap
|
||||
from sklearn.linear_model import LinearRegression, Ridge
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.pipeline import Pipeline
|
||||
from sklearn.preprocessing import PolynomialFeatures
|
||||
from sklearn.datasets import make_moons
|
||||
from sklearn import metrics
|
||||
|
||||
cm_bright = ListedColormap(['#8B0000', '#FF0000'])
|
||||
cm_bright1 = ListedColormap(['#FF4500', '#FFA500'])
|
||||
|
||||
|
||||
def create_moons():
|
||||
x, y = make_moons(noise=0.3, random_state=0)
|
||||
X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=.4, random_state=42)
|
||||
|
||||
linear_regretion(X_train, X_test, y_train, y_test)
|
||||
polynomial_regretion(X_train, X_test, y_train, y_test)
|
||||
ridge_regretion(X_train, X_test, y_train, y_test)
|
||||
|
||||
|
||||
def linear_regretion(x_train, x_test, y_train, y_test):
|
||||
model = LinearRegression().fit(x_train, y_train)
|
||||
y_predict = model.intercept_ + model.coef_ * x_test
|
||||
plt.title('Линейная регрессия')
|
||||
print('Линейная регрессия')
|
||||
plt.scatter(x_train[:, 0], x_train[:, 1], c=y_train, cmap=cm_bright)
|
||||
plt.scatter(x_test[:, 0], x_test[:, 1], c=y_test, cmap=cm_bright1, alpha=0.7)
|
||||
plt.plot(x_test, y_predict, color='red')
|
||||
print('MAE', metrics.mean_absolute_error(y_test, y_predict[:, 1]))
|
||||
print('MSE', metrics.mean_squared_error(y_test, y_predict[:, 1]))
|
||||
plt.show()
|
||||
|
||||
|
||||
def polynomial_regretion(x_train, x_test, y_train, y_test):
|
||||
polynomial_features = PolynomialFeatures(degree=3)
|
||||
X_polynomial = polynomial_features.fit_transform(x_train, y_train)
|
||||
base_model = LinearRegression()
|
||||
base_model.fit(X_polynomial, y_train)
|
||||
y_predict = base_model.predict(X_polynomial)
|
||||
plt.title('Полиномиальная регрессия')
|
||||
plt.scatter(x_train[:, 0], x_train[:, 1], c=y_train, cmap=cm_bright)
|
||||
plt.scatter(x_test[:, 0], x_test[:, 1], c=y_test, cmap=cm_bright1, alpha=0.7)
|
||||
plt.plot(x_train, y_predict, color='blue')
|
||||
plt.show()
|
||||
print('Полиномиальная регрессия')
|
||||
print('MAE', metrics.mean_absolute_error(y_train, y_predict))
|
||||
print('MSE', metrics.mean_squared_error(y_train, y_predict))
|
||||
|
||||
|
||||
def ridge_regretion(X_train, X_test, y_train, y_test):
|
||||
model = Pipeline([('poly', PolynomialFeatures(degree=3)), ('ridge', Ridge(alpha=1.0))])
|
||||
model.fit(X_train, y_train)
|
||||
y_predict = model.predict(X_test)
|
||||
plt.title('Гребневая полиномиальная регрессия')
|
||||
plt.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm_bright)
|
||||
plt.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm_bright1, alpha=0.7)
|
||||
plt.plot(X_test, y_predict, color='blue')
|
||||
plt.show()
|
||||
print('Гребневая полиномиальная регрессия')
|
||||
print('MAE', metrics.mean_absolute_error(y_test, y_predict))
|
||||
print('MSE', metrics.mean_squared_error(y_test, y_predict))
|
||||
|
||||
|
||||
create_moons()
|
||||
BIN
abanin_daniil_lab_1/lin_reg.jpg
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
abanin_daniil_lab_1/pol_reg.jpg
Normal file
|
After Width: | Height: | Size: 63 KiB |
41
abanin_daniil_lab_2/README.md
Normal file
@@ -0,0 +1,41 @@
|
||||
## Лабораторная работа №2
|
||||
|
||||
### Ранжирование признаков
|
||||
|
||||
## ПИбд-41 Абанин Даниил
|
||||
|
||||
### Как запустить лабораторную работу:
|
||||
|
||||
* установить python, numpy, matplotlib, sklearn
|
||||
* запустить проект (стартовая точка lab2)
|
||||
|
||||
### Какие технологии использовались:
|
||||
|
||||
* Язык программирования `Python`, библиотеки numpy, matplotlib, sklearn
|
||||
* Среда разработки `PyCharm`
|
||||
|
||||
### Что делает лабораторная работа:
|
||||
|
||||
* Генерирует данные и обучает такие модели, как: LinearRegression, RandomizedLasso, Recursive Feature Elimination (RFE)
|
||||
* Производиться ранжирование признаков с помощью моделей LinearRegression, RandomizedLasso, Recursive Feature Elimination (RFE)
|
||||
* Отображение получившихся результатов: 4 самых важных признака по среднему значению, значения признаков для каждой модели
|
||||
|
||||
|
||||
### 4 самых важных признака по среднему значению
|
||||
* Параметр - x4, значение - 0.56
|
||||
* Параметр - x1, значение - 0.45
|
||||
* Параметр - x2, значение - 0.33
|
||||
* Параметр - x9, значение - 0.33
|
||||
|
||||
####Linear Regression
|
||||
[('x1', 1.0), ('x4', 0.69), ('x2', 0.61), ('x11', 0.59), ('x3', 0.51), ('x13', 0.48), ('x5', 0.19), ('x12', 0.19), ('x14', 0.12), ('x8', 0.03), ('x6', 0.02), ('x10', 0.01), ('x7', 0.0), ('x9', 0.0)]
|
||||
|
||||
####Recursive Feature Elimination
|
||||
[('x9', 1.0), ('x7', 0.86), ('x10', 0.71), ('x6', 0.57), ('x8', 0.43), ('x14', 0.29), ('x12', 0.14), ('x1', 0.0), ('x2', 0.0), ('x3', 0.0), ('x4', 0.0), ('x5', 0.0), ('x11', 0.0), ('x13', 0.0)]
|
||||
|
||||
####Randomize Lasso
|
||||
[('x4', 1.0), ('x2', 0.37), ('x1', 0.36), ('x5', 0.32), ('x6', 0.02), ('x8', 0.02), ('x3', 0.01), ('x7', 0.0), ('x9', 0.0), ('x10', 0.0), ('x11', 0.0), ('x12', 0.0), ('x13', 0.0), ('x14', 0.0)]
|
||||
|
||||
#### Результаты:
|
||||
|
||||

|
||||
76
abanin_daniil_lab_2/RadomizedLasso.py
Normal file
@@ -0,0 +1,76 @@
|
||||
from sklearn.utils import check_X_y, check_random_state
|
||||
from sklearn.linear_model import Lasso
|
||||
from scipy.sparse import issparse
|
||||
from scipy import sparse
|
||||
|
||||
|
||||
def _rescale_data(x, weights):
|
||||
if issparse(x):
|
||||
size = weights.shape[0]
|
||||
weight_dia = sparse.dia_matrix((1 - weights, 0), (size, size))
|
||||
x_rescaled = x * weight_dia
|
||||
else:
|
||||
x_rescaled = x * (1 - weights)
|
||||
|
||||
return x_rescaled
|
||||
|
||||
|
||||
class RandomizedLasso(Lasso):
|
||||
"""
|
||||
Randomized version of scikit-learns Lasso class.
|
||||
|
||||
Randomized LASSO is a generalization of the LASSO. The LASSO penalises
|
||||
the absolute value of the coefficients with a penalty term proportional
|
||||
to `alpha`, but the randomized LASSO changes the penalty to a randomly
|
||||
chosen value in the range `[alpha, alpha/weakness]`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
weakness : float
|
||||
Weakness value for randomized LASSO. Must be in (0, 1].
|
||||
|
||||
See also
|
||||
--------
|
||||
sklearn.linear_model.LogisticRegression : learns logistic regression models
|
||||
using the same algorithm.
|
||||
"""
|
||||
def __init__(self, weakness=0.5, alpha=1.0, fit_intercept=True,
|
||||
precompute=False, copy_X=True, max_iter=1000,
|
||||
tol=1e-4, warm_start=False, positive=False,
|
||||
random_state=None, selection='cyclic'):
|
||||
self.weakness = weakness
|
||||
super(RandomizedLasso, self).__init__(
|
||||
alpha=alpha, fit_intercept=fit_intercept, precompute=precompute, copy_X=copy_X,
|
||||
max_iter=max_iter, tol=tol, warm_start=warm_start,
|
||||
positive=positive, random_state=random_state,
|
||||
selection=selection)
|
||||
|
||||
def fit(self, X, y):
|
||||
"""Fit the model according to the given training data.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
|
||||
The training input samples.
|
||||
|
||||
y : array-like, shape = [n_samples]
|
||||
The target values.
|
||||
"""
|
||||
if not isinstance(self.weakness, float) or not (0.0 < self.weakness <= 1.0):
|
||||
raise ValueError('weakness should be a float in (0, 1], got %s' % self.weakness)
|
||||
|
||||
X, y = check_X_y(X, y, accept_sparse=True)
|
||||
|
||||
n_features = X.shape[1]
|
||||
weakness = 1. - self.weakness
|
||||
random_state = check_random_state(self.random_state)
|
||||
|
||||
weights = weakness * random_state.randint(0, 1 + 1, size=(n_features,))
|
||||
|
||||
# TODO: I am afraid this will do double normalization if set to true
|
||||
#X, y, _, _ = _preprocess_data(X, y, self.fit_intercept, normalize=self.normalize, copy=False,
|
||||
# sample_weight=None, return_mean=False)
|
||||
|
||||
# TODO: Check if this is a problem if it happens before standardization
|
||||
X_rescaled = _rescale_data(X, weights)
|
||||
return super(RandomizedLasso, self).fit(X_rescaled, y)
|
||||
BIN
abanin_daniil_lab_2/__pycache__/RadomizedLasso.cpython-39.pyc
Normal file
81
abanin_daniil_lab_2/lab2.py
Normal file
@@ -0,0 +1,81 @@
|
||||
from matplotlib import pyplot as plt
|
||||
from sklearn.linear_model import LinearRegression
|
||||
from RadomizedLasso import RandomizedLasso
|
||||
from sklearn.feature_selection import RFE
|
||||
from sklearn.preprocessing import MinMaxScaler
|
||||
import numpy as np
|
||||
|
||||
names = ["x%s" % i for i in range(1, 15)]
|
||||
|
||||
|
||||
def start_point():
|
||||
X,Y = generation_data()
|
||||
# Линейная модель
|
||||
lr = LinearRegression()
|
||||
lr.fit(X, Y)
|
||||
# Рекурсивное сокращение признаков
|
||||
rfe = RFE(lr)
|
||||
rfe.fit(X, Y)
|
||||
# Случайное Лассо
|
||||
randomized_lasso = RandomizedLasso(alpha=.01)
|
||||
randomized_lasso.fit(X, Y)
|
||||
|
||||
ranks = {"Linear Regression": rank_to_dict(lr.coef_), "Recursive Feature Elimination": rank_to_dict(rfe.ranking_),
|
||||
"Randomize Lasso": rank_to_dict(randomized_lasso.coef_)}
|
||||
|
||||
get_estimation(ranks)
|
||||
print_sorted_data(ranks)
|
||||
|
||||
|
||||
def generation_data():
|
||||
np.random.seed(0)
|
||||
size = 750
|
||||
X = np.random.uniform(0, 1, (size, 14))
|
||||
Y = (10 * np.sin(np.pi * X[:, 0] * X[:, 1]) + 20 * (X[:, 2] - .5) ** 2 +
|
||||
10 * X[:, 3] + 5 * X[:, 4] ** 5 + np.random.normal(0, 1))
|
||||
X[:, 10:] = X[:, :4] + np.random.normal(0, .025, (size, 4))
|
||||
return X, Y
|
||||
|
||||
|
||||
def rank_to_dict(ranks):
|
||||
ranks = np.abs(ranks)
|
||||
minmax = MinMaxScaler()
|
||||
ranks = minmax.fit_transform(np.array(ranks).reshape(14, 1)).ravel()
|
||||
ranks = map(lambda x: round(x, 2), ranks)
|
||||
return dict(zip(names, ranks))
|
||||
|
||||
|
||||
def get_estimation(ranks: {}):
|
||||
mean = {}
|
||||
#«Бежим» по списку ranks
|
||||
for key, value in ranks.items():
|
||||
for item in value.items():
|
||||
if(item[0] not in mean):
|
||||
mean[item[0]] = 0
|
||||
mean[item[0]] += item[1]
|
||||
|
||||
for key, value in mean.items():
|
||||
res = value/len(ranks)
|
||||
mean[key] = round(res, 2)
|
||||
|
||||
mean_sorted = sorted(mean.items(), key=lambda item: item[1], reverse=True)
|
||||
print("Средние значения")
|
||||
print(mean_sorted)
|
||||
|
||||
|
||||
print("4 самых важных признака по среднему значению")
|
||||
for item in mean_sorted[:4]:
|
||||
print('Параметр - {0}, значение - {1}'.format(item[0], item[1]))
|
||||
|
||||
|
||||
|
||||
def print_sorted_data(ranks: {}):
|
||||
print()
|
||||
for key, value in ranks.items():
|
||||
ranks[key] = sorted(value.items(), key=lambda item: item[1], reverse=True)
|
||||
for key, value in ranks.items():
|
||||
print(key)
|
||||
print(value)
|
||||
|
||||
|
||||
start_point()
|
||||
BIN
abanin_daniil_lab_2/result.png
Normal file
|
After Width: | Height: | Size: 178 KiB |
27
abanin_daniil_lab_3/README.md
Normal file
@@ -0,0 +1,27 @@
|
||||
## Лабораторная работа №3
|
||||
|
||||
### Деревья решений
|
||||
|
||||
## Cтудент группы ПИбд-41 Абанин Даниил
|
||||
|
||||
### Как запустить лабораторную работу:
|
||||
|
||||
* установить python, numpy, matplotlib, sklearn
|
||||
* запустить проект (lab3)
|
||||
|
||||
### Какие технологии использовались:
|
||||
|
||||
* Язык программирования `Python`, библиотеки numpy, matplotlib, sklearn
|
||||
* Среда разработки `PyCharm`
|
||||
|
||||
### Что делает лабораторная работа:
|
||||
|
||||
* Выполняет ранжирование признаков для регрессионной модели
|
||||
* По данным "Eligibility Prediction for Loan" решает задачу классификации (с помощью дерева решений), в которой необходимо выявить риски выдачи кредита и определить его статус (выдан или отказ). В качестве исходных данных используются три признака: Credit_History - соответствие кредитной истории стандартам банка, ApplicantIncome - доход заявителя, LoanAmount - сумма кредита.
|
||||
|
||||
### Примеры работы:
|
||||
|
||||
#### Результаты:
|
||||
* Наиболее важным параметром при выдачи кредита оказался доход заявителя - ApplicantIncome, затем LoanAmount - сумма выдаваемого кредита
|
||||
|
||||

|
||||
33
abanin_daniil_lab_3/lab3.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.tree import DecisionTreeClassifier
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
|
||||
pd.options.mode.chained_assignment = None
|
||||
|
||||
FILE_PATH = "loan.csv"
|
||||
REQUIRED_COLUMNS = ['Credit_History', 'LoanAmount', 'ApplicantIncome']
|
||||
TARGET_COLUMN = 'Loan_Status'
|
||||
|
||||
|
||||
def print_classifier_info(feature_importance):
|
||||
feature_names = REQUIRED_COLUMNS
|
||||
embarked_score = feature_importance[-3:].sum()
|
||||
scores = np.append(feature_importance[:2], embarked_score)
|
||||
scores = map(lambda score: round(score, 2), scores)
|
||||
print(dict(zip(feature_names, scores)))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
data = pd.read_csv(FILE_PATH)
|
||||
|
||||
X = data[REQUIRED_COLUMNS]
|
||||
y = data[TARGET_COLUMN]
|
||||
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
||||
|
||||
classifier_tree = DecisionTreeClassifier(random_state=42)
|
||||
classifier_tree.fit(X_train, y_train)
|
||||
|
||||
print_classifier_info(classifier_tree.feature_importances_)
|
||||
print("Оценка качества (задача классификации) - ", classifier_tree.score(X_test, y_test))
|
||||
615
abanin_daniil_lab_3/loan.csv
Normal file
@@ -0,0 +1,615 @@
|
||||
Loan_ID,Gender,Married,Dependents,Education,Self_Employed,ApplicantIncome,CoapplicantIncome,LoanAmount,Loan_Amount_Term,Credit_History,Property_Area,Loan_Status
|
||||
LP001002,Male,No,0,1,No,5849,0.0,360.0,1.0,0,Y,0.0
|
||||
LP001003,Male,Yes,1,1,No,4583,1508.0,128.0,360.0,1,Rural,0.0
|
||||
LP001005,Male,Yes,0,1,Yes,3000,0.0,66.0,360.0,1,Urban,1.0
|
||||
LP001006,Male,Yes,0,0,No,2583,2358.0,120.0,360.0,1,Urban,1.0
|
||||
LP001008,Male,No,0,1,No,6000,0.0,141.0,360.0,1,Urban,1.0
|
||||
LP001011,Male,Yes,2,1,Yes,5417,4196.0,267.0,360.0,1,Urban,1.0
|
||||
LP001013,Male,Yes,0,0,No,2333,1516.0,95.0,360.0,1,Urban,1.0
|
||||
LP001014,Male,Yes,3+,1,No,3036,2504.0,158.0,360.0,0,Semiurban,0.0
|
||||
LP001018,Male,Yes,2,1,No,4006,1526.0,168.0,360.0,1,Urban,1.0
|
||||
LP001020,Male,Yes,1,1,No,12841,10968.0,349.0,360.0,1,Semiurban,0.0
|
||||
LP001024,Male,Yes,2,1,No,3200,700.0,70.0,360.0,1,Urban,1.0
|
||||
LP001027,Male,Yes,2,1,,2500,1840.0,109.0,360.0,1,Urban,1.0
|
||||
LP001028,Male,Yes,2,1,No,3073,8106.0,200.0,360.0,1,Urban,1.0
|
||||
LP001029,Male,No,0,1,No,1853,2840.0,114.0,360.0,1,Rural,0.0
|
||||
LP001030,Male,Yes,2,1,No,1299,1086.0,17.0,120.0,1,Urban,1.0
|
||||
LP001032,Male,No,0,1,No,4950,0.0,125.0,360.0,1,Urban,1.0
|
||||
LP001034,Male,No,1,0,No,3596,0.0,100.0,240.0,0,Urban,1.0
|
||||
LP001036,Female,No,0,1,No,3510,0.0,76.0,360.0,0,Urban,0.0
|
||||
LP001038,Male,Yes,0,0,No,4887,0.0,133.0,360.0,1,Rural,0.0
|
||||
LP001041,Male,Yes,0,1,,2600,3500.0,115.0,,1,Urban,1.0
|
||||
LP001043,Male,Yes,0,0,No,7660,0.0,104.0,360.0,0,Urban,0.0
|
||||
LP001046,Male,Yes,1,1,No,5955,5625.0,315.0,360.0,1,Urban,1.0
|
||||
LP001047,Male,Yes,0,0,No,2600,1911.0,116.0,360.0,0,Semiurban,0.0
|
||||
LP001050,,Yes,2,0,No,3365,1917.0,112.0,360.0,0,Rural,0.0
|
||||
LP001052,Male,Yes,1,1,,3717,2925.0,151.0,360.0,0,Semiurban,0.0
|
||||
LP001066,Male,Yes,0,1,Yes,9560,0.0,191.0,360.0,1,Semiurban,1.0
|
||||
LP001068,Male,Yes,0,1,No,2799,2253.0,122.0,360.0,1,Semiurban,1.0
|
||||
LP001073,Male,Yes,2,0,No,4226,1040.0,110.0,360.0,1,Urban,1.0
|
||||
LP001086,Male,No,0,0,No,1442,0.0,35.0,360.0,1,Urban,0.0
|
||||
LP001087,Female,No,2,1,,3750,2083.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001091,Male,Yes,1,1,,4166,3369.0,201.0,360.0,0,Urban,0.0
|
||||
LP001095,Male,No,0,1,No,3167,0.0,74.0,360.0,1,Urban,0.0
|
||||
LP001097,Male,No,1,1,Yes,4692,0.0,106.0,360.0,1,Rural,0.0
|
||||
LP001098,Male,Yes,0,1,No,3500,1667.0,114.0,360.0,1,Semiurban,1.0
|
||||
LP001100,Male,No,3+,1,No,12500,3000.0,320.0,360.0,1,Rural,0.0
|
||||
LP001106,Male,Yes,0,1,No,2275,2067.0,0.0,360.0,1,Urban,1.0
|
||||
LP001109,Male,Yes,0,1,No,1828,1330.0,100.0,,0,Urban,0.0
|
||||
LP001112,Female,Yes,0,1,No,3667,1459.0,144.0,360.0,1,Semiurban,1.0
|
||||
LP001114,Male,No,0,1,No,4166,7210.0,184.0,360.0,1,Urban,1.0
|
||||
LP001116,Male,No,0,0,No,3748,1668.0,110.0,360.0,1,Semiurban,1.0
|
||||
LP001119,Male,No,0,1,No,3600,0.0,80.0,360.0,1,Urban,0.0
|
||||
LP001120,Male,No,0,1,No,1800,1213.0,47.0,360.0,1,Urban,1.0
|
||||
LP001123,Male,Yes,0,1,No,2400,0.0,75.0,360.0,0,Urban,1.0
|
||||
LP001131,Male,Yes,0,1,No,3941,2336.0,134.0,360.0,1,Semiurban,1.0
|
||||
LP001136,Male,Yes,0,0,Yes,4695,0.0,96.0,,1,Urban,1.0
|
||||
LP001137,Female,No,0,1,No,3410,0.0,88.0,,1,Urban,1.0
|
||||
LP001138,Male,Yes,1,1,No,5649,0.0,44.0,360.0,1,Urban,1.0
|
||||
LP001144,Male,Yes,0,1,No,5821,0.0,144.0,360.0,1,Urban,1.0
|
||||
LP001146,Female,Yes,0,1,No,2645,3440.0,120.0,360.0,0,Urban,0.0
|
||||
LP001151,Female,No,0,1,No,4000,2275.0,144.0,360.0,1,Semiurban,1.0
|
||||
LP001155,Female,Yes,0,0,No,1928,1644.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP001157,Female,No,0,1,No,3086,0.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001164,Female,No,0,1,No,4230,0.0,112.0,360.0,1,Semiurban,0.0
|
||||
LP001179,Male,Yes,2,1,No,4616,0.0,134.0,360.0,1,Urban,0.0
|
||||
LP001186,Female,Yes,1,1,Yes,11500,0.0,286.0,360.0,0,Urban,0.0
|
||||
LP001194,Male,Yes,2,1,No,2708,1167.0,97.0,360.0,1,Semiurban,1.0
|
||||
LP001195,Male,Yes,0,1,No,2132,1591.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP001197,Male,Yes,0,1,No,3366,2200.0,135.0,360.0,1,Rural,0.0
|
||||
LP001198,Male,Yes,1,1,No,8080,2250.0,180.0,360.0,1,Urban,1.0
|
||||
LP001199,Male,Yes,2,0,No,3357,2859.0,144.0,360.0,1,Urban,1.0
|
||||
LP001205,Male,Yes,0,1,No,2500,3796.0,120.0,360.0,1,Urban,1.0
|
||||
LP001206,Male,Yes,3+,1,No,3029,0.0,99.0,360.0,1,Urban,1.0
|
||||
LP001207,Male,Yes,0,0,Yes,2609,3449.0,165.0,180.0,0,Rural,0.0
|
||||
LP001213,Male,Yes,1,1,No,4945,0.0,0.0,360.0,0,Rural,0.0
|
||||
LP001222,Female,No,0,1,No,4166,0.0,116.0,360.0,0,Semiurban,0.0
|
||||
LP001225,Male,Yes,0,1,No,5726,4595.0,258.0,360.0,1,Semiurban,0.0
|
||||
LP001228,Male,No,0,0,No,3200,2254.0,126.0,180.0,0,Urban,0.0
|
||||
LP001233,Male,Yes,1,1,No,10750,0.0,312.0,360.0,1,Urban,1.0
|
||||
LP001238,Male,Yes,3+,0,Yes,7100,0.0,125.0,60.0,1,Urban,1.0
|
||||
LP001241,Female,No,0,1,No,4300,0.0,136.0,360.0,0,Semiurban,0.0
|
||||
LP001243,Male,Yes,0,1,No,3208,3066.0,172.0,360.0,1,Urban,1.0
|
||||
LP001245,Male,Yes,2,0,Yes,1875,1875.0,97.0,360.0,1,Semiurban,1.0
|
||||
LP001248,Male,No,0,1,No,3500,0.0,81.0,300.0,1,Semiurban,1.0
|
||||
LP001250,Male,Yes,3+,0,No,4755,0.0,95.0,,0,Semiurban,0.0
|
||||
LP001253,Male,Yes,3+,1,Yes,5266,1774.0,187.0,360.0,1,Semiurban,1.0
|
||||
LP001255,Male,No,0,1,No,3750,0.0,113.0,480.0,1,Urban,0.0
|
||||
LP001256,Male,No,0,1,No,3750,4750.0,176.0,360.0,1,Urban,0.0
|
||||
LP001259,Male,Yes,1,1,Yes,1000,3022.0,110.0,360.0,1,Urban,0.0
|
||||
LP001263,Male,Yes,3+,1,No,3167,4000.0,180.0,300.0,0,Semiurban,0.0
|
||||
LP001264,Male,Yes,3+,0,Yes,3333,2166.0,130.0,360.0,0,Semiurban,1.0
|
||||
LP001265,Female,No,0,1,No,3846,0.0,111.0,360.0,1,Semiurban,1.0
|
||||
LP001266,Male,Yes,1,1,Yes,2395,0.0,0.0,360.0,1,Semiurban,1.0
|
||||
LP001267,Female,Yes,2,1,No,1378,1881.0,167.0,360.0,1,Urban,0.0
|
||||
LP001273,Male,Yes,0,1,No,6000,2250.0,265.0,360.0,0,Semiurban,0.0
|
||||
LP001275,Male,Yes,1,1,No,3988,0.0,50.0,240.0,1,Urban,1.0
|
||||
LP001279,Male,No,0,1,No,2366,2531.0,136.0,360.0,1,Semiurban,1.0
|
||||
LP001280,Male,Yes,2,0,No,3333,2000.0,99.0,360.0,0,Semiurban,1.0
|
||||
LP001282,Male,Yes,0,1,No,2500,2118.0,104.0,360.0,1,Semiurban,1.0
|
||||
LP001289,Male,No,0,1,No,8566,0.0,210.0,360.0,1,Urban,1.0
|
||||
LP001310,Male,Yes,0,1,No,5695,4167.0,175.0,360.0,1,Semiurban,1.0
|
||||
LP001316,Male,Yes,0,1,No,2958,2900.0,131.0,360.0,1,Semiurban,1.0
|
||||
LP001318,Male,Yes,2,1,No,6250,5654.0,188.0,180.0,1,Semiurban,1.0
|
||||
LP001319,Male,Yes,2,0,No,3273,1820.0,81.0,360.0,1,Urban,1.0
|
||||
LP001322,Male,No,0,1,No,4133,0.0,122.0,360.0,1,Semiurban,1.0
|
||||
LP001325,Male,No,0,0,No,3620,0.0,25.0,120.0,1,Semiurban,1.0
|
||||
LP001326,Male,No,0,1,,6782,0.0,0.0,360.0,0,Urban,0.0
|
||||
LP001327,Female,Yes,0,1,No,2484,2302.0,137.0,360.0,1,Semiurban,1.0
|
||||
LP001333,Male,Yes,0,1,No,1977,997.0,50.0,360.0,1,Semiurban,1.0
|
||||
LP001334,Male,Yes,0,0,No,4188,0.0,115.0,180.0,1,Semiurban,1.0
|
||||
LP001343,Male,Yes,0,1,No,1759,3541.0,131.0,360.0,1,Semiurban,1.0
|
||||
LP001345,Male,Yes,2,0,No,4288,3263.0,133.0,180.0,1,Urban,1.0
|
||||
LP001349,Male,No,0,1,No,4843,3806.0,151.0,360.0,1,Semiurban,1.0
|
||||
LP001350,Male,Yes,,1,No,13650,0.0,0.0,360.0,1,Urban,1.0
|
||||
LP001356,Male,Yes,0,1,No,4652,3583.0,0.0,360.0,1,Semiurban,1.0
|
||||
LP001357,Male,,,1,No,3816,754.0,160.0,360.0,1,Urban,1.0
|
||||
LP001367,Male,Yes,1,1,No,3052,1030.0,100.0,360.0,1,Urban,1.0
|
||||
LP001369,Male,Yes,2,1,No,11417,1126.0,225.0,360.0,1,Urban,1.0
|
||||
LP001370,Male,No,0,0,,7333,0.0,120.0,360.0,1,Rural,0.0
|
||||
LP001379,Male,Yes,2,1,No,3800,3600.0,216.0,360.0,0,Urban,0.0
|
||||
LP001384,Male,Yes,3+,0,No,2071,754.0,94.0,480.0,1,Semiurban,1.0
|
||||
LP001385,Male,No,0,1,No,5316,0.0,136.0,360.0,1,Urban,1.0
|
||||
LP001387,Female,Yes,0,1,,2929,2333.0,139.0,360.0,1,Semiurban,1.0
|
||||
LP001391,Male,Yes,0,0,No,3572,4114.0,152.0,,0,Rural,0.0
|
||||
LP001392,Female,No,1,1,Yes,7451,0.0,0.0,360.0,1,Semiurban,1.0
|
||||
LP001398,Male,No,0,1,,5050,0.0,118.0,360.0,1,Semiurban,1.0
|
||||
LP001401,Male,Yes,1,1,No,14583,0.0,185.0,180.0,1,Rural,1.0
|
||||
LP001404,Female,Yes,0,1,No,3167,2283.0,154.0,360.0,1,Semiurban,1.0
|
||||
LP001405,Male,Yes,1,1,No,2214,1398.0,85.0,360.0,0,Urban,1.0
|
||||
LP001421,Male,Yes,0,1,No,5568,2142.0,175.0,360.0,1,Rural,0.0
|
||||
LP001422,Female,No,0,1,No,10408,0.0,259.0,360.0,1,Urban,1.0
|
||||
LP001426,Male,Yes,,1,No,5667,2667.0,180.0,360.0,1,Rural,1.0
|
||||
LP001430,Female,No,0,1,No,4166,0.0,44.0,360.0,1,Semiurban,1.0
|
||||
LP001431,Female,No,0,1,No,2137,8980.0,137.0,360.0,0,Semiurban,1.0
|
||||
LP001432,Male,Yes,2,1,No,2957,0.0,81.0,360.0,1,Semiurban,1.0
|
||||
LP001439,Male,Yes,0,0,No,4300,2014.0,194.0,360.0,1,Rural,1.0
|
||||
LP001443,Female,No,0,1,No,3692,0.0,93.0,360.0,0,Rural,1.0
|
||||
LP001448,,Yes,3+,1,No,23803,0.0,370.0,360.0,1,Rural,1.0
|
||||
LP001449,Male,No,0,1,No,3865,1640.0,0.0,360.0,1,Rural,1.0
|
||||
LP001451,Male,Yes,1,1,Yes,10513,3850.0,160.0,180.0,0,Urban,0.0
|
||||
LP001465,Male,Yes,0,1,No,6080,2569.0,182.0,360.0,0,Rural,0.0
|
||||
LP001469,Male,No,0,1,Yes,20166,0.0,650.0,480.0,0,Urban,1.0
|
||||
LP001473,Male,No,0,1,No,2014,1929.0,74.0,360.0,1,Urban,1.0
|
||||
LP001478,Male,No,0,1,No,2718,0.0,70.0,360.0,1,Semiurban,1.0
|
||||
LP001482,Male,Yes,0,1,Yes,3459,0.0,25.0,120.0,1,Semiurban,1.0
|
||||
LP001487,Male,No,0,1,No,4895,0.0,102.0,360.0,1,Semiurban,1.0
|
||||
LP001488,Male,Yes,3+,1,No,4000,7750.0,290.0,360.0,1,Semiurban,0.0
|
||||
LP001489,Female,Yes,0,1,No,4583,0.0,84.0,360.0,1,Rural,0.0
|
||||
LP001491,Male,Yes,2,1,Yes,3316,3500.0,88.0,360.0,1,Urban,1.0
|
||||
LP001492,Male,No,0,1,No,14999,0.0,242.0,360.0,0,Semiurban,0.0
|
||||
LP001493,Male,Yes,2,0,No,4200,1430.0,129.0,360.0,1,Rural,0.0
|
||||
LP001497,Male,Yes,2,1,No,5042,2083.0,185.0,360.0,1,Rural,0.0
|
||||
LP001498,Male,No,0,1,No,5417,0.0,168.0,360.0,1,Urban,1.0
|
||||
LP001504,Male,No,0,1,Yes,6950,0.0,175.0,180.0,1,Semiurban,1.0
|
||||
LP001507,Male,Yes,0,1,No,2698,2034.0,122.0,360.0,1,Semiurban,1.0
|
||||
LP001508,Male,Yes,2,1,No,11757,0.0,187.0,180.0,1,Urban,1.0
|
||||
LP001514,Female,Yes,0,1,No,2330,4486.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP001516,Female,Yes,2,1,No,14866,0.0,70.0,360.0,1,Urban,1.0
|
||||
LP001518,Male,Yes,1,1,No,1538,1425.0,30.0,360.0,1,Urban,1.0
|
||||
LP001519,Female,No,0,1,No,10000,1666.0,225.0,360.0,1,Rural,0.0
|
||||
LP001520,Male,Yes,0,1,No,4860,830.0,125.0,360.0,1,Semiurban,1.0
|
||||
LP001528,Male,No,0,1,No,6277,0.0,118.0,360.0,0,Rural,0.0
|
||||
LP001529,Male,Yes,0,1,Yes,2577,3750.0,152.0,360.0,1,Rural,1.0
|
||||
LP001531,Male,No,0,1,No,9166,0.0,244.0,360.0,1,Urban,0.0
|
||||
LP001532,Male,Yes,2,0,No,2281,0.0,113.0,360.0,1,Rural,0.0
|
||||
LP001535,Male,No,0,1,No,3254,0.0,50.0,360.0,1,Urban,1.0
|
||||
LP001536,Male,Yes,3+,1,No,39999,0.0,600.0,180.0,0,Semiurban,1.0
|
||||
LP001541,Male,Yes,1,1,No,6000,0.0,160.0,360.0,0,Rural,1.0
|
||||
LP001543,Male,Yes,1,1,No,9538,0.0,187.0,360.0,1,Urban,1.0
|
||||
LP001546,Male,No,0,1,,2980,2083.0,120.0,360.0,1,Rural,1.0
|
||||
LP001552,Male,Yes,0,1,No,4583,5625.0,255.0,360.0,1,Semiurban,1.0
|
||||
LP001560,Male,Yes,0,0,No,1863,1041.0,98.0,360.0,1,Semiurban,1.0
|
||||
LP001562,Male,Yes,0,1,No,7933,0.0,275.0,360.0,1,Urban,0.0
|
||||
LP001565,Male,Yes,1,1,No,3089,1280.0,121.0,360.0,0,Semiurban,0.0
|
||||
LP001570,Male,Yes,2,1,No,4167,1447.0,158.0,360.0,1,Rural,1.0
|
||||
LP001572,Male,Yes,0,1,No,9323,0.0,75.0,180.0,1,Urban,1.0
|
||||
LP001574,Male,Yes,0,1,No,3707,3166.0,182.0,,1,Rural,1.0
|
||||
LP001577,Female,Yes,0,1,No,4583,0.0,112.0,360.0,1,Rural,0.0
|
||||
LP001578,Male,Yes,0,1,No,2439,3333.0,129.0,360.0,1,Rural,1.0
|
||||
LP001579,Male,No,0,1,No,2237,0.0,63.0,480.0,0,Semiurban,0.0
|
||||
LP001580,Male,Yes,2,1,No,8000,0.0,200.0,360.0,1,Semiurban,1.0
|
||||
LP001581,Male,Yes,0,0,,1820,1769.0,95.0,360.0,1,Rural,1.0
|
||||
LP001585,,Yes,3+,1,No,51763,0.0,700.0,300.0,1,Urban,1.0
|
||||
LP001586,Male,Yes,3+,0,No,3522,0.0,81.0,180.0,1,Rural,0.0
|
||||
LP001594,Male,Yes,0,1,No,5708,5625.0,187.0,360.0,1,Semiurban,1.0
|
||||
LP001603,Male,Yes,0,0,Yes,4344,736.0,87.0,360.0,1,Semiurban,0.0
|
||||
LP001606,Male,Yes,0,1,No,3497,1964.0,116.0,360.0,1,Rural,1.0
|
||||
LP001608,Male,Yes,2,1,No,2045,1619.0,101.0,360.0,1,Rural,1.0
|
||||
LP001610,Male,Yes,3+,1,No,5516,11300.0,495.0,360.0,0,Semiurban,0.0
|
||||
LP001616,Male,Yes,1,1,No,3750,0.0,116.0,360.0,1,Semiurban,1.0
|
||||
LP001630,Male,No,0,0,No,2333,1451.0,102.0,480.0,0,Urban,0.0
|
||||
LP001633,Male,Yes,1,1,No,6400,7250.0,180.0,360.0,0,Urban,0.0
|
||||
LP001634,Male,No,0,1,No,1916,5063.0,67.0,360.0,0,Rural,0.0
|
||||
LP001636,Male,Yes,0,1,No,4600,0.0,73.0,180.0,1,Semiurban,1.0
|
||||
LP001637,Male,Yes,1,1,No,33846,0.0,260.0,360.0,1,Semiurban,0.0
|
||||
LP001639,Female,Yes,0,1,No,3625,0.0,108.0,360.0,1,Semiurban,1.0
|
||||
LP001640,Male,Yes,0,1,Yes,39147,4750.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001641,Male,Yes,1,1,Yes,2178,0.0,66.0,300.0,0,Rural,0.0
|
||||
LP001643,Male,Yes,0,1,No,2383,2138.0,58.0,360.0,0,Rural,1.0
|
||||
LP001644,,Yes,0,1,Yes,674,5296.0,168.0,360.0,1,Rural,1.0
|
||||
LP001647,Male,Yes,0,1,No,9328,0.0,188.0,180.0,1,Rural,1.0
|
||||
LP001653,Male,No,0,0,No,4885,0.0,48.0,360.0,1,Rural,1.0
|
||||
LP001656,Male,No,0,1,No,12000,0.0,164.0,360.0,1,Semiurban,0.0
|
||||
LP001657,Male,Yes,0,0,No,6033,0.0,160.0,360.0,1,Urban,0.0
|
||||
LP001658,Male,No,0,1,No,3858,0.0,76.0,360.0,1,Semiurban,1.0
|
||||
LP001664,Male,No,0,1,No,4191,0.0,120.0,360.0,1,Rural,1.0
|
||||
LP001665,Male,Yes,1,1,No,3125,2583.0,170.0,360.0,1,Semiurban,0.0
|
||||
LP001666,Male,No,0,1,No,8333,3750.0,187.0,360.0,1,Rural,1.0
|
||||
LP001669,Female,No,0,0,No,1907,2365.0,120.0,,1,Urban,1.0
|
||||
LP001671,Female,Yes,0,1,No,3416,2816.0,113.0,360.0,0,Semiurban,1.0
|
||||
LP001673,Male,No,0,1,Yes,11000,0.0,83.0,360.0,1,Urban,0.0
|
||||
LP001674,Male,Yes,1,0,No,2600,2500.0,90.0,360.0,1,Semiurban,1.0
|
||||
LP001677,Male,No,2,1,No,4923,0.0,166.0,360.0,0,Semiurban,1.0
|
||||
LP001682,Male,Yes,3+,0,No,3992,0.0,0.0,180.0,1,Urban,0.0
|
||||
LP001688,Male,Yes,1,0,No,3500,1083.0,135.0,360.0,1,Urban,1.0
|
||||
LP001691,Male,Yes,2,0,No,3917,0.0,124.0,360.0,1,Semiurban,1.0
|
||||
LP001692,Female,No,0,0,No,4408,0.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001693,Female,No,0,1,No,3244,0.0,80.0,360.0,1,Urban,1.0
|
||||
LP001698,Male,No,0,0,No,3975,2531.0,55.0,360.0,1,Rural,1.0
|
||||
LP001699,Male,No,0,1,No,2479,0.0,59.0,360.0,1,Urban,1.0
|
||||
LP001702,Male,No,0,1,No,3418,0.0,127.0,360.0,1,Semiurban,0.0
|
||||
LP001708,Female,No,0,1,No,10000,0.0,214.0,360.0,1,Semiurban,0.0
|
||||
LP001711,Male,Yes,3+,1,No,3430,1250.0,128.0,360.0,0,Semiurban,0.0
|
||||
LP001713,Male,Yes,1,1,Yes,7787,0.0,240.0,360.0,1,Urban,1.0
|
||||
LP001715,Male,Yes,3+,0,Yes,5703,0.0,130.0,360.0,1,Rural,1.0
|
||||
LP001716,Male,Yes,0,1,No,3173,3021.0,137.0,360.0,1,Urban,1.0
|
||||
LP001720,Male,Yes,3+,0,No,3850,983.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP001722,Male,Yes,0,1,No,150,1800.0,135.0,360.0,1,Rural,0.0
|
||||
LP001726,Male,Yes,0,1,No,3727,1775.0,131.0,360.0,1,Semiurban,1.0
|
||||
LP001732,Male,Yes,2,1,,5000,0.0,72.0,360.0,0,Semiurban,0.0
|
||||
LP001734,Female,Yes,2,1,No,4283,2383.0,127.0,360.0,0,Semiurban,1.0
|
||||
LP001736,Male,Yes,0,1,No,2221,0.0,60.0,360.0,0,Urban,0.0
|
||||
LP001743,Male,Yes,2,1,No,4009,1717.0,116.0,360.0,1,Semiurban,1.0
|
||||
LP001744,Male,No,0,1,No,2971,2791.0,144.0,360.0,1,Semiurban,1.0
|
||||
LP001749,Male,Yes,0,1,No,7578,1010.0,175.0,,1,Semiurban,1.0
|
||||
LP001750,Male,Yes,0,1,No,6250,0.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP001751,Male,Yes,0,1,No,3250,0.0,170.0,360.0,1,Rural,0.0
|
||||
LP001754,Male,Yes,,0,Yes,4735,0.0,138.0,360.0,1,Urban,0.0
|
||||
LP001758,Male,Yes,2,1,No,6250,1695.0,210.0,360.0,1,Semiurban,1.0
|
||||
LP001760,Male,,,1,No,4758,0.0,158.0,480.0,1,Semiurban,1.0
|
||||
LP001761,Male,No,0,1,Yes,6400,0.0,200.0,360.0,1,Rural,1.0
|
||||
LP001765,Male,Yes,1,1,No,2491,2054.0,104.0,360.0,1,Semiurban,1.0
|
||||
LP001768,Male,Yes,0,1,,3716,0.0,42.0,180.0,1,Rural,1.0
|
||||
LP001770,Male,No,0,0,No,3189,2598.0,120.0,,1,Rural,1.0
|
||||
LP001776,Female,No,0,1,No,8333,0.0,280.0,360.0,1,Semiurban,1.0
|
||||
LP001778,Male,Yes,1,1,No,3155,1779.0,140.0,360.0,1,Semiurban,1.0
|
||||
LP001784,Male,Yes,1,1,No,5500,1260.0,170.0,360.0,1,Rural,1.0
|
||||
LP001786,Male,Yes,0,1,,5746,0.0,255.0,360.0,0,Urban,0.0
|
||||
LP001788,Female,No,0,1,Yes,3463,0.0,122.0,360.0,0,Urban,1.0
|
||||
LP001790,Female,No,1,1,No,3812,0.0,112.0,360.0,1,Rural,1.0
|
||||
LP001792,Male,Yes,1,1,No,3315,0.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP001798,Male,Yes,2,1,No,5819,5000.0,120.0,360.0,1,Rural,1.0
|
||||
LP001800,Male,Yes,1,0,No,2510,1983.0,140.0,180.0,1,Urban,0.0
|
||||
LP001806,Male,No,0,1,No,2965,5701.0,155.0,60.0,1,Urban,1.0
|
||||
LP001807,Male,Yes,2,1,Yes,6250,1300.0,108.0,360.0,1,Rural,1.0
|
||||
LP001811,Male,Yes,0,0,No,3406,4417.0,123.0,360.0,1,Semiurban,1.0
|
||||
LP001813,Male,No,0,1,Yes,6050,4333.0,120.0,180.0,1,Urban,0.0
|
||||
LP001814,Male,Yes,2,1,No,9703,0.0,112.0,360.0,1,Urban,1.0
|
||||
LP001819,Male,Yes,1,0,No,6608,0.0,137.0,180.0,1,Urban,1.0
|
||||
LP001824,Male,Yes,1,1,No,2882,1843.0,123.0,480.0,1,Semiurban,1.0
|
||||
LP001825,Male,Yes,0,1,No,1809,1868.0,90.0,360.0,1,Urban,1.0
|
||||
LP001835,Male,Yes,0,0,No,1668,3890.0,201.0,360.0,0,Semiurban,0.0
|
||||
LP001836,Female,No,2,1,No,3427,0.0,138.0,360.0,1,Urban,0.0
|
||||
LP001841,Male,No,0,0,Yes,2583,2167.0,104.0,360.0,1,Rural,1.0
|
||||
LP001843,Male,Yes,1,0,No,2661,7101.0,279.0,180.0,1,Semiurban,1.0
|
||||
LP001844,Male,No,0,1,Yes,16250,0.0,192.0,360.0,0,Urban,0.0
|
||||
LP001846,Female,No,3+,1,No,3083,0.0,255.0,360.0,1,Rural,1.0
|
||||
LP001849,Male,No,0,0,No,6045,0.0,115.0,360.0,0,Rural,0.0
|
||||
LP001854,Male,Yes,3+,1,No,5250,0.0,94.0,360.0,1,Urban,0.0
|
||||
LP001859,Male,Yes,0,1,No,14683,2100.0,304.0,360.0,1,Rural,0.0
|
||||
LP001864,Male,Yes,3+,0,No,4931,0.0,128.0,360.0,0,Semiurban,0.0
|
||||
LP001865,Male,Yes,1,1,No,6083,4250.0,330.0,360.0,0,Urban,1.0
|
||||
LP001868,Male,No,0,1,No,2060,2209.0,134.0,360.0,1,Semiurban,1.0
|
||||
LP001870,Female,No,1,1,No,3481,0.0,155.0,36.0,1,Semiurban,0.0
|
||||
LP001871,Female,No,0,1,No,7200,0.0,120.0,360.0,1,Rural,1.0
|
||||
LP001872,Male,No,0,1,Yes,5166,0.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP001875,Male,No,0,1,No,4095,3447.0,151.0,360.0,1,Rural,1.0
|
||||
LP001877,Male,Yes,2,1,No,4708,1387.0,150.0,360.0,1,Semiurban,1.0
|
||||
LP001882,Male,Yes,3+,1,No,4333,1811.0,160.0,360.0,0,Urban,1.0
|
||||
LP001883,Female,No,0,1,,3418,0.0,135.0,360.0,1,Rural,0.0
|
||||
LP001884,Female,No,1,1,No,2876,1560.0,90.0,360.0,1,Urban,1.0
|
||||
LP001888,Female,No,0,1,No,3237,0.0,30.0,360.0,1,Urban,1.0
|
||||
LP001891,Male,Yes,0,1,No,11146,0.0,136.0,360.0,1,Urban,1.0
|
||||
LP001892,Male,No,0,1,No,2833,1857.0,126.0,360.0,1,Rural,1.0
|
||||
LP001894,Male,Yes,0,1,No,2620,2223.0,150.0,360.0,1,Semiurban,1.0
|
||||
LP001896,Male,Yes,2,1,No,3900,0.0,90.0,360.0,1,Semiurban,1.0
|
||||
LP001900,Male,Yes,1,1,No,2750,1842.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP001903,Male,Yes,0,1,No,3993,3274.0,207.0,360.0,1,Semiurban,1.0
|
||||
LP001904,Male,Yes,0,1,No,3103,1300.0,80.0,360.0,1,Urban,1.0
|
||||
LP001907,Male,Yes,0,1,No,14583,0.0,436.0,360.0,1,Semiurban,1.0
|
||||
LP001908,Female,Yes,0,0,No,4100,0.0,124.0,360.0,0,Rural,1.0
|
||||
LP001910,Male,No,1,0,Yes,4053,2426.0,158.0,360.0,0,Urban,0.0
|
||||
LP001914,Male,Yes,0,1,No,3927,800.0,112.0,360.0,1,Semiurban,1.0
|
||||
LP001915,Male,Yes,2,1,No,2301,985.7999878,78.0,180.0,1,Urban,1.0
|
||||
LP001917,Female,No,0,1,No,1811,1666.0,54.0,360.0,1,Urban,1.0
|
||||
LP001922,Male,Yes,0,1,No,20667,0.0,0.0,360.0,1,Rural,0.0
|
||||
LP001924,Male,No,0,1,No,3158,3053.0,89.0,360.0,1,Rural,1.0
|
||||
LP001925,Female,No,0,1,Yes,2600,1717.0,99.0,300.0,1,Semiurban,0.0
|
||||
LP001926,Male,Yes,0,1,No,3704,2000.0,120.0,360.0,1,Rural,1.0
|
||||
LP001931,Female,No,0,1,No,4124,0.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP001935,Male,No,0,1,No,9508,0.0,187.0,360.0,1,Rural,1.0
|
||||
LP001936,Male,Yes,0,1,No,3075,2416.0,139.0,360.0,1,Rural,1.0
|
||||
LP001938,Male,Yes,2,1,No,4400,0.0,127.0,360.0,0,Semiurban,0.0
|
||||
LP001940,Male,Yes,2,1,No,3153,1560.0,134.0,360.0,1,Urban,1.0
|
||||
LP001945,Female,No,,1,No,5417,0.0,143.0,480.0,0,Urban,0.0
|
||||
LP001947,Male,Yes,0,1,No,2383,3334.0,172.0,360.0,1,Semiurban,1.0
|
||||
LP001949,Male,Yes,3+,1,,4416,1250.0,110.0,360.0,1,Urban,1.0
|
||||
LP001953,Male,Yes,1,1,No,6875,0.0,200.0,360.0,1,Semiurban,1.0
|
||||
LP001954,Female,Yes,1,1,No,4666,0.0,135.0,360.0,1,Urban,1.0
|
||||
LP001955,Female,No,0,1,No,5000,2541.0,151.0,480.0,1,Rural,0.0
|
||||
LP001963,Male,Yes,1,1,No,2014,2925.0,113.0,360.0,1,Urban,0.0
|
||||
LP001964,Male,Yes,0,0,No,1800,2934.0,93.0,360.0,0,Urban,0.0
|
||||
LP001972,Male,Yes,,0,No,2875,1750.0,105.0,360.0,1,Semiurban,1.0
|
||||
LP001974,Female,No,0,1,No,5000,0.0,132.0,360.0,1,Rural,1.0
|
||||
LP001977,Male,Yes,1,1,No,1625,1803.0,96.0,360.0,1,Urban,1.0
|
||||
LP001978,Male,No,0,1,No,4000,2500.0,140.0,360.0,1,Rural,1.0
|
||||
LP001990,Male,No,0,0,No,2000,0.0,0.0,360.0,1,Urban,0.0
|
||||
LP001993,Female,No,0,1,No,3762,1666.0,135.0,360.0,1,Rural,1.0
|
||||
LP001994,Female,No,0,1,No,2400,1863.0,104.0,360.0,0,Urban,0.0
|
||||
LP001996,Male,No,0,1,No,20233,0.0,480.0,360.0,1,Rural,0.0
|
||||
LP001998,Male,Yes,2,0,No,7667,0.0,185.0,360.0,0,Rural,1.0
|
||||
LP002002,Female,No,0,1,No,2917,0.0,84.0,360.0,1,Semiurban,1.0
|
||||
LP002004,Male,No,0,0,No,2927,2405.0,111.0,360.0,1,Semiurban,1.0
|
||||
LP002006,Female,No,0,1,No,2507,0.0,56.0,360.0,1,Rural,1.0
|
||||
LP002008,Male,Yes,2,1,Yes,5746,0.0,144.0,84.0,0,Rural,1.0
|
||||
LP002024,,Yes,0,1,No,2473,1843.0,159.0,360.0,1,Rural,0.0
|
||||
LP002031,Male,Yes,1,0,No,3399,1640.0,111.0,180.0,1,Urban,1.0
|
||||
LP002035,Male,Yes,2,1,No,3717,0.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP002036,Male,Yes,0,1,No,2058,2134.0,88.0,360.0,0,Urban,1.0
|
||||
LP002043,Female,No,1,1,No,3541,0.0,112.0,360.0,0,Semiurban,1.0
|
||||
LP002050,Male,Yes,1,1,Yes,10000,0.0,155.0,360.0,1,Rural,0.0
|
||||
LP002051,Male,Yes,0,1,No,2400,2167.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP002053,Male,Yes,3+,1,No,4342,189.0,124.0,360.0,1,Semiurban,1.0
|
||||
LP002054,Male,Yes,2,0,No,3601,1590.0,0.0,360.0,1,Rural,1.0
|
||||
LP002055,Female,No,0,1,No,3166,2985.0,132.0,360.0,0,Rural,1.0
|
||||
LP002065,Male,Yes,3+,1,No,15000,0.0,300.0,360.0,1,Rural,1.0
|
||||
LP002067,Male,Yes,1,1,Yes,8666,4983.0,376.0,360.0,0,Rural,0.0
|
||||
LP002068,Male,No,0,1,No,4917,0.0,130.0,360.0,0,Rural,1.0
|
||||
LP002082,Male,Yes,0,1,Yes,5818,2160.0,184.0,360.0,1,Semiurban,1.0
|
||||
LP002086,Female,Yes,0,1,No,4333,2451.0,110.0,360.0,1,Urban,0.0
|
||||
LP002087,Female,No,0,1,No,2500,0.0,67.0,360.0,1,Urban,1.0
|
||||
LP002097,Male,No,1,1,No,4384,1793.0,117.0,360.0,1,Urban,1.0
|
||||
LP002098,Male,No,0,1,No,2935,0.0,98.0,360.0,1,Semiurban,1.0
|
||||
LP002100,Male,No,,1,No,2833,0.0,71.0,360.0,1,Urban,1.0
|
||||
LP002101,Male,Yes,0,1,,63337,0.0,490.0,180.0,1,Urban,1.0
|
||||
LP002103,,Yes,1,1,Yes,9833,1833.0,182.0,180.0,1,Urban,1.0
|
||||
LP002106,Male,Yes,,1,Yes,5503,4490.0,70.0,,1,Semiurban,1.0
|
||||
LP002110,Male,Yes,1,1,,5250,688.0,160.0,360.0,1,Rural,1.0
|
||||
LP002112,Male,Yes,2,1,Yes,2500,4600.0,176.0,360.0,1,Rural,1.0
|
||||
LP002113,Female,No,3+,0,No,1830,0.0,0.0,360.0,0,Urban,0.0
|
||||
LP002114,Female,No,0,1,No,4160,0.0,71.0,360.0,1,Semiurban,1.0
|
||||
LP002115,Male,Yes,3+,0,No,2647,1587.0,173.0,360.0,1,Rural,0.0
|
||||
LP002116,Female,No,0,1,No,2378,0.0,46.0,360.0,1,Rural,0.0
|
||||
LP002119,Male,Yes,1,0,No,4554,1229.0,158.0,360.0,1,Urban,1.0
|
||||
LP002126,Male,Yes,3+,0,No,3173,0.0,74.0,360.0,1,Semiurban,1.0
|
||||
LP002128,Male,Yes,2,1,,2583,2330.0,125.0,360.0,1,Rural,1.0
|
||||
LP002129,Male,Yes,0,1,No,2499,2458.0,160.0,360.0,1,Semiurban,1.0
|
||||
LP002130,Male,Yes,,0,No,3523,3230.0,152.0,360.0,0,Rural,0.0
|
||||
LP002131,Male,Yes,2,0,No,3083,2168.0,126.0,360.0,1,Urban,1.0
|
||||
LP002137,Male,Yes,0,1,No,6333,4583.0,259.0,360.0,0,Semiurban,1.0
|
||||
LP002138,Male,Yes,0,1,No,2625,6250.0,187.0,360.0,1,Rural,1.0
|
||||
LP002139,Male,Yes,0,1,No,9083,0.0,228.0,360.0,1,Semiurban,1.0
|
||||
LP002140,Male,No,0,1,No,8750,4167.0,308.0,360.0,1,Rural,0.0
|
||||
LP002141,Male,Yes,3+,1,No,2666,2083.0,95.0,360.0,1,Rural,1.0
|
||||
LP002142,Female,Yes,0,1,Yes,5500,0.0,105.0,360.0,0,Rural,0.0
|
||||
LP002143,Female,Yes,0,1,No,2423,505.0,130.0,360.0,1,Semiurban,1.0
|
||||
LP002144,Female,No,,1,No,3813,0.0,116.0,180.0,1,Urban,1.0
|
||||
LP002149,Male,Yes,2,1,No,8333,3167.0,165.0,360.0,1,Rural,1.0
|
||||
LP002151,Male,Yes,1,1,No,3875,0.0,67.0,360.0,1,Urban,0.0
|
||||
LP002158,Male,Yes,0,0,No,3000,1666.0,100.0,480.0,0,Urban,0.0
|
||||
LP002160,Male,Yes,3+,1,No,5167,3167.0,200.0,360.0,1,Semiurban,1.0
|
||||
LP002161,Female,No,1,1,No,4723,0.0,81.0,360.0,1,Semiurban,0.0
|
||||
LP002170,Male,Yes,2,1,No,5000,3667.0,236.0,360.0,1,Semiurban,1.0
|
||||
LP002175,Male,Yes,0,1,No,4750,2333.0,130.0,360.0,1,Urban,1.0
|
||||
LP002178,Male,Yes,0,1,No,3013,3033.0,95.0,300.0,0,Urban,1.0
|
||||
LP002180,Male,No,0,1,Yes,6822,0.0,141.0,360.0,1,Rural,1.0
|
||||
LP002181,Male,No,0,0,No,6216,0.0,133.0,360.0,1,Rural,0.0
|
||||
LP002187,Male,No,0,1,No,2500,0.0,96.0,480.0,1,Semiurban,0.0
|
||||
LP002188,Male,No,0,1,No,5124,0.0,124.0,,0,Rural,0.0
|
||||
LP002190,Male,Yes,1,1,No,6325,0.0,175.0,360.0,1,Semiurban,1.0
|
||||
LP002191,Male,Yes,0,1,No,19730,5266.0,570.0,360.0,1,Rural,0.0
|
||||
LP002194,Female,No,0,1,Yes,15759,0.0,55.0,360.0,1,Semiurban,1.0
|
||||
LP002197,Male,Yes,2,1,No,5185,0.0,155.0,360.0,1,Semiurban,1.0
|
||||
LP002201,Male,Yes,2,1,Yes,9323,7873.0,380.0,300.0,1,Rural,1.0
|
||||
LP002205,Male,No,1,1,No,3062,1987.0,111.0,180.0,0,Urban,0.0
|
||||
LP002209,Female,No,0,1,,2764,1459.0,110.0,360.0,1,Urban,1.0
|
||||
LP002211,Male,Yes,0,1,No,4817,923.0,120.0,180.0,1,Urban,1.0
|
||||
LP002219,Male,Yes,3+,1,No,8750,4996.0,130.0,360.0,1,Rural,1.0
|
||||
LP002223,Male,Yes,0,1,No,4310,0.0,130.0,360.0,0,Semiurban,1.0
|
||||
LP002224,Male,No,0,1,No,3069,0.0,71.0,480.0,1,Urban,0.0
|
||||
LP002225,Male,Yes,2,1,No,5391,0.0,130.0,360.0,1,Urban,1.0
|
||||
LP002226,Male,Yes,0,1,,3333,2500.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP002229,Male,No,0,1,No,5941,4232.0,296.0,360.0,1,Semiurban,1.0
|
||||
LP002231,Female,No,0,1,No,6000,0.0,156.0,360.0,1,Urban,1.0
|
||||
LP002234,Male,No,0,1,Yes,7167,0.0,128.0,360.0,1,Urban,1.0
|
||||
LP002236,Male,Yes,2,1,No,4566,0.0,100.0,360.0,1,Urban,0.0
|
||||
LP002237,Male,No,1,1,,3667,0.0,113.0,180.0,1,Urban,1.0
|
||||
LP002239,Male,No,0,0,No,2346,1600.0,132.0,360.0,1,Semiurban,1.0
|
||||
LP002243,Male,Yes,0,0,No,3010,3136.0,0.0,360.0,0,Urban,0.0
|
||||
LP002244,Male,Yes,0,1,No,2333,2417.0,136.0,360.0,1,Urban,1.0
|
||||
LP002250,Male,Yes,0,1,No,5488,0.0,125.0,360.0,1,Rural,1.0
|
||||
LP002255,Male,No,3+,1,No,9167,0.0,185.0,360.0,1,Rural,1.0
|
||||
LP002262,Male,Yes,3+,1,No,9504,0.0,275.0,360.0,1,Rural,1.0
|
||||
LP002263,Male,Yes,0,1,No,2583,2115.0,120.0,360.0,0,Urban,1.0
|
||||
LP002265,Male,Yes,2,0,No,1993,1625.0,113.0,180.0,1,Semiurban,1.0
|
||||
LP002266,Male,Yes,2,1,No,3100,1400.0,113.0,360.0,1,Urban,1.0
|
||||
LP002272,Male,Yes,2,1,No,3276,484.0,135.0,360.0,0,Semiurban,1.0
|
||||
LP002277,Female,No,0,1,No,3180,0.0,71.0,360.0,0,Urban,0.0
|
||||
LP002281,Male,Yes,0,1,No,3033,1459.0,95.0,360.0,1,Urban,1.0
|
||||
LP002284,Male,No,0,0,No,3902,1666.0,109.0,360.0,1,Rural,1.0
|
||||
LP002287,Female,No,0,1,No,1500,1800.0,103.0,360.0,0,Semiurban,0.0
|
||||
LP002288,Male,Yes,2,0,No,2889,0.0,45.0,180.0,0,Urban,0.0
|
||||
LP002296,Male,No,0,0,No,2755,0.0,65.0,300.0,1,Rural,0.0
|
||||
LP002297,Male,No,0,1,No,2500,20000.0,103.0,360.0,1,Semiurban,1.0
|
||||
LP002300,Female,No,0,0,No,1963,0.0,53.0,360.0,1,Semiurban,1.0
|
||||
LP002301,Female,No,0,1,Yes,7441,0.0,194.0,360.0,1,Rural,0.0
|
||||
LP002305,Female,No,0,1,No,4547,0.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP002308,Male,Yes,0,0,No,2167,2400.0,115.0,360.0,1,Urban,1.0
|
||||
LP002314,Female,No,0,0,No,2213,0.0,66.0,360.0,1,Rural,1.0
|
||||
LP002315,Male,Yes,1,1,No,8300,0.0,152.0,300.0,0,Semiurban,0.0
|
||||
LP002317,Male,Yes,3+,1,No,81000,0.0,360.0,360.0,0,Rural,0.0
|
||||
LP002318,Female,No,1,0,Yes,3867,0.0,62.0,360.0,1,Semiurban,0.0
|
||||
LP002319,Male,Yes,0,1,,6256,0.0,160.0,360.0,0,Urban,1.0
|
||||
LP002328,Male,Yes,0,0,No,6096,0.0,218.0,360.0,0,Rural,0.0
|
||||
LP002332,Male,Yes,0,0,No,2253,2033.0,110.0,360.0,1,Rural,1.0
|
||||
LP002335,Female,Yes,0,0,No,2149,3237.0,178.0,360.0,0,Semiurban,0.0
|
||||
LP002337,Female,No,0,1,No,2995,0.0,60.0,360.0,1,Urban,1.0
|
||||
LP002341,Female,No,1,1,No,2600,0.0,160.0,360.0,1,Urban,0.0
|
||||
LP002342,Male,Yes,2,1,Yes,1600,20000.0,239.0,360.0,1,Urban,0.0
|
||||
LP002345,Male,Yes,0,1,No,1025,2773.0,112.0,360.0,1,Rural,1.0
|
||||
LP002347,Male,Yes,0,1,No,3246,1417.0,138.0,360.0,1,Semiurban,1.0
|
||||
LP002348,Male,Yes,0,1,No,5829,0.0,138.0,360.0,1,Rural,1.0
|
||||
LP002357,Female,No,0,0,No,2720,0.0,80.0,,0,Urban,0.0
|
||||
LP002361,Male,Yes,0,1,No,1820,1719.0,100.0,360.0,1,Urban,1.0
|
||||
LP002362,Male,Yes,1,1,No,7250,1667.0,110.0,,0,Urban,0.0
|
||||
LP002364,Male,Yes,0,1,No,14880,0.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP002366,Male,Yes,0,1,No,2666,4300.0,121.0,360.0,1,Rural,1.0
|
||||
LP002367,Female,No,1,0,No,4606,0.0,81.0,360.0,1,Rural,0.0
|
||||
LP002368,Male,Yes,2,1,No,5935,0.0,133.0,360.0,1,Semiurban,1.0
|
||||
LP002369,Male,Yes,0,1,No,2920,16.12000084,87.0,360.0,1,Rural,1.0
|
||||
LP002370,Male,No,0,0,No,2717,0.0,60.0,180.0,1,Urban,1.0
|
||||
LP002377,Female,No,1,1,Yes,8624,0.0,150.0,360.0,1,Semiurban,1.0
|
||||
LP002379,Male,No,0,1,No,6500,0.0,105.0,360.0,0,Rural,0.0
|
||||
LP002386,Male,No,0,1,,12876,0.0,405.0,360.0,1,Semiurban,1.0
|
||||
LP002387,Male,Yes,0,1,No,2425,2340.0,143.0,360.0,1,Semiurban,1.0
|
||||
LP002390,Male,No,0,1,No,3750,0.0,100.0,360.0,1,Urban,1.0
|
||||
LP002393,Female,,,1,No,10047,0.0,0.0,240.0,1,Semiurban,1.0
|
||||
LP002398,Male,No,0,1,No,1926,1851.0,50.0,360.0,1,Semiurban,1.0
|
||||
LP002401,Male,Yes,0,1,No,2213,1125.0,0.0,360.0,1,Urban,1.0
|
||||
LP002403,Male,No,0,1,Yes,10416,0.0,187.0,360.0,0,Urban,0.0
|
||||
LP002407,Female,Yes,0,0,Yes,7142,0.0,138.0,360.0,1,Rural,1.0
|
||||
LP002408,Male,No,0,1,No,3660,5064.0,187.0,360.0,1,Semiurban,1.0
|
||||
LP002409,Male,Yes,0,1,No,7901,1833.0,180.0,360.0,1,Rural,1.0
|
||||
LP002418,Male,No,3+,0,No,4707,1993.0,148.0,360.0,1,Semiurban,1.0
|
||||
LP002422,Male,No,1,1,No,37719,0.0,152.0,360.0,1,Semiurban,1.0
|
||||
LP002424,Male,Yes,0,1,No,7333,8333.0,175.0,300.0,0,Rural,1.0
|
||||
LP002429,Male,Yes,1,1,Yes,3466,1210.0,130.0,360.0,1,Rural,1.0
|
||||
LP002434,Male,Yes,2,0,No,4652,0.0,110.0,360.0,1,Rural,1.0
|
||||
LP002435,Male,Yes,0,1,,3539,1376.0,55.0,360.0,1,Rural,0.0
|
||||
LP002443,Male,Yes,2,1,No,3340,1710.0,150.0,360.0,0,Rural,0.0
|
||||
LP002444,Male,No,1,0,Yes,2769,1542.0,190.0,360.0,0,Semiurban,0.0
|
||||
LP002446,Male,Yes,2,0,No,2309,1255.0,125.0,360.0,0,Rural,0.0
|
||||
LP002447,Male,Yes,2,0,No,1958,1456.0,60.0,300.0,0,Urban,1.0
|
||||
LP002448,Male,Yes,0,1,No,3948,1733.0,149.0,360.0,0,Rural,0.0
|
||||
LP002449,Male,Yes,0,1,No,2483,2466.0,90.0,180.0,0,Rural,1.0
|
||||
LP002453,Male,No,0,1,Yes,7085,0.0,84.0,360.0,1,Semiurban,1.0
|
||||
LP002455,Male,Yes,2,1,No,3859,0.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP002459,Male,Yes,0,1,No,4301,0.0,118.0,360.0,1,Urban,1.0
|
||||
LP002467,Male,Yes,0,1,No,3708,2569.0,173.0,360.0,1,Urban,0.0
|
||||
LP002472,Male,No,2,1,No,4354,0.0,136.0,360.0,1,Rural,1.0
|
||||
LP002473,Male,Yes,0,1,No,8334,0.0,160.0,360.0,1,Semiurban,0.0
|
||||
LP002478,,Yes,0,1,Yes,2083,4083.0,160.0,360.0,0,Semiurban,1.0
|
||||
LP002484,Male,Yes,3+,1,No,7740,0.0,128.0,180.0,1,Urban,1.0
|
||||
LP002487,Male,Yes,0,1,No,3015,2188.0,153.0,360.0,1,Rural,1.0
|
||||
LP002489,Female,No,1,0,,5191,0.0,132.0,360.0,1,Semiurban,1.0
|
||||
LP002493,Male,No,0,1,No,4166,0.0,98.0,360.0,0,Semiurban,0.0
|
||||
LP002494,Male,No,0,1,No,6000,0.0,140.0,360.0,1,Rural,1.0
|
||||
LP002500,Male,Yes,3+,0,No,2947,1664.0,70.0,180.0,0,Urban,0.0
|
||||
LP002501,,Yes,0,1,No,16692,0.0,110.0,360.0,1,Semiurban,1.0
|
||||
LP002502,Female,Yes,2,0,,210,2917.0,98.0,360.0,1,Semiurban,1.0
|
||||
LP002505,Male,Yes,0,1,No,4333,2451.0,110.0,360.0,1,Urban,0.0
|
||||
LP002515,Male,Yes,1,1,Yes,3450,2079.0,162.0,360.0,1,Semiurban,1.0
|
||||
LP002517,Male,Yes,1,0,No,2653,1500.0,113.0,180.0,0,Rural,0.0
|
||||
LP002519,Male,Yes,3+,1,No,4691,0.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP002522,Female,No,0,1,Yes,2500,0.0,93.0,360.0,0,Urban,1.0
|
||||
LP002524,Male,No,2,1,No,5532,4648.0,162.0,360.0,1,Rural,1.0
|
||||
LP002527,Male,Yes,2,1,Yes,16525,1014.0,150.0,360.0,1,Rural,1.0
|
||||
LP002529,Male,Yes,2,1,No,6700,1750.0,230.0,300.0,1,Semiurban,1.0
|
||||
LP002530,,Yes,2,1,No,2873,1872.0,132.0,360.0,0,Semiurban,0.0
|
||||
LP002531,Male,Yes,1,1,Yes,16667,2250.0,86.0,360.0,1,Semiurban,1.0
|
||||
LP002533,Male,Yes,2,1,No,2947,1603.0,0.0,360.0,1,Urban,0.0
|
||||
LP002534,Female,No,0,0,No,4350,0.0,154.0,360.0,1,Rural,1.0
|
||||
LP002536,Male,Yes,3+,0,No,3095,0.0,113.0,360.0,1,Rural,1.0
|
||||
LP002537,Male,Yes,0,1,No,2083,3150.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP002541,Male,Yes,0,1,No,10833,0.0,234.0,360.0,1,Semiurban,1.0
|
||||
LP002543,Male,Yes,2,1,No,8333,0.0,246.0,360.0,1,Semiurban,1.0
|
||||
LP002544,Male,Yes,1,0,No,1958,2436.0,131.0,360.0,1,Rural,1.0
|
||||
LP002545,Male,No,2,1,No,3547,0.0,80.0,360.0,0,Rural,0.0
|
||||
LP002547,Male,Yes,1,1,No,18333,0.0,500.0,360.0,1,Urban,0.0
|
||||
LP002555,Male,Yes,2,1,Yes,4583,2083.0,160.0,360.0,1,Semiurban,1.0
|
||||
LP002556,Male,No,0,1,No,2435,0.0,75.0,360.0,1,Urban,0.0
|
||||
LP002560,Male,No,0,0,No,2699,2785.0,96.0,360.0,0,Semiurban,1.0
|
||||
LP002562,Male,Yes,1,0,No,5333,1131.0,186.0,360.0,0,Urban,1.0
|
||||
LP002571,Male,No,0,0,No,3691,0.0,110.0,360.0,1,Rural,1.0
|
||||
LP002582,Female,No,0,0,Yes,17263,0.0,225.0,360.0,1,Semiurban,1.0
|
||||
LP002585,Male,Yes,0,1,No,3597,2157.0,119.0,360.0,0,Rural,0.0
|
||||
LP002586,Female,Yes,1,1,No,3326,913.0,105.0,84.0,1,Semiurban,1.0
|
||||
LP002587,Male,Yes,0,0,No,2600,1700.0,107.0,360.0,1,Rural,1.0
|
||||
LP002588,Male,Yes,0,1,No,4625,2857.0,111.0,12.0,0,Urban,1.0
|
||||
LP002600,Male,Yes,1,1,Yes,2895,0.0,95.0,360.0,1,Semiurban,1.0
|
||||
LP002602,Male,No,0,1,No,6283,4416.0,209.0,360.0,0,Rural,0.0
|
||||
LP002603,Female,No,0,1,No,645,3683.0,113.0,480.0,1,Rural,1.0
|
||||
LP002606,Female,No,0,1,No,3159,0.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP002615,Male,Yes,2,1,No,4865,5624.0,208.0,360.0,1,Semiurban,1.0
|
||||
LP002618,Male,Yes,1,0,No,4050,5302.0,138.0,360.0,0,Rural,0.0
|
||||
LP002619,Male,Yes,0,0,No,3814,1483.0,124.0,300.0,1,Semiurban,1.0
|
||||
LP002622,Male,Yes,2,1,No,3510,4416.0,243.0,360.0,1,Rural,1.0
|
||||
LP002624,Male,Yes,0,1,No,20833,6667.0,480.0,360.0,0,Urban,1.0
|
||||
LP002625,,No,0,1,No,3583,0.0,96.0,360.0,1,Urban,0.0
|
||||
LP002626,Male,Yes,0,1,Yes,2479,3013.0,188.0,360.0,1,Urban,1.0
|
||||
LP002634,Female,No,1,1,No,13262,0.0,40.0,360.0,1,Urban,1.0
|
||||
LP002637,Male,No,0,0,No,3598,1287.0,100.0,360.0,1,Rural,0.0
|
||||
LP002640,Male,Yes,1,1,No,6065,2004.0,250.0,360.0,1,Semiurban,1.0
|
||||
LP002643,Male,Yes,2,1,No,3283,2035.0,148.0,360.0,1,Urban,1.0
|
||||
LP002648,Male,Yes,0,1,No,2130,6666.0,70.0,180.0,1,Semiurban,0.0
|
||||
LP002652,Male,No,0,1,No,5815,3666.0,311.0,360.0,1,Rural,0.0
|
||||
LP002659,Male,Yes,3+,1,No,3466,3428.0,150.0,360.0,1,Rural,1.0
|
||||
LP002670,Female,Yes,2,1,No,2031,1632.0,113.0,480.0,1,Semiurban,1.0
|
||||
LP002682,Male,Yes,,0,No,3074,1800.0,123.0,360.0,0,Semiurban,0.0
|
||||
LP002683,Male,No,0,1,No,4683,1915.0,185.0,360.0,1,Semiurban,0.0
|
||||
LP002684,Female,No,0,0,No,3400,0.0,95.0,360.0,1,Rural,0.0
|
||||
LP002689,Male,Yes,2,0,No,2192,1742.0,45.0,360.0,1,Semiurban,1.0
|
||||
LP002690,Male,No,0,1,No,2500,0.0,55.0,360.0,1,Semiurban,1.0
|
||||
LP002692,Male,Yes,3+,1,Yes,5677,1424.0,100.0,360.0,1,Rural,1.0
|
||||
LP002693,Male,Yes,2,1,Yes,7948,7166.0,480.0,360.0,1,Rural,1.0
|
||||
LP002697,Male,No,0,1,No,4680,2087.0,0.0,360.0,1,Semiurban,0.0
|
||||
LP002699,Male,Yes,2,1,Yes,17500,0.0,400.0,360.0,1,Rural,1.0
|
||||
LP002705,Male,Yes,0,1,No,3775,0.0,110.0,360.0,1,Semiurban,1.0
|
||||
LP002706,Male,Yes,1,0,No,5285,1430.0,161.0,360.0,0,Semiurban,1.0
|
||||
LP002714,Male,No,1,0,No,2679,1302.0,94.0,360.0,1,Semiurban,1.0
|
||||
LP002716,Male,No,0,0,No,6783,0.0,130.0,360.0,1,Semiurban,1.0
|
||||
LP002717,Male,Yes,0,1,No,1025,5500.0,216.0,360.0,0,Rural,1.0
|
||||
LP002720,Male,Yes,3+,1,No,4281,0.0,100.0,360.0,1,Urban,1.0
|
||||
LP002723,Male,No,2,1,No,3588,0.0,110.0,360.0,0,Rural,0.0
|
||||
LP002729,Male,No,1,1,No,11250,0.0,196.0,360.0,0,Semiurban,0.0
|
||||
LP002731,Female,No,0,0,Yes,18165,0.0,125.0,360.0,1,Urban,1.0
|
||||
LP002732,Male,No,0,0,,2550,2042.0,126.0,360.0,1,Rural,1.0
|
||||
LP002734,Male,Yes,0,1,No,6133,3906.0,324.0,360.0,1,Urban,1.0
|
||||
LP002738,Male,No,2,1,No,3617,0.0,107.0,360.0,1,Semiurban,1.0
|
||||
LP002739,Male,Yes,0,0,No,2917,536.0,66.0,360.0,1,Rural,0.0
|
||||
LP002740,Male,Yes,3+,1,No,6417,0.0,157.0,180.0,1,Rural,1.0
|
||||
LP002741,Female,Yes,1,1,No,4608,2845.0,140.0,180.0,1,Semiurban,1.0
|
||||
LP002743,Female,No,0,1,No,2138,0.0,99.0,360.0,0,Semiurban,0.0
|
||||
LP002753,Female,No,1,1,,3652,0.0,95.0,360.0,1,Semiurban,1.0
|
||||
LP002755,Male,Yes,1,0,No,2239,2524.0,128.0,360.0,1,Urban,1.0
|
||||
LP002757,Female,Yes,0,0,No,3017,663.0,102.0,360.0,0,Semiurban,1.0
|
||||
LP002767,Male,Yes,0,1,No,2768,1950.0,155.0,360.0,1,Rural,1.0
|
||||
LP002768,Male,No,0,0,No,3358,0.0,80.0,36.0,1,Semiurban,0.0
|
||||
LP002772,Male,No,0,1,No,2526,1783.0,145.0,360.0,1,Rural,1.0
|
||||
LP002776,Female,No,0,1,No,5000,0.0,103.0,360.0,0,Semiurban,0.0
|
||||
LP002777,Male,Yes,0,1,No,2785,2016.0,110.0,360.0,1,Rural,1.0
|
||||
LP002778,Male,Yes,2,1,Yes,6633,0.0,0.0,360.0,0,Rural,0.0
|
||||
LP002784,Male,Yes,1,0,No,2492,2375.0,0.0,360.0,1,Rural,1.0
|
||||
LP002785,Male,Yes,1,1,No,3333,3250.0,158.0,360.0,1,Urban,1.0
|
||||
LP002788,Male,Yes,0,0,No,2454,2333.0,181.0,360.0,0,Urban,0.0
|
||||
LP002789,Male,Yes,0,1,No,3593,4266.0,132.0,180.0,0,Rural,0.0
|
||||
LP002792,Male,Yes,1,1,No,5468,1032.0,26.0,360.0,1,Semiurban,1.0
|
||||
LP002794,Female,No,0,1,No,2667,1625.0,84.0,360.0,0,Urban,1.0
|
||||
LP002795,Male,Yes,3+,1,Yes,10139,0.0,260.0,360.0,1,Semiurban,1.0
|
||||
LP002798,Male,Yes,0,1,No,3887,2669.0,162.0,360.0,1,Semiurban,1.0
|
||||
LP002804,Female,Yes,0,1,No,4180,2306.0,182.0,360.0,1,Semiurban,1.0
|
||||
LP002807,Male,Yes,2,0,No,3675,242.0,108.0,360.0,1,Semiurban,1.0
|
||||
LP002813,Female,Yes,1,1,Yes,19484,0.0,600.0,360.0,1,Semiurban,1.0
|
||||
LP002820,Male,Yes,0,1,No,5923,2054.0,211.0,360.0,1,Rural,1.0
|
||||
LP002821,Male,No,0,0,Yes,5800,0.0,132.0,360.0,1,Semiurban,1.0
|
||||
LP002832,Male,Yes,2,1,No,8799,0.0,258.0,360.0,0,Urban,0.0
|
||||
LP002833,Male,Yes,0,0,No,4467,0.0,120.0,360.0,0,Rural,1.0
|
||||
LP002836,Male,No,0,1,No,3333,0.0,70.0,360.0,1,Urban,1.0
|
||||
LP002837,Male,Yes,3+,1,No,3400,2500.0,123.0,360.0,0,Rural,0.0
|
||||
LP002840,Female,No,0,1,No,2378,0.0,9.0,360.0,1,Urban,0.0
|
||||
LP002841,Male,Yes,0,1,No,3166,2064.0,104.0,360.0,0,Urban,0.0
|
||||
LP002842,Male,Yes,1,1,No,3417,1750.0,186.0,360.0,1,Urban,1.0
|
||||
LP002847,Male,Yes,,1,No,5116,1451.0,165.0,360.0,0,Urban,0.0
|
||||
LP002855,Male,Yes,2,1,No,16666,0.0,275.0,360.0,1,Urban,1.0
|
||||
LP002862,Male,Yes,2,0,No,6125,1625.0,187.0,480.0,1,Semiurban,0.0
|
||||
LP002863,Male,Yes,3+,1,No,6406,0.0,150.0,360.0,1,Semiurban,0.0
|
||||
LP002868,Male,Yes,2,1,No,3159,461.0,108.0,84.0,1,Urban,1.0
|
||||
LP002872,,Yes,0,1,No,3087,2210.0,136.0,360.0,0,Semiurban,0.0
|
||||
LP002874,Male,No,0,1,No,3229,2739.0,110.0,360.0,1,Urban,1.0
|
||||
LP002877,Male,Yes,1,1,No,1782,2232.0,107.0,360.0,1,Rural,1.0
|
||||
LP002888,Male,No,0,1,,3182,2917.0,161.0,360.0,1,Urban,1.0
|
||||
LP002892,Male,Yes,2,1,No,6540,0.0,205.0,360.0,1,Semiurban,1.0
|
||||
LP002893,Male,No,0,1,No,1836,33837.0,90.0,360.0,1,Urban,0.0
|
||||
LP002894,Female,Yes,0,1,No,3166,0.0,36.0,360.0,1,Semiurban,1.0
|
||||
LP002898,Male,Yes,1,1,No,1880,0.0,61.0,360.0,0,Rural,0.0
|
||||
LP002911,Male,Yes,1,1,No,2787,1917.0,146.0,360.0,0,Rural,0.0
|
||||
LP002912,Male,Yes,1,1,No,4283,3000.0,172.0,84.0,1,Rural,0.0
|
||||
LP002916,Male,Yes,0,1,No,2297,1522.0,104.0,360.0,1,Urban,1.0
|
||||
LP002917,Female,No,0,0,No,2165,0.0,70.0,360.0,1,Semiurban,1.0
|
||||
LP002925,,No,0,1,No,4750,0.0,94.0,360.0,1,Semiurban,1.0
|
||||
LP002926,Male,Yes,2,1,Yes,2726,0.0,106.0,360.0,0,Semiurban,0.0
|
||||
LP002928,Male,Yes,0,1,No,3000,3416.0,56.0,180.0,1,Semiurban,1.0
|
||||
LP002931,Male,Yes,2,1,Yes,6000,0.0,205.0,240.0,1,Semiurban,0.0
|
||||
LP002933,,No,3+,1,Yes,9357,0.0,292.0,360.0,1,Semiurban,1.0
|
||||
LP002936,Male,Yes,0,1,No,3859,3300.0,142.0,180.0,1,Rural,1.0
|
||||
LP002938,Male,Yes,0,1,Yes,16120,0.0,260.0,360.0,1,Urban,1.0
|
||||
LP002940,Male,No,0,0,No,3833,0.0,110.0,360.0,1,Rural,1.0
|
||||
LP002941,Male,Yes,2,0,Yes,6383,1000.0,187.0,360.0,1,Rural,0.0
|
||||
LP002943,Male,No,,1,No,2987,0.0,88.0,360.0,0,Semiurban,0.0
|
||||
LP002945,Male,Yes,0,1,Yes,9963,0.0,180.0,360.0,1,Rural,1.0
|
||||
LP002948,Male,Yes,2,1,No,5780,0.0,192.0,360.0,1,Urban,1.0
|
||||
LP002949,Female,No,3+,1,,416,41667.0,350.0,180.0,0,Urban,0.0
|
||||
LP002950,Male,Yes,0,0,,2894,2792.0,155.0,360.0,1,Rural,1.0
|
||||
LP002953,Male,Yes,3+,1,No,5703,0.0,128.0,360.0,1,Urban,1.0
|
||||
LP002958,Male,No,0,1,No,3676,4301.0,172.0,360.0,1,Rural,1.0
|
||||
LP002959,Female,Yes,1,1,No,12000,0.0,496.0,360.0,1,Semiurban,1.0
|
||||
LP002960,Male,Yes,0,0,No,2400,3800.0,0.0,180.0,1,Urban,0.0
|
||||
LP002961,Male,Yes,1,1,No,3400,2500.0,173.0,360.0,1,Semiurban,1.0
|
||||
LP002964,Male,Yes,2,0,No,3987,1411.0,157.0,360.0,1,Rural,1.0
|
||||
LP002974,Male,Yes,0,1,No,3232,1950.0,108.0,360.0,1,Rural,1.0
|
||||
LP002978,Female,No,0,1,No,2900,0.0,71.0,360.0,1,Rural,1.0
|
||||
LP002979,Male,Yes,3+,1,No,4106,0.0,40.0,180.0,1,Rural,1.0
|
||||
LP002983,Male,Yes,1,1,No,8072,240.0,253.0,360.0,1,Urban,1.0
|
||||
LP002984,Male,Yes,2,1,No,7583,0.0,187.0,360.0,1,Urban,1.0
|
||||
LP002990,Female,No,0,1,Yes,4583,0.0,133.0,360.0,0,Semiurban,0.0
|
||||
|
BIN
abanin_daniil_lab_3/result.png
Normal file
|
After Width: | Height: | Size: 27 KiB |
26
abanin_daniil_lab_4/README.md
Normal file
@@ -0,0 +1,26 @@
|
||||
## Лабораторная работа №4
|
||||
|
||||
### Ранжирование признаков
|
||||
|
||||
## ПИбд-41 Абанин Даниил
|
||||
|
||||
### Как запустить лабораторную работу:
|
||||
|
||||
* установить python, pandas, matplotlib, scipy
|
||||
* запустить проект (стартовая точка lab4)
|
||||
|
||||
### Какие технологии использовались:
|
||||
|
||||
* Язык программирования `Python`, библиотеки pandas, matplotlib, scipy
|
||||
* Среда разработки `PyCharm`
|
||||
|
||||
### Что делает лабораторная работа:
|
||||
Программа читает данные из csv файла. На основе имеющейся информации кластеризует заявителей на различные группы по риску выдачи кредита.
|
||||
При кластеризации используются такие признаки, как: ApplicantIncome - доход заявителя, LoanAmount - сумма займа в тысячах, Credit_History -
|
||||
статус кредитной истории заявителя (соответствие рекомендациям), Self_Employed - самозанятость (Да/Нет), Education - наличие образования
|
||||
|
||||
### Тест
|
||||
|
||||

|
||||
|
||||
По результатам кластеризации дендрограммой видно, что было проведено эффективное разбиение данных. На диаграмме показаны различные группы заявителей по рискам выдачи кредита
|
||||
23
abanin_daniil_lab_4/lab4.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from scipy.cluster import hierarchy
|
||||
import pandas as pd
|
||||
from matplotlib import pyplot as plt
|
||||
|
||||
|
||||
def start():
|
||||
data = pd.read_csv('loan.csv')
|
||||
x = data[['ApplicantIncome', 'LoanAmount', 'Credit_History', 'Self_Employed', 'Education']]
|
||||
plt.figure(1, figsize=(16, 9))
|
||||
plt.title('Дендрограмма кластеризации заявителей')
|
||||
|
||||
hierarchy.dendrogram(hierarchy.linkage(x, method='single'),
|
||||
truncate_mode='lastp',
|
||||
p=20,
|
||||
orientation='top',
|
||||
leaf_rotation=90,
|
||||
leaf_font_size=8,
|
||||
show_contracted=True)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
start()
|
||||
615
abanin_daniil_lab_4/loan.csv
Normal file
@@ -0,0 +1,615 @@
|
||||
Loan_ID,Gender,Married,Dependents,Education,Self_Employed,ApplicantIncome,CoapplicantIncome,LoanAmount,Loan_Amount_Term,Credit_History,Property_Area,Loan_Status
|
||||
LP001002,Male,No,0,1,0.0,5849,0.0,360.0,1.0,0,Y,0.0
|
||||
LP001003,Male,Yes,1,1,0.0,4583,1508.0,128.0,360.0,1,Rural,0.0
|
||||
LP001005,Male,Yes,0,1,1.0,3000,0.0,66.0,360.0,1,Urban,1.0
|
||||
LP001006,Male,Yes,0,0,0.0,2583,2358.0,120.0,360.0,1,Urban,1.0
|
||||
LP001008,Male,No,0,1,0.0,6000,0.0,141.0,360.0,1,Urban,1.0
|
||||
LP001011,Male,Yes,2,1,1.0,5417,4196.0,267.0,360.0,1,Urban,1.0
|
||||
LP001013,Male,Yes,0,0,0.0,2333,1516.0,95.0,360.0,1,Urban,1.0
|
||||
LP001014,Male,Yes,3+,1,0.0,3036,2504.0,158.0,360.0,0,Semiurban,0.0
|
||||
LP001018,Male,Yes,2,1,0.0,4006,1526.0,168.0,360.0,1,Urban,1.0
|
||||
LP001020,Male,Yes,1,1,0.0,12841,10968.0,349.0,360.0,1,Semiurban,0.0
|
||||
LP001024,Male,Yes,2,1,0.0,3200,700.0,70.0,360.0,1,Urban,1.0
|
||||
LP001027,Male,Yes,2,1,0.0,2500,1840.0,109.0,360.0,1,Urban,1.0
|
||||
LP001028,Male,Yes,2,1,0.0,3073,8106.0,200.0,360.0,1,Urban,1.0
|
||||
LP001029,Male,No,0,1,0.0,1853,2840.0,114.0,360.0,1,Rural,0.0
|
||||
LP001030,Male,Yes,2,1,0.0,1299,1086.0,17.0,120.0,1,Urban,1.0
|
||||
LP001032,Male,No,0,1,0.0,4950,0.0,125.0,360.0,1,Urban,1.0
|
||||
LP001034,Male,No,1,0,0.0,3596,0.0,100.0,240.0,0,Urban,1.0
|
||||
LP001036,Female,No,0,1,0.0,3510,0.0,76.0,360.0,0,Urban,0.0
|
||||
LP001038,Male,Yes,0,0,0.0,4887,0.0,133.0,360.0,1,Rural,0.0
|
||||
LP001041,Male,Yes,0,1,0.0,2600,3500.0,115.0,,1,Urban,1.0
|
||||
LP001043,Male,Yes,0,0,0.0,7660,0.0,104.0,360.0,0,Urban,0.0
|
||||
LP001046,Male,Yes,1,1,0.0,5955,5625.0,315.0,360.0,1,Urban,1.0
|
||||
LP001047,Male,Yes,0,0,0.0,2600,1911.0,116.0,360.0,0,Semiurban,0.0
|
||||
LP001050,,Yes,2,0,0.0,3365,1917.0,112.0,360.0,0,Rural,0.0
|
||||
LP001052,Male,Yes,1,1,0.0,3717,2925.0,151.0,360.0,0,Semiurban,0.0
|
||||
LP001066,Male,Yes,0,1,1.0,9560,0.0,191.0,360.0,1,Semiurban,1.0
|
||||
LP001068,Male,Yes,0,1,0.0,2799,2253.0,122.0,360.0,1,Semiurban,1.0
|
||||
LP001073,Male,Yes,2,0,0.0,4226,1040.0,110.0,360.0,1,Urban,1.0
|
||||
LP001086,Male,No,0,0,0.0,1442,0.0,35.0,360.0,1,Urban,0.0
|
||||
LP001087,Female,No,2,1,0.0,3750,2083.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001091,Male,Yes,1,1,0.0,4166,3369.0,201.0,360.0,0,Urban,0.0
|
||||
LP001095,Male,No,0,1,0.0,3167,0.0,74.0,360.0,1,Urban,0.0
|
||||
LP001097,Male,No,1,1,1.0,4692,0.0,106.0,360.0,1,Rural,0.0
|
||||
LP001098,Male,Yes,0,1,0.0,3500,1667.0,114.0,360.0,1,Semiurban,1.0
|
||||
LP001100,Male,No,3+,1,0.0,12500,3000.0,320.0,360.0,1,Rural,0.0
|
||||
LP001106,Male,Yes,0,1,0.0,2275,2067.0,0.0,360.0,1,Urban,1.0
|
||||
LP001109,Male,Yes,0,1,0.0,1828,1330.0,100.0,,0,Urban,0.0
|
||||
LP001112,Female,Yes,0,1,0.0,3667,1459.0,144.0,360.0,1,Semiurban,1.0
|
||||
LP001114,Male,No,0,1,0.0,4166,7210.0,184.0,360.0,1,Urban,1.0
|
||||
LP001116,Male,No,0,0,0.0,3748,1668.0,110.0,360.0,1,Semiurban,1.0
|
||||
LP001119,Male,No,0,1,0.0,3600,0.0,80.0,360.0,1,Urban,0.0
|
||||
LP001120,Male,No,0,1,0.0,1800,1213.0,47.0,360.0,1,Urban,1.0
|
||||
LP001123,Male,Yes,0,1,0.0,2400,0.0,75.0,360.0,0,Urban,1.0
|
||||
LP001131,Male,Yes,0,1,0.0,3941,2336.0,134.0,360.0,1,Semiurban,1.0
|
||||
LP001136,Male,Yes,0,0,1.0,4695,0.0,96.0,,1,Urban,1.0
|
||||
LP001137,Female,No,0,1,0.0,3410,0.0,88.0,,1,Urban,1.0
|
||||
LP001138,Male,Yes,1,1,0.0,5649,0.0,44.0,360.0,1,Urban,1.0
|
||||
LP001144,Male,Yes,0,1,0.0,5821,0.0,144.0,360.0,1,Urban,1.0
|
||||
LP001146,Female,Yes,0,1,0.0,2645,3440.0,120.0,360.0,0,Urban,0.0
|
||||
LP001151,Female,No,0,1,0.0,4000,2275.0,144.0,360.0,1,Semiurban,1.0
|
||||
LP001155,Female,Yes,0,0,0.0,1928,1644.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP001157,Female,No,0,1,0.0,3086,0.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001164,Female,No,0,1,0.0,4230,0.0,112.0,360.0,1,Semiurban,0.0
|
||||
LP001179,Male,Yes,2,1,0.0,4616,0.0,134.0,360.0,1,Urban,0.0
|
||||
LP001186,Female,Yes,1,1,1.0,11500,0.0,286.0,360.0,0,Urban,0.0
|
||||
LP001194,Male,Yes,2,1,0.0,2708,1167.0,97.0,360.0,1,Semiurban,1.0
|
||||
LP001195,Male,Yes,0,1,0.0,2132,1591.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP001197,Male,Yes,0,1,0.0,3366,2200.0,135.0,360.0,1,Rural,0.0
|
||||
LP001198,Male,Yes,1,1,0.0,8080,2250.0,180.0,360.0,1,Urban,1.0
|
||||
LP001199,Male,Yes,2,0,0.0,3357,2859.0,144.0,360.0,1,Urban,1.0
|
||||
LP001205,Male,Yes,0,1,0.0,2500,3796.0,120.0,360.0,1,Urban,1.0
|
||||
LP001206,Male,Yes,3+,1,0.0,3029,0.0,99.0,360.0,1,Urban,1.0
|
||||
LP001207,Male,Yes,0,0,1.0,2609,3449.0,165.0,180.0,0,Rural,0.0
|
||||
LP001213,Male,Yes,1,1,0.0,4945,0.0,0.0,360.0,0,Rural,0.0
|
||||
LP001222,Female,No,0,1,0.0,4166,0.0,116.0,360.0,0,Semiurban,0.0
|
||||
LP001225,Male,Yes,0,1,0.0,5726,4595.0,258.0,360.0,1,Semiurban,0.0
|
||||
LP001228,Male,No,0,0,0.0,3200,2254.0,126.0,180.0,0,Urban,0.0
|
||||
LP001233,Male,Yes,1,1,0.0,10750,0.0,312.0,360.0,1,Urban,1.0
|
||||
LP001238,Male,Yes,3+,0,1.0,7100,0.0,125.0,60.0,1,Urban,1.0
|
||||
LP001241,Female,No,0,1,0.0,4300,0.0,136.0,360.0,0,Semiurban,0.0
|
||||
LP001243,Male,Yes,0,1,0.0,3208,3066.0,172.0,360.0,1,Urban,1.0
|
||||
LP001245,Male,Yes,2,0,1.0,1875,1875.0,97.0,360.0,1,Semiurban,1.0
|
||||
LP001248,Male,No,0,1,0.0,3500,0.0,81.0,300.0,1,Semiurban,1.0
|
||||
LP001250,Male,Yes,3+,0,0.0,4755,0.0,95.0,,0,Semiurban,0.0
|
||||
LP001253,Male,Yes,3+,1,1.0,5266,1774.0,187.0,360.0,1,Semiurban,1.0
|
||||
LP001255,Male,No,0,1,0.0,3750,0.0,113.0,480.0,1,Urban,0.0
|
||||
LP001256,Male,No,0,1,0.0,3750,4750.0,176.0,360.0,1,Urban,0.0
|
||||
LP001259,Male,Yes,1,1,1.0,1000,3022.0,110.0,360.0,1,Urban,0.0
|
||||
LP001263,Male,Yes,3+,1,0.0,3167,4000.0,180.0,300.0,0,Semiurban,0.0
|
||||
LP001264,Male,Yes,3+,0,1.0,3333,2166.0,130.0,360.0,0,Semiurban,1.0
|
||||
LP001265,Female,No,0,1,0.0,3846,0.0,111.0,360.0,1,Semiurban,1.0
|
||||
LP001266,Male,Yes,1,1,1.0,2395,0.0,0.0,360.0,1,Semiurban,1.0
|
||||
LP001267,Female,Yes,2,1,0.0,1378,1881.0,167.0,360.0,1,Urban,0.0
|
||||
LP001273,Male,Yes,0,1,0.0,6000,2250.0,265.0,360.0,0,Semiurban,0.0
|
||||
LP001275,Male,Yes,1,1,0.0,3988,0.0,50.0,240.0,1,Urban,1.0
|
||||
LP001279,Male,No,0,1,0.0,2366,2531.0,136.0,360.0,1,Semiurban,1.0
|
||||
LP001280,Male,Yes,2,0,0.0,3333,2000.0,99.0,360.0,0,Semiurban,1.0
|
||||
LP001282,Male,Yes,0,1,0.0,2500,2118.0,104.0,360.0,1,Semiurban,1.0
|
||||
LP001289,Male,No,0,1,0.0,8566,0.0,210.0,360.0,1,Urban,1.0
|
||||
LP001310,Male,Yes,0,1,0.0,5695,4167.0,175.0,360.0,1,Semiurban,1.0
|
||||
LP001316,Male,Yes,0,1,0.0,2958,2900.0,131.0,360.0,1,Semiurban,1.0
|
||||
LP001318,Male,Yes,2,1,0.0,6250,5654.0,188.0,180.0,1,Semiurban,1.0
|
||||
LP001319,Male,Yes,2,0,0.0,3273,1820.0,81.0,360.0,1,Urban,1.0
|
||||
LP001322,Male,No,0,1,0.0,4133,0.0,122.0,360.0,1,Semiurban,1.0
|
||||
LP001325,Male,No,0,0,0.0,3620,0.0,25.0,120.0,1,Semiurban,1.0
|
||||
LP001326,Male,No,0,1,0.0,6782,0.0,0.0,360.0,0,Urban,0.0
|
||||
LP001327,Female,Yes,0,1,0.0,2484,2302.0,137.0,360.0,1,Semiurban,1.0
|
||||
LP001333,Male,Yes,0,1,0.0,1977,997.0,50.0,360.0,1,Semiurban,1.0
|
||||
LP001334,Male,Yes,0,0,0.0,4188,0.0,115.0,180.0,1,Semiurban,1.0
|
||||
LP001343,Male,Yes,0,1,0.0,1759,3541.0,131.0,360.0,1,Semiurban,1.0
|
||||
LP001345,Male,Yes,2,0,0.0,4288,3263.0,133.0,180.0,1,Urban,1.0
|
||||
LP001349,Male,No,0,1,0.0,4843,3806.0,151.0,360.0,1,Semiurban,1.0
|
||||
LP001350,Male,Yes,,1,0.0,13650,0.0,0.0,360.0,1,Urban,1.0
|
||||
LP001356,Male,Yes,0,1,0.0,4652,3583.0,0.0,360.0,1,Semiurban,1.0
|
||||
LP001357,Male,,,1,0.0,3816,754.0,160.0,360.0,1,Urban,1.0
|
||||
LP001367,Male,Yes,1,1,0.0,3052,1030.0,100.0,360.0,1,Urban,1.0
|
||||
LP001369,Male,Yes,2,1,0.0,11417,1126.0,225.0,360.0,1,Urban,1.0
|
||||
LP001370,Male,No,0,0,0.0,7333,0.0,120.0,360.0,1,Rural,0.0
|
||||
LP001379,Male,Yes,2,1,0.0,3800,3600.0,216.0,360.0,0,Urban,0.0
|
||||
LP001384,Male,Yes,3+,0,0.0,2071,754.0,94.0,480.0,1,Semiurban,1.0
|
||||
LP001385,Male,No,0,1,0.0,5316,0.0,136.0,360.0,1,Urban,1.0
|
||||
LP001387,Female,Yes,0,1,0.0,2929,2333.0,139.0,360.0,1,Semiurban,1.0
|
||||
LP001391,Male,Yes,0,0,0.0,3572,4114.0,152.0,,0,Rural,0.0
|
||||
LP001392,Female,No,1,1,1.0,7451,0.0,0.0,360.0,1,Semiurban,1.0
|
||||
LP001398,Male,No,0,1,0.0,5050,0.0,118.0,360.0,1,Semiurban,1.0
|
||||
LP001401,Male,Yes,1,1,0.0,14583,0.0,185.0,180.0,1,Rural,1.0
|
||||
LP001404,Female,Yes,0,1,0.0,3167,2283.0,154.0,360.0,1,Semiurban,1.0
|
||||
LP001405,Male,Yes,1,1,0.0,2214,1398.0,85.0,360.0,0,Urban,1.0
|
||||
LP001421,Male,Yes,0,1,0.0,5568,2142.0,175.0,360.0,1,Rural,0.0
|
||||
LP001422,Female,No,0,1,0.0,10408,0.0,259.0,360.0,1,Urban,1.0
|
||||
LP001426,Male,Yes,,1,0.0,5667,2667.0,180.0,360.0,1,Rural,1.0
|
||||
LP001430,Female,No,0,1,0.0,4166,0.0,44.0,360.0,1,Semiurban,1.0
|
||||
LP001431,Female,No,0,1,0.0,2137,8980.0,137.0,360.0,0,Semiurban,1.0
|
||||
LP001432,Male,Yes,2,1,0.0,2957,0.0,81.0,360.0,1,Semiurban,1.0
|
||||
LP001439,Male,Yes,0,0,0.0,4300,2014.0,194.0,360.0,1,Rural,1.0
|
||||
LP001443,Female,No,0,1,0.0,3692,0.0,93.0,360.0,0,Rural,1.0
|
||||
LP001448,,Yes,3+,1,0.0,23803,0.0,370.0,360.0,1,Rural,1.0
|
||||
LP001449,Male,No,0,1,0.0,3865,1640.0,0.0,360.0,1,Rural,1.0
|
||||
LP001451,Male,Yes,1,1,1.0,10513,3850.0,160.0,180.0,0,Urban,0.0
|
||||
LP001465,Male,Yes,0,1,0.0,6080,2569.0,182.0,360.0,0,Rural,0.0
|
||||
LP001469,Male,No,0,1,1.0,20166,0.0,650.0,480.0,0,Urban,1.0
|
||||
LP001473,Male,No,0,1,0.0,2014,1929.0,74.0,360.0,1,Urban,1.0
|
||||
LP001478,Male,No,0,1,0.0,2718,0.0,70.0,360.0,1,Semiurban,1.0
|
||||
LP001482,Male,Yes,0,1,1.0,3459,0.0,25.0,120.0,1,Semiurban,1.0
|
||||
LP001487,Male,No,0,1,0.0,4895,0.0,102.0,360.0,1,Semiurban,1.0
|
||||
LP001488,Male,Yes,3+,1,0.0,4000,7750.0,290.0,360.0,1,Semiurban,0.0
|
||||
LP001489,Female,Yes,0,1,0.0,4583,0.0,84.0,360.0,1,Rural,0.0
|
||||
LP001491,Male,Yes,2,1,1.0,3316,3500.0,88.0,360.0,1,Urban,1.0
|
||||
LP001492,Male,No,0,1,0.0,14999,0.0,242.0,360.0,0,Semiurban,0.0
|
||||
LP001493,Male,Yes,2,0,0.0,4200,1430.0,129.0,360.0,1,Rural,0.0
|
||||
LP001497,Male,Yes,2,1,0.0,5042,2083.0,185.0,360.0,1,Rural,0.0
|
||||
LP001498,Male,No,0,1,0.0,5417,0.0,168.0,360.0,1,Urban,1.0
|
||||
LP001504,Male,No,0,1,1.0,6950,0.0,175.0,180.0,1,Semiurban,1.0
|
||||
LP001507,Male,Yes,0,1,0.0,2698,2034.0,122.0,360.0,1,Semiurban,1.0
|
||||
LP001508,Male,Yes,2,1,0.0,11757,0.0,187.0,180.0,1,Urban,1.0
|
||||
LP001514,Female,Yes,0,1,0.0,2330,4486.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP001516,Female,Yes,2,1,0.0,14866,0.0,70.0,360.0,1,Urban,1.0
|
||||
LP001518,Male,Yes,1,1,0.0,1538,1425.0,30.0,360.0,1,Urban,1.0
|
||||
LP001519,Female,No,0,1,0.0,10000,1666.0,225.0,360.0,1,Rural,0.0
|
||||
LP001520,Male,Yes,0,1,0.0,4860,830.0,125.0,360.0,1,Semiurban,1.0
|
||||
LP001528,Male,No,0,1,0.0,6277,0.0,118.0,360.0,0,Rural,0.0
|
||||
LP001529,Male,Yes,0,1,1.0,2577,3750.0,152.0,360.0,1,Rural,1.0
|
||||
LP001531,Male,No,0,1,0.0,9166,0.0,244.0,360.0,1,Urban,0.0
|
||||
LP001532,Male,Yes,2,0,0.0,2281,0.0,113.0,360.0,1,Rural,0.0
|
||||
LP001535,Male,No,0,1,0.0,3254,0.0,50.0,360.0,1,Urban,1.0
|
||||
LP001536,Male,Yes,3+,1,0.0,39999,0.0,600.0,180.0,0,Semiurban,1.0
|
||||
LP001541,Male,Yes,1,1,0.0,6000,0.0,160.0,360.0,0,Rural,1.0
|
||||
LP001543,Male,Yes,1,1,0.0,9538,0.0,187.0,360.0,1,Urban,1.0
|
||||
LP001546,Male,No,0,1,0.0,2980,2083.0,120.0,360.0,1,Rural,1.0
|
||||
LP001552,Male,Yes,0,1,0.0,4583,5625.0,255.0,360.0,1,Semiurban,1.0
|
||||
LP001560,Male,Yes,0,0,0.0,1863,1041.0,98.0,360.0,1,Semiurban,1.0
|
||||
LP001562,Male,Yes,0,1,0.0,7933,0.0,275.0,360.0,1,Urban,0.0
|
||||
LP001565,Male,Yes,1,1,0.0,3089,1280.0,121.0,360.0,0,Semiurban,0.0
|
||||
LP001570,Male,Yes,2,1,0.0,4167,1447.0,158.0,360.0,1,Rural,1.0
|
||||
LP001572,Male,Yes,0,1,0.0,9323,0.0,75.0,180.0,1,Urban,1.0
|
||||
LP001574,Male,Yes,0,1,0.0,3707,3166.0,182.0,,1,Rural,1.0
|
||||
LP001577,Female,Yes,0,1,0.0,4583,0.0,112.0,360.0,1,Rural,0.0
|
||||
LP001578,Male,Yes,0,1,0.0,2439,3333.0,129.0,360.0,1,Rural,1.0
|
||||
LP001579,Male,No,0,1,0.0,2237,0.0,63.0,480.0,0,Semiurban,0.0
|
||||
LP001580,Male,Yes,2,1,0.0,8000,0.0,200.0,360.0,1,Semiurban,1.0
|
||||
LP001581,Male,Yes,0,0,0.0,1820,1769.0,95.0,360.0,1,Rural,1.0
|
||||
LP001585,,Yes,3+,1,0.0,51763,0.0,700.0,300.0,1,Urban,1.0
|
||||
LP001586,Male,Yes,3+,0,0.0,3522,0.0,81.0,180.0,1,Rural,0.0
|
||||
LP001594,Male,Yes,0,1,0.0,5708,5625.0,187.0,360.0,1,Semiurban,1.0
|
||||
LP001603,Male,Yes,0,0,1.0,4344,736.0,87.0,360.0,1,Semiurban,0.0
|
||||
LP001606,Male,Yes,0,1,0.0,3497,1964.0,116.0,360.0,1,Rural,1.0
|
||||
LP001608,Male,Yes,2,1,0.0,2045,1619.0,101.0,360.0,1,Rural,1.0
|
||||
LP001610,Male,Yes,3+,1,0.0,5516,11300.0,495.0,360.0,0,Semiurban,0.0
|
||||
LP001616,Male,Yes,1,1,0.0,3750,0.0,116.0,360.0,1,Semiurban,1.0
|
||||
LP001630,Male,No,0,0,0.0,2333,1451.0,102.0,480.0,0,Urban,0.0
|
||||
LP001633,Male,Yes,1,1,0.0,6400,7250.0,180.0,360.0,0,Urban,0.0
|
||||
LP001634,Male,No,0,1,0.0,1916,5063.0,67.0,360.0,0,Rural,0.0
|
||||
LP001636,Male,Yes,0,1,0.0,4600,0.0,73.0,180.0,1,Semiurban,1.0
|
||||
LP001637,Male,Yes,1,1,0.0,33846,0.0,260.0,360.0,1,Semiurban,0.0
|
||||
LP001639,Female,Yes,0,1,0.0,3625,0.0,108.0,360.0,1,Semiurban,1.0
|
||||
LP001640,Male,Yes,0,1,1.0,39147,4750.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001641,Male,Yes,1,1,1.0,2178,0.0,66.0,300.0,0,Rural,0.0
|
||||
LP001643,Male,Yes,0,1,0.0,2383,2138.0,58.0,360.0,0,Rural,1.0
|
||||
LP001644,,Yes,0,1,1.0,674,5296.0,168.0,360.0,1,Rural,1.0
|
||||
LP001647,Male,Yes,0,1,0.0,9328,0.0,188.0,180.0,1,Rural,1.0
|
||||
LP001653,Male,No,0,0,0.0,4885,0.0,48.0,360.0,1,Rural,1.0
|
||||
LP001656,Male,No,0,1,0.0,12000,0.0,164.0,360.0,1,Semiurban,0.0
|
||||
LP001657,Male,Yes,0,0,0.0,6033,0.0,160.0,360.0,1,Urban,0.0
|
||||
LP001658,Male,No,0,1,0.0,3858,0.0,76.0,360.0,1,Semiurban,1.0
|
||||
LP001664,Male,No,0,1,0.0,4191,0.0,120.0,360.0,1,Rural,1.0
|
||||
LP001665,Male,Yes,1,1,0.0,3125,2583.0,170.0,360.0,1,Semiurban,0.0
|
||||
LP001666,Male,No,0,1,0.0,8333,3750.0,187.0,360.0,1,Rural,1.0
|
||||
LP001669,Female,No,0,0,0.0,1907,2365.0,120.0,,1,Urban,1.0
|
||||
LP001671,Female,Yes,0,1,0.0,3416,2816.0,113.0,360.0,0,Semiurban,1.0
|
||||
LP001673,Male,No,0,1,1.0,11000,0.0,83.0,360.0,1,Urban,0.0
|
||||
LP001674,Male,Yes,1,0,0.0,2600,2500.0,90.0,360.0,1,Semiurban,1.0
|
||||
LP001677,Male,No,2,1,0.0,4923,0.0,166.0,360.0,0,Semiurban,1.0
|
||||
LP001682,Male,Yes,3+,0,0.0,3992,0.0,0.0,180.0,1,Urban,0.0
|
||||
LP001688,Male,Yes,1,0,0.0,3500,1083.0,135.0,360.0,1,Urban,1.0
|
||||
LP001691,Male,Yes,2,0,0.0,3917,0.0,124.0,360.0,1,Semiurban,1.0
|
||||
LP001692,Female,No,0,0,0.0,4408,0.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001693,Female,No,0,1,0.0,3244,0.0,80.0,360.0,1,Urban,1.0
|
||||
LP001698,Male,No,0,0,0.0,3975,2531.0,55.0,360.0,1,Rural,1.0
|
||||
LP001699,Male,No,0,1,0.0,2479,0.0,59.0,360.0,1,Urban,1.0
|
||||
LP001702,Male,No,0,1,0.0,3418,0.0,127.0,360.0,1,Semiurban,0.0
|
||||
LP001708,Female,No,0,1,0.0,10000,0.0,214.0,360.0,1,Semiurban,0.0
|
||||
LP001711,Male,Yes,3+,1,0.0,3430,1250.0,128.0,360.0,0,Semiurban,0.0
|
||||
LP001713,Male,Yes,1,1,1.0,7787,0.0,240.0,360.0,1,Urban,1.0
|
||||
LP001715,Male,Yes,3+,0,1.0,5703,0.0,130.0,360.0,1,Rural,1.0
|
||||
LP001716,Male,Yes,0,1,0.0,3173,3021.0,137.0,360.0,1,Urban,1.0
|
||||
LP001720,Male,Yes,3+,0,0.0,3850,983.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP001722,Male,Yes,0,1,0.0,150,1800.0,135.0,360.0,1,Rural,0.0
|
||||
LP001726,Male,Yes,0,1,0.0,3727,1775.0,131.0,360.0,1,Semiurban,1.0
|
||||
LP001732,Male,Yes,2,1,0.0,5000,0.0,72.0,360.0,0,Semiurban,0.0
|
||||
LP001734,Female,Yes,2,1,0.0,4283,2383.0,127.0,360.0,0,Semiurban,1.0
|
||||
LP001736,Male,Yes,0,1,0.0,2221,0.0,60.0,360.0,0,Urban,0.0
|
||||
LP001743,Male,Yes,2,1,0.0,4009,1717.0,116.0,360.0,1,Semiurban,1.0
|
||||
LP001744,Male,No,0,1,0.0,2971,2791.0,144.0,360.0,1,Semiurban,1.0
|
||||
LP001749,Male,Yes,0,1,0.0,7578,1010.0,175.0,,1,Semiurban,1.0
|
||||
LP001750,Male,Yes,0,1,0.0,6250,0.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP001751,Male,Yes,0,1,0.0,3250,0.0,170.0,360.0,1,Rural,0.0
|
||||
LP001754,Male,Yes,,0,1.0,4735,0.0,138.0,360.0,1,Urban,0.0
|
||||
LP001758,Male,Yes,2,1,0.0,6250,1695.0,210.0,360.0,1,Semiurban,1.0
|
||||
LP001760,Male,,,1,0.0,4758,0.0,158.0,480.0,1,Semiurban,1.0
|
||||
LP001761,Male,No,0,1,1.0,6400,0.0,200.0,360.0,1,Rural,1.0
|
||||
LP001765,Male,Yes,1,1,0.0,2491,2054.0,104.0,360.0,1,Semiurban,1.0
|
||||
LP001768,Male,Yes,0,1,0.0,3716,0.0,42.0,180.0,1,Rural,1.0
|
||||
LP001770,Male,No,0,0,0.0,3189,2598.0,120.0,,1,Rural,1.0
|
||||
LP001776,Female,No,0,1,0.0,8333,0.0,280.0,360.0,1,Semiurban,1.0
|
||||
LP001778,Male,Yes,1,1,0.0,3155,1779.0,140.0,360.0,1,Semiurban,1.0
|
||||
LP001784,Male,Yes,1,1,0.0,5500,1260.0,170.0,360.0,1,Rural,1.0
|
||||
LP001786,Male,Yes,0,1,0.0,5746,0.0,255.0,360.0,0,Urban,0.0
|
||||
LP001788,Female,No,0,1,1.0,3463,0.0,122.0,360.0,0,Urban,1.0
|
||||
LP001790,Female,No,1,1,0.0,3812,0.0,112.0,360.0,1,Rural,1.0
|
||||
LP001792,Male,Yes,1,1,0.0,3315,0.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP001798,Male,Yes,2,1,0.0,5819,5000.0,120.0,360.0,1,Rural,1.0
|
||||
LP001800,Male,Yes,1,0,0.0,2510,1983.0,140.0,180.0,1,Urban,0.0
|
||||
LP001806,Male,No,0,1,0.0,2965,5701.0,155.0,60.0,1,Urban,1.0
|
||||
LP001807,Male,Yes,2,1,1.0,6250,1300.0,108.0,360.0,1,Rural,1.0
|
||||
LP001811,Male,Yes,0,0,0.0,3406,4417.0,123.0,360.0,1,Semiurban,1.0
|
||||
LP001813,Male,No,0,1,1.0,6050,4333.0,120.0,180.0,1,Urban,0.0
|
||||
LP001814,Male,Yes,2,1,0.0,9703,0.0,112.0,360.0,1,Urban,1.0
|
||||
LP001819,Male,Yes,1,0,0.0,6608,0.0,137.0,180.0,1,Urban,1.0
|
||||
LP001824,Male,Yes,1,1,0.0,2882,1843.0,123.0,480.0,1,Semiurban,1.0
|
||||
LP001825,Male,Yes,0,1,0.0,1809,1868.0,90.0,360.0,1,Urban,1.0
|
||||
LP001835,Male,Yes,0,0,0.0,1668,3890.0,201.0,360.0,0,Semiurban,0.0
|
||||
LP001836,Female,No,2,1,0.0,3427,0.0,138.0,360.0,1,Urban,0.0
|
||||
LP001841,Male,No,0,0,1.0,2583,2167.0,104.0,360.0,1,Rural,1.0
|
||||
LP001843,Male,Yes,1,0,0.0,2661,7101.0,279.0,180.0,1,Semiurban,1.0
|
||||
LP001844,Male,No,0,1,1.0,16250,0.0,192.0,360.0,0,Urban,0.0
|
||||
LP001846,Female,No,3+,1,0.0,3083,0.0,255.0,360.0,1,Rural,1.0
|
||||
LP001849,Male,No,0,0,0.0,6045,0.0,115.0,360.0,0,Rural,0.0
|
||||
LP001854,Male,Yes,3+,1,0.0,5250,0.0,94.0,360.0,1,Urban,0.0
|
||||
LP001859,Male,Yes,0,1,0.0,14683,2100.0,304.0,360.0,1,Rural,0.0
|
||||
LP001864,Male,Yes,3+,0,0.0,4931,0.0,128.0,360.0,0,Semiurban,0.0
|
||||
LP001865,Male,Yes,1,1,0.0,6083,4250.0,330.0,360.0,0,Urban,1.0
|
||||
LP001868,Male,No,0,1,0.0,2060,2209.0,134.0,360.0,1,Semiurban,1.0
|
||||
LP001870,Female,No,1,1,0.0,3481,0.0,155.0,36.0,1,Semiurban,0.0
|
||||
LP001871,Female,No,0,1,0.0,7200,0.0,120.0,360.0,1,Rural,1.0
|
||||
LP001872,Male,No,0,1,1.0,5166,0.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP001875,Male,No,0,1,0.0,4095,3447.0,151.0,360.0,1,Rural,1.0
|
||||
LP001877,Male,Yes,2,1,0.0,4708,1387.0,150.0,360.0,1,Semiurban,1.0
|
||||
LP001882,Male,Yes,3+,1,0.0,4333,1811.0,160.0,360.0,0,Urban,1.0
|
||||
LP001883,Female,No,0,1,0.0,3418,0.0,135.0,360.0,1,Rural,0.0
|
||||
LP001884,Female,No,1,1,0.0,2876,1560.0,90.0,360.0,1,Urban,1.0
|
||||
LP001888,Female,No,0,1,0.0,3237,0.0,30.0,360.0,1,Urban,1.0
|
||||
LP001891,Male,Yes,0,1,0.0,11146,0.0,136.0,360.0,1,Urban,1.0
|
||||
LP001892,Male,No,0,1,0.0,2833,1857.0,126.0,360.0,1,Rural,1.0
|
||||
LP001894,Male,Yes,0,1,0.0,2620,2223.0,150.0,360.0,1,Semiurban,1.0
|
||||
LP001896,Male,Yes,2,1,0.0,3900,0.0,90.0,360.0,1,Semiurban,1.0
|
||||
LP001900,Male,Yes,1,1,0.0,2750,1842.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP001903,Male,Yes,0,1,0.0,3993,3274.0,207.0,360.0,1,Semiurban,1.0
|
||||
LP001904,Male,Yes,0,1,0.0,3103,1300.0,80.0,360.0,1,Urban,1.0
|
||||
LP001907,Male,Yes,0,1,0.0,14583,0.0,436.0,360.0,1,Semiurban,1.0
|
||||
LP001908,Female,Yes,0,0,0.0,4100,0.0,124.0,360.0,0,Rural,1.0
|
||||
LP001910,Male,No,1,0,1.0,4053,2426.0,158.0,360.0,0,Urban,0.0
|
||||
LP001914,Male,Yes,0,1,0.0,3927,800.0,112.0,360.0,1,Semiurban,1.0
|
||||
LP001915,Male,Yes,2,1,0.0,2301,985.7999878,78.0,180.0,1,Urban,1.0
|
||||
LP001917,Female,No,0,1,0.0,1811,1666.0,54.0,360.0,1,Urban,1.0
|
||||
LP001922,Male,Yes,0,1,0.0,20667,0.0,0.0,360.0,1,Rural,0.0
|
||||
LP001924,Male,No,0,1,0.0,3158,3053.0,89.0,360.0,1,Rural,1.0
|
||||
LP001925,Female,No,0,1,1.0,2600,1717.0,99.0,300.0,1,Semiurban,0.0
|
||||
LP001926,Male,Yes,0,1,0.0,3704,2000.0,120.0,360.0,1,Rural,1.0
|
||||
LP001931,Female,No,0,1,0.0,4124,0.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP001935,Male,No,0,1,0.0,9508,0.0,187.0,360.0,1,Rural,1.0
|
||||
LP001936,Male,Yes,0,1,0.0,3075,2416.0,139.0,360.0,1,Rural,1.0
|
||||
LP001938,Male,Yes,2,1,0.0,4400,0.0,127.0,360.0,0,Semiurban,0.0
|
||||
LP001940,Male,Yes,2,1,0.0,3153,1560.0,134.0,360.0,1,Urban,1.0
|
||||
LP001945,Female,No,,1,0.0,5417,0.0,143.0,480.0,0,Urban,0.0
|
||||
LP001947,Male,Yes,0,1,0.0,2383,3334.0,172.0,360.0,1,Semiurban,1.0
|
||||
LP001949,Male,Yes,3+,1,0.0,4416,1250.0,110.0,360.0,1,Urban,1.0
|
||||
LP001953,Male,Yes,1,1,0.0,6875,0.0,200.0,360.0,1,Semiurban,1.0
|
||||
LP001954,Female,Yes,1,1,0.0,4666,0.0,135.0,360.0,1,Urban,1.0
|
||||
LP001955,Female,No,0,1,0.0,5000,2541.0,151.0,480.0,1,Rural,0.0
|
||||
LP001963,Male,Yes,1,1,0.0,2014,2925.0,113.0,360.0,1,Urban,0.0
|
||||
LP001964,Male,Yes,0,0,0.0,1800,2934.0,93.0,360.0,0,Urban,0.0
|
||||
LP001972,Male,Yes,,0,0.0,2875,1750.0,105.0,360.0,1,Semiurban,1.0
|
||||
LP001974,Female,No,0,1,0.0,5000,0.0,132.0,360.0,1,Rural,1.0
|
||||
LP001977,Male,Yes,1,1,0.0,1625,1803.0,96.0,360.0,1,Urban,1.0
|
||||
LP001978,Male,No,0,1,0.0,4000,2500.0,140.0,360.0,1,Rural,1.0
|
||||
LP001990,Male,No,0,0,0.0,2000,0.0,0.0,360.0,1,Urban,0.0
|
||||
LP001993,Female,No,0,1,0.0,3762,1666.0,135.0,360.0,1,Rural,1.0
|
||||
LP001994,Female,No,0,1,0.0,2400,1863.0,104.0,360.0,0,Urban,0.0
|
||||
LP001996,Male,No,0,1,0.0,20233,0.0,480.0,360.0,1,Rural,0.0
|
||||
LP001998,Male,Yes,2,0,0.0,7667,0.0,185.0,360.0,0,Rural,1.0
|
||||
LP002002,Female,No,0,1,0.0,2917,0.0,84.0,360.0,1,Semiurban,1.0
|
||||
LP002004,Male,No,0,0,0.0,2927,2405.0,111.0,360.0,1,Semiurban,1.0
|
||||
LP002006,Female,No,0,1,0.0,2507,0.0,56.0,360.0,1,Rural,1.0
|
||||
LP002008,Male,Yes,2,1,1.0,5746,0.0,144.0,84.0,0,Rural,1.0
|
||||
LP002024,,Yes,0,1,0.0,2473,1843.0,159.0,360.0,1,Rural,0.0
|
||||
LP002031,Male,Yes,1,0,0.0,3399,1640.0,111.0,180.0,1,Urban,1.0
|
||||
LP002035,Male,Yes,2,1,0.0,3717,0.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP002036,Male,Yes,0,1,0.0,2058,2134.0,88.0,360.0,0,Urban,1.0
|
||||
LP002043,Female,No,1,1,0.0,3541,0.0,112.0,360.0,0,Semiurban,1.0
|
||||
LP002050,Male,Yes,1,1,1.0,10000,0.0,155.0,360.0,1,Rural,0.0
|
||||
LP002051,Male,Yes,0,1,0.0,2400,2167.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP002053,Male,Yes,3+,1,0.0,4342,189.0,124.0,360.0,1,Semiurban,1.0
|
||||
LP002054,Male,Yes,2,0,0.0,3601,1590.0,0.0,360.0,1,Rural,1.0
|
||||
LP002055,Female,No,0,1,0.0,3166,2985.0,132.0,360.0,0,Rural,1.0
|
||||
LP002065,Male,Yes,3+,1,0.0,15000,0.0,300.0,360.0,1,Rural,1.0
|
||||
LP002067,Male,Yes,1,1,1.0,8666,4983.0,376.0,360.0,0,Rural,0.0
|
||||
LP002068,Male,No,0,1,0.0,4917,0.0,130.0,360.0,0,Rural,1.0
|
||||
LP002082,Male,Yes,0,1,1.0,5818,2160.0,184.0,360.0,1,Semiurban,1.0
|
||||
LP002086,Female,Yes,0,1,0.0,4333,2451.0,110.0,360.0,1,Urban,0.0
|
||||
LP002087,Female,No,0,1,0.0,2500,0.0,67.0,360.0,1,Urban,1.0
|
||||
LP002097,Male,No,1,1,0.0,4384,1793.0,117.0,360.0,1,Urban,1.0
|
||||
LP002098,Male,No,0,1,0.0,2935,0.0,98.0,360.0,1,Semiurban,1.0
|
||||
LP002100,Male,No,,1,0.0,2833,0.0,71.0,360.0,1,Urban,1.0
|
||||
LP002101,Male,Yes,0,1,0.0,63337,0.0,490.0,180.0,1,Urban,1.0
|
||||
LP002103,,Yes,1,1,1.0,9833,1833.0,182.0,180.0,1,Urban,1.0
|
||||
LP002106,Male,Yes,,1,1.0,5503,4490.0,70.0,,1,Semiurban,1.0
|
||||
LP002110,Male,Yes,1,1,0.0,5250,688.0,160.0,360.0,1,Rural,1.0
|
||||
LP002112,Male,Yes,2,1,1.0,2500,4600.0,176.0,360.0,1,Rural,1.0
|
||||
LP002113,Female,No,3+,0,0.0,1830,0.0,0.0,360.0,0,Urban,0.0
|
||||
LP002114,Female,No,0,1,0.0,4160,0.0,71.0,360.0,1,Semiurban,1.0
|
||||
LP002115,Male,Yes,3+,0,0.0,2647,1587.0,173.0,360.0,1,Rural,0.0
|
||||
LP002116,Female,No,0,1,0.0,2378,0.0,46.0,360.0,1,Rural,0.0
|
||||
LP002119,Male,Yes,1,0,0.0,4554,1229.0,158.0,360.0,1,Urban,1.0
|
||||
LP002126,Male,Yes,3+,0,0.0,3173,0.0,74.0,360.0,1,Semiurban,1.0
|
||||
LP002128,Male,Yes,2,1,0.0,2583,2330.0,125.0,360.0,1,Rural,1.0
|
||||
LP002129,Male,Yes,0,1,0.0,2499,2458.0,160.0,360.0,1,Semiurban,1.0
|
||||
LP002130,Male,Yes,,0,0.0,3523,3230.0,152.0,360.0,0,Rural,0.0
|
||||
LP002131,Male,Yes,2,0,0.0,3083,2168.0,126.0,360.0,1,Urban,1.0
|
||||
LP002137,Male,Yes,0,1,0.0,6333,4583.0,259.0,360.0,0,Semiurban,1.0
|
||||
LP002138,Male,Yes,0,1,0.0,2625,6250.0,187.0,360.0,1,Rural,1.0
|
||||
LP002139,Male,Yes,0,1,0.0,9083,0.0,228.0,360.0,1,Semiurban,1.0
|
||||
LP002140,Male,No,0,1,0.0,8750,4167.0,308.0,360.0,1,Rural,0.0
|
||||
LP002141,Male,Yes,3+,1,0.0,2666,2083.0,95.0,360.0,1,Rural,1.0
|
||||
LP002142,Female,Yes,0,1,1.0,5500,0.0,105.0,360.0,0,Rural,0.0
|
||||
LP002143,Female,Yes,0,1,0.0,2423,505.0,130.0,360.0,1,Semiurban,1.0
|
||||
LP002144,Female,No,,1,0.0,3813,0.0,116.0,180.0,1,Urban,1.0
|
||||
LP002149,Male,Yes,2,1,0.0,8333,3167.0,165.0,360.0,1,Rural,1.0
|
||||
LP002151,Male,Yes,1,1,0.0,3875,0.0,67.0,360.0,1,Urban,0.0
|
||||
LP002158,Male,Yes,0,0,0.0,3000,1666.0,100.0,480.0,0,Urban,0.0
|
||||
LP002160,Male,Yes,3+,1,0.0,5167,3167.0,200.0,360.0,1,Semiurban,1.0
|
||||
LP002161,Female,No,1,1,0.0,4723,0.0,81.0,360.0,1,Semiurban,0.0
|
||||
LP002170,Male,Yes,2,1,0.0,5000,3667.0,236.0,360.0,1,Semiurban,1.0
|
||||
LP002175,Male,Yes,0,1,0.0,4750,2333.0,130.0,360.0,1,Urban,1.0
|
||||
LP002178,Male,Yes,0,1,0.0,3013,3033.0,95.0,300.0,0,Urban,1.0
|
||||
LP002180,Male,No,0,1,1.0,6822,0.0,141.0,360.0,1,Rural,1.0
|
||||
LP002181,Male,No,0,0,0.0,6216,0.0,133.0,360.0,1,Rural,0.0
|
||||
LP002187,Male,No,0,1,0.0,2500,0.0,96.0,480.0,1,Semiurban,0.0
|
||||
LP002188,Male,No,0,1,0.0,5124,0.0,124.0,,0,Rural,0.0
|
||||
LP002190,Male,Yes,1,1,0.0,6325,0.0,175.0,360.0,1,Semiurban,1.0
|
||||
LP002191,Male,Yes,0,1,0.0,19730,5266.0,570.0,360.0,1,Rural,0.0
|
||||
LP002194,Female,No,0,1,1.0,15759,0.0,55.0,360.0,1,Semiurban,1.0
|
||||
LP002197,Male,Yes,2,1,0.0,5185,0.0,155.0,360.0,1,Semiurban,1.0
|
||||
LP002201,Male,Yes,2,1,1.0,9323,7873.0,380.0,300.0,1,Rural,1.0
|
||||
LP002205,Male,No,1,1,0.0,3062,1987.0,111.0,180.0,0,Urban,0.0
|
||||
LP002209,Female,No,0,1,0.0,2764,1459.0,110.0,360.0,1,Urban,1.0
|
||||
LP002211,Male,Yes,0,1,0.0,4817,923.0,120.0,180.0,1,Urban,1.0
|
||||
LP002219,Male,Yes,3+,1,0.0,8750,4996.0,130.0,360.0,1,Rural,1.0
|
||||
LP002223,Male,Yes,0,1,0.0,4310,0.0,130.0,360.0,0,Semiurban,1.0
|
||||
LP002224,Male,No,0,1,0.0,3069,0.0,71.0,480.0,1,Urban,0.0
|
||||
LP002225,Male,Yes,2,1,0.0,5391,0.0,130.0,360.0,1,Urban,1.0
|
||||
LP002226,Male,Yes,0,1,0.0,3333,2500.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP002229,Male,No,0,1,0.0,5941,4232.0,296.0,360.0,1,Semiurban,1.0
|
||||
LP002231,Female,No,0,1,0.0,6000,0.0,156.0,360.0,1,Urban,1.0
|
||||
LP002234,Male,No,0,1,1.0,7167,0.0,128.0,360.0,1,Urban,1.0
|
||||
LP002236,Male,Yes,2,1,0.0,4566,0.0,100.0,360.0,1,Urban,0.0
|
||||
LP002237,Male,No,1,1,0.0,3667,0.0,113.0,180.0,1,Urban,1.0
|
||||
LP002239,Male,No,0,0,0.0,2346,1600.0,132.0,360.0,1,Semiurban,1.0
|
||||
LP002243,Male,Yes,0,0,0.0,3010,3136.0,0.0,360.0,0,Urban,0.0
|
||||
LP002244,Male,Yes,0,1,0.0,2333,2417.0,136.0,360.0,1,Urban,1.0
|
||||
LP002250,Male,Yes,0,1,0.0,5488,0.0,125.0,360.0,1,Rural,1.0
|
||||
LP002255,Male,No,3+,1,0.0,9167,0.0,185.0,360.0,1,Rural,1.0
|
||||
LP002262,Male,Yes,3+,1,0.0,9504,0.0,275.0,360.0,1,Rural,1.0
|
||||
LP002263,Male,Yes,0,1,0.0,2583,2115.0,120.0,360.0,0,Urban,1.0
|
||||
LP002265,Male,Yes,2,0,0.0,1993,1625.0,113.0,180.0,1,Semiurban,1.0
|
||||
LP002266,Male,Yes,2,1,0.0,3100,1400.0,113.0,360.0,1,Urban,1.0
|
||||
LP002272,Male,Yes,2,1,0.0,3276,484.0,135.0,360.0,0,Semiurban,1.0
|
||||
LP002277,Female,No,0,1,0.0,3180,0.0,71.0,360.0,0,Urban,0.0
|
||||
LP002281,Male,Yes,0,1,0.0,3033,1459.0,95.0,360.0,1,Urban,1.0
|
||||
LP002284,Male,No,0,0,0.0,3902,1666.0,109.0,360.0,1,Rural,1.0
|
||||
LP002287,Female,No,0,1,0.0,1500,1800.0,103.0,360.0,0,Semiurban,0.0
|
||||
LP002288,Male,Yes,2,0,0.0,2889,0.0,45.0,180.0,0,Urban,0.0
|
||||
LP002296,Male,No,0,0,0.0,2755,0.0,65.0,300.0,1,Rural,0.0
|
||||
LP002297,Male,No,0,1,0.0,2500,20000.0,103.0,360.0,1,Semiurban,1.0
|
||||
LP002300,Female,No,0,0,0.0,1963,0.0,53.0,360.0,1,Semiurban,1.0
|
||||
LP002301,Female,No,0,1,1.0,7441,0.0,194.0,360.0,1,Rural,0.0
|
||||
LP002305,Female,No,0,1,0.0,4547,0.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP002308,Male,Yes,0,0,0.0,2167,2400.0,115.0,360.0,1,Urban,1.0
|
||||
LP002314,Female,No,0,0,0.0,2213,0.0,66.0,360.0,1,Rural,1.0
|
||||
LP002315,Male,Yes,1,1,0.0,8300,0.0,152.0,300.0,0,Semiurban,0.0
|
||||
LP002317,Male,Yes,3+,1,0.0,81000,0.0,360.0,360.0,0,Rural,0.0
|
||||
LP002318,Female,No,1,0,1.0,3867,0.0,62.0,360.0,1,Semiurban,0.0
|
||||
LP002319,Male,Yes,0,1,0.0,6256,0.0,160.0,360.0,0,Urban,1.0
|
||||
LP002328,Male,Yes,0,0,0.0,6096,0.0,218.0,360.0,0,Rural,0.0
|
||||
LP002332,Male,Yes,0,0,0.0,2253,2033.0,110.0,360.0,1,Rural,1.0
|
||||
LP002335,Female,Yes,0,0,0.0,2149,3237.0,178.0,360.0,0,Semiurban,0.0
|
||||
LP002337,Female,No,0,1,0.0,2995,0.0,60.0,360.0,1,Urban,1.0
|
||||
LP002341,Female,No,1,1,0.0,2600,0.0,160.0,360.0,1,Urban,0.0
|
||||
LP002342,Male,Yes,2,1,1.0,1600,20000.0,239.0,360.0,1,Urban,0.0
|
||||
LP002345,Male,Yes,0,1,0.0,1025,2773.0,112.0,360.0,1,Rural,1.0
|
||||
LP002347,Male,Yes,0,1,0.0,3246,1417.0,138.0,360.0,1,Semiurban,1.0
|
||||
LP002348,Male,Yes,0,1,0.0,5829,0.0,138.0,360.0,1,Rural,1.0
|
||||
LP002357,Female,No,0,0,0.0,2720,0.0,80.0,,0,Urban,0.0
|
||||
LP002361,Male,Yes,0,1,0.0,1820,1719.0,100.0,360.0,1,Urban,1.0
|
||||
LP002362,Male,Yes,1,1,0.0,7250,1667.0,110.0,,0,Urban,0.0
|
||||
LP002364,Male,Yes,0,1,0.0,14880,0.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP002366,Male,Yes,0,1,0.0,2666,4300.0,121.0,360.0,1,Rural,1.0
|
||||
LP002367,Female,No,1,0,0.0,4606,0.0,81.0,360.0,1,Rural,0.0
|
||||
LP002368,Male,Yes,2,1,0.0,5935,0.0,133.0,360.0,1,Semiurban,1.0
|
||||
LP002369,Male,Yes,0,1,0.0,2920,16.12000084,87.0,360.0,1,Rural,1.0
|
||||
LP002370,Male,No,0,0,0.0,2717,0.0,60.0,180.0,1,Urban,1.0
|
||||
LP002377,Female,No,1,1,1.0,8624,0.0,150.0,360.0,1,Semiurban,1.0
|
||||
LP002379,Male,No,0,1,0.0,6500,0.0,105.0,360.0,0,Rural,0.0
|
||||
LP002386,Male,No,0,1,0.0,12876,0.0,405.0,360.0,1,Semiurban,1.0
|
||||
LP002387,Male,Yes,0,1,0.0,2425,2340.0,143.0,360.0,1,Semiurban,1.0
|
||||
LP002390,Male,No,0,1,0.0,3750,0.0,100.0,360.0,1,Urban,1.0
|
||||
LP002393,Female,,,1,0.0,10047,0.0,0.0,240.0,1,Semiurban,1.0
|
||||
LP002398,Male,No,0,1,0.0,1926,1851.0,50.0,360.0,1,Semiurban,1.0
|
||||
LP002401,Male,Yes,0,1,0.0,2213,1125.0,0.0,360.0,1,Urban,1.0
|
||||
LP002403,Male,No,0,1,1.0,10416,0.0,187.0,360.0,0,Urban,0.0
|
||||
LP002407,Female,Yes,0,0,1.0,7142,0.0,138.0,360.0,1,Rural,1.0
|
||||
LP002408,Male,No,0,1,0.0,3660,5064.0,187.0,360.0,1,Semiurban,1.0
|
||||
LP002409,Male,Yes,0,1,0.0,7901,1833.0,180.0,360.0,1,Rural,1.0
|
||||
LP002418,Male,No,3+,0,0.0,4707,1993.0,148.0,360.0,1,Semiurban,1.0
|
||||
LP002422,Male,No,1,1,0.0,37719,0.0,152.0,360.0,1,Semiurban,1.0
|
||||
LP002424,Male,Yes,0,1,0.0,7333,8333.0,175.0,300.0,0,Rural,1.0
|
||||
LP002429,Male,Yes,1,1,1.0,3466,1210.0,130.0,360.0,1,Rural,1.0
|
||||
LP002434,Male,Yes,2,0,0.0,4652,0.0,110.0,360.0,1,Rural,1.0
|
||||
LP002435,Male,Yes,0,1,0.0,3539,1376.0,55.0,360.0,1,Rural,0.0
|
||||
LP002443,Male,Yes,2,1,0.0,3340,1710.0,150.0,360.0,0,Rural,0.0
|
||||
LP002444,Male,No,1,0,1.0,2769,1542.0,190.0,360.0,0,Semiurban,0.0
|
||||
LP002446,Male,Yes,2,0,0.0,2309,1255.0,125.0,360.0,0,Rural,0.0
|
||||
LP002447,Male,Yes,2,0,0.0,1958,1456.0,60.0,300.0,0,Urban,1.0
|
||||
LP002448,Male,Yes,0,1,0.0,3948,1733.0,149.0,360.0,0,Rural,0.0
|
||||
LP002449,Male,Yes,0,1,0.0,2483,2466.0,90.0,180.0,0,Rural,1.0
|
||||
LP002453,Male,No,0,1,1.0,7085,0.0,84.0,360.0,1,Semiurban,1.0
|
||||
LP002455,Male,Yes,2,1,0.0,3859,0.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP002459,Male,Yes,0,1,0.0,4301,0.0,118.0,360.0,1,Urban,1.0
|
||||
LP002467,Male,Yes,0,1,0.0,3708,2569.0,173.0,360.0,1,Urban,0.0
|
||||
LP002472,Male,No,2,1,0.0,4354,0.0,136.0,360.0,1,Rural,1.0
|
||||
LP002473,Male,Yes,0,1,0.0,8334,0.0,160.0,360.0,1,Semiurban,0.0
|
||||
LP002478,,Yes,0,1,1.0,2083,4083.0,160.0,360.0,0,Semiurban,1.0
|
||||
LP002484,Male,Yes,3+,1,0.0,7740,0.0,128.0,180.0,1,Urban,1.0
|
||||
LP002487,Male,Yes,0,1,0.0,3015,2188.0,153.0,360.0,1,Rural,1.0
|
||||
LP002489,Female,No,1,0,0.0,5191,0.0,132.0,360.0,1,Semiurban,1.0
|
||||
LP002493,Male,No,0,1,0.0,4166,0.0,98.0,360.0,0,Semiurban,0.0
|
||||
LP002494,Male,No,0,1,0.0,6000,0.0,140.0,360.0,1,Rural,1.0
|
||||
LP002500,Male,Yes,3+,0,0.0,2947,1664.0,70.0,180.0,0,Urban,0.0
|
||||
LP002501,,Yes,0,1,0.0,16692,0.0,110.0,360.0,1,Semiurban,1.0
|
||||
LP002502,Female,Yes,2,0,0.0,210,2917.0,98.0,360.0,1,Semiurban,1.0
|
||||
LP002505,Male,Yes,0,1,0.0,4333,2451.0,110.0,360.0,1,Urban,0.0
|
||||
LP002515,Male,Yes,1,1,1.0,3450,2079.0,162.0,360.0,1,Semiurban,1.0
|
||||
LP002517,Male,Yes,1,0,0.0,2653,1500.0,113.0,180.0,0,Rural,0.0
|
||||
LP002519,Male,Yes,3+,1,0.0,4691,0.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP002522,Female,No,0,1,1.0,2500,0.0,93.0,360.0,0,Urban,1.0
|
||||
LP002524,Male,No,2,1,0.0,5532,4648.0,162.0,360.0,1,Rural,1.0
|
||||
LP002527,Male,Yes,2,1,1.0,16525,1014.0,150.0,360.0,1,Rural,1.0
|
||||
LP002529,Male,Yes,2,1,0.0,6700,1750.0,230.0,300.0,1,Semiurban,1.0
|
||||
LP002530,,Yes,2,1,0.0,2873,1872.0,132.0,360.0,0,Semiurban,0.0
|
||||
LP002531,Male,Yes,1,1,1.0,16667,2250.0,86.0,360.0,1,Semiurban,1.0
|
||||
LP002533,Male,Yes,2,1,0.0,2947,1603.0,0.0,360.0,1,Urban,0.0
|
||||
LP002534,Female,No,0,0,0.0,4350,0.0,154.0,360.0,1,Rural,1.0
|
||||
LP002536,Male,Yes,3+,0,0.0,3095,0.0,113.0,360.0,1,Rural,1.0
|
||||
LP002537,Male,Yes,0,1,0.0,2083,3150.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP002541,Male,Yes,0,1,0.0,10833,0.0,234.0,360.0,1,Semiurban,1.0
|
||||
LP002543,Male,Yes,2,1,0.0,8333,0.0,246.0,360.0,1,Semiurban,1.0
|
||||
LP002544,Male,Yes,1,0,0.0,1958,2436.0,131.0,360.0,1,Rural,1.0
|
||||
LP002545,Male,No,2,1,0.0,3547,0.0,80.0,360.0,0,Rural,0.0
|
||||
LP002547,Male,Yes,1,1,0.0,18333,0.0,500.0,360.0,1,Urban,0.0
|
||||
LP002555,Male,Yes,2,1,1.0,4583,2083.0,160.0,360.0,1,Semiurban,1.0
|
||||
LP002556,Male,No,0,1,0.0,2435,0.0,75.0,360.0,1,Urban,0.0
|
||||
LP002560,Male,No,0,0,0.0,2699,2785.0,96.0,360.0,0,Semiurban,1.0
|
||||
LP002562,Male,Yes,1,0,0.0,5333,1131.0,186.0,360.0,0,Urban,1.0
|
||||
LP002571,Male,No,0,0,0.0,3691,0.0,110.0,360.0,1,Rural,1.0
|
||||
LP002582,Female,No,0,0,1.0,17263,0.0,225.0,360.0,1,Semiurban,1.0
|
||||
LP002585,Male,Yes,0,1,0.0,3597,2157.0,119.0,360.0,0,Rural,0.0
|
||||
LP002586,Female,Yes,1,1,0.0,3326,913.0,105.0,84.0,1,Semiurban,1.0
|
||||
LP002587,Male,Yes,0,0,0.0,2600,1700.0,107.0,360.0,1,Rural,1.0
|
||||
LP002588,Male,Yes,0,1,0.0,4625,2857.0,111.0,12.0,0,Urban,1.0
|
||||
LP002600,Male,Yes,1,1,1.0,2895,0.0,95.0,360.0,1,Semiurban,1.0
|
||||
LP002602,Male,No,0,1,0.0,6283,4416.0,209.0,360.0,0,Rural,0.0
|
||||
LP002603,Female,No,0,1,0.0,645,3683.0,113.0,480.0,1,Rural,1.0
|
||||
LP002606,Female,No,0,1,0.0,3159,0.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP002615,Male,Yes,2,1,0.0,4865,5624.0,208.0,360.0,1,Semiurban,1.0
|
||||
LP002618,Male,Yes,1,0,0.0,4050,5302.0,138.0,360.0,0,Rural,0.0
|
||||
LP002619,Male,Yes,0,0,0.0,3814,1483.0,124.0,300.0,1,Semiurban,1.0
|
||||
LP002622,Male,Yes,2,1,0.0,3510,4416.0,243.0,360.0,1,Rural,1.0
|
||||
LP002624,Male,Yes,0,1,0.0,20833,6667.0,480.0,360.0,0,Urban,1.0
|
||||
LP002625,,No,0,1,0.0,3583,0.0,96.0,360.0,1,Urban,0.0
|
||||
LP002626,Male,Yes,0,1,1.0,2479,3013.0,188.0,360.0,1,Urban,1.0
|
||||
LP002634,Female,No,1,1,0.0,13262,0.0,40.0,360.0,1,Urban,1.0
|
||||
LP002637,Male,No,0,0,0.0,3598,1287.0,100.0,360.0,1,Rural,0.0
|
||||
LP002640,Male,Yes,1,1,0.0,6065,2004.0,250.0,360.0,1,Semiurban,1.0
|
||||
LP002643,Male,Yes,2,1,0.0,3283,2035.0,148.0,360.0,1,Urban,1.0
|
||||
LP002648,Male,Yes,0,1,0.0,2130,6666.0,70.0,180.0,1,Semiurban,0.0
|
||||
LP002652,Male,No,0,1,0.0,5815,3666.0,311.0,360.0,1,Rural,0.0
|
||||
LP002659,Male,Yes,3+,1,0.0,3466,3428.0,150.0,360.0,1,Rural,1.0
|
||||
LP002670,Female,Yes,2,1,0.0,2031,1632.0,113.0,480.0,1,Semiurban,1.0
|
||||
LP002682,Male,Yes,,0,0.0,3074,1800.0,123.0,360.0,0,Semiurban,0.0
|
||||
LP002683,Male,No,0,1,0.0,4683,1915.0,185.0,360.0,1,Semiurban,0.0
|
||||
LP002684,Female,No,0,0,0.0,3400,0.0,95.0,360.0,1,Rural,0.0
|
||||
LP002689,Male,Yes,2,0,0.0,2192,1742.0,45.0,360.0,1,Semiurban,1.0
|
||||
LP002690,Male,No,0,1,0.0,2500,0.0,55.0,360.0,1,Semiurban,1.0
|
||||
LP002692,Male,Yes,3+,1,1.0,5677,1424.0,100.0,360.0,1,Rural,1.0
|
||||
LP002693,Male,Yes,2,1,1.0,7948,7166.0,480.0,360.0,1,Rural,1.0
|
||||
LP002697,Male,No,0,1,0.0,4680,2087.0,0.0,360.0,1,Semiurban,0.0
|
||||
LP002699,Male,Yes,2,1,1.0,17500,0.0,400.0,360.0,1,Rural,1.0
|
||||
LP002705,Male,Yes,0,1,0.0,3775,0.0,110.0,360.0,1,Semiurban,1.0
|
||||
LP002706,Male,Yes,1,0,0.0,5285,1430.0,161.0,360.0,0,Semiurban,1.0
|
||||
LP002714,Male,No,1,0,0.0,2679,1302.0,94.0,360.0,1,Semiurban,1.0
|
||||
LP002716,Male,No,0,0,0.0,6783,0.0,130.0,360.0,1,Semiurban,1.0
|
||||
LP002717,Male,Yes,0,1,0.0,1025,5500.0,216.0,360.0,0,Rural,1.0
|
||||
LP002720,Male,Yes,3+,1,0.0,4281,0.0,100.0,360.0,1,Urban,1.0
|
||||
LP002723,Male,No,2,1,0.0,3588,0.0,110.0,360.0,0,Rural,0.0
|
||||
LP002729,Male,No,1,1,0.0,11250,0.0,196.0,360.0,0,Semiurban,0.0
|
||||
LP002731,Female,No,0,0,1.0,18165,0.0,125.0,360.0,1,Urban,1.0
|
||||
LP002732,Male,No,0,0,0.0,2550,2042.0,126.0,360.0,1,Rural,1.0
|
||||
LP002734,Male,Yes,0,1,0.0,6133,3906.0,324.0,360.0,1,Urban,1.0
|
||||
LP002738,Male,No,2,1,0.0,3617,0.0,107.0,360.0,1,Semiurban,1.0
|
||||
LP002739,Male,Yes,0,0,0.0,2917,536.0,66.0,360.0,1,Rural,0.0
|
||||
LP002740,Male,Yes,3+,1,0.0,6417,0.0,157.0,180.0,1,Rural,1.0
|
||||
LP002741,Female,Yes,1,1,0.0,4608,2845.0,140.0,180.0,1,Semiurban,1.0
|
||||
LP002743,Female,No,0,1,0.0,2138,0.0,99.0,360.0,0,Semiurban,0.0
|
||||
LP002753,Female,No,1,1,0.0,3652,0.0,95.0,360.0,1,Semiurban,1.0
|
||||
LP002755,Male,Yes,1,0,0.0,2239,2524.0,128.0,360.0,1,Urban,1.0
|
||||
LP002757,Female,Yes,0,0,0.0,3017,663.0,102.0,360.0,0,Semiurban,1.0
|
||||
LP002767,Male,Yes,0,1,0.0,2768,1950.0,155.0,360.0,1,Rural,1.0
|
||||
LP002768,Male,No,0,0,0.0,3358,0.0,80.0,36.0,1,Semiurban,0.0
|
||||
LP002772,Male,No,0,1,0.0,2526,1783.0,145.0,360.0,1,Rural,1.0
|
||||
LP002776,Female,No,0,1,0.0,5000,0.0,103.0,360.0,0,Semiurban,0.0
|
||||
LP002777,Male,Yes,0,1,0.0,2785,2016.0,110.0,360.0,1,Rural,1.0
|
||||
LP002778,Male,Yes,2,1,1.0,6633,0.0,0.0,360.0,0,Rural,0.0
|
||||
LP002784,Male,Yes,1,0,0.0,2492,2375.0,0.0,360.0,1,Rural,1.0
|
||||
LP002785,Male,Yes,1,1,0.0,3333,3250.0,158.0,360.0,1,Urban,1.0
|
||||
LP002788,Male,Yes,0,0,0.0,2454,2333.0,181.0,360.0,0,Urban,0.0
|
||||
LP002789,Male,Yes,0,1,0.0,3593,4266.0,132.0,180.0,0,Rural,0.0
|
||||
LP002792,Male,Yes,1,1,0.0,5468,1032.0,26.0,360.0,1,Semiurban,1.0
|
||||
LP002794,Female,No,0,1,0.0,2667,1625.0,84.0,360.0,0,Urban,1.0
|
||||
LP002795,Male,Yes,3+,1,1.0,10139,0.0,260.0,360.0,1,Semiurban,1.0
|
||||
LP002798,Male,Yes,0,1,0.0,3887,2669.0,162.0,360.0,1,Semiurban,1.0
|
||||
LP002804,Female,Yes,0,1,0.0,4180,2306.0,182.0,360.0,1,Semiurban,1.0
|
||||
LP002807,Male,Yes,2,0,0.0,3675,242.0,108.0,360.0,1,Semiurban,1.0
|
||||
LP002813,Female,Yes,1,1,1.0,19484,0.0,600.0,360.0,1,Semiurban,1.0
|
||||
LP002820,Male,Yes,0,1,0.0,5923,2054.0,211.0,360.0,1,Rural,1.0
|
||||
LP002821,Male,No,0,0,1.0,5800,0.0,132.0,360.0,1,Semiurban,1.0
|
||||
LP002832,Male,Yes,2,1,0.0,8799,0.0,258.0,360.0,0,Urban,0.0
|
||||
LP002833,Male,Yes,0,0,0.0,4467,0.0,120.0,360.0,0,Rural,1.0
|
||||
LP002836,Male,No,0,1,0.0,3333,0.0,70.0,360.0,1,Urban,1.0
|
||||
LP002837,Male,Yes,3+,1,0.0,3400,2500.0,123.0,360.0,0,Rural,0.0
|
||||
LP002840,Female,No,0,1,0.0,2378,0.0,9.0,360.0,1,Urban,0.0
|
||||
LP002841,Male,Yes,0,1,0.0,3166,2064.0,104.0,360.0,0,Urban,0.0
|
||||
LP002842,Male,Yes,1,1,0.0,3417,1750.0,186.0,360.0,1,Urban,1.0
|
||||
LP002847,Male,Yes,,1,0.0,5116,1451.0,165.0,360.0,0,Urban,0.0
|
||||
LP002855,Male,Yes,2,1,0.0,16666,0.0,275.0,360.0,1,Urban,1.0
|
||||
LP002862,Male,Yes,2,0,0.0,6125,1625.0,187.0,480.0,1,Semiurban,0.0
|
||||
LP002863,Male,Yes,3+,1,0.0,6406,0.0,150.0,360.0,1,Semiurban,0.0
|
||||
LP002868,Male,Yes,2,1,0.0,3159,461.0,108.0,84.0,1,Urban,1.0
|
||||
LP002872,,Yes,0,1,0.0,3087,2210.0,136.0,360.0,0,Semiurban,0.0
|
||||
LP002874,Male,No,0,1,0.0,3229,2739.0,110.0,360.0,1,Urban,1.0
|
||||
LP002877,Male,Yes,1,1,0.0,1782,2232.0,107.0,360.0,1,Rural,1.0
|
||||
LP002888,Male,No,0,1,0.0,3182,2917.0,161.0,360.0,1,Urban,1.0
|
||||
LP002892,Male,Yes,2,1,0.0,6540,0.0,205.0,360.0,1,Semiurban,1.0
|
||||
LP002893,Male,No,0,1,0.0,1836,33837.0,90.0,360.0,1,Urban,0.0
|
||||
LP002894,Female,Yes,0,1,0.0,3166,0.0,36.0,360.0,1,Semiurban,1.0
|
||||
LP002898,Male,Yes,1,1,0.0,1880,0.0,61.0,360.0,0,Rural,0.0
|
||||
LP002911,Male,Yes,1,1,0.0,2787,1917.0,146.0,360.0,0,Rural,0.0
|
||||
LP002912,Male,Yes,1,1,0.0,4283,3000.0,172.0,84.0,1,Rural,0.0
|
||||
LP002916,Male,Yes,0,1,0.0,2297,1522.0,104.0,360.0,1,Urban,1.0
|
||||
LP002917,Female,No,0,0,0.0,2165,0.0,70.0,360.0,1,Semiurban,1.0
|
||||
LP002925,,No,0,1,0.0,4750,0.0,94.0,360.0,1,Semiurban,1.0
|
||||
LP002926,Male,Yes,2,1,1.0,2726,0.0,106.0,360.0,0,Semiurban,0.0
|
||||
LP002928,Male,Yes,0,1,0.0,3000,3416.0,56.0,180.0,1,Semiurban,1.0
|
||||
LP002931,Male,Yes,2,1,1.0,6000,0.0,205.0,240.0,1,Semiurban,0.0
|
||||
LP002933,,No,3+,1,1.0,9357,0.0,292.0,360.0,1,Semiurban,1.0
|
||||
LP002936,Male,Yes,0,1,0.0,3859,3300.0,142.0,180.0,1,Rural,1.0
|
||||
LP002938,Male,Yes,0,1,1.0,16120,0.0,260.0,360.0,1,Urban,1.0
|
||||
LP002940,Male,No,0,0,0.0,3833,0.0,110.0,360.0,1,Rural,1.0
|
||||
LP002941,Male,Yes,2,0,1.0,6383,1000.0,187.0,360.0,1,Rural,0.0
|
||||
LP002943,Male,No,,1,0.0,2987,0.0,88.0,360.0,0,Semiurban,0.0
|
||||
LP002945,Male,Yes,0,1,1.0,9963,0.0,180.0,360.0,1,Rural,1.0
|
||||
LP002948,Male,Yes,2,1,0.0,5780,0.0,192.0,360.0,1,Urban,1.0
|
||||
LP002949,Female,No,3+,1,0.0,416,41667.0,350.0,180.0,0,Urban,0.0
|
||||
LP002950,Male,Yes,0,0,0.0,2894,2792.0,155.0,360.0,1,Rural,1.0
|
||||
LP002953,Male,Yes,3+,1,0.0,5703,0.0,128.0,360.0,1,Urban,1.0
|
||||
LP002958,Male,No,0,1,0.0,3676,4301.0,172.0,360.0,1,Rural,1.0
|
||||
LP002959,Female,Yes,1,1,0.0,12000,0.0,496.0,360.0,1,Semiurban,1.0
|
||||
LP002960,Male,Yes,0,0,0.0,2400,3800.0,0.0,180.0,1,Urban,0.0
|
||||
LP002961,Male,Yes,1,1,0.0,3400,2500.0,173.0,360.0,1,Semiurban,1.0
|
||||
LP002964,Male,Yes,2,0,0.0,3987,1411.0,157.0,360.0,1,Rural,1.0
|
||||
LP002974,Male,Yes,0,1,0.0,3232,1950.0,108.0,360.0,1,Rural,1.0
|
||||
LP002978,Female,No,0,1,0.0,2900,0.0,71.0,360.0,1,Rural,1.0
|
||||
LP002979,Male,Yes,3+,1,0.0,4106,0.0,40.0,180.0,1,Rural,1.0
|
||||
LP002983,Male,Yes,1,1,0.0,8072,240.0,253.0,360.0,1,Urban,1.0
|
||||
LP002984,Male,Yes,2,1,0.0,7583,0.0,187.0,360.0,1,Urban,1.0
|
||||
LP002990,Female,No,0,1,1.0,4583,0.0,133.0,360.0,0,Semiurban,0.0
|
||||
|
BIN
abanin_daniil_lab_4/result.png
Normal file
|
After Width: | Height: | Size: 92 KiB |
38
abanin_daniil_lab_5/README.md
Normal file
@@ -0,0 +1,38 @@
|
||||
## Лабораторная работа №5
|
||||
|
||||
### Ранжирование признаков
|
||||
|
||||
## ПИбд-41 Абанин Даниил
|
||||
|
||||
### Как запустить лабораторную работу:
|
||||
|
||||
* установить python, pandas, matplotlib, sklearn
|
||||
* запустить проект (стартовая точка lab4)
|
||||
|
||||
### Какие технологии использовались:
|
||||
|
||||
* Язык программирования `Python`, библиотеки pandas, matplotlib, sklearn
|
||||
* Среда разработки `PyCharm`
|
||||
|
||||
### Что делает лабораторная работа:
|
||||
Программа решает задачу регрессии, используя полиномиальную регрессию.
|
||||
Цель - предсказать сумму займа (LoanAmount), используя имеющиеся признаки: ApplicantIncome - доход заявителя, Credit_History - статус соответствия кредитной истории стандартам банка,
|
||||
Education - наличие образования, Married - заявитель женат/замужем (Да/Нет), Self_Employed - самозанятый (Да/Нет)
|
||||
|
||||
### Тест
|
||||
Зелёные маркеры на графике - тестовые результаты
|
||||
Красные маркеры на графике - предсказанные результаты
|
||||
|
||||
При небольшом объёме тестовых данных, алгоритм показывает неплохие результаты обучения
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
Но при увеличении объёма данных, алгоритм теряет свою эффективность
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
Вывод: На малых объёмах данных алгоритм показывает свою эффективность. Но при большем объём стоит использовать другие методы для данного набора информации
|
||||
BIN
abanin_daniil_lab_5/grade_1.png
Normal file
|
After Width: | Height: | Size: 13 KiB |
BIN
abanin_daniil_lab_5/grade_2.png
Normal file
|
After Width: | Height: | Size: 10 KiB |
33
abanin_daniil_lab_5/lab5.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from matplotlib import pyplot as plt
|
||||
from sklearn import metrics
|
||||
from sklearn.linear_model import LinearRegression
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.preprocessing import PolynomialFeatures
|
||||
from sklearn.pipeline import Pipeline
|
||||
import pandas as pd
|
||||
|
||||
|
||||
def start():
|
||||
data = pd.read_csv('loan.csv')
|
||||
x = data[['ApplicantIncome', 'Credit_History', 'Education', 'Married', 'Self_Employed']]
|
||||
y = data[['LoanAmount']]
|
||||
|
||||
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.1, random_state=42)
|
||||
|
||||
poly = Pipeline([('poly', PolynomialFeatures(degree=3)),
|
||||
('linear', LinearRegression())])
|
||||
poly.fit(x_train, y_train)
|
||||
|
||||
y_predicted = poly.predict(x_test)
|
||||
|
||||
print('Оценка обучения:')
|
||||
print(metrics.r2_score(y_test, y_predicted))
|
||||
|
||||
plt.figure(1, figsize=(16, 9))
|
||||
plt.title('Сравнение результатов обучения')
|
||||
plt.scatter(x=[i for i in range(len(x_test))], y=y_test, c='green', s=5)
|
||||
plt.scatter(x=[i for i in range(len(x_test))], y=y_predicted, c='red', s=5)
|
||||
plt.show()
|
||||
|
||||
|
||||
start()
|
||||
615
abanin_daniil_lab_5/loan.csv
Normal file
@@ -0,0 +1,615 @@
|
||||
Loan_ID,Gender,Married,Dependents,Education,Self_Employed,ApplicantIncome,CoapplicantIncome,LoanAmount,Loan_Amount_Term,Credit_History,Property_Area,Loan_Status
|
||||
LP001002,Male,0.0,0,1,0.0,5849,0.0,360.0,1.0,0,Y,0.0
|
||||
LP001003,Male,1.0,1,1,0.0,4583,1508.0,128.0,360.0,1,Rural,0.0
|
||||
LP001005,Male,1.0,0,1,1.0,3000,0.0,66.0,360.0,1,Urban,1.0
|
||||
LP001006,Male,1.0,0,0,0.0,2583,2358.0,120.0,360.0,1,Urban,1.0
|
||||
LP001008,Male,0.0,0,1,0.0,6000,0.0,141.0,360.0,1,Urban,1.0
|
||||
LP001011,Male,1.0,2,1,1.0,5417,4196.0,267.0,360.0,1,Urban,1.0
|
||||
LP001013,Male,1.0,0,0,0.0,2333,1516.0,95.0,360.0,1,Urban,1.0
|
||||
LP001014,Male,1.0,3+,1,0.0,3036,2504.0,158.0,360.0,0,Semiurban,0.0
|
||||
LP001018,Male,1.0,2,1,0.0,4006,1526.0,168.0,360.0,1,Urban,1.0
|
||||
LP001020,Male,1.0,1,1,0.0,12841,10968.0,349.0,360.0,1,Semiurban,0.0
|
||||
LP001024,Male,1.0,2,1,0.0,3200,700.0,70.0,360.0,1,Urban,1.0
|
||||
LP001027,Male,1.0,2,1,0.0,2500,1840.0,109.0,360.0,1,Urban,1.0
|
||||
LP001028,Male,1.0,2,1,0.0,3073,8106.0,200.0,360.0,1,Urban,1.0
|
||||
LP001029,Male,0.0,0,1,0.0,1853,2840.0,114.0,360.0,1,Rural,0.0
|
||||
LP001030,Male,1.0,2,1,0.0,1299,1086.0,17.0,120.0,1,Urban,1.0
|
||||
LP001032,Male,0.0,0,1,0.0,4950,0.0,125.0,360.0,1,Urban,1.0
|
||||
LP001034,Male,0.0,1,0,0.0,3596,0.0,100.0,240.0,0,Urban,1.0
|
||||
LP001036,Female,0.0,0,1,0.0,3510,0.0,76.0,360.0,0,Urban,0.0
|
||||
LP001038,Male,1.0,0,0,0.0,4887,0.0,133.0,360.0,1,Rural,0.0
|
||||
LP001041,Male,1.0,0,1,0.0,2600,3500.0,115.0,,1,Urban,1.0
|
||||
LP001043,Male,1.0,0,0,0.0,7660,0.0,104.0,360.0,0,Urban,0.0
|
||||
LP001046,Male,1.0,1,1,0.0,5955,5625.0,315.0,360.0,1,Urban,1.0
|
||||
LP001047,Male,1.0,0,0,0.0,2600,1911.0,116.0,360.0,0,Semiurban,0.0
|
||||
LP001050,,1.0,2,0,0.0,3365,1917.0,112.0,360.0,0,Rural,0.0
|
||||
LP001052,Male,1.0,1,1,0.0,3717,2925.0,151.0,360.0,0,Semiurban,0.0
|
||||
LP001066,Male,1.0,0,1,1.0,9560,0.0,191.0,360.0,1,Semiurban,1.0
|
||||
LP001068,Male,1.0,0,1,0.0,2799,2253.0,122.0,360.0,1,Semiurban,1.0
|
||||
LP001073,Male,1.0,2,0,0.0,4226,1040.0,110.0,360.0,1,Urban,1.0
|
||||
LP001086,Male,0.0,0,0,0.0,1442,0.0,35.0,360.0,1,Urban,0.0
|
||||
LP001087,Female,0.0,2,1,0.0,3750,2083.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001091,Male,1.0,1,1,0.0,4166,3369.0,201.0,360.0,0,Urban,0.0
|
||||
LP001095,Male,0.0,0,1,0.0,3167,0.0,74.0,360.0,1,Urban,0.0
|
||||
LP001097,Male,0.0,1,1,1.0,4692,0.0,106.0,360.0,1,Rural,0.0
|
||||
LP001098,Male,1.0,0,1,0.0,3500,1667.0,114.0,360.0,1,Semiurban,1.0
|
||||
LP001100,Male,0.0,3+,1,0.0,12500,3000.0,320.0,360.0,1,Rural,0.0
|
||||
LP001106,Male,1.0,0,1,0.0,2275,2067.0,0.0,360.0,1,Urban,1.0
|
||||
LP001109,Male,1.0,0,1,0.0,1828,1330.0,100.0,,0,Urban,0.0
|
||||
LP001112,Female,1.0,0,1,0.0,3667,1459.0,144.0,360.0,1,Semiurban,1.0
|
||||
LP001114,Male,0.0,0,1,0.0,4166,7210.0,184.0,360.0,1,Urban,1.0
|
||||
LP001116,Male,0.0,0,0,0.0,3748,1668.0,110.0,360.0,1,Semiurban,1.0
|
||||
LP001119,Male,0.0,0,1,0.0,3600,0.0,80.0,360.0,1,Urban,0.0
|
||||
LP001120,Male,0.0,0,1,0.0,1800,1213.0,47.0,360.0,1,Urban,1.0
|
||||
LP001123,Male,1.0,0,1,0.0,2400,0.0,75.0,360.0,0,Urban,1.0
|
||||
LP001131,Male,1.0,0,1,0.0,3941,2336.0,134.0,360.0,1,Semiurban,1.0
|
||||
LP001136,Male,1.0,0,0,1.0,4695,0.0,96.0,,1,Urban,1.0
|
||||
LP001137,Female,0.0,0,1,0.0,3410,0.0,88.0,,1,Urban,1.0
|
||||
LP001138,Male,1.0,1,1,0.0,5649,0.0,44.0,360.0,1,Urban,1.0
|
||||
LP001144,Male,1.0,0,1,0.0,5821,0.0,144.0,360.0,1,Urban,1.0
|
||||
LP001146,Female,1.0,0,1,0.0,2645,3440.0,120.0,360.0,0,Urban,0.0
|
||||
LP001151,Female,0.0,0,1,0.0,4000,2275.0,144.0,360.0,1,Semiurban,1.0
|
||||
LP001155,Female,1.0,0,0,0.0,1928,1644.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP001157,Female,0.0,0,1,0.0,3086,0.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001164,Female,0.0,0,1,0.0,4230,0.0,112.0,360.0,1,Semiurban,0.0
|
||||
LP001179,Male,1.0,2,1,0.0,4616,0.0,134.0,360.0,1,Urban,0.0
|
||||
LP001186,Female,1.0,1,1,1.0,11500,0.0,286.0,360.0,0,Urban,0.0
|
||||
LP001194,Male,1.0,2,1,0.0,2708,1167.0,97.0,360.0,1,Semiurban,1.0
|
||||
LP001195,Male,1.0,0,1,0.0,2132,1591.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP001197,Male,1.0,0,1,0.0,3366,2200.0,135.0,360.0,1,Rural,0.0
|
||||
LP001198,Male,1.0,1,1,0.0,8080,2250.0,180.0,360.0,1,Urban,1.0
|
||||
LP001199,Male,1.0,2,0,0.0,3357,2859.0,144.0,360.0,1,Urban,1.0
|
||||
LP001205,Male,1.0,0,1,0.0,2500,3796.0,120.0,360.0,1,Urban,1.0
|
||||
LP001206,Male,1.0,3+,1,0.0,3029,0.0,99.0,360.0,1,Urban,1.0
|
||||
LP001207,Male,1.0,0,0,1.0,2609,3449.0,165.0,180.0,0,Rural,0.0
|
||||
LP001213,Male,1.0,1,1,0.0,4945,0.0,0.0,360.0,0,Rural,0.0
|
||||
LP001222,Female,0.0,0,1,0.0,4166,0.0,116.0,360.0,0,Semiurban,0.0
|
||||
LP001225,Male,1.0,0,1,0.0,5726,4595.0,258.0,360.0,1,Semiurban,0.0
|
||||
LP001228,Male,0.0,0,0,0.0,3200,2254.0,126.0,180.0,0,Urban,0.0
|
||||
LP001233,Male,1.0,1,1,0.0,10750,0.0,312.0,360.0,1,Urban,1.0
|
||||
LP001238,Male,1.0,3+,0,1.0,7100,0.0,125.0,60.0,1,Urban,1.0
|
||||
LP001241,Female,0.0,0,1,0.0,4300,0.0,136.0,360.0,0,Semiurban,0.0
|
||||
LP001243,Male,1.0,0,1,0.0,3208,3066.0,172.0,360.0,1,Urban,1.0
|
||||
LP001245,Male,1.0,2,0,1.0,1875,1875.0,97.0,360.0,1,Semiurban,1.0
|
||||
LP001248,Male,0.0,0,1,0.0,3500,0.0,81.0,300.0,1,Semiurban,1.0
|
||||
LP001250,Male,1.0,3+,0,0.0,4755,0.0,95.0,,0,Semiurban,0.0
|
||||
LP001253,Male,1.0,3+,1,1.0,5266,1774.0,187.0,360.0,1,Semiurban,1.0
|
||||
LP001255,Male,0.0,0,1,0.0,3750,0.0,113.0,480.0,1,Urban,0.0
|
||||
LP001256,Male,0.0,0,1,0.0,3750,4750.0,176.0,360.0,1,Urban,0.0
|
||||
LP001259,Male,1.0,1,1,1.0,1000,3022.0,110.0,360.0,1,Urban,0.0
|
||||
LP001263,Male,1.0,3+,1,0.0,3167,4000.0,180.0,300.0,0,Semiurban,0.0
|
||||
LP001264,Male,1.0,3+,0,1.0,3333,2166.0,130.0,360.0,0,Semiurban,1.0
|
||||
LP001265,Female,0.0,0,1,0.0,3846,0.0,111.0,360.0,1,Semiurban,1.0
|
||||
LP001266,Male,1.0,1,1,1.0,2395,0.0,0.0,360.0,1,Semiurban,1.0
|
||||
LP001267,Female,1.0,2,1,0.0,1378,1881.0,167.0,360.0,1,Urban,0.0
|
||||
LP001273,Male,1.0,0,1,0.0,6000,2250.0,265.0,360.0,0,Semiurban,0.0
|
||||
LP001275,Male,1.0,1,1,0.0,3988,0.0,50.0,240.0,1,Urban,1.0
|
||||
LP001279,Male,0.0,0,1,0.0,2366,2531.0,136.0,360.0,1,Semiurban,1.0
|
||||
LP001280,Male,1.0,2,0,0.0,3333,2000.0,99.0,360.0,0,Semiurban,1.0
|
||||
LP001282,Male,1.0,0,1,0.0,2500,2118.0,104.0,360.0,1,Semiurban,1.0
|
||||
LP001289,Male,0.0,0,1,0.0,8566,0.0,210.0,360.0,1,Urban,1.0
|
||||
LP001310,Male,1.0,0,1,0.0,5695,4167.0,175.0,360.0,1,Semiurban,1.0
|
||||
LP001316,Male,1.0,0,1,0.0,2958,2900.0,131.0,360.0,1,Semiurban,1.0
|
||||
LP001318,Male,1.0,2,1,0.0,6250,5654.0,188.0,180.0,1,Semiurban,1.0
|
||||
LP001319,Male,1.0,2,0,0.0,3273,1820.0,81.0,360.0,1,Urban,1.0
|
||||
LP001322,Male,0.0,0,1,0.0,4133,0.0,122.0,360.0,1,Semiurban,1.0
|
||||
LP001325,Male,0.0,0,0,0.0,3620,0.0,25.0,120.0,1,Semiurban,1.0
|
||||
LP001326,Male,0.0,0,1,0.0,6782,0.0,0.0,360.0,0,Urban,0.0
|
||||
LP001327,Female,1.0,0,1,0.0,2484,2302.0,137.0,360.0,1,Semiurban,1.0
|
||||
LP001333,Male,1.0,0,1,0.0,1977,997.0,50.0,360.0,1,Semiurban,1.0
|
||||
LP001334,Male,1.0,0,0,0.0,4188,0.0,115.0,180.0,1,Semiurban,1.0
|
||||
LP001343,Male,1.0,0,1,0.0,1759,3541.0,131.0,360.0,1,Semiurban,1.0
|
||||
LP001345,Male,1.0,2,0,0.0,4288,3263.0,133.0,180.0,1,Urban,1.0
|
||||
LP001349,Male,0.0,0,1,0.0,4843,3806.0,151.0,360.0,1,Semiurban,1.0
|
||||
LP001350,Male,1.0,,1,0.0,13650,0.0,0.0,360.0,1,Urban,1.0
|
||||
LP001356,Male,1.0,0,1,0.0,4652,3583.0,0.0,360.0,1,Semiurban,1.0
|
||||
LP001357,Male,0.0,,1,0.0,3816,754.0,160.0,360.0,1,Urban,1.0
|
||||
LP001367,Male,1.0,1,1,0.0,3052,1030.0,100.0,360.0,1,Urban,1.0
|
||||
LP001369,Male,1.0,2,1,0.0,11417,1126.0,225.0,360.0,1,Urban,1.0
|
||||
LP001370,Male,0.0,0,0,0.0,7333,0.0,120.0,360.0,1,Rural,0.0
|
||||
LP001379,Male,1.0,2,1,0.0,3800,3600.0,216.0,360.0,0,Urban,0.0
|
||||
LP001384,Male,1.0,3+,0,0.0,2071,754.0,94.0,480.0,1,Semiurban,1.0
|
||||
LP001385,Male,0.0,0,1,0.0,5316,0.0,136.0,360.0,1,Urban,1.0
|
||||
LP001387,Female,1.0,0,1,0.0,2929,2333.0,139.0,360.0,1,Semiurban,1.0
|
||||
LP001391,Male,1.0,0,0,0.0,3572,4114.0,152.0,,0,Rural,0.0
|
||||
LP001392,Female,0.0,1,1,1.0,7451,0.0,0.0,360.0,1,Semiurban,1.0
|
||||
LP001398,Male,0.0,0,1,0.0,5050,0.0,118.0,360.0,1,Semiurban,1.0
|
||||
LP001401,Male,1.0,1,1,0.0,14583,0.0,185.0,180.0,1,Rural,1.0
|
||||
LP001404,Female,1.0,0,1,0.0,3167,2283.0,154.0,360.0,1,Semiurban,1.0
|
||||
LP001405,Male,1.0,1,1,0.0,2214,1398.0,85.0,360.0,0,Urban,1.0
|
||||
LP001421,Male,1.0,0,1,0.0,5568,2142.0,175.0,360.0,1,Rural,0.0
|
||||
LP001422,Female,0.0,0,1,0.0,10408,0.0,259.0,360.0,1,Urban,1.0
|
||||
LP001426,Male,1.0,,1,0.0,5667,2667.0,180.0,360.0,1,Rural,1.0
|
||||
LP001430,Female,0.0,0,1,0.0,4166,0.0,44.0,360.0,1,Semiurban,1.0
|
||||
LP001431,Female,0.0,0,1,0.0,2137,8980.0,137.0,360.0,0,Semiurban,1.0
|
||||
LP001432,Male,1.0,2,1,0.0,2957,0.0,81.0,360.0,1,Semiurban,1.0
|
||||
LP001439,Male,1.0,0,0,0.0,4300,2014.0,194.0,360.0,1,Rural,1.0
|
||||
LP001443,Female,0.0,0,1,0.0,3692,0.0,93.0,360.0,0,Rural,1.0
|
||||
LP001448,,1.0,3+,1,0.0,23803,0.0,370.0,360.0,1,Rural,1.0
|
||||
LP001449,Male,0.0,0,1,0.0,3865,1640.0,0.0,360.0,1,Rural,1.0
|
||||
LP001451,Male,1.0,1,1,1.0,10513,3850.0,160.0,180.0,0,Urban,0.0
|
||||
LP001465,Male,1.0,0,1,0.0,6080,2569.0,182.0,360.0,0,Rural,0.0
|
||||
LP001469,Male,0.0,0,1,1.0,20166,0.0,650.0,480.0,0,Urban,1.0
|
||||
LP001473,Male,0.0,0,1,0.0,2014,1929.0,74.0,360.0,1,Urban,1.0
|
||||
LP001478,Male,0.0,0,1,0.0,2718,0.0,70.0,360.0,1,Semiurban,1.0
|
||||
LP001482,Male,1.0,0,1,1.0,3459,0.0,25.0,120.0,1,Semiurban,1.0
|
||||
LP001487,Male,0.0,0,1,0.0,4895,0.0,102.0,360.0,1,Semiurban,1.0
|
||||
LP001488,Male,1.0,3+,1,0.0,4000,7750.0,290.0,360.0,1,Semiurban,0.0
|
||||
LP001489,Female,1.0,0,1,0.0,4583,0.0,84.0,360.0,1,Rural,0.0
|
||||
LP001491,Male,1.0,2,1,1.0,3316,3500.0,88.0,360.0,1,Urban,1.0
|
||||
LP001492,Male,0.0,0,1,0.0,14999,0.0,242.0,360.0,0,Semiurban,0.0
|
||||
LP001493,Male,1.0,2,0,0.0,4200,1430.0,129.0,360.0,1,Rural,0.0
|
||||
LP001497,Male,1.0,2,1,0.0,5042,2083.0,185.0,360.0,1,Rural,0.0
|
||||
LP001498,Male,0.0,0,1,0.0,5417,0.0,168.0,360.0,1,Urban,1.0
|
||||
LP001504,Male,0.0,0,1,1.0,6950,0.0,175.0,180.0,1,Semiurban,1.0
|
||||
LP001507,Male,1.0,0,1,0.0,2698,2034.0,122.0,360.0,1,Semiurban,1.0
|
||||
LP001508,Male,1.0,2,1,0.0,11757,0.0,187.0,180.0,1,Urban,1.0
|
||||
LP001514,Female,1.0,0,1,0.0,2330,4486.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP001516,Female,1.0,2,1,0.0,14866,0.0,70.0,360.0,1,Urban,1.0
|
||||
LP001518,Male,1.0,1,1,0.0,1538,1425.0,30.0,360.0,1,Urban,1.0
|
||||
LP001519,Female,0.0,0,1,0.0,10000,1666.0,225.0,360.0,1,Rural,0.0
|
||||
LP001520,Male,1.0,0,1,0.0,4860,830.0,125.0,360.0,1,Semiurban,1.0
|
||||
LP001528,Male,0.0,0,1,0.0,6277,0.0,118.0,360.0,0,Rural,0.0
|
||||
LP001529,Male,1.0,0,1,1.0,2577,3750.0,152.0,360.0,1,Rural,1.0
|
||||
LP001531,Male,0.0,0,1,0.0,9166,0.0,244.0,360.0,1,Urban,0.0
|
||||
LP001532,Male,1.0,2,0,0.0,2281,0.0,113.0,360.0,1,Rural,0.0
|
||||
LP001535,Male,0.0,0,1,0.0,3254,0.0,50.0,360.0,1,Urban,1.0
|
||||
LP001536,Male,1.0,3+,1,0.0,39999,0.0,600.0,180.0,0,Semiurban,1.0
|
||||
LP001541,Male,1.0,1,1,0.0,6000,0.0,160.0,360.0,0,Rural,1.0
|
||||
LP001543,Male,1.0,1,1,0.0,9538,0.0,187.0,360.0,1,Urban,1.0
|
||||
LP001546,Male,0.0,0,1,0.0,2980,2083.0,120.0,360.0,1,Rural,1.0
|
||||
LP001552,Male,1.0,0,1,0.0,4583,5625.0,255.0,360.0,1,Semiurban,1.0
|
||||
LP001560,Male,1.0,0,0,0.0,1863,1041.0,98.0,360.0,1,Semiurban,1.0
|
||||
LP001562,Male,1.0,0,1,0.0,7933,0.0,275.0,360.0,1,Urban,0.0
|
||||
LP001565,Male,1.0,1,1,0.0,3089,1280.0,121.0,360.0,0,Semiurban,0.0
|
||||
LP001570,Male,1.0,2,1,0.0,4167,1447.0,158.0,360.0,1,Rural,1.0
|
||||
LP001572,Male,1.0,0,1,0.0,9323,0.0,75.0,180.0,1,Urban,1.0
|
||||
LP001574,Male,1.0,0,1,0.0,3707,3166.0,182.0,,1,Rural,1.0
|
||||
LP001577,Female,1.0,0,1,0.0,4583,0.0,112.0,360.0,1,Rural,0.0
|
||||
LP001578,Male,1.0,0,1,0.0,2439,3333.0,129.0,360.0,1,Rural,1.0
|
||||
LP001579,Male,0.0,0,1,0.0,2237,0.0,63.0,480.0,0,Semiurban,0.0
|
||||
LP001580,Male,1.0,2,1,0.0,8000,0.0,200.0,360.0,1,Semiurban,1.0
|
||||
LP001581,Male,1.0,0,0,0.0,1820,1769.0,95.0,360.0,1,Rural,1.0
|
||||
LP001585,,1.0,3+,1,0.0,51763,0.0,700.0,300.0,1,Urban,1.0
|
||||
LP001586,Male,1.0,3+,0,0.0,3522,0.0,81.0,180.0,1,Rural,0.0
|
||||
LP001594,Male,1.0,0,1,0.0,5708,5625.0,187.0,360.0,1,Semiurban,1.0
|
||||
LP001603,Male,1.0,0,0,1.0,4344,736.0,87.0,360.0,1,Semiurban,0.0
|
||||
LP001606,Male,1.0,0,1,0.0,3497,1964.0,116.0,360.0,1,Rural,1.0
|
||||
LP001608,Male,1.0,2,1,0.0,2045,1619.0,101.0,360.0,1,Rural,1.0
|
||||
LP001610,Male,1.0,3+,1,0.0,5516,11300.0,495.0,360.0,0,Semiurban,0.0
|
||||
LP001616,Male,1.0,1,1,0.0,3750,0.0,116.0,360.0,1,Semiurban,1.0
|
||||
LP001630,Male,0.0,0,0,0.0,2333,1451.0,102.0,480.0,0,Urban,0.0
|
||||
LP001633,Male,1.0,1,1,0.0,6400,7250.0,180.0,360.0,0,Urban,0.0
|
||||
LP001634,Male,0.0,0,1,0.0,1916,5063.0,67.0,360.0,0,Rural,0.0
|
||||
LP001636,Male,1.0,0,1,0.0,4600,0.0,73.0,180.0,1,Semiurban,1.0
|
||||
LP001637,Male,1.0,1,1,0.0,33846,0.0,260.0,360.0,1,Semiurban,0.0
|
||||
LP001639,Female,1.0,0,1,0.0,3625,0.0,108.0,360.0,1,Semiurban,1.0
|
||||
LP001640,Male,1.0,0,1,1.0,39147,4750.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001641,Male,1.0,1,1,1.0,2178,0.0,66.0,300.0,0,Rural,0.0
|
||||
LP001643,Male,1.0,0,1,0.0,2383,2138.0,58.0,360.0,0,Rural,1.0
|
||||
LP001644,,1.0,0,1,1.0,674,5296.0,168.0,360.0,1,Rural,1.0
|
||||
LP001647,Male,1.0,0,1,0.0,9328,0.0,188.0,180.0,1,Rural,1.0
|
||||
LP001653,Male,0.0,0,0,0.0,4885,0.0,48.0,360.0,1,Rural,1.0
|
||||
LP001656,Male,0.0,0,1,0.0,12000,0.0,164.0,360.0,1,Semiurban,0.0
|
||||
LP001657,Male,1.0,0,0,0.0,6033,0.0,160.0,360.0,1,Urban,0.0
|
||||
LP001658,Male,0.0,0,1,0.0,3858,0.0,76.0,360.0,1,Semiurban,1.0
|
||||
LP001664,Male,0.0,0,1,0.0,4191,0.0,120.0,360.0,1,Rural,1.0
|
||||
LP001665,Male,1.0,1,1,0.0,3125,2583.0,170.0,360.0,1,Semiurban,0.0
|
||||
LP001666,Male,0.0,0,1,0.0,8333,3750.0,187.0,360.0,1,Rural,1.0
|
||||
LP001669,Female,0.0,0,0,0.0,1907,2365.0,120.0,,1,Urban,1.0
|
||||
LP001671,Female,1.0,0,1,0.0,3416,2816.0,113.0,360.0,0,Semiurban,1.0
|
||||
LP001673,Male,0.0,0,1,1.0,11000,0.0,83.0,360.0,1,Urban,0.0
|
||||
LP001674,Male,1.0,1,0,0.0,2600,2500.0,90.0,360.0,1,Semiurban,1.0
|
||||
LP001677,Male,0.0,2,1,0.0,4923,0.0,166.0,360.0,0,Semiurban,1.0
|
||||
LP001682,Male,1.0,3+,0,0.0,3992,0.0,0.0,180.0,1,Urban,0.0
|
||||
LP001688,Male,1.0,1,0,0.0,3500,1083.0,135.0,360.0,1,Urban,1.0
|
||||
LP001691,Male,1.0,2,0,0.0,3917,0.0,124.0,360.0,1,Semiurban,1.0
|
||||
LP001692,Female,0.0,0,0,0.0,4408,0.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP001693,Female,0.0,0,1,0.0,3244,0.0,80.0,360.0,1,Urban,1.0
|
||||
LP001698,Male,0.0,0,0,0.0,3975,2531.0,55.0,360.0,1,Rural,1.0
|
||||
LP001699,Male,0.0,0,1,0.0,2479,0.0,59.0,360.0,1,Urban,1.0
|
||||
LP001702,Male,0.0,0,1,0.0,3418,0.0,127.0,360.0,1,Semiurban,0.0
|
||||
LP001708,Female,0.0,0,1,0.0,10000,0.0,214.0,360.0,1,Semiurban,0.0
|
||||
LP001711,Male,1.0,3+,1,0.0,3430,1250.0,128.0,360.0,0,Semiurban,0.0
|
||||
LP001713,Male,1.0,1,1,1.0,7787,0.0,240.0,360.0,1,Urban,1.0
|
||||
LP001715,Male,1.0,3+,0,1.0,5703,0.0,130.0,360.0,1,Rural,1.0
|
||||
LP001716,Male,1.0,0,1,0.0,3173,3021.0,137.0,360.0,1,Urban,1.0
|
||||
LP001720,Male,1.0,3+,0,0.0,3850,983.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP001722,Male,1.0,0,1,0.0,150,1800.0,135.0,360.0,1,Rural,0.0
|
||||
LP001726,Male,1.0,0,1,0.0,3727,1775.0,131.0,360.0,1,Semiurban,1.0
|
||||
LP001732,Male,1.0,2,1,0.0,5000,0.0,72.0,360.0,0,Semiurban,0.0
|
||||
LP001734,Female,1.0,2,1,0.0,4283,2383.0,127.0,360.0,0,Semiurban,1.0
|
||||
LP001736,Male,1.0,0,1,0.0,2221,0.0,60.0,360.0,0,Urban,0.0
|
||||
LP001743,Male,1.0,2,1,0.0,4009,1717.0,116.0,360.0,1,Semiurban,1.0
|
||||
LP001744,Male,0.0,0,1,0.0,2971,2791.0,144.0,360.0,1,Semiurban,1.0
|
||||
LP001749,Male,1.0,0,1,0.0,7578,1010.0,175.0,,1,Semiurban,1.0
|
||||
LP001750,Male,1.0,0,1,0.0,6250,0.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP001751,Male,1.0,0,1,0.0,3250,0.0,170.0,360.0,1,Rural,0.0
|
||||
LP001754,Male,1.0,,0,1.0,4735,0.0,138.0,360.0,1,Urban,0.0
|
||||
LP001758,Male,1.0,2,1,0.0,6250,1695.0,210.0,360.0,1,Semiurban,1.0
|
||||
LP001760,Male,0.0,,1,0.0,4758,0.0,158.0,480.0,1,Semiurban,1.0
|
||||
LP001761,Male,0.0,0,1,1.0,6400,0.0,200.0,360.0,1,Rural,1.0
|
||||
LP001765,Male,1.0,1,1,0.0,2491,2054.0,104.0,360.0,1,Semiurban,1.0
|
||||
LP001768,Male,1.0,0,1,0.0,3716,0.0,42.0,180.0,1,Rural,1.0
|
||||
LP001770,Male,0.0,0,0,0.0,3189,2598.0,120.0,,1,Rural,1.0
|
||||
LP001776,Female,0.0,0,1,0.0,8333,0.0,280.0,360.0,1,Semiurban,1.0
|
||||
LP001778,Male,1.0,1,1,0.0,3155,1779.0,140.0,360.0,1,Semiurban,1.0
|
||||
LP001784,Male,1.0,1,1,0.0,5500,1260.0,170.0,360.0,1,Rural,1.0
|
||||
LP001786,Male,1.0,0,1,0.0,5746,0.0,255.0,360.0,0,Urban,0.0
|
||||
LP001788,Female,0.0,0,1,1.0,3463,0.0,122.0,360.0,0,Urban,1.0
|
||||
LP001790,Female,0.0,1,1,0.0,3812,0.0,112.0,360.0,1,Rural,1.0
|
||||
LP001792,Male,1.0,1,1,0.0,3315,0.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP001798,Male,1.0,2,1,0.0,5819,5000.0,120.0,360.0,1,Rural,1.0
|
||||
LP001800,Male,1.0,1,0,0.0,2510,1983.0,140.0,180.0,1,Urban,0.0
|
||||
LP001806,Male,0.0,0,1,0.0,2965,5701.0,155.0,60.0,1,Urban,1.0
|
||||
LP001807,Male,1.0,2,1,1.0,6250,1300.0,108.0,360.0,1,Rural,1.0
|
||||
LP001811,Male,1.0,0,0,0.0,3406,4417.0,123.0,360.0,1,Semiurban,1.0
|
||||
LP001813,Male,0.0,0,1,1.0,6050,4333.0,120.0,180.0,1,Urban,0.0
|
||||
LP001814,Male,1.0,2,1,0.0,9703,0.0,112.0,360.0,1,Urban,1.0
|
||||
LP001819,Male,1.0,1,0,0.0,6608,0.0,137.0,180.0,1,Urban,1.0
|
||||
LP001824,Male,1.0,1,1,0.0,2882,1843.0,123.0,480.0,1,Semiurban,1.0
|
||||
LP001825,Male,1.0,0,1,0.0,1809,1868.0,90.0,360.0,1,Urban,1.0
|
||||
LP001835,Male,1.0,0,0,0.0,1668,3890.0,201.0,360.0,0,Semiurban,0.0
|
||||
LP001836,Female,0.0,2,1,0.0,3427,0.0,138.0,360.0,1,Urban,0.0
|
||||
LP001841,Male,0.0,0,0,1.0,2583,2167.0,104.0,360.0,1,Rural,1.0
|
||||
LP001843,Male,1.0,1,0,0.0,2661,7101.0,279.0,180.0,1,Semiurban,1.0
|
||||
LP001844,Male,0.0,0,1,1.0,16250,0.0,192.0,360.0,0,Urban,0.0
|
||||
LP001846,Female,0.0,3+,1,0.0,3083,0.0,255.0,360.0,1,Rural,1.0
|
||||
LP001849,Male,0.0,0,0,0.0,6045,0.0,115.0,360.0,0,Rural,0.0
|
||||
LP001854,Male,1.0,3+,1,0.0,5250,0.0,94.0,360.0,1,Urban,0.0
|
||||
LP001859,Male,1.0,0,1,0.0,14683,2100.0,304.0,360.0,1,Rural,0.0
|
||||
LP001864,Male,1.0,3+,0,0.0,4931,0.0,128.0,360.0,0,Semiurban,0.0
|
||||
LP001865,Male,1.0,1,1,0.0,6083,4250.0,330.0,360.0,0,Urban,1.0
|
||||
LP001868,Male,0.0,0,1,0.0,2060,2209.0,134.0,360.0,1,Semiurban,1.0
|
||||
LP001870,Female,0.0,1,1,0.0,3481,0.0,155.0,36.0,1,Semiurban,0.0
|
||||
LP001871,Female,0.0,0,1,0.0,7200,0.0,120.0,360.0,1,Rural,1.0
|
||||
LP001872,Male,0.0,0,1,1.0,5166,0.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP001875,Male,0.0,0,1,0.0,4095,3447.0,151.0,360.0,1,Rural,1.0
|
||||
LP001877,Male,1.0,2,1,0.0,4708,1387.0,150.0,360.0,1,Semiurban,1.0
|
||||
LP001882,Male,1.0,3+,1,0.0,4333,1811.0,160.0,360.0,0,Urban,1.0
|
||||
LP001883,Female,0.0,0,1,0.0,3418,0.0,135.0,360.0,1,Rural,0.0
|
||||
LP001884,Female,0.0,1,1,0.0,2876,1560.0,90.0,360.0,1,Urban,1.0
|
||||
LP001888,Female,0.0,0,1,0.0,3237,0.0,30.0,360.0,1,Urban,1.0
|
||||
LP001891,Male,1.0,0,1,0.0,11146,0.0,136.0,360.0,1,Urban,1.0
|
||||
LP001892,Male,0.0,0,1,0.0,2833,1857.0,126.0,360.0,1,Rural,1.0
|
||||
LP001894,Male,1.0,0,1,0.0,2620,2223.0,150.0,360.0,1,Semiurban,1.0
|
||||
LP001896,Male,1.0,2,1,0.0,3900,0.0,90.0,360.0,1,Semiurban,1.0
|
||||
LP001900,Male,1.0,1,1,0.0,2750,1842.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP001903,Male,1.0,0,1,0.0,3993,3274.0,207.0,360.0,1,Semiurban,1.0
|
||||
LP001904,Male,1.0,0,1,0.0,3103,1300.0,80.0,360.0,1,Urban,1.0
|
||||
LP001907,Male,1.0,0,1,0.0,14583,0.0,436.0,360.0,1,Semiurban,1.0
|
||||
LP001908,Female,1.0,0,0,0.0,4100,0.0,124.0,360.0,0,Rural,1.0
|
||||
LP001910,Male,0.0,1,0,1.0,4053,2426.0,158.0,360.0,0,Urban,0.0
|
||||
LP001914,Male,1.0,0,1,0.0,3927,800.0,112.0,360.0,1,Semiurban,1.0
|
||||
LP001915,Male,1.0,2,1,0.0,2301,985.7999878,78.0,180.0,1,Urban,1.0
|
||||
LP001917,Female,0.0,0,1,0.0,1811,1666.0,54.0,360.0,1,Urban,1.0
|
||||
LP001922,Male,1.0,0,1,0.0,20667,0.0,0.0,360.0,1,Rural,0.0
|
||||
LP001924,Male,0.0,0,1,0.0,3158,3053.0,89.0,360.0,1,Rural,1.0
|
||||
LP001925,Female,0.0,0,1,1.0,2600,1717.0,99.0,300.0,1,Semiurban,0.0
|
||||
LP001926,Male,1.0,0,1,0.0,3704,2000.0,120.0,360.0,1,Rural,1.0
|
||||
LP001931,Female,0.0,0,1,0.0,4124,0.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP001935,Male,0.0,0,1,0.0,9508,0.0,187.0,360.0,1,Rural,1.0
|
||||
LP001936,Male,1.0,0,1,0.0,3075,2416.0,139.0,360.0,1,Rural,1.0
|
||||
LP001938,Male,1.0,2,1,0.0,4400,0.0,127.0,360.0,0,Semiurban,0.0
|
||||
LP001940,Male,1.0,2,1,0.0,3153,1560.0,134.0,360.0,1,Urban,1.0
|
||||
LP001945,Female,0.0,,1,0.0,5417,0.0,143.0,480.0,0,Urban,0.0
|
||||
LP001947,Male,1.0,0,1,0.0,2383,3334.0,172.0,360.0,1,Semiurban,1.0
|
||||
LP001949,Male,1.0,3+,1,0.0,4416,1250.0,110.0,360.0,1,Urban,1.0
|
||||
LP001953,Male,1.0,1,1,0.0,6875,0.0,200.0,360.0,1,Semiurban,1.0
|
||||
LP001954,Female,1.0,1,1,0.0,4666,0.0,135.0,360.0,1,Urban,1.0
|
||||
LP001955,Female,0.0,0,1,0.0,5000,2541.0,151.0,480.0,1,Rural,0.0
|
||||
LP001963,Male,1.0,1,1,0.0,2014,2925.0,113.0,360.0,1,Urban,0.0
|
||||
LP001964,Male,1.0,0,0,0.0,1800,2934.0,93.0,360.0,0,Urban,0.0
|
||||
LP001972,Male,1.0,,0,0.0,2875,1750.0,105.0,360.0,1,Semiurban,1.0
|
||||
LP001974,Female,0.0,0,1,0.0,5000,0.0,132.0,360.0,1,Rural,1.0
|
||||
LP001977,Male,1.0,1,1,0.0,1625,1803.0,96.0,360.0,1,Urban,1.0
|
||||
LP001978,Male,0.0,0,1,0.0,4000,2500.0,140.0,360.0,1,Rural,1.0
|
||||
LP001990,Male,0.0,0,0,0.0,2000,0.0,0.0,360.0,1,Urban,0.0
|
||||
LP001993,Female,0.0,0,1,0.0,3762,1666.0,135.0,360.0,1,Rural,1.0
|
||||
LP001994,Female,0.0,0,1,0.0,2400,1863.0,104.0,360.0,0,Urban,0.0
|
||||
LP001996,Male,0.0,0,1,0.0,20233,0.0,480.0,360.0,1,Rural,0.0
|
||||
LP001998,Male,1.0,2,0,0.0,7667,0.0,185.0,360.0,0,Rural,1.0
|
||||
LP002002,Female,0.0,0,1,0.0,2917,0.0,84.0,360.0,1,Semiurban,1.0
|
||||
LP002004,Male,0.0,0,0,0.0,2927,2405.0,111.0,360.0,1,Semiurban,1.0
|
||||
LP002006,Female,0.0,0,1,0.0,2507,0.0,56.0,360.0,1,Rural,1.0
|
||||
LP002008,Male,1.0,2,1,1.0,5746,0.0,144.0,84.0,0,Rural,1.0
|
||||
LP002024,,1.0,0,1,0.0,2473,1843.0,159.0,360.0,1,Rural,0.0
|
||||
LP002031,Male,1.0,1,0,0.0,3399,1640.0,111.0,180.0,1,Urban,1.0
|
||||
LP002035,Male,1.0,2,1,0.0,3717,0.0,120.0,360.0,1,Semiurban,1.0
|
||||
LP002036,Male,1.0,0,1,0.0,2058,2134.0,88.0,360.0,0,Urban,1.0
|
||||
LP002043,Female,0.0,1,1,0.0,3541,0.0,112.0,360.0,0,Semiurban,1.0
|
||||
LP002050,Male,1.0,1,1,1.0,10000,0.0,155.0,360.0,1,Rural,0.0
|
||||
LP002051,Male,1.0,0,1,0.0,2400,2167.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP002053,Male,1.0,3+,1,0.0,4342,189.0,124.0,360.0,1,Semiurban,1.0
|
||||
LP002054,Male,1.0,2,0,0.0,3601,1590.0,0.0,360.0,1,Rural,1.0
|
||||
LP002055,Female,0.0,0,1,0.0,3166,2985.0,132.0,360.0,0,Rural,1.0
|
||||
LP002065,Male,1.0,3+,1,0.0,15000,0.0,300.0,360.0,1,Rural,1.0
|
||||
LP002067,Male,1.0,1,1,1.0,8666,4983.0,376.0,360.0,0,Rural,0.0
|
||||
LP002068,Male,0.0,0,1,0.0,4917,0.0,130.0,360.0,0,Rural,1.0
|
||||
LP002082,Male,1.0,0,1,1.0,5818,2160.0,184.0,360.0,1,Semiurban,1.0
|
||||
LP002086,Female,1.0,0,1,0.0,4333,2451.0,110.0,360.0,1,Urban,0.0
|
||||
LP002087,Female,0.0,0,1,0.0,2500,0.0,67.0,360.0,1,Urban,1.0
|
||||
LP002097,Male,0.0,1,1,0.0,4384,1793.0,117.0,360.0,1,Urban,1.0
|
||||
LP002098,Male,0.0,0,1,0.0,2935,0.0,98.0,360.0,1,Semiurban,1.0
|
||||
LP002100,Male,0.0,,1,0.0,2833,0.0,71.0,360.0,1,Urban,1.0
|
||||
LP002101,Male,1.0,0,1,0.0,63337,0.0,490.0,180.0,1,Urban,1.0
|
||||
LP002103,,1.0,1,1,1.0,9833,1833.0,182.0,180.0,1,Urban,1.0
|
||||
LP002106,Male,1.0,,1,1.0,5503,4490.0,70.0,,1,Semiurban,1.0
|
||||
LP002110,Male,1.0,1,1,0.0,5250,688.0,160.0,360.0,1,Rural,1.0
|
||||
LP002112,Male,1.0,2,1,1.0,2500,4600.0,176.0,360.0,1,Rural,1.0
|
||||
LP002113,Female,0.0,3+,0,0.0,1830,0.0,0.0,360.0,0,Urban,0.0
|
||||
LP002114,Female,0.0,0,1,0.0,4160,0.0,71.0,360.0,1,Semiurban,1.0
|
||||
LP002115,Male,1.0,3+,0,0.0,2647,1587.0,173.0,360.0,1,Rural,0.0
|
||||
LP002116,Female,0.0,0,1,0.0,2378,0.0,46.0,360.0,1,Rural,0.0
|
||||
LP002119,Male,1.0,1,0,0.0,4554,1229.0,158.0,360.0,1,Urban,1.0
|
||||
LP002126,Male,1.0,3+,0,0.0,3173,0.0,74.0,360.0,1,Semiurban,1.0
|
||||
LP002128,Male,1.0,2,1,0.0,2583,2330.0,125.0,360.0,1,Rural,1.0
|
||||
LP002129,Male,1.0,0,1,0.0,2499,2458.0,160.0,360.0,1,Semiurban,1.0
|
||||
LP002130,Male,1.0,,0,0.0,3523,3230.0,152.0,360.0,0,Rural,0.0
|
||||
LP002131,Male,1.0,2,0,0.0,3083,2168.0,126.0,360.0,1,Urban,1.0
|
||||
LP002137,Male,1.0,0,1,0.0,6333,4583.0,259.0,360.0,0,Semiurban,1.0
|
||||
LP002138,Male,1.0,0,1,0.0,2625,6250.0,187.0,360.0,1,Rural,1.0
|
||||
LP002139,Male,1.0,0,1,0.0,9083,0.0,228.0,360.0,1,Semiurban,1.0
|
||||
LP002140,Male,0.0,0,1,0.0,8750,4167.0,308.0,360.0,1,Rural,0.0
|
||||
LP002141,Male,1.0,3+,1,0.0,2666,2083.0,95.0,360.0,1,Rural,1.0
|
||||
LP002142,Female,1.0,0,1,1.0,5500,0.0,105.0,360.0,0,Rural,0.0
|
||||
LP002143,Female,1.0,0,1,0.0,2423,505.0,130.0,360.0,1,Semiurban,1.0
|
||||
LP002144,Female,0.0,,1,0.0,3813,0.0,116.0,180.0,1,Urban,1.0
|
||||
LP002149,Male,1.0,2,1,0.0,8333,3167.0,165.0,360.0,1,Rural,1.0
|
||||
LP002151,Male,1.0,1,1,0.0,3875,0.0,67.0,360.0,1,Urban,0.0
|
||||
LP002158,Male,1.0,0,0,0.0,3000,1666.0,100.0,480.0,0,Urban,0.0
|
||||
LP002160,Male,1.0,3+,1,0.0,5167,3167.0,200.0,360.0,1,Semiurban,1.0
|
||||
LP002161,Female,0.0,1,1,0.0,4723,0.0,81.0,360.0,1,Semiurban,0.0
|
||||
LP002170,Male,1.0,2,1,0.0,5000,3667.0,236.0,360.0,1,Semiurban,1.0
|
||||
LP002175,Male,1.0,0,1,0.0,4750,2333.0,130.0,360.0,1,Urban,1.0
|
||||
LP002178,Male,1.0,0,1,0.0,3013,3033.0,95.0,300.0,0,Urban,1.0
|
||||
LP002180,Male,0.0,0,1,1.0,6822,0.0,141.0,360.0,1,Rural,1.0
|
||||
LP002181,Male,0.0,0,0,0.0,6216,0.0,133.0,360.0,1,Rural,0.0
|
||||
LP002187,Male,0.0,0,1,0.0,2500,0.0,96.0,480.0,1,Semiurban,0.0
|
||||
LP002188,Male,0.0,0,1,0.0,5124,0.0,124.0,,0,Rural,0.0
|
||||
LP002190,Male,1.0,1,1,0.0,6325,0.0,175.0,360.0,1,Semiurban,1.0
|
||||
LP002191,Male,1.0,0,1,0.0,19730,5266.0,570.0,360.0,1,Rural,0.0
|
||||
LP002194,Female,0.0,0,1,1.0,15759,0.0,55.0,360.0,1,Semiurban,1.0
|
||||
LP002197,Male,1.0,2,1,0.0,5185,0.0,155.0,360.0,1,Semiurban,1.0
|
||||
LP002201,Male,1.0,2,1,1.0,9323,7873.0,380.0,300.0,1,Rural,1.0
|
||||
LP002205,Male,0.0,1,1,0.0,3062,1987.0,111.0,180.0,0,Urban,0.0
|
||||
LP002209,Female,0.0,0,1,0.0,2764,1459.0,110.0,360.0,1,Urban,1.0
|
||||
LP002211,Male,1.0,0,1,0.0,4817,923.0,120.0,180.0,1,Urban,1.0
|
||||
LP002219,Male,1.0,3+,1,0.0,8750,4996.0,130.0,360.0,1,Rural,1.0
|
||||
LP002223,Male,1.0,0,1,0.0,4310,0.0,130.0,360.0,0,Semiurban,1.0
|
||||
LP002224,Male,0.0,0,1,0.0,3069,0.0,71.0,480.0,1,Urban,0.0
|
||||
LP002225,Male,1.0,2,1,0.0,5391,0.0,130.0,360.0,1,Urban,1.0
|
||||
LP002226,Male,1.0,0,1,0.0,3333,2500.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP002229,Male,0.0,0,1,0.0,5941,4232.0,296.0,360.0,1,Semiurban,1.0
|
||||
LP002231,Female,0.0,0,1,0.0,6000,0.0,156.0,360.0,1,Urban,1.0
|
||||
LP002234,Male,0.0,0,1,1.0,7167,0.0,128.0,360.0,1,Urban,1.0
|
||||
LP002236,Male,1.0,2,1,0.0,4566,0.0,100.0,360.0,1,Urban,0.0
|
||||
LP002237,Male,0.0,1,1,0.0,3667,0.0,113.0,180.0,1,Urban,1.0
|
||||
LP002239,Male,0.0,0,0,0.0,2346,1600.0,132.0,360.0,1,Semiurban,1.0
|
||||
LP002243,Male,1.0,0,0,0.0,3010,3136.0,0.0,360.0,0,Urban,0.0
|
||||
LP002244,Male,1.0,0,1,0.0,2333,2417.0,136.0,360.0,1,Urban,1.0
|
||||
LP002250,Male,1.0,0,1,0.0,5488,0.0,125.0,360.0,1,Rural,1.0
|
||||
LP002255,Male,0.0,3+,1,0.0,9167,0.0,185.0,360.0,1,Rural,1.0
|
||||
LP002262,Male,1.0,3+,1,0.0,9504,0.0,275.0,360.0,1,Rural,1.0
|
||||
LP002263,Male,1.0,0,1,0.0,2583,2115.0,120.0,360.0,0,Urban,1.0
|
||||
LP002265,Male,1.0,2,0,0.0,1993,1625.0,113.0,180.0,1,Semiurban,1.0
|
||||
LP002266,Male,1.0,2,1,0.0,3100,1400.0,113.0,360.0,1,Urban,1.0
|
||||
LP002272,Male,1.0,2,1,0.0,3276,484.0,135.0,360.0,0,Semiurban,1.0
|
||||
LP002277,Female,0.0,0,1,0.0,3180,0.0,71.0,360.0,0,Urban,0.0
|
||||
LP002281,Male,1.0,0,1,0.0,3033,1459.0,95.0,360.0,1,Urban,1.0
|
||||
LP002284,Male,0.0,0,0,0.0,3902,1666.0,109.0,360.0,1,Rural,1.0
|
||||
LP002287,Female,0.0,0,1,0.0,1500,1800.0,103.0,360.0,0,Semiurban,0.0
|
||||
LP002288,Male,1.0,2,0,0.0,2889,0.0,45.0,180.0,0,Urban,0.0
|
||||
LP002296,Male,0.0,0,0,0.0,2755,0.0,65.0,300.0,1,Rural,0.0
|
||||
LP002297,Male,0.0,0,1,0.0,2500,20000.0,103.0,360.0,1,Semiurban,1.0
|
||||
LP002300,Female,0.0,0,0,0.0,1963,0.0,53.0,360.0,1,Semiurban,1.0
|
||||
LP002301,Female,0.0,0,1,1.0,7441,0.0,194.0,360.0,1,Rural,0.0
|
||||
LP002305,Female,0.0,0,1,0.0,4547,0.0,115.0,360.0,1,Semiurban,1.0
|
||||
LP002308,Male,1.0,0,0,0.0,2167,2400.0,115.0,360.0,1,Urban,1.0
|
||||
LP002314,Female,0.0,0,0,0.0,2213,0.0,66.0,360.0,1,Rural,1.0
|
||||
LP002315,Male,1.0,1,1,0.0,8300,0.0,152.0,300.0,0,Semiurban,0.0
|
||||
LP002317,Male,1.0,3+,1,0.0,81000,0.0,360.0,360.0,0,Rural,0.0
|
||||
LP002318,Female,0.0,1,0,1.0,3867,0.0,62.0,360.0,1,Semiurban,0.0
|
||||
LP002319,Male,1.0,0,1,0.0,6256,0.0,160.0,360.0,0,Urban,1.0
|
||||
LP002328,Male,1.0,0,0,0.0,6096,0.0,218.0,360.0,0,Rural,0.0
|
||||
LP002332,Male,1.0,0,0,0.0,2253,2033.0,110.0,360.0,1,Rural,1.0
|
||||
LP002335,Female,1.0,0,0,0.0,2149,3237.0,178.0,360.0,0,Semiurban,0.0
|
||||
LP002337,Female,0.0,0,1,0.0,2995,0.0,60.0,360.0,1,Urban,1.0
|
||||
LP002341,Female,0.0,1,1,0.0,2600,0.0,160.0,360.0,1,Urban,0.0
|
||||
LP002342,Male,1.0,2,1,1.0,1600,20000.0,239.0,360.0,1,Urban,0.0
|
||||
LP002345,Male,1.0,0,1,0.0,1025,2773.0,112.0,360.0,1,Rural,1.0
|
||||
LP002347,Male,1.0,0,1,0.0,3246,1417.0,138.0,360.0,1,Semiurban,1.0
|
||||
LP002348,Male,1.0,0,1,0.0,5829,0.0,138.0,360.0,1,Rural,1.0
|
||||
LP002357,Female,0.0,0,0,0.0,2720,0.0,80.0,,0,Urban,0.0
|
||||
LP002361,Male,1.0,0,1,0.0,1820,1719.0,100.0,360.0,1,Urban,1.0
|
||||
LP002362,Male,1.0,1,1,0.0,7250,1667.0,110.0,,0,Urban,0.0
|
||||
LP002364,Male,1.0,0,1,0.0,14880,0.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP002366,Male,1.0,0,1,0.0,2666,4300.0,121.0,360.0,1,Rural,1.0
|
||||
LP002367,Female,0.0,1,0,0.0,4606,0.0,81.0,360.0,1,Rural,0.0
|
||||
LP002368,Male,1.0,2,1,0.0,5935,0.0,133.0,360.0,1,Semiurban,1.0
|
||||
LP002369,Male,1.0,0,1,0.0,2920,16.12000084,87.0,360.0,1,Rural,1.0
|
||||
LP002370,Male,0.0,0,0,0.0,2717,0.0,60.0,180.0,1,Urban,1.0
|
||||
LP002377,Female,0.0,1,1,1.0,8624,0.0,150.0,360.0,1,Semiurban,1.0
|
||||
LP002379,Male,0.0,0,1,0.0,6500,0.0,105.0,360.0,0,Rural,0.0
|
||||
LP002386,Male,0.0,0,1,0.0,12876,0.0,405.0,360.0,1,Semiurban,1.0
|
||||
LP002387,Male,1.0,0,1,0.0,2425,2340.0,143.0,360.0,1,Semiurban,1.0
|
||||
LP002390,Male,0.0,0,1,0.0,3750,0.0,100.0,360.0,1,Urban,1.0
|
||||
LP002393,Female,0.0,,1,0.0,10047,0.0,0.0,240.0,1,Semiurban,1.0
|
||||
LP002398,Male,0.0,0,1,0.0,1926,1851.0,50.0,360.0,1,Semiurban,1.0
|
||||
LP002401,Male,1.0,0,1,0.0,2213,1125.0,0.0,360.0,1,Urban,1.0
|
||||
LP002403,Male,0.0,0,1,1.0,10416,0.0,187.0,360.0,0,Urban,0.0
|
||||
LP002407,Female,1.0,0,0,1.0,7142,0.0,138.0,360.0,1,Rural,1.0
|
||||
LP002408,Male,0.0,0,1,0.0,3660,5064.0,187.0,360.0,1,Semiurban,1.0
|
||||
LP002409,Male,1.0,0,1,0.0,7901,1833.0,180.0,360.0,1,Rural,1.0
|
||||
LP002418,Male,0.0,3+,0,0.0,4707,1993.0,148.0,360.0,1,Semiurban,1.0
|
||||
LP002422,Male,0.0,1,1,0.0,37719,0.0,152.0,360.0,1,Semiurban,1.0
|
||||
LP002424,Male,1.0,0,1,0.0,7333,8333.0,175.0,300.0,0,Rural,1.0
|
||||
LP002429,Male,1.0,1,1,1.0,3466,1210.0,130.0,360.0,1,Rural,1.0
|
||||
LP002434,Male,1.0,2,0,0.0,4652,0.0,110.0,360.0,1,Rural,1.0
|
||||
LP002435,Male,1.0,0,1,0.0,3539,1376.0,55.0,360.0,1,Rural,0.0
|
||||
LP002443,Male,1.0,2,1,0.0,3340,1710.0,150.0,360.0,0,Rural,0.0
|
||||
LP002444,Male,0.0,1,0,1.0,2769,1542.0,190.0,360.0,0,Semiurban,0.0
|
||||
LP002446,Male,1.0,2,0,0.0,2309,1255.0,125.0,360.0,0,Rural,0.0
|
||||
LP002447,Male,1.0,2,0,0.0,1958,1456.0,60.0,300.0,0,Urban,1.0
|
||||
LP002448,Male,1.0,0,1,0.0,3948,1733.0,149.0,360.0,0,Rural,0.0
|
||||
LP002449,Male,1.0,0,1,0.0,2483,2466.0,90.0,180.0,0,Rural,1.0
|
||||
LP002453,Male,0.0,0,1,1.0,7085,0.0,84.0,360.0,1,Semiurban,1.0
|
||||
LP002455,Male,1.0,2,1,0.0,3859,0.0,96.0,360.0,1,Semiurban,1.0
|
||||
LP002459,Male,1.0,0,1,0.0,4301,0.0,118.0,360.0,1,Urban,1.0
|
||||
LP002467,Male,1.0,0,1,0.0,3708,2569.0,173.0,360.0,1,Urban,0.0
|
||||
LP002472,Male,0.0,2,1,0.0,4354,0.0,136.0,360.0,1,Rural,1.0
|
||||
LP002473,Male,1.0,0,1,0.0,8334,0.0,160.0,360.0,1,Semiurban,0.0
|
||||
LP002478,,1.0,0,1,1.0,2083,4083.0,160.0,360.0,0,Semiurban,1.0
|
||||
LP002484,Male,1.0,3+,1,0.0,7740,0.0,128.0,180.0,1,Urban,1.0
|
||||
LP002487,Male,1.0,0,1,0.0,3015,2188.0,153.0,360.0,1,Rural,1.0
|
||||
LP002489,Female,0.0,1,0,0.0,5191,0.0,132.0,360.0,1,Semiurban,1.0
|
||||
LP002493,Male,0.0,0,1,0.0,4166,0.0,98.0,360.0,0,Semiurban,0.0
|
||||
LP002494,Male,0.0,0,1,0.0,6000,0.0,140.0,360.0,1,Rural,1.0
|
||||
LP002500,Male,1.0,3+,0,0.0,2947,1664.0,70.0,180.0,0,Urban,0.0
|
||||
LP002501,,1.0,0,1,0.0,16692,0.0,110.0,360.0,1,Semiurban,1.0
|
||||
LP002502,Female,1.0,2,0,0.0,210,2917.0,98.0,360.0,1,Semiurban,1.0
|
||||
LP002505,Male,1.0,0,1,0.0,4333,2451.0,110.0,360.0,1,Urban,0.0
|
||||
LP002515,Male,1.0,1,1,1.0,3450,2079.0,162.0,360.0,1,Semiurban,1.0
|
||||
LP002517,Male,1.0,1,0,0.0,2653,1500.0,113.0,180.0,0,Rural,0.0
|
||||
LP002519,Male,1.0,3+,1,0.0,4691,0.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP002522,Female,0.0,0,1,1.0,2500,0.0,93.0,360.0,0,Urban,1.0
|
||||
LP002524,Male,0.0,2,1,0.0,5532,4648.0,162.0,360.0,1,Rural,1.0
|
||||
LP002527,Male,1.0,2,1,1.0,16525,1014.0,150.0,360.0,1,Rural,1.0
|
||||
LP002529,Male,1.0,2,1,0.0,6700,1750.0,230.0,300.0,1,Semiurban,1.0
|
||||
LP002530,,1.0,2,1,0.0,2873,1872.0,132.0,360.0,0,Semiurban,0.0
|
||||
LP002531,Male,1.0,1,1,1.0,16667,2250.0,86.0,360.0,1,Semiurban,1.0
|
||||
LP002533,Male,1.0,2,1,0.0,2947,1603.0,0.0,360.0,1,Urban,0.0
|
||||
LP002534,Female,0.0,0,0,0.0,4350,0.0,154.0,360.0,1,Rural,1.0
|
||||
LP002536,Male,1.0,3+,0,0.0,3095,0.0,113.0,360.0,1,Rural,1.0
|
||||
LP002537,Male,1.0,0,1,0.0,2083,3150.0,128.0,360.0,1,Semiurban,1.0
|
||||
LP002541,Male,1.0,0,1,0.0,10833,0.0,234.0,360.0,1,Semiurban,1.0
|
||||
LP002543,Male,1.0,2,1,0.0,8333,0.0,246.0,360.0,1,Semiurban,1.0
|
||||
LP002544,Male,1.0,1,0,0.0,1958,2436.0,131.0,360.0,1,Rural,1.0
|
||||
LP002545,Male,0.0,2,1,0.0,3547,0.0,80.0,360.0,0,Rural,0.0
|
||||
LP002547,Male,1.0,1,1,0.0,18333,0.0,500.0,360.0,1,Urban,0.0
|
||||
LP002555,Male,1.0,2,1,1.0,4583,2083.0,160.0,360.0,1,Semiurban,1.0
|
||||
LP002556,Male,0.0,0,1,0.0,2435,0.0,75.0,360.0,1,Urban,0.0
|
||||
LP002560,Male,0.0,0,0,0.0,2699,2785.0,96.0,360.0,0,Semiurban,1.0
|
||||
LP002562,Male,1.0,1,0,0.0,5333,1131.0,186.0,360.0,0,Urban,1.0
|
||||
LP002571,Male,0.0,0,0,0.0,3691,0.0,110.0,360.0,1,Rural,1.0
|
||||
LP002582,Female,0.0,0,0,1.0,17263,0.0,225.0,360.0,1,Semiurban,1.0
|
||||
LP002585,Male,1.0,0,1,0.0,3597,2157.0,119.0,360.0,0,Rural,0.0
|
||||
LP002586,Female,1.0,1,1,0.0,3326,913.0,105.0,84.0,1,Semiurban,1.0
|
||||
LP002587,Male,1.0,0,0,0.0,2600,1700.0,107.0,360.0,1,Rural,1.0
|
||||
LP002588,Male,1.0,0,1,0.0,4625,2857.0,111.0,12.0,0,Urban,1.0
|
||||
LP002600,Male,1.0,1,1,1.0,2895,0.0,95.0,360.0,1,Semiurban,1.0
|
||||
LP002602,Male,0.0,0,1,0.0,6283,4416.0,209.0,360.0,0,Rural,0.0
|
||||
LP002603,Female,0.0,0,1,0.0,645,3683.0,113.0,480.0,1,Rural,1.0
|
||||
LP002606,Female,0.0,0,1,0.0,3159,0.0,100.0,360.0,1,Semiurban,1.0
|
||||
LP002615,Male,1.0,2,1,0.0,4865,5624.0,208.0,360.0,1,Semiurban,1.0
|
||||
LP002618,Male,1.0,1,0,0.0,4050,5302.0,138.0,360.0,0,Rural,0.0
|
||||
LP002619,Male,1.0,0,0,0.0,3814,1483.0,124.0,300.0,1,Semiurban,1.0
|
||||
LP002622,Male,1.0,2,1,0.0,3510,4416.0,243.0,360.0,1,Rural,1.0
|
||||
LP002624,Male,1.0,0,1,0.0,20833,6667.0,480.0,360.0,0,Urban,1.0
|
||||
LP002625,,0.0,0,1,0.0,3583,0.0,96.0,360.0,1,Urban,0.0
|
||||
LP002626,Male,1.0,0,1,1.0,2479,3013.0,188.0,360.0,1,Urban,1.0
|
||||
LP002634,Female,0.0,1,1,0.0,13262,0.0,40.0,360.0,1,Urban,1.0
|
||||
LP002637,Male,0.0,0,0,0.0,3598,1287.0,100.0,360.0,1,Rural,0.0
|
||||
LP002640,Male,1.0,1,1,0.0,6065,2004.0,250.0,360.0,1,Semiurban,1.0
|
||||
LP002643,Male,1.0,2,1,0.0,3283,2035.0,148.0,360.0,1,Urban,1.0
|
||||
LP002648,Male,1.0,0,1,0.0,2130,6666.0,70.0,180.0,1,Semiurban,0.0
|
||||
LP002652,Male,0.0,0,1,0.0,5815,3666.0,311.0,360.0,1,Rural,0.0
|
||||
LP002659,Male,1.0,3+,1,0.0,3466,3428.0,150.0,360.0,1,Rural,1.0
|
||||
LP002670,Female,1.0,2,1,0.0,2031,1632.0,113.0,480.0,1,Semiurban,1.0
|
||||
LP002682,Male,1.0,,0,0.0,3074,1800.0,123.0,360.0,0,Semiurban,0.0
|
||||
LP002683,Male,0.0,0,1,0.0,4683,1915.0,185.0,360.0,1,Semiurban,0.0
|
||||
LP002684,Female,0.0,0,0,0.0,3400,0.0,95.0,360.0,1,Rural,0.0
|
||||
LP002689,Male,1.0,2,0,0.0,2192,1742.0,45.0,360.0,1,Semiurban,1.0
|
||||
LP002690,Male,0.0,0,1,0.0,2500,0.0,55.0,360.0,1,Semiurban,1.0
|
||||
LP002692,Male,1.0,3+,1,1.0,5677,1424.0,100.0,360.0,1,Rural,1.0
|
||||
LP002693,Male,1.0,2,1,1.0,7948,7166.0,480.0,360.0,1,Rural,1.0
|
||||
LP002697,Male,0.0,0,1,0.0,4680,2087.0,0.0,360.0,1,Semiurban,0.0
|
||||
LP002699,Male,1.0,2,1,1.0,17500,0.0,400.0,360.0,1,Rural,1.0
|
||||
LP002705,Male,1.0,0,1,0.0,3775,0.0,110.0,360.0,1,Semiurban,1.0
|
||||
LP002706,Male,1.0,1,0,0.0,5285,1430.0,161.0,360.0,0,Semiurban,1.0
|
||||
LP002714,Male,0.0,1,0,0.0,2679,1302.0,94.0,360.0,1,Semiurban,1.0
|
||||
LP002716,Male,0.0,0,0,0.0,6783,0.0,130.0,360.0,1,Semiurban,1.0
|
||||
LP002717,Male,1.0,0,1,0.0,1025,5500.0,216.0,360.0,0,Rural,1.0
|
||||
LP002720,Male,1.0,3+,1,0.0,4281,0.0,100.0,360.0,1,Urban,1.0
|
||||
LP002723,Male,0.0,2,1,0.0,3588,0.0,110.0,360.0,0,Rural,0.0
|
||||
LP002729,Male,0.0,1,1,0.0,11250,0.0,196.0,360.0,0,Semiurban,0.0
|
||||
LP002731,Female,0.0,0,0,1.0,18165,0.0,125.0,360.0,1,Urban,1.0
|
||||
LP002732,Male,0.0,0,0,0.0,2550,2042.0,126.0,360.0,1,Rural,1.0
|
||||
LP002734,Male,1.0,0,1,0.0,6133,3906.0,324.0,360.0,1,Urban,1.0
|
||||
LP002738,Male,0.0,2,1,0.0,3617,0.0,107.0,360.0,1,Semiurban,1.0
|
||||
LP002739,Male,1.0,0,0,0.0,2917,536.0,66.0,360.0,1,Rural,0.0
|
||||
LP002740,Male,1.0,3+,1,0.0,6417,0.0,157.0,180.0,1,Rural,1.0
|
||||
LP002741,Female,1.0,1,1,0.0,4608,2845.0,140.0,180.0,1,Semiurban,1.0
|
||||
LP002743,Female,0.0,0,1,0.0,2138,0.0,99.0,360.0,0,Semiurban,0.0
|
||||
LP002753,Female,0.0,1,1,0.0,3652,0.0,95.0,360.0,1,Semiurban,1.0
|
||||
LP002755,Male,1.0,1,0,0.0,2239,2524.0,128.0,360.0,1,Urban,1.0
|
||||
LP002757,Female,1.0,0,0,0.0,3017,663.0,102.0,360.0,0,Semiurban,1.0
|
||||
LP002767,Male,1.0,0,1,0.0,2768,1950.0,155.0,360.0,1,Rural,1.0
|
||||
LP002768,Male,0.0,0,0,0.0,3358,0.0,80.0,36.0,1,Semiurban,0.0
|
||||
LP002772,Male,0.0,0,1,0.0,2526,1783.0,145.0,360.0,1,Rural,1.0
|
||||
LP002776,Female,0.0,0,1,0.0,5000,0.0,103.0,360.0,0,Semiurban,0.0
|
||||
LP002777,Male,1.0,0,1,0.0,2785,2016.0,110.0,360.0,1,Rural,1.0
|
||||
LP002778,Male,1.0,2,1,1.0,6633,0.0,0.0,360.0,0,Rural,0.0
|
||||
LP002784,Male,1.0,1,0,0.0,2492,2375.0,0.0,360.0,1,Rural,1.0
|
||||
LP002785,Male,1.0,1,1,0.0,3333,3250.0,158.0,360.0,1,Urban,1.0
|
||||
LP002788,Male,1.0,0,0,0.0,2454,2333.0,181.0,360.0,0,Urban,0.0
|
||||
LP002789,Male,1.0,0,1,0.0,3593,4266.0,132.0,180.0,0,Rural,0.0
|
||||
LP002792,Male,1.0,1,1,0.0,5468,1032.0,26.0,360.0,1,Semiurban,1.0
|
||||
LP002794,Female,0.0,0,1,0.0,2667,1625.0,84.0,360.0,0,Urban,1.0
|
||||
LP002795,Male,1.0,3+,1,1.0,10139,0.0,260.0,360.0,1,Semiurban,1.0
|
||||
LP002798,Male,1.0,0,1,0.0,3887,2669.0,162.0,360.0,1,Semiurban,1.0
|
||||
LP002804,Female,1.0,0,1,0.0,4180,2306.0,182.0,360.0,1,Semiurban,1.0
|
||||
LP002807,Male,1.0,2,0,0.0,3675,242.0,108.0,360.0,1,Semiurban,1.0
|
||||
LP002813,Female,1.0,1,1,1.0,19484,0.0,600.0,360.0,1,Semiurban,1.0
|
||||
LP002820,Male,1.0,0,1,0.0,5923,2054.0,211.0,360.0,1,Rural,1.0
|
||||
LP002821,Male,0.0,0,0,1.0,5800,0.0,132.0,360.0,1,Semiurban,1.0
|
||||
LP002832,Male,1.0,2,1,0.0,8799,0.0,258.0,360.0,0,Urban,0.0
|
||||
LP002833,Male,1.0,0,0,0.0,4467,0.0,120.0,360.0,0,Rural,1.0
|
||||
LP002836,Male,0.0,0,1,0.0,3333,0.0,70.0,360.0,1,Urban,1.0
|
||||
LP002837,Male,1.0,3+,1,0.0,3400,2500.0,123.0,360.0,0,Rural,0.0
|
||||
LP002840,Female,0.0,0,1,0.0,2378,0.0,9.0,360.0,1,Urban,0.0
|
||||
LP002841,Male,1.0,0,1,0.0,3166,2064.0,104.0,360.0,0,Urban,0.0
|
||||
LP002842,Male,1.0,1,1,0.0,3417,1750.0,186.0,360.0,1,Urban,1.0
|
||||
LP002847,Male,1.0,,1,0.0,5116,1451.0,165.0,360.0,0,Urban,0.0
|
||||
LP002855,Male,1.0,2,1,0.0,16666,0.0,275.0,360.0,1,Urban,1.0
|
||||
LP002862,Male,1.0,2,0,0.0,6125,1625.0,187.0,480.0,1,Semiurban,0.0
|
||||
LP002863,Male,1.0,3+,1,0.0,6406,0.0,150.0,360.0,1,Semiurban,0.0
|
||||
LP002868,Male,1.0,2,1,0.0,3159,461.0,108.0,84.0,1,Urban,1.0
|
||||
LP002872,,1.0,0,1,0.0,3087,2210.0,136.0,360.0,0,Semiurban,0.0
|
||||
LP002874,Male,0.0,0,1,0.0,3229,2739.0,110.0,360.0,1,Urban,1.0
|
||||
LP002877,Male,1.0,1,1,0.0,1782,2232.0,107.0,360.0,1,Rural,1.0
|
||||
LP002888,Male,0.0,0,1,0.0,3182,2917.0,161.0,360.0,1,Urban,1.0
|
||||
LP002892,Male,1.0,2,1,0.0,6540,0.0,205.0,360.0,1,Semiurban,1.0
|
||||
LP002893,Male,0.0,0,1,0.0,1836,33837.0,90.0,360.0,1,Urban,0.0
|
||||
LP002894,Female,1.0,0,1,0.0,3166,0.0,36.0,360.0,1,Semiurban,1.0
|
||||
LP002898,Male,1.0,1,1,0.0,1880,0.0,61.0,360.0,0,Rural,0.0
|
||||
LP002911,Male,1.0,1,1,0.0,2787,1917.0,146.0,360.0,0,Rural,0.0
|
||||
LP002912,Male,1.0,1,1,0.0,4283,3000.0,172.0,84.0,1,Rural,0.0
|
||||
LP002916,Male,1.0,0,1,0.0,2297,1522.0,104.0,360.0,1,Urban,1.0
|
||||
LP002917,Female,0.0,0,0,0.0,2165,0.0,70.0,360.0,1,Semiurban,1.0
|
||||
LP002925,,0.0,0,1,0.0,4750,0.0,94.0,360.0,1,Semiurban,1.0
|
||||
LP002926,Male,1.0,2,1,1.0,2726,0.0,106.0,360.0,0,Semiurban,0.0
|
||||
LP002928,Male,1.0,0,1,0.0,3000,3416.0,56.0,180.0,1,Semiurban,1.0
|
||||
LP002931,Male,1.0,2,1,1.0,6000,0.0,205.0,240.0,1,Semiurban,0.0
|
||||
LP002933,,0.0,3+,1,1.0,9357,0.0,292.0,360.0,1,Semiurban,1.0
|
||||
LP002936,Male,1.0,0,1,0.0,3859,3300.0,142.0,180.0,1,Rural,1.0
|
||||
LP002938,Male,1.0,0,1,1.0,16120,0.0,260.0,360.0,1,Urban,1.0
|
||||
LP002940,Male,0.0,0,0,0.0,3833,0.0,110.0,360.0,1,Rural,1.0
|
||||
LP002941,Male,1.0,2,0,1.0,6383,1000.0,187.0,360.0,1,Rural,0.0
|
||||
LP002943,Male,0.0,,1,0.0,2987,0.0,88.0,360.0,0,Semiurban,0.0
|
||||
LP002945,Male,1.0,0,1,1.0,9963,0.0,180.0,360.0,1,Rural,1.0
|
||||
LP002948,Male,1.0,2,1,0.0,5780,0.0,192.0,360.0,1,Urban,1.0
|
||||
LP002949,Female,0.0,3+,1,0.0,416,41667.0,350.0,180.0,0,Urban,0.0
|
||||
LP002950,Male,1.0,0,0,0.0,2894,2792.0,155.0,360.0,1,Rural,1.0
|
||||
LP002953,Male,1.0,3+,1,0.0,5703,0.0,128.0,360.0,1,Urban,1.0
|
||||
LP002958,Male,0.0,0,1,0.0,3676,4301.0,172.0,360.0,1,Rural,1.0
|
||||
LP002959,Female,1.0,1,1,0.0,12000,0.0,496.0,360.0,1,Semiurban,1.0
|
||||
LP002960,Male,1.0,0,0,0.0,2400,3800.0,0.0,180.0,1,Urban,0.0
|
||||
LP002961,Male,1.0,1,1,0.0,3400,2500.0,173.0,360.0,1,Semiurban,1.0
|
||||
LP002964,Male,1.0,2,0,0.0,3987,1411.0,157.0,360.0,1,Rural,1.0
|
||||
LP002974,Male,1.0,0,1,0.0,3232,1950.0,108.0,360.0,1,Rural,1.0
|
||||
LP002978,Female,0.0,0,1,0.0,2900,0.0,71.0,360.0,1,Rural,1.0
|
||||
LP002979,Male,1.0,3+,1,0.0,4106,0.0,40.0,180.0,1,Rural,1.0
|
||||
LP002983,Male,1.0,1,1,0.0,8072,240.0,253.0,360.0,1,Urban,1.0
|
||||
LP002984,Male,1.0,2,1,0.0,7583,0.0,187.0,360.0,1,Urban,1.0
|
||||
LP002990,Female,0.0,0,1,1.0,4583,0.0,133.0,360.0,0,Semiurban,0.0
|
||||
|
BIN
abanin_daniil_lab_5/result_1.png
Normal file
|
After Width: | Height: | Size: 66 KiB |
BIN
abanin_daniil_lab_5/result_2.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
56
alexandrov_dmitrii_lab_1/lab1.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import random
|
||||
from matplotlib import pyplot as plt
|
||||
from matplotlib.colors import ListedColormap
|
||||
from sklearn.datasets import make_moons
|
||||
from sklearn.linear_model import LinearRegression, Ridge
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.preprocessing import PolynomialFeatures
|
||||
from sklearn.pipeline import Pipeline
|
||||
|
||||
rs = random.randrange(50)
|
||||
|
||||
X, y = make_moons(n_samples=250, noise=0.3, random_state=rs)
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4, random_state=42)
|
||||
|
||||
figure = plt.figure(1, figsize=(16, 9))
|
||||
axis = figure.subplots(4, 3)
|
||||
cm = ListedColormap(['#FF0000', "#0000FF"])
|
||||
arr_res = list(range(len(y_test)))
|
||||
X_scale = list(range(len(y_test)))
|
||||
|
||||
|
||||
def test(col, model):
|
||||
global axis
|
||||
global arr_res
|
||||
global X_test
|
||||
global X_train
|
||||
global y_train
|
||||
global y_test
|
||||
|
||||
model.fit(X_train, y_train)
|
||||
res_y = model.predict(X_test)
|
||||
print(model.score(X_test, y_test))
|
||||
|
||||
axis[0, col].scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm)
|
||||
axis[1, col].scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm)
|
||||
axis[2, col].scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm)
|
||||
axis[2, col].scatter(X_test[:, 0], X_test[:, 1], c=res_y, cmap=cm)
|
||||
axis[3, col].plot([i for i in range(len(res_y))], y_test, c="g")
|
||||
axis[3, col].plot([i for i in range(len(res_y))], res_y, c="r")
|
||||
|
||||
|
||||
def start():
|
||||
lin = LinearRegression()
|
||||
poly = Pipeline([('poly', PolynomialFeatures(degree=3)),
|
||||
('linear', LinearRegression())])
|
||||
ridge = Pipeline([('poly', PolynomialFeatures(degree=3)),
|
||||
('ridge', Ridge(alpha=1.0))])
|
||||
|
||||
test(0, lin)
|
||||
test(1, poly)
|
||||
test(2, ridge)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
start()
|
||||
46
alexandrov_dmitrii_lab_1/readme.md
Normal file
@@ -0,0 +1,46 @@
|
||||
## Задание
|
||||
Сгенерировать определенный тип данных и сравнить на нем 3 модели. Построить графики, отобразить качество моделей, объяснить полученные результаты.
|
||||
Вариант 1.
|
||||
Данные: make_moons (noise=0.3, random_state=rs)
|
||||
Модели:
|
||||
· Линейная регрессия
|
||||
· Полиномиальная регрессия (со степенью 3)
|
||||
· Гребневая полиномиальная регрессия (со степенью 3, alpha = 1.0)
|
||||
|
||||
### Запуск программы
|
||||
Файл lab1.py содержит и запускает программу, аргументов и настройки ~~вроде~~ не требует,
|
||||
|
||||
### Описание программы
|
||||
Генерирует один из 50 наборов данных, показывает окно с графиками и пишет оценку моделей обучения по заданию.
|
||||
Использует библиотеки matplotlib для демонстрации графиков и sklearn для создания и использования моделей.
|
||||
|
||||
### Результаты тестирования
|
||||
Для различных значений rs результаты следующие:
|
||||
значение - линейная - полиномиальная - гребневая полиномиальная
|
||||
1 - 0.54 - 0.08 - 0.35
|
||||
2 - 0.62 - 0.58 - 0.63
|
||||
3 - 0.6 - 0.67 - 0.65
|
||||
4 - 0.52 - 0.46 - 0.5
|
||||
5 - 0.4 - 0.42 - 0.44
|
||||
Из данных результатов можно заключить, что чёткой зависимости точности от выбранной модели нет.
|
||||
|
||||
Однако, после этого я добавил в генератор данных число значений: 500. Результаты оказались более детерминированными:
|
||||
значение - линейная - полиномиальная - гребневая полиномиальная
|
||||
1 - 0.54 - 0.63 - 0.63
|
||||
2 - 0.52 - 0.63 - 0.62
|
||||
3 - 0.56 - 0.64 - 0.64
|
||||
4 - 0.5 - 0.63 - 0.62
|
||||
5 - 0.5 - 0.52 - 0.53
|
||||
Из данных результатов можно заключить, что в общем случае модель линейной регрессии уступает полиномиальным. Гребневая полиномиальная регрессия чаще уступала обычной полиномиальной, однако в незначительном количестве ситуаций была оценена выше - но во всех случаях результаты были близки, поэтому можно с уверенностью предположить, что результаты идентичны и различаются по воле шума обучения.
|
||||
|
||||
После изучения число значений в генераторе заменено на 250, поскольку графики становились неразличимыми^
|
||||
значение - линейная - полиномиальная - гребневая полиномиальная
|
||||
1 - 0.48 - 0.54 - 0.54
|
||||
2 - 0.5 - 0.56 - 0.56
|
||||
3 - 0.57 - 0.6 - 0.6
|
||||
4 - 0.57 - 0.66 - 0.68
|
||||
5 - 0.49 - 0.54 - 0.55
|
||||
По данным результатам видно, что в большинстве ситуаций уже гребневая полиномиальная регрессия показывает лучшую точность.
|
||||
|
||||
Результаты объясняются следующим образом:
|
||||
Линейная регрессия будучи математически прямой плохо отражает сложные функции и нелинейные зависимости, в то время как полиномиальная регрессия способна отражать перегибы и изменяющиеся в зависимости от меры значений зависимости. Гребневая полиномиальная вышла идентичной простой полиномиальной из-за одинаковых настроек - обе они по заданию имеют третью степень, а гребневая регрессия имеет слишком малый параметр alpha, что результирует в малом эффекте гребневой функции.
|
||||
82
alexandrov_dmitrii_lab_2/lab2.py
Normal file
@@ -0,0 +1,82 @@
|
||||
from sklearn.linear_model import LinearRegression, RandomizedLasso
|
||||
from sklearn.feature_selection import RFE
|
||||
from sklearn.preprocessing import MinMaxScaler
|
||||
from matplotlib import pyplot as plt
|
||||
import numpy as np
|
||||
import random as rand
|
||||
|
||||
figure = plt.figure(1, figsize=(16, 9))
|
||||
axis = figure.subplots(1, 4)
|
||||
col = 0
|
||||
y = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]
|
||||
|
||||
|
||||
def rank_to_dict(ranks, names, n_features):
|
||||
ranks = np.abs(ranks)
|
||||
minmax = MinMaxScaler()
|
||||
ranks = minmax.fit_transform(np.array(ranks).reshape(n_features, 1)).ravel()
|
||||
ranks = map(lambda x: round(x, 2), ranks)
|
||||
return dict(zip(names, ranks))
|
||||
|
||||
|
||||
def createView(key, val):
|
||||
global figure
|
||||
global axis
|
||||
global col
|
||||
global y
|
||||
|
||||
axis[col].bar(y, list(val.values()), label=key)
|
||||
axis[col].set_title(key)
|
||||
|
||||
col = col + 1
|
||||
|
||||
|
||||
def start():
|
||||
np.random.seed(rand.randint(0, 50))
|
||||
size = 750
|
||||
n_features = 14
|
||||
X = np.random.uniform(0, 1, (size, n_features))
|
||||
|
||||
Y = (10 * np.sin(np.pi * X[:, 0] * X[:, 1]) + 20 * (X[:, 2] - .5) ** 2 +
|
||||
10 * X[:, 3] + 5 * X[:, 4] ** 5 + np.random.normal(0, 1))
|
||||
X[:, 10:] = X[:, :4] + np.random.normal(0, .025, (size, 4))
|
||||
|
||||
lr = LinearRegression()
|
||||
rl = RandomizedLasso()
|
||||
rfe = RFE(estimator=LinearRegression(), n_features_to_select=1)
|
||||
lr.fit(X, Y)
|
||||
rl.fit(X, Y)
|
||||
rfe.fit(X, Y)
|
||||
|
||||
names = ["x%s" % i for i in range(1, n_features + 1)]
|
||||
rfe_res = rfe.ranking_
|
||||
for i in range(rfe_res.size):
|
||||
rfe_res[i] = 14 - rfe_res[i]
|
||||
ranks = {"Linear regression": rank_to_dict(lr.coef_, names, n_features),
|
||||
"Random lasso": rank_to_dict(rl.scores_, names, n_features),
|
||||
"RFE": rank_to_dict(rfe_res, names, n_features)}
|
||||
|
||||
mean = {}
|
||||
|
||||
for key, value in ranks.items():
|
||||
for item in value.items():
|
||||
if item[0] not in mean:
|
||||
mean[item[0]] = 0
|
||||
mean[item[0]] += item[1]
|
||||
|
||||
for key, value in mean.items():
|
||||
res = value / len(ranks)
|
||||
mean[key] = round(res, 2)
|
||||
|
||||
ranks["Mean"] = mean
|
||||
|
||||
for key, value in ranks.items():
|
||||
createView(key, value)
|
||||
ranks[key] = sorted(value.items(), key=lambda y: y[1], reverse=True)
|
||||
for key, value in ranks.items():
|
||||
print(key)
|
||||
print(value)
|
||||
|
||||
|
||||
start()
|
||||
plt.show()
|
||||
50
alexandrov_dmitrii_lab_2/readme.md
Normal file
@@ -0,0 +1,50 @@
|
||||
### Задание
|
||||
Выполнить ранжирование признаков с помощью указанных по варианту моделей. Отобразить получившиеся значения\оценки каждого признака каждым методом\моделью и среднюю оценку. Провести анализ получившихся результатов. Определить, какие четыре признака оказались самыми важными по среднему значению.
|
||||
|
||||
Вариант 1.
|
||||
Модели:
|
||||
* Линейная регрессия (LinearRegression)
|
||||
* Случайное Лассо (RandomizedLasso)
|
||||
* Рекурсивное сокращение признаков (Recursive Feature Elimination – RFE)
|
||||
|
||||
### Запуск программы
|
||||
Программа работает на Python 3.7, поскольку только в нём можно подключить нужную версию библиотеки scikit-learn, которая ещё содержит RandomizedLasso.
|
||||
|
||||
Файл lab2.py содержит и запускает программу, аргументов и настройки ~~вроде~~ не требует.
|
||||
|
||||
### Описание программы
|
||||
Файл lab2.py содержит непосредственно программу.
|
||||
|
||||
Программа создаёт набор данных с 10 признаками для последующего их ранжирования, и обрабатывает тремя моделями по варианту.
|
||||
Программа строит столбчатые диаграммы, которые показывают как распределились оценки важности признаков, и выводит в консоль отсортированные по убыванию важности признаки.
|
||||
Таким образом можно легко определить наиважнейшие признаки.
|
||||
|
||||
### Результаты тестирования
|
||||
По результатам тестирования, можно сказать следующее:
|
||||
* линейная регрессия показывает хорошие результаты, выделяет все 9 значимых признаков.
|
||||
* случайное лассо справляется хуже других моделей, иногда выделяя шумовые признаки в значимые, а значимые - в шумовые.
|
||||
* рекурсивное сокращение признаков показывает хорошие результаты, правильно правильно выделяя 9 самых значимых признаков.
|
||||
* хотя линейная регрессия и рекурсивное сокращение признаков правильно выделяют значимые признаки, саму значимость они оценивают по-разному.
|
||||
* среднее значение позволяет c хорошей уверенностью определять истинные значимые признаки.
|
||||
|
||||
Итого. Если необходимо просто ранжирование, достаточно взять модель RFE, однако, если необходимо анализировать признаки по коэффициентам, имея меру (коэффициенты), то брать нужно линейную регрессию. Случайное лассо лучше не надо.
|
||||
|
||||
Пример консольных результатов:
|
||||
|
||||
>Linear regression
|
||||
|
||||
>[('x1', 1.0), ('x4', 0.69), ('x2', 0.61), ('x11', 0.59), ('x3', 0.51), ('x13', 0.48), ('x5', 0.19), ('x12', 0.19), ('x14', 0.12), ('x8', 0.03), ('x6', 0.02), ('x10', 0.01), ('x7', 0.0), ('x9', 0.0)]
|
||||
|
||||
>Random lasso
|
||||
|
||||
>[('x5', 1.0), ('x4', 0.76), ('x2', 0.74), ('x1', 0.72), ('x14', 0.44), ('x12', 0.32), ('x11', 0.28), ('x8', 0.22), ('x6', 0.17), ('x3', 0.08), ('x7', 0.02), ('x13', 0.02), ('x9', 0.01), ('x10', 0.0)]
|
||||
|
||||
>RFE
|
||||
|
||||
>[('x4', 1.0), ('x1', 0.92), ('x11', 0.85), ('x2', 0.77), ('x3', 0.69), ('x13', 0.62), ('x5', 0.54), ('x12', 0.46), ('x14', 0.38), ('x8', 0.31), ('x6', 0.23), ('x10', 0.15), ('x7', 0.08), ('x9', 0.0)]
|
||||
|
||||
>Mean
|
||||
|
||||
>[('x1', 0.88), ('x4', 0.82), ('x2', 0.71), ('x5', 0.58), ('x11', 0.57), ('x3', 0.43), ('x13', 0.37), ('x12', 0.32), ('x14', 0.31), ('x8', 0.19), ('x6', 0.14), ('x10', 0.05), ('x7', 0.03), ('x9', 0.0)]
|
||||
|
||||
По данным результатам можно заключить, что наиболее влиятельные признаки по убыванию: x1, x4, x2, x5.
|
||||
126
alexandrov_dmitrii_lab_3/lab3.py
Normal file
@@ -0,0 +1,126 @@
|
||||
from sklearn.impute import SimpleImputer, MissingIndicator
|
||||
from sklearn.pipeline import FeatureUnion, make_pipeline
|
||||
from sklearn.preprocessing import MinMaxScaler
|
||||
from sklearn.tree import DecisionTreeClassifier
|
||||
from sklearn.feature_extraction.text import TfidfVectorizer
|
||||
from sklearn.model_selection import train_test_split
|
||||
import pandas as pd
|
||||
import random as rand
|
||||
import numpy as np
|
||||
from matplotlib import pyplot as plt
|
||||
|
||||
|
||||
def rank_to_dict(ranks, names, n_features):
|
||||
ranks = np.abs(ranks)
|
||||
minmax = MinMaxScaler()
|
||||
ranks = minmax.fit_transform(np.array(ranks).reshape(len(ranks), 1)).ravel()
|
||||
ranks = map(lambda x: round(x, 2), ranks)
|
||||
return dict(zip(names, ranks))
|
||||
|
||||
|
||||
def part_one():
|
||||
print('Titanic data analysis\n')
|
||||
data = pd.read_csv('titanic_data.csv', index_col='PassengerId')
|
||||
x = data[['Pclass', 'Name', 'Sex']]
|
||||
y = data[['Survived']]
|
||||
|
||||
names = pd.DataFrame(TfidfVectorizer().fit_transform(x['Name']).toarray())
|
||||
col_names = names[names.columns[1:]].apply(lambda el: sum(el.dropna().astype(float)), axis=1)
|
||||
col_names.index = np.arange(1, len(col_names) + 1)
|
||||
col_sexes = []
|
||||
|
||||
for index, row in x.iterrows():
|
||||
if row['Sex'] == 'male':
|
||||
col_sexes.append(1)
|
||||
else:
|
||||
col_sexes.append(0)
|
||||
|
||||
x = x.drop(columns=['Sex', 'Name'])
|
||||
x['Sex'] = col_sexes
|
||||
x['Name'] = col_names
|
||||
|
||||
dtc = DecisionTreeClassifier(random_state=rand.randint(0, 250))
|
||||
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.05, random_state=rand.randint(0, 250))
|
||||
dtc.fit(x_train, y_train)
|
||||
print('model score: ' + str(dtc.score(x_test, y_test)))
|
||||
res = dict(zip(['Pclass', 'Sex', 'Name'], dtc.feature_importances_))
|
||||
print('feature importances: ' + str(res))
|
||||
|
||||
|
||||
def part_two():
|
||||
print('\n---------------------------------------------------------------------------\nSberbank data analysis\n')
|
||||
data = pd.read_csv('sberbank_data.csv', index_col='id')
|
||||
x = data.drop(columns='price_doc')
|
||||
y = data[['price_doc']]
|
||||
|
||||
x = x.replace(
|
||||
['NA', 'no', 'yes', 'Investment', 'OwnerOccupier', 'poor', 'satisfactory', 'no data', 'good', 'excellent'],
|
||||
[0, 0, 1, 0, 1, -1, 0, 0, 1, 2])
|
||||
x.fillna(0, inplace=True)
|
||||
|
||||
names = pd.DataFrame(TfidfVectorizer().fit_transform(x['sub_area']).toarray())
|
||||
col_area = names[names.columns[1:]].apply(lambda el: sum(el.dropna().astype(float)), axis=1)
|
||||
col_area.index = np.arange(1, len(col_area) + 1)
|
||||
col_date = []
|
||||
|
||||
for val in x['timestamp']:
|
||||
col_date.append(val.split('-', 1)[0])
|
||||
|
||||
x = x.drop(columns=['sub_area', 'timestamp'])
|
||||
x['sub_area'] = col_area
|
||||
x['timestamp'] = col_date
|
||||
|
||||
col_price = []
|
||||
for val in y['price_doc']:
|
||||
if val < 1500000:
|
||||
col_price.append('low')
|
||||
elif val < 3000000:
|
||||
col_price.append('medium')
|
||||
elif val < 5500000:
|
||||
col_price.append('high')
|
||||
elif val < 10000000:
|
||||
col_price.append('premium')
|
||||
else:
|
||||
col_price.append('oligarch')
|
||||
|
||||
y = pd.DataFrame(col_price)
|
||||
|
||||
transformer = FeatureUnion(
|
||||
transformer_list=[
|
||||
('features', SimpleImputer(strategy='mean')),
|
||||
('indicators', MissingIndicator())])
|
||||
|
||||
dtr = make_pipeline(transformer, DecisionTreeClassifier(random_state=rand.randint(0, 250)))
|
||||
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.01, random_state=rand.randint(0, 250))
|
||||
dtr.fit(x_train, y_train)
|
||||
|
||||
features = list(x.columns)
|
||||
print('model score: ' + str(dtr.score(x_test, y_test)))
|
||||
|
||||
res = sorted(dict(zip(features, dtr.steps[-1][1].feature_importances_)).items(),
|
||||
key=lambda el: el[1], reverse=True)
|
||||
|
||||
view_y = []
|
||||
view_x = []
|
||||
|
||||
flag = 0
|
||||
print('feature importances:')
|
||||
for val in res:
|
||||
if flag == 8:
|
||||
break
|
||||
print(val[0]+" - "+str(val[1]))
|
||||
view_y.append(val[0])
|
||||
view_x.append(val[1])
|
||||
flag = flag + 1
|
||||
|
||||
plt.figure(1, figsize=(16, 9))
|
||||
plt.bar(view_y, view_x)
|
||||
plt.show()
|
||||
|
||||
|
||||
def start():
|
||||
part_one()
|
||||
part_two()
|
||||
|
||||
|
||||
start()
|
||||
60
alexandrov_dmitrii_lab_3/readme.md
Normal file
@@ -0,0 +1,60 @@
|
||||
### Задание
|
||||
1. По данным о пассажирах Титаника решить задачу классификации с помощью дерева решений, в которой по различным характеристикам пассажиров требуется найти у выживших пассажиров два наиболее важных признака из трех рассматриваемых.
|
||||
|
||||
Вариант 1: Pclass,Name,Sex.
|
||||
|
||||
2. По данным курсовой работы с помощью дерева решений решить выбранную задачу: классификация - зависимость категории цены от всех остальных факторов, оценка результата и отбор наиболее значимых признаков.
|
||||
|
||||
### Запуск программы
|
||||
Файл lab3.py содержит и запускает программу, аргументов и настройки ~~вроде~~ не требует.
|
||||
|
||||
### Описание программы
|
||||
Программа состоит из двух частей:
|
||||
1. Она считывает файл с данными по пассажирам "Титаника", признаки "класс", "имя", "пол" и запись о том, выжил ли пассажир. Данные предобрабатываются: запись о поле кодируется (ж - 0, м - 1), запись об имени кодируется (Tfidf). После этого дерево решений тренируется на данных и результаты выводятся в консоль.
|
||||
2. Она считывает файл с данными сбербанка по рынку недвижимости. Далее данные предобрабатываются: названия районов кодируется (Tfidf), нечисловые записи цифровизируются, запоняются нулевые записи, записи подразделяются на классы. После этого на данных обучается дерево решений и результат выводится в консоль и на форму. Поскольку признаков слишком много, выводимые результаты ограничены восемью наиболее значимыми.
|
||||
|
||||
### Результаты тестирования
|
||||
По результатам тестирования, можно сказать следующее:
|
||||
|
||||
По первой задаче:
|
||||
* Дерево решений показывает неплохие результаты, около 70-75%.
|
||||
* Однако оценка важности признаков даёт абсолютно неверный результат: наиболее значимым признаком назначается имя пассажира. Это значит, что кодировка не подходит для правильной обработки данных. Возможные решения: обнуление или исключение признака как аналитически очевидно незначимого.
|
||||
* Помимо неправильной оценки роли имени, пол определяется более чем в два раза более значимым, нежели класс. Действительная статистика (среди спасшихся пассажиров 74% женщин и детей (из которых многие также были мужского пола) и 26% мужчин, 60% первого класса, 44% - второго, 25% - третьего) скорее подтверждает правильность этого вывода.
|
||||
|
||||
По второй задаче:
|
||||
* Дерево решений показывает неплохие результаты, около 70-75%.
|
||||
* Оценка важности признаков показывает наиболее важным признаком площадь недвижимости, что скорее всего верно.
|
||||
* После площади с небольшим отрывом идёт количество спортивных объектов в округе. Это неверно хотя бы потому, что в данных присустствуют коррелирующие признаки - площадь жилого пространства и другие. К тому же доступна информация по действительному ранжированию.
|
||||
* Дальнейшие оценки содержат как правильные, так и неправильные признаки: этаж, количество этажей в доме, район - действительно значимые признаки, но они перемешаны с незначимыми.
|
||||
|
||||
Итого. Дерево решений даёт неплохие результаты при классификации. Однако для задач регрессии не подходят, т.к. неверно определяют значимые признаки. При работе также следует тщательнее предобрабатывать данные, в особенности малозначащие текстовые - предложенные методы кодирования показали себя неэффективно на лабораторных данных.
|
||||
|
||||
Пример консольных результатов:
|
||||
|
||||
>Titanic data analysis
|
||||
|
||||
>model score: 0.7777777777777778
|
||||
|
||||
>feature importances: {'Pclass': 0.1287795817634186, 'Sex': 0.3381642167551354, 'Name': 0.533056201481446}
|
||||
|
||||
>Sberbank data analysis
|
||||
|
||||
>model score: 0.7162629757785467
|
||||
|
||||
>feature importances:
|
||||
|
||||
>full_sq - 0.1801327274709341
|
||||
|
||||
>sport_count_3000 - 0.14881362533480907
|
||||
|
||||
>floor - 0.03169232872469085
|
||||
|
||||
>power_transmission_line_km - 0.027978416524911377
|
||||
|
||||
>timestamp - 0.020092007662845194
|
||||
|
||||
>max_floor - 0.019985442431576052
|
||||
|
||||
>cafe_count_5000_price_2500 - 0.019397048405749438
|
||||
|
||||
>sub_area - 0.017477163456413432
|
||||
28896
alexandrov_dmitrii_lab_3/sberbank_data.csv
Normal file
892
alexandrov_dmitrii_lab_3/titanic_data.csv
Normal file
@@ -0,0 +1,892 @@
|
||||
PassengerId,Survived,Pclass,Name,Sex,Age,SibSp,Parch,Ticket,Fare,Cabin,Embarked
|
||||
1,0,3,"Braund, Mr. Owen Harris",male,22,1,0,A/5 21171,7.25,,S
|
||||
2,1,1,"Cumings, Mrs. John Bradley (Florence Briggs Thayer)",female,38,1,0,PC 17599,71.2833,C85,C
|
||||
3,1,3,"Heikkinen, Miss. Laina",female,26,0,0,STON/O2. 3101282,7.925,,S
|
||||
4,1,1,"Futrelle, Mrs. Jacques Heath (Lily May Peel)",female,35,1,0,113803,53.1,C123,S
|
||||
5,0,3,"Allen, Mr. William Henry",male,35,0,0,373450,8.05,,S
|
||||
6,0,3,"Moran, Mr. James",male,,0,0,330877,8.4583,,Q
|
||||
7,0,1,"McCarthy, Mr. Timothy J",male,54,0,0,17463,51.8625,E46,S
|
||||
8,0,3,"Palsson, Master. Gosta Leonard",male,2,3,1,349909,21.075,,S
|
||||
9,1,3,"Johnson, Mrs. Oscar W (Elisabeth Vilhelmina Berg)",female,27,0,2,347742,11.1333,,S
|
||||
10,1,2,"Nasser, Mrs. Nicholas (Adele Achem)",female,14,1,0,237736,30.0708,,C
|
||||
11,1,3,"Sandstrom, Miss. Marguerite Rut",female,4,1,1,PP 9549,16.7,G6,S
|
||||
12,1,1,"Bonnell, Miss. Elizabeth",female,58,0,0,113783,26.55,C103,S
|
||||
13,0,3,"Saundercock, Mr. William Henry",male,20,0,0,A/5. 2151,8.05,,S
|
||||
14,0,3,"Andersson, Mr. Anders Johan",male,39,1,5,347082,31.275,,S
|
||||
15,0,3,"Vestrom, Miss. Hulda Amanda Adolfina",female,14,0,0,350406,7.8542,,S
|
||||
16,1,2,"Hewlett, Mrs. (Mary D Kingcome) ",female,55,0,0,248706,16,,S
|
||||
17,0,3,"Rice, Master. Eugene",male,2,4,1,382652,29.125,,Q
|
||||
18,1,2,"Williams, Mr. Charles Eugene",male,,0,0,244373,13,,S
|
||||
19,0,3,"Vander Planke, Mrs. Julius (Emelia Maria Vandemoortele)",female,31,1,0,345763,18,,S
|
||||
20,1,3,"Masselmani, Mrs. Fatima",female,,0,0,2649,7.225,,C
|
||||
21,0,2,"Fynney, Mr. Joseph J",male,35,0,0,239865,26,,S
|
||||
22,1,2,"Beesley, Mr. Lawrence",male,34,0,0,248698,13,D56,S
|
||||
23,1,3,"McGowan, Miss. Anna ""Annie""",female,15,0,0,330923,8.0292,,Q
|
||||
24,1,1,"Sloper, Mr. William Thompson",male,28,0,0,113788,35.5,A6,S
|
||||
25,0,3,"Palsson, Miss. Torborg Danira",female,8,3,1,349909,21.075,,S
|
||||
26,1,3,"Asplund, Mrs. Carl Oscar (Selma Augusta Emilia Johansson)",female,38,1,5,347077,31.3875,,S
|
||||
27,0,3,"Emir, Mr. Farred Chehab",male,,0,0,2631,7.225,,C
|
||||
28,0,1,"Fortune, Mr. Charles Alexander",male,19,3,2,19950,263,C23 C25 C27,S
|
||||
29,1,3,"O'Dwyer, Miss. Ellen ""Nellie""",female,,0,0,330959,7.8792,,Q
|
||||
30,0,3,"Todoroff, Mr. Lalio",male,,0,0,349216,7.8958,,S
|
||||
31,0,1,"Uruchurtu, Don. Manuel E",male,40,0,0,PC 17601,27.7208,,C
|
||||
32,1,1,"Spencer, Mrs. William Augustus (Marie Eugenie)",female,,1,0,PC 17569,146.5208,B78,C
|
||||
33,1,3,"Glynn, Miss. Mary Agatha",female,,0,0,335677,7.75,,Q
|
||||
34,0,2,"Wheadon, Mr. Edward H",male,66,0,0,C.A. 24579,10.5,,S
|
||||
35,0,1,"Meyer, Mr. Edgar Joseph",male,28,1,0,PC 17604,82.1708,,C
|
||||
36,0,1,"Holverson, Mr. Alexander Oskar",male,42,1,0,113789,52,,S
|
||||
37,1,3,"Mamee, Mr. Hanna",male,,0,0,2677,7.2292,,C
|
||||
38,0,3,"Cann, Mr. Ernest Charles",male,21,0,0,A./5. 2152,8.05,,S
|
||||
39,0,3,"Vander Planke, Miss. Augusta Maria",female,18,2,0,345764,18,,S
|
||||
40,1,3,"Nicola-Yarred, Miss. Jamila",female,14,1,0,2651,11.2417,,C
|
||||
41,0,3,"Ahlin, Mrs. Johan (Johanna Persdotter Larsson)",female,40,1,0,7546,9.475,,S
|
||||
42,0,2,"Turpin, Mrs. William John Robert (Dorothy Ann Wonnacott)",female,27,1,0,11668,21,,S
|
||||
43,0,3,"Kraeff, Mr. Theodor",male,,0,0,349253,7.8958,,C
|
||||
44,1,2,"Laroche, Miss. Simonne Marie Anne Andree",female,3,1,2,SC/Paris 2123,41.5792,,C
|
||||
45,1,3,"Devaney, Miss. Margaret Delia",female,19,0,0,330958,7.8792,,Q
|
||||
46,0,3,"Rogers, Mr. William John",male,,0,0,S.C./A.4. 23567,8.05,,S
|
||||
47,0,3,"Lennon, Mr. Denis",male,,1,0,370371,15.5,,Q
|
||||
48,1,3,"O'Driscoll, Miss. Bridget",female,,0,0,14311,7.75,,Q
|
||||
49,0,3,"Samaan, Mr. Youssef",male,,2,0,2662,21.6792,,C
|
||||
50,0,3,"Arnold-Franchi, Mrs. Josef (Josefine Franchi)",female,18,1,0,349237,17.8,,S
|
||||
51,0,3,"Panula, Master. Juha Niilo",male,7,4,1,3101295,39.6875,,S
|
||||
52,0,3,"Nosworthy, Mr. Richard Cater",male,21,0,0,A/4. 39886,7.8,,S
|
||||
53,1,1,"Harper, Mrs. Henry Sleeper (Myna Haxtun)",female,49,1,0,PC 17572,76.7292,D33,C
|
||||
54,1,2,"Faunthorpe, Mrs. Lizzie (Elizabeth Anne Wilkinson)",female,29,1,0,2926,26,,S
|
||||
55,0,1,"Ostby, Mr. Engelhart Cornelius",male,65,0,1,113509,61.9792,B30,C
|
||||
56,1,1,"Woolner, Mr. Hugh",male,,0,0,19947,35.5,C52,S
|
||||
57,1,2,"Rugg, Miss. Emily",female,21,0,0,C.A. 31026,10.5,,S
|
||||
58,0,3,"Novel, Mr. Mansouer",male,28.5,0,0,2697,7.2292,,C
|
||||
59,1,2,"West, Miss. Constance Mirium",female,5,1,2,C.A. 34651,27.75,,S
|
||||
60,0,3,"Goodwin, Master. William Frederick",male,11,5,2,CA 2144,46.9,,S
|
||||
61,0,3,"Sirayanian, Mr. Orsen",male,22,0,0,2669,7.2292,,C
|
||||
62,1,1,"Icard, Miss. Amelie",female,38,0,0,113572,80,B28,
|
||||
63,0,1,"Harris, Mr. Henry Birkhardt",male,45,1,0,36973,83.475,C83,S
|
||||
64,0,3,"Skoog, Master. Harald",male,4,3,2,347088,27.9,,S
|
||||
65,0,1,"Stewart, Mr. Albert A",male,,0,0,PC 17605,27.7208,,C
|
||||
66,1,3,"Moubarek, Master. Gerios",male,,1,1,2661,15.2458,,C
|
||||
67,1,2,"Nye, Mrs. (Elizabeth Ramell)",female,29,0,0,C.A. 29395,10.5,F33,S
|
||||
68,0,3,"Crease, Mr. Ernest James",male,19,0,0,S.P. 3464,8.1583,,S
|
||||
69,1,3,"Andersson, Miss. Erna Alexandra",female,17,4,2,3101281,7.925,,S
|
||||
70,0,3,"Kink, Mr. Vincenz",male,26,2,0,315151,8.6625,,S
|
||||
71,0,2,"Jenkin, Mr. Stephen Curnow",male,32,0,0,C.A. 33111,10.5,,S
|
||||
72,0,3,"Goodwin, Miss. Lillian Amy",female,16,5,2,CA 2144,46.9,,S
|
||||
73,0,2,"Hood, Mr. Ambrose Jr",male,21,0,0,S.O.C. 14879,73.5,,S
|
||||
74,0,3,"Chronopoulos, Mr. Apostolos",male,26,1,0,2680,14.4542,,C
|
||||
75,1,3,"Bing, Mr. Lee",male,32,0,0,1601,56.4958,,S
|
||||
76,0,3,"Moen, Mr. Sigurd Hansen",male,25,0,0,348123,7.65,F G73,S
|
||||
77,0,3,"Staneff, Mr. Ivan",male,,0,0,349208,7.8958,,S
|
||||
78,0,3,"Moutal, Mr. Rahamin Haim",male,,0,0,374746,8.05,,S
|
||||
79,1,2,"Caldwell, Master. Alden Gates",male,0.83,0,2,248738,29,,S
|
||||
80,1,3,"Dowdell, Miss. Elizabeth",female,30,0,0,364516,12.475,,S
|
||||
81,0,3,"Waelens, Mr. Achille",male,22,0,0,345767,9,,S
|
||||
82,1,3,"Sheerlinck, Mr. Jan Baptist",male,29,0,0,345779,9.5,,S
|
||||
83,1,3,"McDermott, Miss. Brigdet Delia",female,,0,0,330932,7.7875,,Q
|
||||
84,0,1,"Carrau, Mr. Francisco M",male,28,0,0,113059,47.1,,S
|
||||
85,1,2,"Ilett, Miss. Bertha",female,17,0,0,SO/C 14885,10.5,,S
|
||||
86,1,3,"Backstrom, Mrs. Karl Alfred (Maria Mathilda Gustafsson)",female,33,3,0,3101278,15.85,,S
|
||||
87,0,3,"Ford, Mr. William Neal",male,16,1,3,W./C. 6608,34.375,,S
|
||||
88,0,3,"Slocovski, Mr. Selman Francis",male,,0,0,SOTON/OQ 392086,8.05,,S
|
||||
89,1,1,"Fortune, Miss. Mabel Helen",female,23,3,2,19950,263,C23 C25 C27,S
|
||||
90,0,3,"Celotti, Mr. Francesco",male,24,0,0,343275,8.05,,S
|
||||
91,0,3,"Christmann, Mr. Emil",male,29,0,0,343276,8.05,,S
|
||||
92,0,3,"Andreasson, Mr. Paul Edvin",male,20,0,0,347466,7.8542,,S
|
||||
93,0,1,"Chaffee, Mr. Herbert Fuller",male,46,1,0,W.E.P. 5734,61.175,E31,S
|
||||
94,0,3,"Dean, Mr. Bertram Frank",male,26,1,2,C.A. 2315,20.575,,S
|
||||
95,0,3,"Coxon, Mr. Daniel",male,59,0,0,364500,7.25,,S
|
||||
96,0,3,"Shorney, Mr. Charles Joseph",male,,0,0,374910,8.05,,S
|
||||
97,0,1,"Goldschmidt, Mr. George B",male,71,0,0,PC 17754,34.6542,A5,C
|
||||
98,1,1,"Greenfield, Mr. William Bertram",male,23,0,1,PC 17759,63.3583,D10 D12,C
|
||||
99,1,2,"Doling, Mrs. John T (Ada Julia Bone)",female,34,0,1,231919,23,,S
|
||||
100,0,2,"Kantor, Mr. Sinai",male,34,1,0,244367,26,,S
|
||||
101,0,3,"Petranec, Miss. Matilda",female,28,0,0,349245,7.8958,,S
|
||||
102,0,3,"Petroff, Mr. Pastcho (""Pentcho"")",male,,0,0,349215,7.8958,,S
|
||||
103,0,1,"White, Mr. Richard Frasar",male,21,0,1,35281,77.2875,D26,S
|
||||
104,0,3,"Johansson, Mr. Gustaf Joel",male,33,0,0,7540,8.6542,,S
|
||||
105,0,3,"Gustafsson, Mr. Anders Vilhelm",male,37,2,0,3101276,7.925,,S
|
||||
106,0,3,"Mionoff, Mr. Stoytcho",male,28,0,0,349207,7.8958,,S
|
||||
107,1,3,"Salkjelsvik, Miss. Anna Kristine",female,21,0,0,343120,7.65,,S
|
||||
108,1,3,"Moss, Mr. Albert Johan",male,,0,0,312991,7.775,,S
|
||||
109,0,3,"Rekic, Mr. Tido",male,38,0,0,349249,7.8958,,S
|
||||
110,1,3,"Moran, Miss. Bertha",female,,1,0,371110,24.15,,Q
|
||||
111,0,1,"Porter, Mr. Walter Chamberlain",male,47,0,0,110465,52,C110,S
|
||||
112,0,3,"Zabour, Miss. Hileni",female,14.5,1,0,2665,14.4542,,C
|
||||
113,0,3,"Barton, Mr. David John",male,22,0,0,324669,8.05,,S
|
||||
114,0,3,"Jussila, Miss. Katriina",female,20,1,0,4136,9.825,,S
|
||||
115,0,3,"Attalah, Miss. Malake",female,17,0,0,2627,14.4583,,C
|
||||
116,0,3,"Pekoniemi, Mr. Edvard",male,21,0,0,STON/O 2. 3101294,7.925,,S
|
||||
117,0,3,"Connors, Mr. Patrick",male,70.5,0,0,370369,7.75,,Q
|
||||
118,0,2,"Turpin, Mr. William John Robert",male,29,1,0,11668,21,,S
|
||||
119,0,1,"Baxter, Mr. Quigg Edmond",male,24,0,1,PC 17558,247.5208,B58 B60,C
|
||||
120,0,3,"Andersson, Miss. Ellis Anna Maria",female,2,4,2,347082,31.275,,S
|
||||
121,0,2,"Hickman, Mr. Stanley George",male,21,2,0,S.O.C. 14879,73.5,,S
|
||||
122,0,3,"Moore, Mr. Leonard Charles",male,,0,0,A4. 54510,8.05,,S
|
||||
123,0,2,"Nasser, Mr. Nicholas",male,32.5,1,0,237736,30.0708,,C
|
||||
124,1,2,"Webber, Miss. Susan",female,32.5,0,0,27267,13,E101,S
|
||||
125,0,1,"White, Mr. Percival Wayland",male,54,0,1,35281,77.2875,D26,S
|
||||
126,1,3,"Nicola-Yarred, Master. Elias",male,12,1,0,2651,11.2417,,C
|
||||
127,0,3,"McMahon, Mr. Martin",male,,0,0,370372,7.75,,Q
|
||||
128,1,3,"Madsen, Mr. Fridtjof Arne",male,24,0,0,C 17369,7.1417,,S
|
||||
129,1,3,"Peter, Miss. Anna",female,,1,1,2668,22.3583,F E69,C
|
||||
130,0,3,"Ekstrom, Mr. Johan",male,45,0,0,347061,6.975,,S
|
||||
131,0,3,"Drazenoic, Mr. Jozef",male,33,0,0,349241,7.8958,,C
|
||||
132,0,3,"Coelho, Mr. Domingos Fernandeo",male,20,0,0,SOTON/O.Q. 3101307,7.05,,S
|
||||
133,0,3,"Robins, Mrs. Alexander A (Grace Charity Laury)",female,47,1,0,A/5. 3337,14.5,,S
|
||||
134,1,2,"Weisz, Mrs. Leopold (Mathilde Francoise Pede)",female,29,1,0,228414,26,,S
|
||||
135,0,2,"Sobey, Mr. Samuel James Hayden",male,25,0,0,C.A. 29178,13,,S
|
||||
136,0,2,"Richard, Mr. Emile",male,23,0,0,SC/PARIS 2133,15.0458,,C
|
||||
137,1,1,"Newsom, Miss. Helen Monypeny",female,19,0,2,11752,26.2833,D47,S
|
||||
138,0,1,"Futrelle, Mr. Jacques Heath",male,37,1,0,113803,53.1,C123,S
|
||||
139,0,3,"Osen, Mr. Olaf Elon",male,16,0,0,7534,9.2167,,S
|
||||
140,0,1,"Giglio, Mr. Victor",male,24,0,0,PC 17593,79.2,B86,C
|
||||
141,0,3,"Boulos, Mrs. Joseph (Sultana)",female,,0,2,2678,15.2458,,C
|
||||
142,1,3,"Nysten, Miss. Anna Sofia",female,22,0,0,347081,7.75,,S
|
||||
143,1,3,"Hakkarainen, Mrs. Pekka Pietari (Elin Matilda Dolck)",female,24,1,0,STON/O2. 3101279,15.85,,S
|
||||
144,0,3,"Burke, Mr. Jeremiah",male,19,0,0,365222,6.75,,Q
|
||||
145,0,2,"Andrew, Mr. Edgardo Samuel",male,18,0,0,231945,11.5,,S
|
||||
146,0,2,"Nicholls, Mr. Joseph Charles",male,19,1,1,C.A. 33112,36.75,,S
|
||||
147,1,3,"Andersson, Mr. August Edvard (""Wennerstrom"")",male,27,0,0,350043,7.7958,,S
|
||||
148,0,3,"Ford, Miss. Robina Maggie ""Ruby""",female,9,2,2,W./C. 6608,34.375,,S
|
||||
149,0,2,"Navratil, Mr. Michel (""Louis M Hoffman"")",male,36.5,0,2,230080,26,F2,S
|
||||
150,0,2,"Byles, Rev. Thomas Roussel Davids",male,42,0,0,244310,13,,S
|
||||
151,0,2,"Bateman, Rev. Robert James",male,51,0,0,S.O.P. 1166,12.525,,S
|
||||
152,1,1,"Pears, Mrs. Thomas (Edith Wearne)",female,22,1,0,113776,66.6,C2,S
|
||||
153,0,3,"Meo, Mr. Alfonzo",male,55.5,0,0,A.5. 11206,8.05,,S
|
||||
154,0,3,"van Billiard, Mr. Austin Blyler",male,40.5,0,2,A/5. 851,14.5,,S
|
||||
155,0,3,"Olsen, Mr. Ole Martin",male,,0,0,Fa 265302,7.3125,,S
|
||||
156,0,1,"Williams, Mr. Charles Duane",male,51,0,1,PC 17597,61.3792,,C
|
||||
157,1,3,"Gilnagh, Miss. Katherine ""Katie""",female,16,0,0,35851,7.7333,,Q
|
||||
158,0,3,"Corn, Mr. Harry",male,30,0,0,SOTON/OQ 392090,8.05,,S
|
||||
159,0,3,"Smiljanic, Mr. Mile",male,,0,0,315037,8.6625,,S
|
||||
160,0,3,"Sage, Master. Thomas Henry",male,,8,2,CA. 2343,69.55,,S
|
||||
161,0,3,"Cribb, Mr. John Hatfield",male,44,0,1,371362,16.1,,S
|
||||
162,1,2,"Watt, Mrs. James (Elizabeth ""Bessie"" Inglis Milne)",female,40,0,0,C.A. 33595,15.75,,S
|
||||
163,0,3,"Bengtsson, Mr. John Viktor",male,26,0,0,347068,7.775,,S
|
||||
164,0,3,"Calic, Mr. Jovo",male,17,0,0,315093,8.6625,,S
|
||||
165,0,3,"Panula, Master. Eino Viljami",male,1,4,1,3101295,39.6875,,S
|
||||
166,1,3,"Goldsmith, Master. Frank John William ""Frankie""",male,9,0,2,363291,20.525,,S
|
||||
167,1,1,"Chibnall, Mrs. (Edith Martha Bowerman)",female,,0,1,113505,55,E33,S
|
||||
168,0,3,"Skoog, Mrs. William (Anna Bernhardina Karlsson)",female,45,1,4,347088,27.9,,S
|
||||
169,0,1,"Baumann, Mr. John D",male,,0,0,PC 17318,25.925,,S
|
||||
170,0,3,"Ling, Mr. Lee",male,28,0,0,1601,56.4958,,S
|
||||
171,0,1,"Van der hoef, Mr. Wyckoff",male,61,0,0,111240,33.5,B19,S
|
||||
172,0,3,"Rice, Master. Arthur",male,4,4,1,382652,29.125,,Q
|
||||
173,1,3,"Johnson, Miss. Eleanor Ileen",female,1,1,1,347742,11.1333,,S
|
||||
174,0,3,"Sivola, Mr. Antti Wilhelm",male,21,0,0,STON/O 2. 3101280,7.925,,S
|
||||
175,0,1,"Smith, Mr. James Clinch",male,56,0,0,17764,30.6958,A7,C
|
||||
176,0,3,"Klasen, Mr. Klas Albin",male,18,1,1,350404,7.8542,,S
|
||||
177,0,3,"Lefebre, Master. Henry Forbes",male,,3,1,4133,25.4667,,S
|
||||
178,0,1,"Isham, Miss. Ann Elizabeth",female,50,0,0,PC 17595,28.7125,C49,C
|
||||
179,0,2,"Hale, Mr. Reginald",male,30,0,0,250653,13,,S
|
||||
180,0,3,"Leonard, Mr. Lionel",male,36,0,0,LINE,0,,S
|
||||
181,0,3,"Sage, Miss. Constance Gladys",female,,8,2,CA. 2343,69.55,,S
|
||||
182,0,2,"Pernot, Mr. Rene",male,,0,0,SC/PARIS 2131,15.05,,C
|
||||
183,0,3,"Asplund, Master. Clarence Gustaf Hugo",male,9,4,2,347077,31.3875,,S
|
||||
184,1,2,"Becker, Master. Richard F",male,1,2,1,230136,39,F4,S
|
||||
185,1,3,"Kink-Heilmann, Miss. Luise Gretchen",female,4,0,2,315153,22.025,,S
|
||||
186,0,1,"Rood, Mr. Hugh Roscoe",male,,0,0,113767,50,A32,S
|
||||
187,1,3,"O'Brien, Mrs. Thomas (Johanna ""Hannah"" Godfrey)",female,,1,0,370365,15.5,,Q
|
||||
188,1,1,"Romaine, Mr. Charles Hallace (""Mr C Rolmane"")",male,45,0,0,111428,26.55,,S
|
||||
189,0,3,"Bourke, Mr. John",male,40,1,1,364849,15.5,,Q
|
||||
190,0,3,"Turcin, Mr. Stjepan",male,36,0,0,349247,7.8958,,S
|
||||
191,1,2,"Pinsky, Mrs. (Rosa)",female,32,0,0,234604,13,,S
|
||||
192,0,2,"Carbines, Mr. William",male,19,0,0,28424,13,,S
|
||||
193,1,3,"Andersen-Jensen, Miss. Carla Christine Nielsine",female,19,1,0,350046,7.8542,,S
|
||||
194,1,2,"Navratil, Master. Michel M",male,3,1,1,230080,26,F2,S
|
||||
195,1,1,"Brown, Mrs. James Joseph (Margaret Tobin)",female,44,0,0,PC 17610,27.7208,B4,C
|
||||
196,1,1,"Lurette, Miss. Elise",female,58,0,0,PC 17569,146.5208,B80,C
|
||||
197,0,3,"Mernagh, Mr. Robert",male,,0,0,368703,7.75,,Q
|
||||
198,0,3,"Olsen, Mr. Karl Siegwart Andreas",male,42,0,1,4579,8.4042,,S
|
||||
199,1,3,"Madigan, Miss. Margaret ""Maggie""",female,,0,0,370370,7.75,,Q
|
||||
200,0,2,"Yrois, Miss. Henriette (""Mrs Harbeck"")",female,24,0,0,248747,13,,S
|
||||
201,0,3,"Vande Walle, Mr. Nestor Cyriel",male,28,0,0,345770,9.5,,S
|
||||
202,0,3,"Sage, Mr. Frederick",male,,8,2,CA. 2343,69.55,,S
|
||||
203,0,3,"Johanson, Mr. Jakob Alfred",male,34,0,0,3101264,6.4958,,S
|
||||
204,0,3,"Youseff, Mr. Gerious",male,45.5,0,0,2628,7.225,,C
|
||||
205,1,3,"Cohen, Mr. Gurshon ""Gus""",male,18,0,0,A/5 3540,8.05,,S
|
||||
206,0,3,"Strom, Miss. Telma Matilda",female,2,0,1,347054,10.4625,G6,S
|
||||
207,0,3,"Backstrom, Mr. Karl Alfred",male,32,1,0,3101278,15.85,,S
|
||||
208,1,3,"Albimona, Mr. Nassef Cassem",male,26,0,0,2699,18.7875,,C
|
||||
209,1,3,"Carr, Miss. Helen ""Ellen""",female,16,0,0,367231,7.75,,Q
|
||||
210,1,1,"Blank, Mr. Henry",male,40,0,0,112277,31,A31,C
|
||||
211,0,3,"Ali, Mr. Ahmed",male,24,0,0,SOTON/O.Q. 3101311,7.05,,S
|
||||
212,1,2,"Cameron, Miss. Clear Annie",female,35,0,0,F.C.C. 13528,21,,S
|
||||
213,0,3,"Perkin, Mr. John Henry",male,22,0,0,A/5 21174,7.25,,S
|
||||
214,0,2,"Givard, Mr. Hans Kristensen",male,30,0,0,250646,13,,S
|
||||
215,0,3,"Kiernan, Mr. Philip",male,,1,0,367229,7.75,,Q
|
||||
216,1,1,"Newell, Miss. Madeleine",female,31,1,0,35273,113.275,D36,C
|
||||
217,1,3,"Honkanen, Miss. Eliina",female,27,0,0,STON/O2. 3101283,7.925,,S
|
||||
218,0,2,"Jacobsohn, Mr. Sidney Samuel",male,42,1,0,243847,27,,S
|
||||
219,1,1,"Bazzani, Miss. Albina",female,32,0,0,11813,76.2917,D15,C
|
||||
220,0,2,"Harris, Mr. Walter",male,30,0,0,W/C 14208,10.5,,S
|
||||
221,1,3,"Sunderland, Mr. Victor Francis",male,16,0,0,SOTON/OQ 392089,8.05,,S
|
||||
222,0,2,"Bracken, Mr. James H",male,27,0,0,220367,13,,S
|
||||
223,0,3,"Green, Mr. George Henry",male,51,0,0,21440,8.05,,S
|
||||
224,0,3,"Nenkoff, Mr. Christo",male,,0,0,349234,7.8958,,S
|
||||
225,1,1,"Hoyt, Mr. Frederick Maxfield",male,38,1,0,19943,90,C93,S
|
||||
226,0,3,"Berglund, Mr. Karl Ivar Sven",male,22,0,0,PP 4348,9.35,,S
|
||||
227,1,2,"Mellors, Mr. William John",male,19,0,0,SW/PP 751,10.5,,S
|
||||
228,0,3,"Lovell, Mr. John Hall (""Henry"")",male,20.5,0,0,A/5 21173,7.25,,S
|
||||
229,0,2,"Fahlstrom, Mr. Arne Jonas",male,18,0,0,236171,13,,S
|
||||
230,0,3,"Lefebre, Miss. Mathilde",female,,3,1,4133,25.4667,,S
|
||||
231,1,1,"Harris, Mrs. Henry Birkhardt (Irene Wallach)",female,35,1,0,36973,83.475,C83,S
|
||||
232,0,3,"Larsson, Mr. Bengt Edvin",male,29,0,0,347067,7.775,,S
|
||||
233,0,2,"Sjostedt, Mr. Ernst Adolf",male,59,0,0,237442,13.5,,S
|
||||
234,1,3,"Asplund, Miss. Lillian Gertrud",female,5,4,2,347077,31.3875,,S
|
||||
235,0,2,"Leyson, Mr. Robert William Norman",male,24,0,0,C.A. 29566,10.5,,S
|
||||
236,0,3,"Harknett, Miss. Alice Phoebe",female,,0,0,W./C. 6609,7.55,,S
|
||||
237,0,2,"Hold, Mr. Stephen",male,44,1,0,26707,26,,S
|
||||
238,1,2,"Collyer, Miss. Marjorie ""Lottie""",female,8,0,2,C.A. 31921,26.25,,S
|
||||
239,0,2,"Pengelly, Mr. Frederick William",male,19,0,0,28665,10.5,,S
|
||||
240,0,2,"Hunt, Mr. George Henry",male,33,0,0,SCO/W 1585,12.275,,S
|
||||
241,0,3,"Zabour, Miss. Thamine",female,,1,0,2665,14.4542,,C
|
||||
242,1,3,"Murphy, Miss. Katherine ""Kate""",female,,1,0,367230,15.5,,Q
|
||||
243,0,2,"Coleridge, Mr. Reginald Charles",male,29,0,0,W./C. 14263,10.5,,S
|
||||
244,0,3,"Maenpaa, Mr. Matti Alexanteri",male,22,0,0,STON/O 2. 3101275,7.125,,S
|
||||
245,0,3,"Attalah, Mr. Sleiman",male,30,0,0,2694,7.225,,C
|
||||
246,0,1,"Minahan, Dr. William Edward",male,44,2,0,19928,90,C78,Q
|
||||
247,0,3,"Lindahl, Miss. Agda Thorilda Viktoria",female,25,0,0,347071,7.775,,S
|
||||
248,1,2,"Hamalainen, Mrs. William (Anna)",female,24,0,2,250649,14.5,,S
|
||||
249,1,1,"Beckwith, Mr. Richard Leonard",male,37,1,1,11751,52.5542,D35,S
|
||||
250,0,2,"Carter, Rev. Ernest Courtenay",male,54,1,0,244252,26,,S
|
||||
251,0,3,"Reed, Mr. James George",male,,0,0,362316,7.25,,S
|
||||
252,0,3,"Strom, Mrs. Wilhelm (Elna Matilda Persson)",female,29,1,1,347054,10.4625,G6,S
|
||||
253,0,1,"Stead, Mr. William Thomas",male,62,0,0,113514,26.55,C87,S
|
||||
254,0,3,"Lobb, Mr. William Arthur",male,30,1,0,A/5. 3336,16.1,,S
|
||||
255,0,3,"Rosblom, Mrs. Viktor (Helena Wilhelmina)",female,41,0,2,370129,20.2125,,S
|
||||
256,1,3,"Touma, Mrs. Darwis (Hanne Youssef Razi)",female,29,0,2,2650,15.2458,,C
|
||||
257,1,1,"Thorne, Mrs. Gertrude Maybelle",female,,0,0,PC 17585,79.2,,C
|
||||
258,1,1,"Cherry, Miss. Gladys",female,30,0,0,110152,86.5,B77,S
|
||||
259,1,1,"Ward, Miss. Anna",female,35,0,0,PC 17755,512.3292,,C
|
||||
260,1,2,"Parrish, Mrs. (Lutie Davis)",female,50,0,1,230433,26,,S
|
||||
261,0,3,"Smith, Mr. Thomas",male,,0,0,384461,7.75,,Q
|
||||
262,1,3,"Asplund, Master. Edvin Rojj Felix",male,3,4,2,347077,31.3875,,S
|
||||
263,0,1,"Taussig, Mr. Emil",male,52,1,1,110413,79.65,E67,S
|
||||
264,0,1,"Harrison, Mr. William",male,40,0,0,112059,0,B94,S
|
||||
265,0,3,"Henry, Miss. Delia",female,,0,0,382649,7.75,,Q
|
||||
266,0,2,"Reeves, Mr. David",male,36,0,0,C.A. 17248,10.5,,S
|
||||
267,0,3,"Panula, Mr. Ernesti Arvid",male,16,4,1,3101295,39.6875,,S
|
||||
268,1,3,"Persson, Mr. Ernst Ulrik",male,25,1,0,347083,7.775,,S
|
||||
269,1,1,"Graham, Mrs. William Thompson (Edith Junkins)",female,58,0,1,PC 17582,153.4625,C125,S
|
||||
270,1,1,"Bissette, Miss. Amelia",female,35,0,0,PC 17760,135.6333,C99,S
|
||||
271,0,1,"Cairns, Mr. Alexander",male,,0,0,113798,31,,S
|
||||
272,1,3,"Tornquist, Mr. William Henry",male,25,0,0,LINE,0,,S
|
||||
273,1,2,"Mellinger, Mrs. (Elizabeth Anne Maidment)",female,41,0,1,250644,19.5,,S
|
||||
274,0,1,"Natsch, Mr. Charles H",male,37,0,1,PC 17596,29.7,C118,C
|
||||
275,1,3,"Healy, Miss. Hanora ""Nora""",female,,0,0,370375,7.75,,Q
|
||||
276,1,1,"Andrews, Miss. Kornelia Theodosia",female,63,1,0,13502,77.9583,D7,S
|
||||
277,0,3,"Lindblom, Miss. Augusta Charlotta",female,45,0,0,347073,7.75,,S
|
||||
278,0,2,"Parkes, Mr. Francis ""Frank""",male,,0,0,239853,0,,S
|
||||
279,0,3,"Rice, Master. Eric",male,7,4,1,382652,29.125,,Q
|
||||
280,1,3,"Abbott, Mrs. Stanton (Rosa Hunt)",female,35,1,1,C.A. 2673,20.25,,S
|
||||
281,0,3,"Duane, Mr. Frank",male,65,0,0,336439,7.75,,Q
|
||||
282,0,3,"Olsson, Mr. Nils Johan Goransson",male,28,0,0,347464,7.8542,,S
|
||||
283,0,3,"de Pelsmaeker, Mr. Alfons",male,16,0,0,345778,9.5,,S
|
||||
284,1,3,"Dorking, Mr. Edward Arthur",male,19,0,0,A/5. 10482,8.05,,S
|
||||
285,0,1,"Smith, Mr. Richard William",male,,0,0,113056,26,A19,S
|
||||
286,0,3,"Stankovic, Mr. Ivan",male,33,0,0,349239,8.6625,,C
|
||||
287,1,3,"de Mulder, Mr. Theodore",male,30,0,0,345774,9.5,,S
|
||||
288,0,3,"Naidenoff, Mr. Penko",male,22,0,0,349206,7.8958,,S
|
||||
289,1,2,"Hosono, Mr. Masabumi",male,42,0,0,237798,13,,S
|
||||
290,1,3,"Connolly, Miss. Kate",female,22,0,0,370373,7.75,,Q
|
||||
291,1,1,"Barber, Miss. Ellen ""Nellie""",female,26,0,0,19877,78.85,,S
|
||||
292,1,1,"Bishop, Mrs. Dickinson H (Helen Walton)",female,19,1,0,11967,91.0792,B49,C
|
||||
293,0,2,"Levy, Mr. Rene Jacques",male,36,0,0,SC/Paris 2163,12.875,D,C
|
||||
294,0,3,"Haas, Miss. Aloisia",female,24,0,0,349236,8.85,,S
|
||||
295,0,3,"Mineff, Mr. Ivan",male,24,0,0,349233,7.8958,,S
|
||||
296,0,1,"Lewy, Mr. Ervin G",male,,0,0,PC 17612,27.7208,,C
|
||||
297,0,3,"Hanna, Mr. Mansour",male,23.5,0,0,2693,7.2292,,C
|
||||
298,0,1,"Allison, Miss. Helen Loraine",female,2,1,2,113781,151.55,C22 C26,S
|
||||
299,1,1,"Saalfeld, Mr. Adolphe",male,,0,0,19988,30.5,C106,S
|
||||
300,1,1,"Baxter, Mrs. James (Helene DeLaudeniere Chaput)",female,50,0,1,PC 17558,247.5208,B58 B60,C
|
||||
301,1,3,"Kelly, Miss. Anna Katherine ""Annie Kate""",female,,0,0,9234,7.75,,Q
|
||||
302,1,3,"McCoy, Mr. Bernard",male,,2,0,367226,23.25,,Q
|
||||
303,0,3,"Johnson, Mr. William Cahoone Jr",male,19,0,0,LINE,0,,S
|
||||
304,1,2,"Keane, Miss. Nora A",female,,0,0,226593,12.35,E101,Q
|
||||
305,0,3,"Williams, Mr. Howard Hugh ""Harry""",male,,0,0,A/5 2466,8.05,,S
|
||||
306,1,1,"Allison, Master. Hudson Trevor",male,0.92,1,2,113781,151.55,C22 C26,S
|
||||
307,1,1,"Fleming, Miss. Margaret",female,,0,0,17421,110.8833,,C
|
||||
308,1,1,"Penasco y Castellana, Mrs. Victor de Satode (Maria Josefa Perez de Soto y Vallejo)",female,17,1,0,PC 17758,108.9,C65,C
|
||||
309,0,2,"Abelson, Mr. Samuel",male,30,1,0,P/PP 3381,24,,C
|
||||
310,1,1,"Francatelli, Miss. Laura Mabel",female,30,0,0,PC 17485,56.9292,E36,C
|
||||
311,1,1,"Hays, Miss. Margaret Bechstein",female,24,0,0,11767,83.1583,C54,C
|
||||
312,1,1,"Ryerson, Miss. Emily Borie",female,18,2,2,PC 17608,262.375,B57 B59 B63 B66,C
|
||||
313,0,2,"Lahtinen, Mrs. William (Anna Sylfven)",female,26,1,1,250651,26,,S
|
||||
314,0,3,"Hendekovic, Mr. Ignjac",male,28,0,0,349243,7.8958,,S
|
||||
315,0,2,"Hart, Mr. Benjamin",male,43,1,1,F.C.C. 13529,26.25,,S
|
||||
316,1,3,"Nilsson, Miss. Helmina Josefina",female,26,0,0,347470,7.8542,,S
|
||||
317,1,2,"Kantor, Mrs. Sinai (Miriam Sternin)",female,24,1,0,244367,26,,S
|
||||
318,0,2,"Moraweck, Dr. Ernest",male,54,0,0,29011,14,,S
|
||||
319,1,1,"Wick, Miss. Mary Natalie",female,31,0,2,36928,164.8667,C7,S
|
||||
320,1,1,"Spedden, Mrs. Frederic Oakley (Margaretta Corning Stone)",female,40,1,1,16966,134.5,E34,C
|
||||
321,0,3,"Dennis, Mr. Samuel",male,22,0,0,A/5 21172,7.25,,S
|
||||
322,0,3,"Danoff, Mr. Yoto",male,27,0,0,349219,7.8958,,S
|
||||
323,1,2,"Slayter, Miss. Hilda Mary",female,30,0,0,234818,12.35,,Q
|
||||
324,1,2,"Caldwell, Mrs. Albert Francis (Sylvia Mae Harbaugh)",female,22,1,1,248738,29,,S
|
||||
325,0,3,"Sage, Mr. George John Jr",male,,8,2,CA. 2343,69.55,,S
|
||||
326,1,1,"Young, Miss. Marie Grice",female,36,0,0,PC 17760,135.6333,C32,C
|
||||
327,0,3,"Nysveen, Mr. Johan Hansen",male,61,0,0,345364,6.2375,,S
|
||||
328,1,2,"Ball, Mrs. (Ada E Hall)",female,36,0,0,28551,13,D,S
|
||||
329,1,3,"Goldsmith, Mrs. Frank John (Emily Alice Brown)",female,31,1,1,363291,20.525,,S
|
||||
330,1,1,"Hippach, Miss. Jean Gertrude",female,16,0,1,111361,57.9792,B18,C
|
||||
331,1,3,"McCoy, Miss. Agnes",female,,2,0,367226,23.25,,Q
|
||||
332,0,1,"Partner, Mr. Austen",male,45.5,0,0,113043,28.5,C124,S
|
||||
333,0,1,"Graham, Mr. George Edward",male,38,0,1,PC 17582,153.4625,C91,S
|
||||
334,0,3,"Vander Planke, Mr. Leo Edmondus",male,16,2,0,345764,18,,S
|
||||
335,1,1,"Frauenthal, Mrs. Henry William (Clara Heinsheimer)",female,,1,0,PC 17611,133.65,,S
|
||||
336,0,3,"Denkoff, Mr. Mitto",male,,0,0,349225,7.8958,,S
|
||||
337,0,1,"Pears, Mr. Thomas Clinton",male,29,1,0,113776,66.6,C2,S
|
||||
338,1,1,"Burns, Miss. Elizabeth Margaret",female,41,0,0,16966,134.5,E40,C
|
||||
339,1,3,"Dahl, Mr. Karl Edwart",male,45,0,0,7598,8.05,,S
|
||||
340,0,1,"Blackwell, Mr. Stephen Weart",male,45,0,0,113784,35.5,T,S
|
||||
341,1,2,"Navratil, Master. Edmond Roger",male,2,1,1,230080,26,F2,S
|
||||
342,1,1,"Fortune, Miss. Alice Elizabeth",female,24,3,2,19950,263,C23 C25 C27,S
|
||||
343,0,2,"Collander, Mr. Erik Gustaf",male,28,0,0,248740,13,,S
|
||||
344,0,2,"Sedgwick, Mr. Charles Frederick Waddington",male,25,0,0,244361,13,,S
|
||||
345,0,2,"Fox, Mr. Stanley Hubert",male,36,0,0,229236,13,,S
|
||||
346,1,2,"Brown, Miss. Amelia ""Mildred""",female,24,0,0,248733,13,F33,S
|
||||
347,1,2,"Smith, Miss. Marion Elsie",female,40,0,0,31418,13,,S
|
||||
348,1,3,"Davison, Mrs. Thomas Henry (Mary E Finck)",female,,1,0,386525,16.1,,S
|
||||
349,1,3,"Coutts, Master. William Loch ""William""",male,3,1,1,C.A. 37671,15.9,,S
|
||||
350,0,3,"Dimic, Mr. Jovan",male,42,0,0,315088,8.6625,,S
|
||||
351,0,3,"Odahl, Mr. Nils Martin",male,23,0,0,7267,9.225,,S
|
||||
352,0,1,"Williams-Lambert, Mr. Fletcher Fellows",male,,0,0,113510,35,C128,S
|
||||
353,0,3,"Elias, Mr. Tannous",male,15,1,1,2695,7.2292,,C
|
||||
354,0,3,"Arnold-Franchi, Mr. Josef",male,25,1,0,349237,17.8,,S
|
||||
355,0,3,"Yousif, Mr. Wazli",male,,0,0,2647,7.225,,C
|
||||
356,0,3,"Vanden Steen, Mr. Leo Peter",male,28,0,0,345783,9.5,,S
|
||||
357,1,1,"Bowerman, Miss. Elsie Edith",female,22,0,1,113505,55,E33,S
|
||||
358,0,2,"Funk, Miss. Annie Clemmer",female,38,0,0,237671,13,,S
|
||||
359,1,3,"McGovern, Miss. Mary",female,,0,0,330931,7.8792,,Q
|
||||
360,1,3,"Mockler, Miss. Helen Mary ""Ellie""",female,,0,0,330980,7.8792,,Q
|
||||
361,0,3,"Skoog, Mr. Wilhelm",male,40,1,4,347088,27.9,,S
|
||||
362,0,2,"del Carlo, Mr. Sebastiano",male,29,1,0,SC/PARIS 2167,27.7208,,C
|
||||
363,0,3,"Barbara, Mrs. (Catherine David)",female,45,0,1,2691,14.4542,,C
|
||||
364,0,3,"Asim, Mr. Adola",male,35,0,0,SOTON/O.Q. 3101310,7.05,,S
|
||||
365,0,3,"O'Brien, Mr. Thomas",male,,1,0,370365,15.5,,Q
|
||||
366,0,3,"Adahl, Mr. Mauritz Nils Martin",male,30,0,0,C 7076,7.25,,S
|
||||
367,1,1,"Warren, Mrs. Frank Manley (Anna Sophia Atkinson)",female,60,1,0,110813,75.25,D37,C
|
||||
368,1,3,"Moussa, Mrs. (Mantoura Boulos)",female,,0,0,2626,7.2292,,C
|
||||
369,1,3,"Jermyn, Miss. Annie",female,,0,0,14313,7.75,,Q
|
||||
370,1,1,"Aubart, Mme. Leontine Pauline",female,24,0,0,PC 17477,69.3,B35,C
|
||||
371,1,1,"Harder, Mr. George Achilles",male,25,1,0,11765,55.4417,E50,C
|
||||
372,0,3,"Wiklund, Mr. Jakob Alfred",male,18,1,0,3101267,6.4958,,S
|
||||
373,0,3,"Beavan, Mr. William Thomas",male,19,0,0,323951,8.05,,S
|
||||
374,0,1,"Ringhini, Mr. Sante",male,22,0,0,PC 17760,135.6333,,C
|
||||
375,0,3,"Palsson, Miss. Stina Viola",female,3,3,1,349909,21.075,,S
|
||||
376,1,1,"Meyer, Mrs. Edgar Joseph (Leila Saks)",female,,1,0,PC 17604,82.1708,,C
|
||||
377,1,3,"Landergren, Miss. Aurora Adelia",female,22,0,0,C 7077,7.25,,S
|
||||
378,0,1,"Widener, Mr. Harry Elkins",male,27,0,2,113503,211.5,C82,C
|
||||
379,0,3,"Betros, Mr. Tannous",male,20,0,0,2648,4.0125,,C
|
||||
380,0,3,"Gustafsson, Mr. Karl Gideon",male,19,0,0,347069,7.775,,S
|
||||
381,1,1,"Bidois, Miss. Rosalie",female,42,0,0,PC 17757,227.525,,C
|
||||
382,1,3,"Nakid, Miss. Maria (""Mary"")",female,1,0,2,2653,15.7417,,C
|
||||
383,0,3,"Tikkanen, Mr. Juho",male,32,0,0,STON/O 2. 3101293,7.925,,S
|
||||
384,1,1,"Holverson, Mrs. Alexander Oskar (Mary Aline Towner)",female,35,1,0,113789,52,,S
|
||||
385,0,3,"Plotcharsky, Mr. Vasil",male,,0,0,349227,7.8958,,S
|
||||
386,0,2,"Davies, Mr. Charles Henry",male,18,0,0,S.O.C. 14879,73.5,,S
|
||||
387,0,3,"Goodwin, Master. Sidney Leonard",male,1,5,2,CA 2144,46.9,,S
|
||||
388,1,2,"Buss, Miss. Kate",female,36,0,0,27849,13,,S
|
||||
389,0,3,"Sadlier, Mr. Matthew",male,,0,0,367655,7.7292,,Q
|
||||
390,1,2,"Lehmann, Miss. Bertha",female,17,0,0,SC 1748,12,,C
|
||||
391,1,1,"Carter, Mr. William Ernest",male,36,1,2,113760,120,B96 B98,S
|
||||
392,1,3,"Jansson, Mr. Carl Olof",male,21,0,0,350034,7.7958,,S
|
||||
393,0,3,"Gustafsson, Mr. Johan Birger",male,28,2,0,3101277,7.925,,S
|
||||
394,1,1,"Newell, Miss. Marjorie",female,23,1,0,35273,113.275,D36,C
|
||||
395,1,3,"Sandstrom, Mrs. Hjalmar (Agnes Charlotta Bengtsson)",female,24,0,2,PP 9549,16.7,G6,S
|
||||
396,0,3,"Johansson, Mr. Erik",male,22,0,0,350052,7.7958,,S
|
||||
397,0,3,"Olsson, Miss. Elina",female,31,0,0,350407,7.8542,,S
|
||||
398,0,2,"McKane, Mr. Peter David",male,46,0,0,28403,26,,S
|
||||
399,0,2,"Pain, Dr. Alfred",male,23,0,0,244278,10.5,,S
|
||||
400,1,2,"Trout, Mrs. William H (Jessie L)",female,28,0,0,240929,12.65,,S
|
||||
401,1,3,"Niskanen, Mr. Juha",male,39,0,0,STON/O 2. 3101289,7.925,,S
|
||||
402,0,3,"Adams, Mr. John",male,26,0,0,341826,8.05,,S
|
||||
403,0,3,"Jussila, Miss. Mari Aina",female,21,1,0,4137,9.825,,S
|
||||
404,0,3,"Hakkarainen, Mr. Pekka Pietari",male,28,1,0,STON/O2. 3101279,15.85,,S
|
||||
405,0,3,"Oreskovic, Miss. Marija",female,20,0,0,315096,8.6625,,S
|
||||
406,0,2,"Gale, Mr. Shadrach",male,34,1,0,28664,21,,S
|
||||
407,0,3,"Widegren, Mr. Carl/Charles Peter",male,51,0,0,347064,7.75,,S
|
||||
408,1,2,"Richards, Master. William Rowe",male,3,1,1,29106,18.75,,S
|
||||
409,0,3,"Birkeland, Mr. Hans Martin Monsen",male,21,0,0,312992,7.775,,S
|
||||
410,0,3,"Lefebre, Miss. Ida",female,,3,1,4133,25.4667,,S
|
||||
411,0,3,"Sdycoff, Mr. Todor",male,,0,0,349222,7.8958,,S
|
||||
412,0,3,"Hart, Mr. Henry",male,,0,0,394140,6.8583,,Q
|
||||
413,1,1,"Minahan, Miss. Daisy E",female,33,1,0,19928,90,C78,Q
|
||||
414,0,2,"Cunningham, Mr. Alfred Fleming",male,,0,0,239853,0,,S
|
||||
415,1,3,"Sundman, Mr. Johan Julian",male,44,0,0,STON/O 2. 3101269,7.925,,S
|
||||
416,0,3,"Meek, Mrs. Thomas (Annie Louise Rowley)",female,,0,0,343095,8.05,,S
|
||||
417,1,2,"Drew, Mrs. James Vivian (Lulu Thorne Christian)",female,34,1,1,28220,32.5,,S
|
||||
418,1,2,"Silven, Miss. Lyyli Karoliina",female,18,0,2,250652,13,,S
|
||||
419,0,2,"Matthews, Mr. William John",male,30,0,0,28228,13,,S
|
||||
420,0,3,"Van Impe, Miss. Catharina",female,10,0,2,345773,24.15,,S
|
||||
421,0,3,"Gheorgheff, Mr. Stanio",male,,0,0,349254,7.8958,,C
|
||||
422,0,3,"Charters, Mr. David",male,21,0,0,A/5. 13032,7.7333,,Q
|
||||
423,0,3,"Zimmerman, Mr. Leo",male,29,0,0,315082,7.875,,S
|
||||
424,0,3,"Danbom, Mrs. Ernst Gilbert (Anna Sigrid Maria Brogren)",female,28,1,1,347080,14.4,,S
|
||||
425,0,3,"Rosblom, Mr. Viktor Richard",male,18,1,1,370129,20.2125,,S
|
||||
426,0,3,"Wiseman, Mr. Phillippe",male,,0,0,A/4. 34244,7.25,,S
|
||||
427,1,2,"Clarke, Mrs. Charles V (Ada Maria Winfield)",female,28,1,0,2003,26,,S
|
||||
428,1,2,"Phillips, Miss. Kate Florence (""Mrs Kate Louise Phillips Marshall"")",female,19,0,0,250655,26,,S
|
||||
429,0,3,"Flynn, Mr. James",male,,0,0,364851,7.75,,Q
|
||||
430,1,3,"Pickard, Mr. Berk (Berk Trembisky)",male,32,0,0,SOTON/O.Q. 392078,8.05,E10,S
|
||||
431,1,1,"Bjornstrom-Steffansson, Mr. Mauritz Hakan",male,28,0,0,110564,26.55,C52,S
|
||||
432,1,3,"Thorneycroft, Mrs. Percival (Florence Kate White)",female,,1,0,376564,16.1,,S
|
||||
433,1,2,"Louch, Mrs. Charles Alexander (Alice Adelaide Slow)",female,42,1,0,SC/AH 3085,26,,S
|
||||
434,0,3,"Kallio, Mr. Nikolai Erland",male,17,0,0,STON/O 2. 3101274,7.125,,S
|
||||
435,0,1,"Silvey, Mr. William Baird",male,50,1,0,13507,55.9,E44,S
|
||||
436,1,1,"Carter, Miss. Lucile Polk",female,14,1,2,113760,120,B96 B98,S
|
||||
437,0,3,"Ford, Miss. Doolina Margaret ""Daisy""",female,21,2,2,W./C. 6608,34.375,,S
|
||||
438,1,2,"Richards, Mrs. Sidney (Emily Hocking)",female,24,2,3,29106,18.75,,S
|
||||
439,0,1,"Fortune, Mr. Mark",male,64,1,4,19950,263,C23 C25 C27,S
|
||||
440,0,2,"Kvillner, Mr. Johan Henrik Johannesson",male,31,0,0,C.A. 18723,10.5,,S
|
||||
441,1,2,"Hart, Mrs. Benjamin (Esther Ada Bloomfield)",female,45,1,1,F.C.C. 13529,26.25,,S
|
||||
442,0,3,"Hampe, Mr. Leon",male,20,0,0,345769,9.5,,S
|
||||
443,0,3,"Petterson, Mr. Johan Emil",male,25,1,0,347076,7.775,,S
|
||||
444,1,2,"Reynaldo, Ms. Encarnacion",female,28,0,0,230434,13,,S
|
||||
445,1,3,"Johannesen-Bratthammer, Mr. Bernt",male,,0,0,65306,8.1125,,S
|
||||
446,1,1,"Dodge, Master. Washington",male,4,0,2,33638,81.8583,A34,S
|
||||
447,1,2,"Mellinger, Miss. Madeleine Violet",female,13,0,1,250644,19.5,,S
|
||||
448,1,1,"Seward, Mr. Frederic Kimber",male,34,0,0,113794,26.55,,S
|
||||
449,1,3,"Baclini, Miss. Marie Catherine",female,5,2,1,2666,19.2583,,C
|
||||
450,1,1,"Peuchen, Major. Arthur Godfrey",male,52,0,0,113786,30.5,C104,S
|
||||
451,0,2,"West, Mr. Edwy Arthur",male,36,1,2,C.A. 34651,27.75,,S
|
||||
452,0,3,"Hagland, Mr. Ingvald Olai Olsen",male,,1,0,65303,19.9667,,S
|
||||
453,0,1,"Foreman, Mr. Benjamin Laventall",male,30,0,0,113051,27.75,C111,C
|
||||
454,1,1,"Goldenberg, Mr. Samuel L",male,49,1,0,17453,89.1042,C92,C
|
||||
455,0,3,"Peduzzi, Mr. Joseph",male,,0,0,A/5 2817,8.05,,S
|
||||
456,1,3,"Jalsevac, Mr. Ivan",male,29,0,0,349240,7.8958,,C
|
||||
457,0,1,"Millet, Mr. Francis Davis",male,65,0,0,13509,26.55,E38,S
|
||||
458,1,1,"Kenyon, Mrs. Frederick R (Marion)",female,,1,0,17464,51.8625,D21,S
|
||||
459,1,2,"Toomey, Miss. Ellen",female,50,0,0,F.C.C. 13531,10.5,,S
|
||||
460,0,3,"O'Connor, Mr. Maurice",male,,0,0,371060,7.75,,Q
|
||||
461,1,1,"Anderson, Mr. Harry",male,48,0,0,19952,26.55,E12,S
|
||||
462,0,3,"Morley, Mr. William",male,34,0,0,364506,8.05,,S
|
||||
463,0,1,"Gee, Mr. Arthur H",male,47,0,0,111320,38.5,E63,S
|
||||
464,0,2,"Milling, Mr. Jacob Christian",male,48,0,0,234360,13,,S
|
||||
465,0,3,"Maisner, Mr. Simon",male,,0,0,A/S 2816,8.05,,S
|
||||
466,0,3,"Goncalves, Mr. Manuel Estanslas",male,38,0,0,SOTON/O.Q. 3101306,7.05,,S
|
||||
467,0,2,"Campbell, Mr. William",male,,0,0,239853,0,,S
|
||||
468,0,1,"Smart, Mr. John Montgomery",male,56,0,0,113792,26.55,,S
|
||||
469,0,3,"Scanlan, Mr. James",male,,0,0,36209,7.725,,Q
|
||||
470,1,3,"Baclini, Miss. Helene Barbara",female,0.75,2,1,2666,19.2583,,C
|
||||
471,0,3,"Keefe, Mr. Arthur",male,,0,0,323592,7.25,,S
|
||||
472,0,3,"Cacic, Mr. Luka",male,38,0,0,315089,8.6625,,S
|
||||
473,1,2,"West, Mrs. Edwy Arthur (Ada Mary Worth)",female,33,1,2,C.A. 34651,27.75,,S
|
||||
474,1,2,"Jerwan, Mrs. Amin S (Marie Marthe Thuillard)",female,23,0,0,SC/AH Basle 541,13.7917,D,C
|
||||
475,0,3,"Strandberg, Miss. Ida Sofia",female,22,0,0,7553,9.8375,,S
|
||||
476,0,1,"Clifford, Mr. George Quincy",male,,0,0,110465,52,A14,S
|
||||
477,0,2,"Renouf, Mr. Peter Henry",male,34,1,0,31027,21,,S
|
||||
478,0,3,"Braund, Mr. Lewis Richard",male,29,1,0,3460,7.0458,,S
|
||||
479,0,3,"Karlsson, Mr. Nils August",male,22,0,0,350060,7.5208,,S
|
||||
480,1,3,"Hirvonen, Miss. Hildur E",female,2,0,1,3101298,12.2875,,S
|
||||
481,0,3,"Goodwin, Master. Harold Victor",male,9,5,2,CA 2144,46.9,,S
|
||||
482,0,2,"Frost, Mr. Anthony Wood ""Archie""",male,,0,0,239854,0,,S
|
||||
483,0,3,"Rouse, Mr. Richard Henry",male,50,0,0,A/5 3594,8.05,,S
|
||||
484,1,3,"Turkula, Mrs. (Hedwig)",female,63,0,0,4134,9.5875,,S
|
||||
485,1,1,"Bishop, Mr. Dickinson H",male,25,1,0,11967,91.0792,B49,C
|
||||
486,0,3,"Lefebre, Miss. Jeannie",female,,3,1,4133,25.4667,,S
|
||||
487,1,1,"Hoyt, Mrs. Frederick Maxfield (Jane Anne Forby)",female,35,1,0,19943,90,C93,S
|
||||
488,0,1,"Kent, Mr. Edward Austin",male,58,0,0,11771,29.7,B37,C
|
||||
489,0,3,"Somerton, Mr. Francis William",male,30,0,0,A.5. 18509,8.05,,S
|
||||
490,1,3,"Coutts, Master. Eden Leslie ""Neville""",male,9,1,1,C.A. 37671,15.9,,S
|
||||
491,0,3,"Hagland, Mr. Konrad Mathias Reiersen",male,,1,0,65304,19.9667,,S
|
||||
492,0,3,"Windelov, Mr. Einar",male,21,0,0,SOTON/OQ 3101317,7.25,,S
|
||||
493,0,1,"Molson, Mr. Harry Markland",male,55,0,0,113787,30.5,C30,S
|
||||
494,0,1,"Artagaveytia, Mr. Ramon",male,71,0,0,PC 17609,49.5042,,C
|
||||
495,0,3,"Stanley, Mr. Edward Roland",male,21,0,0,A/4 45380,8.05,,S
|
||||
496,0,3,"Yousseff, Mr. Gerious",male,,0,0,2627,14.4583,,C
|
||||
497,1,1,"Eustis, Miss. Elizabeth Mussey",female,54,1,0,36947,78.2667,D20,C
|
||||
498,0,3,"Shellard, Mr. Frederick William",male,,0,0,C.A. 6212,15.1,,S
|
||||
499,0,1,"Allison, Mrs. Hudson J C (Bessie Waldo Daniels)",female,25,1,2,113781,151.55,C22 C26,S
|
||||
500,0,3,"Svensson, Mr. Olof",male,24,0,0,350035,7.7958,,S
|
||||
501,0,3,"Calic, Mr. Petar",male,17,0,0,315086,8.6625,,S
|
||||
502,0,3,"Canavan, Miss. Mary",female,21,0,0,364846,7.75,,Q
|
||||
503,0,3,"O'Sullivan, Miss. Bridget Mary",female,,0,0,330909,7.6292,,Q
|
||||
504,0,3,"Laitinen, Miss. Kristina Sofia",female,37,0,0,4135,9.5875,,S
|
||||
505,1,1,"Maioni, Miss. Roberta",female,16,0,0,110152,86.5,B79,S
|
||||
506,0,1,"Penasco y Castellana, Mr. Victor de Satode",male,18,1,0,PC 17758,108.9,C65,C
|
||||
507,1,2,"Quick, Mrs. Frederick Charles (Jane Richards)",female,33,0,2,26360,26,,S
|
||||
508,1,1,"Bradley, Mr. George (""George Arthur Brayton"")",male,,0,0,111427,26.55,,S
|
||||
509,0,3,"Olsen, Mr. Henry Margido",male,28,0,0,C 4001,22.525,,S
|
||||
510,1,3,"Lang, Mr. Fang",male,26,0,0,1601,56.4958,,S
|
||||
511,1,3,"Daly, Mr. Eugene Patrick",male,29,0,0,382651,7.75,,Q
|
||||
512,0,3,"Webber, Mr. James",male,,0,0,SOTON/OQ 3101316,8.05,,S
|
||||
513,1,1,"McGough, Mr. James Robert",male,36,0,0,PC 17473,26.2875,E25,S
|
||||
514,1,1,"Rothschild, Mrs. Martin (Elizabeth L. Barrett)",female,54,1,0,PC 17603,59.4,,C
|
||||
515,0,3,"Coleff, Mr. Satio",male,24,0,0,349209,7.4958,,S
|
||||
516,0,1,"Walker, Mr. William Anderson",male,47,0,0,36967,34.0208,D46,S
|
||||
517,1,2,"Lemore, Mrs. (Amelia Milley)",female,34,0,0,C.A. 34260,10.5,F33,S
|
||||
518,0,3,"Ryan, Mr. Patrick",male,,0,0,371110,24.15,,Q
|
||||
519,1,2,"Angle, Mrs. William A (Florence ""Mary"" Agnes Hughes)",female,36,1,0,226875,26,,S
|
||||
520,0,3,"Pavlovic, Mr. Stefo",male,32,0,0,349242,7.8958,,S
|
||||
521,1,1,"Perreault, Miss. Anne",female,30,0,0,12749,93.5,B73,S
|
||||
522,0,3,"Vovk, Mr. Janko",male,22,0,0,349252,7.8958,,S
|
||||
523,0,3,"Lahoud, Mr. Sarkis",male,,0,0,2624,7.225,,C
|
||||
524,1,1,"Hippach, Mrs. Louis Albert (Ida Sophia Fischer)",female,44,0,1,111361,57.9792,B18,C
|
||||
525,0,3,"Kassem, Mr. Fared",male,,0,0,2700,7.2292,,C
|
||||
526,0,3,"Farrell, Mr. James",male,40.5,0,0,367232,7.75,,Q
|
||||
527,1,2,"Ridsdale, Miss. Lucy",female,50,0,0,W./C. 14258,10.5,,S
|
||||
528,0,1,"Farthing, Mr. John",male,,0,0,PC 17483,221.7792,C95,S
|
||||
529,0,3,"Salonen, Mr. Johan Werner",male,39,0,0,3101296,7.925,,S
|
||||
530,0,2,"Hocking, Mr. Richard George",male,23,2,1,29104,11.5,,S
|
||||
531,1,2,"Quick, Miss. Phyllis May",female,2,1,1,26360,26,,S
|
||||
532,0,3,"Toufik, Mr. Nakli",male,,0,0,2641,7.2292,,C
|
||||
533,0,3,"Elias, Mr. Joseph Jr",male,17,1,1,2690,7.2292,,C
|
||||
534,1,3,"Peter, Mrs. Catherine (Catherine Rizk)",female,,0,2,2668,22.3583,,C
|
||||
535,0,3,"Cacic, Miss. Marija",female,30,0,0,315084,8.6625,,S
|
||||
536,1,2,"Hart, Miss. Eva Miriam",female,7,0,2,F.C.C. 13529,26.25,,S
|
||||
537,0,1,"Butt, Major. Archibald Willingham",male,45,0,0,113050,26.55,B38,S
|
||||
538,1,1,"LeRoy, Miss. Bertha",female,30,0,0,PC 17761,106.425,,C
|
||||
539,0,3,"Risien, Mr. Samuel Beard",male,,0,0,364498,14.5,,S
|
||||
540,1,1,"Frolicher, Miss. Hedwig Margaritha",female,22,0,2,13568,49.5,B39,C
|
||||
541,1,1,"Crosby, Miss. Harriet R",female,36,0,2,WE/P 5735,71,B22,S
|
||||
542,0,3,"Andersson, Miss. Ingeborg Constanzia",female,9,4,2,347082,31.275,,S
|
||||
543,0,3,"Andersson, Miss. Sigrid Elisabeth",female,11,4,2,347082,31.275,,S
|
||||
544,1,2,"Beane, Mr. Edward",male,32,1,0,2908,26,,S
|
||||
545,0,1,"Douglas, Mr. Walter Donald",male,50,1,0,PC 17761,106.425,C86,C
|
||||
546,0,1,"Nicholson, Mr. Arthur Ernest",male,64,0,0,693,26,,S
|
||||
547,1,2,"Beane, Mrs. Edward (Ethel Clarke)",female,19,1,0,2908,26,,S
|
||||
548,1,2,"Padro y Manent, Mr. Julian",male,,0,0,SC/PARIS 2146,13.8625,,C
|
||||
549,0,3,"Goldsmith, Mr. Frank John",male,33,1,1,363291,20.525,,S
|
||||
550,1,2,"Davies, Master. John Morgan Jr",male,8,1,1,C.A. 33112,36.75,,S
|
||||
551,1,1,"Thayer, Mr. John Borland Jr",male,17,0,2,17421,110.8833,C70,C
|
||||
552,0,2,"Sharp, Mr. Percival James R",male,27,0,0,244358,26,,S
|
||||
553,0,3,"O'Brien, Mr. Timothy",male,,0,0,330979,7.8292,,Q
|
||||
554,1,3,"Leeni, Mr. Fahim (""Philip Zenni"")",male,22,0,0,2620,7.225,,C
|
||||
555,1,3,"Ohman, Miss. Velin",female,22,0,0,347085,7.775,,S
|
||||
556,0,1,"Wright, Mr. George",male,62,0,0,113807,26.55,,S
|
||||
557,1,1,"Duff Gordon, Lady. (Lucille Christiana Sutherland) (""Mrs Morgan"")",female,48,1,0,11755,39.6,A16,C
|
||||
558,0,1,"Robbins, Mr. Victor",male,,0,0,PC 17757,227.525,,C
|
||||
559,1,1,"Taussig, Mrs. Emil (Tillie Mandelbaum)",female,39,1,1,110413,79.65,E67,S
|
||||
560,1,3,"de Messemaeker, Mrs. Guillaume Joseph (Emma)",female,36,1,0,345572,17.4,,S
|
||||
561,0,3,"Morrow, Mr. Thomas Rowan",male,,0,0,372622,7.75,,Q
|
||||
562,0,3,"Sivic, Mr. Husein",male,40,0,0,349251,7.8958,,S
|
||||
563,0,2,"Norman, Mr. Robert Douglas",male,28,0,0,218629,13.5,,S
|
||||
564,0,3,"Simmons, Mr. John",male,,0,0,SOTON/OQ 392082,8.05,,S
|
||||
565,0,3,"Meanwell, Miss. (Marion Ogden)",female,,0,0,SOTON/O.Q. 392087,8.05,,S
|
||||
566,0,3,"Davies, Mr. Alfred J",male,24,2,0,A/4 48871,24.15,,S
|
||||
567,0,3,"Stoytcheff, Mr. Ilia",male,19,0,0,349205,7.8958,,S
|
||||
568,0,3,"Palsson, Mrs. Nils (Alma Cornelia Berglund)",female,29,0,4,349909,21.075,,S
|
||||
569,0,3,"Doharr, Mr. Tannous",male,,0,0,2686,7.2292,,C
|
||||
570,1,3,"Jonsson, Mr. Carl",male,32,0,0,350417,7.8542,,S
|
||||
571,1,2,"Harris, Mr. George",male,62,0,0,S.W./PP 752,10.5,,S
|
||||
572,1,1,"Appleton, Mrs. Edward Dale (Charlotte Lamson)",female,53,2,0,11769,51.4792,C101,S
|
||||
573,1,1,"Flynn, Mr. John Irwin (""Irving"")",male,36,0,0,PC 17474,26.3875,E25,S
|
||||
574,1,3,"Kelly, Miss. Mary",female,,0,0,14312,7.75,,Q
|
||||
575,0,3,"Rush, Mr. Alfred George John",male,16,0,0,A/4. 20589,8.05,,S
|
||||
576,0,3,"Patchett, Mr. George",male,19,0,0,358585,14.5,,S
|
||||
577,1,2,"Garside, Miss. Ethel",female,34,0,0,243880,13,,S
|
||||
578,1,1,"Silvey, Mrs. William Baird (Alice Munger)",female,39,1,0,13507,55.9,E44,S
|
||||
579,0,3,"Caram, Mrs. Joseph (Maria Elias)",female,,1,0,2689,14.4583,,C
|
||||
580,1,3,"Jussila, Mr. Eiriik",male,32,0,0,STON/O 2. 3101286,7.925,,S
|
||||
581,1,2,"Christy, Miss. Julie Rachel",female,25,1,1,237789,30,,S
|
||||
582,1,1,"Thayer, Mrs. John Borland (Marian Longstreth Morris)",female,39,1,1,17421,110.8833,C68,C
|
||||
583,0,2,"Downton, Mr. William James",male,54,0,0,28403,26,,S
|
||||
584,0,1,"Ross, Mr. John Hugo",male,36,0,0,13049,40.125,A10,C
|
||||
585,0,3,"Paulner, Mr. Uscher",male,,0,0,3411,8.7125,,C
|
||||
586,1,1,"Taussig, Miss. Ruth",female,18,0,2,110413,79.65,E68,S
|
||||
587,0,2,"Jarvis, Mr. John Denzil",male,47,0,0,237565,15,,S
|
||||
588,1,1,"Frolicher-Stehli, Mr. Maxmillian",male,60,1,1,13567,79.2,B41,C
|
||||
589,0,3,"Gilinski, Mr. Eliezer",male,22,0,0,14973,8.05,,S
|
||||
590,0,3,"Murdlin, Mr. Joseph",male,,0,0,A./5. 3235,8.05,,S
|
||||
591,0,3,"Rintamaki, Mr. Matti",male,35,0,0,STON/O 2. 3101273,7.125,,S
|
||||
592,1,1,"Stephenson, Mrs. Walter Bertram (Martha Eustis)",female,52,1,0,36947,78.2667,D20,C
|
||||
593,0,3,"Elsbury, Mr. William James",male,47,0,0,A/5 3902,7.25,,S
|
||||
594,0,3,"Bourke, Miss. Mary",female,,0,2,364848,7.75,,Q
|
||||
595,0,2,"Chapman, Mr. John Henry",male,37,1,0,SC/AH 29037,26,,S
|
||||
596,0,3,"Van Impe, Mr. Jean Baptiste",male,36,1,1,345773,24.15,,S
|
||||
597,1,2,"Leitch, Miss. Jessie Wills",female,,0,0,248727,33,,S
|
||||
598,0,3,"Johnson, Mr. Alfred",male,49,0,0,LINE,0,,S
|
||||
599,0,3,"Boulos, Mr. Hanna",male,,0,0,2664,7.225,,C
|
||||
600,1,1,"Duff Gordon, Sir. Cosmo Edmund (""Mr Morgan"")",male,49,1,0,PC 17485,56.9292,A20,C
|
||||
601,1,2,"Jacobsohn, Mrs. Sidney Samuel (Amy Frances Christy)",female,24,2,1,243847,27,,S
|
||||
602,0,3,"Slabenoff, Mr. Petco",male,,0,0,349214,7.8958,,S
|
||||
603,0,1,"Harrington, Mr. Charles H",male,,0,0,113796,42.4,,S
|
||||
604,0,3,"Torber, Mr. Ernst William",male,44,0,0,364511,8.05,,S
|
||||
605,1,1,"Homer, Mr. Harry (""Mr E Haven"")",male,35,0,0,111426,26.55,,C
|
||||
606,0,3,"Lindell, Mr. Edvard Bengtsson",male,36,1,0,349910,15.55,,S
|
||||
607,0,3,"Karaic, Mr. Milan",male,30,0,0,349246,7.8958,,S
|
||||
608,1,1,"Daniel, Mr. Robert Williams",male,27,0,0,113804,30.5,,S
|
||||
609,1,2,"Laroche, Mrs. Joseph (Juliette Marie Louise Lafargue)",female,22,1,2,SC/Paris 2123,41.5792,,C
|
||||
610,1,1,"Shutes, Miss. Elizabeth W",female,40,0,0,PC 17582,153.4625,C125,S
|
||||
611,0,3,"Andersson, Mrs. Anders Johan (Alfrida Konstantia Brogren)",female,39,1,5,347082,31.275,,S
|
||||
612,0,3,"Jardin, Mr. Jose Neto",male,,0,0,SOTON/O.Q. 3101305,7.05,,S
|
||||
613,1,3,"Murphy, Miss. Margaret Jane",female,,1,0,367230,15.5,,Q
|
||||
614,0,3,"Horgan, Mr. John",male,,0,0,370377,7.75,,Q
|
||||
615,0,3,"Brocklebank, Mr. William Alfred",male,35,0,0,364512,8.05,,S
|
||||
616,1,2,"Herman, Miss. Alice",female,24,1,2,220845,65,,S
|
||||
617,0,3,"Danbom, Mr. Ernst Gilbert",male,34,1,1,347080,14.4,,S
|
||||
618,0,3,"Lobb, Mrs. William Arthur (Cordelia K Stanlick)",female,26,1,0,A/5. 3336,16.1,,S
|
||||
619,1,2,"Becker, Miss. Marion Louise",female,4,2,1,230136,39,F4,S
|
||||
620,0,2,"Gavey, Mr. Lawrence",male,26,0,0,31028,10.5,,S
|
||||
621,0,3,"Yasbeck, Mr. Antoni",male,27,1,0,2659,14.4542,,C
|
||||
622,1,1,"Kimball, Mr. Edwin Nelson Jr",male,42,1,0,11753,52.5542,D19,S
|
||||
623,1,3,"Nakid, Mr. Sahid",male,20,1,1,2653,15.7417,,C
|
||||
624,0,3,"Hansen, Mr. Henry Damsgaard",male,21,0,0,350029,7.8542,,S
|
||||
625,0,3,"Bowen, Mr. David John ""Dai""",male,21,0,0,54636,16.1,,S
|
||||
626,0,1,"Sutton, Mr. Frederick",male,61,0,0,36963,32.3208,D50,S
|
||||
627,0,2,"Kirkland, Rev. Charles Leonard",male,57,0,0,219533,12.35,,Q
|
||||
628,1,1,"Longley, Miss. Gretchen Fiske",female,21,0,0,13502,77.9583,D9,S
|
||||
629,0,3,"Bostandyeff, Mr. Guentcho",male,26,0,0,349224,7.8958,,S
|
||||
630,0,3,"O'Connell, Mr. Patrick D",male,,0,0,334912,7.7333,,Q
|
||||
631,1,1,"Barkworth, Mr. Algernon Henry Wilson",male,80,0,0,27042,30,A23,S
|
||||
632,0,3,"Lundahl, Mr. Johan Svensson",male,51,0,0,347743,7.0542,,S
|
||||
633,1,1,"Stahelin-Maeglin, Dr. Max",male,32,0,0,13214,30.5,B50,C
|
||||
634,0,1,"Parr, Mr. William Henry Marsh",male,,0,0,112052,0,,S
|
||||
635,0,3,"Skoog, Miss. Mabel",female,9,3,2,347088,27.9,,S
|
||||
636,1,2,"Davis, Miss. Mary",female,28,0,0,237668,13,,S
|
||||
637,0,3,"Leinonen, Mr. Antti Gustaf",male,32,0,0,STON/O 2. 3101292,7.925,,S
|
||||
638,0,2,"Collyer, Mr. Harvey",male,31,1,1,C.A. 31921,26.25,,S
|
||||
639,0,3,"Panula, Mrs. Juha (Maria Emilia Ojala)",female,41,0,5,3101295,39.6875,,S
|
||||
640,0,3,"Thorneycroft, Mr. Percival",male,,1,0,376564,16.1,,S
|
||||
641,0,3,"Jensen, Mr. Hans Peder",male,20,0,0,350050,7.8542,,S
|
||||
642,1,1,"Sagesser, Mlle. Emma",female,24,0,0,PC 17477,69.3,B35,C
|
||||
643,0,3,"Skoog, Miss. Margit Elizabeth",female,2,3,2,347088,27.9,,S
|
||||
644,1,3,"Foo, Mr. Choong",male,,0,0,1601,56.4958,,S
|
||||
645,1,3,"Baclini, Miss. Eugenie",female,0.75,2,1,2666,19.2583,,C
|
||||
646,1,1,"Harper, Mr. Henry Sleeper",male,48,1,0,PC 17572,76.7292,D33,C
|
||||
647,0,3,"Cor, Mr. Liudevit",male,19,0,0,349231,7.8958,,S
|
||||
648,1,1,"Simonius-Blumer, Col. Oberst Alfons",male,56,0,0,13213,35.5,A26,C
|
||||
649,0,3,"Willey, Mr. Edward",male,,0,0,S.O./P.P. 751,7.55,,S
|
||||
650,1,3,"Stanley, Miss. Amy Zillah Elsie",female,23,0,0,CA. 2314,7.55,,S
|
||||
651,0,3,"Mitkoff, Mr. Mito",male,,0,0,349221,7.8958,,S
|
||||
652,1,2,"Doling, Miss. Elsie",female,18,0,1,231919,23,,S
|
||||
653,0,3,"Kalvik, Mr. Johannes Halvorsen",male,21,0,0,8475,8.4333,,S
|
||||
654,1,3,"O'Leary, Miss. Hanora ""Norah""",female,,0,0,330919,7.8292,,Q
|
||||
655,0,3,"Hegarty, Miss. Hanora ""Nora""",female,18,0,0,365226,6.75,,Q
|
||||
656,0,2,"Hickman, Mr. Leonard Mark",male,24,2,0,S.O.C. 14879,73.5,,S
|
||||
657,0,3,"Radeff, Mr. Alexander",male,,0,0,349223,7.8958,,S
|
||||
658,0,3,"Bourke, Mrs. John (Catherine)",female,32,1,1,364849,15.5,,Q
|
||||
659,0,2,"Eitemiller, Mr. George Floyd",male,23,0,0,29751,13,,S
|
||||
660,0,1,"Newell, Mr. Arthur Webster",male,58,0,2,35273,113.275,D48,C
|
||||
661,1,1,"Frauenthal, Dr. Henry William",male,50,2,0,PC 17611,133.65,,S
|
||||
662,0,3,"Badt, Mr. Mohamed",male,40,0,0,2623,7.225,,C
|
||||
663,0,1,"Colley, Mr. Edward Pomeroy",male,47,0,0,5727,25.5875,E58,S
|
||||
664,0,3,"Coleff, Mr. Peju",male,36,0,0,349210,7.4958,,S
|
||||
665,1,3,"Lindqvist, Mr. Eino William",male,20,1,0,STON/O 2. 3101285,7.925,,S
|
||||
666,0,2,"Hickman, Mr. Lewis",male,32,2,0,S.O.C. 14879,73.5,,S
|
||||
667,0,2,"Butler, Mr. Reginald Fenton",male,25,0,0,234686,13,,S
|
||||
668,0,3,"Rommetvedt, Mr. Knud Paust",male,,0,0,312993,7.775,,S
|
||||
669,0,3,"Cook, Mr. Jacob",male,43,0,0,A/5 3536,8.05,,S
|
||||
670,1,1,"Taylor, Mrs. Elmer Zebley (Juliet Cummins Wright)",female,,1,0,19996,52,C126,S
|
||||
671,1,2,"Brown, Mrs. Thomas William Solomon (Elizabeth Catherine Ford)",female,40,1,1,29750,39,,S
|
||||
672,0,1,"Davidson, Mr. Thornton",male,31,1,0,F.C. 12750,52,B71,S
|
||||
673,0,2,"Mitchell, Mr. Henry Michael",male,70,0,0,C.A. 24580,10.5,,S
|
||||
674,1,2,"Wilhelms, Mr. Charles",male,31,0,0,244270,13,,S
|
||||
675,0,2,"Watson, Mr. Ennis Hastings",male,,0,0,239856,0,,S
|
||||
676,0,3,"Edvardsson, Mr. Gustaf Hjalmar",male,18,0,0,349912,7.775,,S
|
||||
677,0,3,"Sawyer, Mr. Frederick Charles",male,24.5,0,0,342826,8.05,,S
|
||||
678,1,3,"Turja, Miss. Anna Sofia",female,18,0,0,4138,9.8417,,S
|
||||
679,0,3,"Goodwin, Mrs. Frederick (Augusta Tyler)",female,43,1,6,CA 2144,46.9,,S
|
||||
680,1,1,"Cardeza, Mr. Thomas Drake Martinez",male,36,0,1,PC 17755,512.3292,B51 B53 B55,C
|
||||
681,0,3,"Peters, Miss. Katie",female,,0,0,330935,8.1375,,Q
|
||||
682,1,1,"Hassab, Mr. Hammad",male,27,0,0,PC 17572,76.7292,D49,C
|
||||
683,0,3,"Olsvigen, Mr. Thor Anderson",male,20,0,0,6563,9.225,,S
|
||||
684,0,3,"Goodwin, Mr. Charles Edward",male,14,5,2,CA 2144,46.9,,S
|
||||
685,0,2,"Brown, Mr. Thomas William Solomon",male,60,1,1,29750,39,,S
|
||||
686,0,2,"Laroche, Mr. Joseph Philippe Lemercier",male,25,1,2,SC/Paris 2123,41.5792,,C
|
||||
687,0,3,"Panula, Mr. Jaako Arnold",male,14,4,1,3101295,39.6875,,S
|
||||
688,0,3,"Dakic, Mr. Branko",male,19,0,0,349228,10.1708,,S
|
||||
689,0,3,"Fischer, Mr. Eberhard Thelander",male,18,0,0,350036,7.7958,,S
|
||||
690,1,1,"Madill, Miss. Georgette Alexandra",female,15,0,1,24160,211.3375,B5,S
|
||||
691,1,1,"Dick, Mr. Albert Adrian",male,31,1,0,17474,57,B20,S
|
||||
692,1,3,"Karun, Miss. Manca",female,4,0,1,349256,13.4167,,C
|
||||
693,1,3,"Lam, Mr. Ali",male,,0,0,1601,56.4958,,S
|
||||
694,0,3,"Saad, Mr. Khalil",male,25,0,0,2672,7.225,,C
|
||||
695,0,1,"Weir, Col. John",male,60,0,0,113800,26.55,,S
|
||||
696,0,2,"Chapman, Mr. Charles Henry",male,52,0,0,248731,13.5,,S
|
||||
697,0,3,"Kelly, Mr. James",male,44,0,0,363592,8.05,,S
|
||||
698,1,3,"Mullens, Miss. Katherine ""Katie""",female,,0,0,35852,7.7333,,Q
|
||||
699,0,1,"Thayer, Mr. John Borland",male,49,1,1,17421,110.8833,C68,C
|
||||
700,0,3,"Humblen, Mr. Adolf Mathias Nicolai Olsen",male,42,0,0,348121,7.65,F G63,S
|
||||
701,1,1,"Astor, Mrs. John Jacob (Madeleine Talmadge Force)",female,18,1,0,PC 17757,227.525,C62 C64,C
|
||||
702,1,1,"Silverthorne, Mr. Spencer Victor",male,35,0,0,PC 17475,26.2875,E24,S
|
||||
703,0,3,"Barbara, Miss. Saiide",female,18,0,1,2691,14.4542,,C
|
||||
704,0,3,"Gallagher, Mr. Martin",male,25,0,0,36864,7.7417,,Q
|
||||
705,0,3,"Hansen, Mr. Henrik Juul",male,26,1,0,350025,7.8542,,S
|
||||
706,0,2,"Morley, Mr. Henry Samuel (""Mr Henry Marshall"")",male,39,0,0,250655,26,,S
|
||||
707,1,2,"Kelly, Mrs. Florence ""Fannie""",female,45,0,0,223596,13.5,,S
|
||||
708,1,1,"Calderhead, Mr. Edward Pennington",male,42,0,0,PC 17476,26.2875,E24,S
|
||||
709,1,1,"Cleaver, Miss. Alice",female,22,0,0,113781,151.55,,S
|
||||
710,1,3,"Moubarek, Master. Halim Gonios (""William George"")",male,,1,1,2661,15.2458,,C
|
||||
711,1,1,"Mayne, Mlle. Berthe Antonine (""Mrs de Villiers"")",female,24,0,0,PC 17482,49.5042,C90,C
|
||||
712,0,1,"Klaber, Mr. Herman",male,,0,0,113028,26.55,C124,S
|
||||
713,1,1,"Taylor, Mr. Elmer Zebley",male,48,1,0,19996,52,C126,S
|
||||
714,0,3,"Larsson, Mr. August Viktor",male,29,0,0,7545,9.4833,,S
|
||||
715,0,2,"Greenberg, Mr. Samuel",male,52,0,0,250647,13,,S
|
||||
716,0,3,"Soholt, Mr. Peter Andreas Lauritz Andersen",male,19,0,0,348124,7.65,F G73,S
|
||||
717,1,1,"Endres, Miss. Caroline Louise",female,38,0,0,PC 17757,227.525,C45,C
|
||||
718,1,2,"Troutt, Miss. Edwina Celia ""Winnie""",female,27,0,0,34218,10.5,E101,S
|
||||
719,0,3,"McEvoy, Mr. Michael",male,,0,0,36568,15.5,,Q
|
||||
720,0,3,"Johnson, Mr. Malkolm Joackim",male,33,0,0,347062,7.775,,S
|
||||
721,1,2,"Harper, Miss. Annie Jessie ""Nina""",female,6,0,1,248727,33,,S
|
||||
722,0,3,"Jensen, Mr. Svend Lauritz",male,17,1,0,350048,7.0542,,S
|
||||
723,0,2,"Gillespie, Mr. William Henry",male,34,0,0,12233,13,,S
|
||||
724,0,2,"Hodges, Mr. Henry Price",male,50,0,0,250643,13,,S
|
||||
725,1,1,"Chambers, Mr. Norman Campbell",male,27,1,0,113806,53.1,E8,S
|
||||
726,0,3,"Oreskovic, Mr. Luka",male,20,0,0,315094,8.6625,,S
|
||||
727,1,2,"Renouf, Mrs. Peter Henry (Lillian Jefferys)",female,30,3,0,31027,21,,S
|
||||
728,1,3,"Mannion, Miss. Margareth",female,,0,0,36866,7.7375,,Q
|
||||
729,0,2,"Bryhl, Mr. Kurt Arnold Gottfrid",male,25,1,0,236853,26,,S
|
||||
730,0,3,"Ilmakangas, Miss. Pieta Sofia",female,25,1,0,STON/O2. 3101271,7.925,,S
|
||||
731,1,1,"Allen, Miss. Elisabeth Walton",female,29,0,0,24160,211.3375,B5,S
|
||||
732,0,3,"Hassan, Mr. Houssein G N",male,11,0,0,2699,18.7875,,C
|
||||
733,0,2,"Knight, Mr. Robert J",male,,0,0,239855,0,,S
|
||||
734,0,2,"Berriman, Mr. William John",male,23,0,0,28425,13,,S
|
||||
735,0,2,"Troupiansky, Mr. Moses Aaron",male,23,0,0,233639,13,,S
|
||||
736,0,3,"Williams, Mr. Leslie",male,28.5,0,0,54636,16.1,,S
|
||||
737,0,3,"Ford, Mrs. Edward (Margaret Ann Watson)",female,48,1,3,W./C. 6608,34.375,,S
|
||||
738,1,1,"Lesurer, Mr. Gustave J",male,35,0,0,PC 17755,512.3292,B101,C
|
||||
739,0,3,"Ivanoff, Mr. Kanio",male,,0,0,349201,7.8958,,S
|
||||
740,0,3,"Nankoff, Mr. Minko",male,,0,0,349218,7.8958,,S
|
||||
741,1,1,"Hawksford, Mr. Walter James",male,,0,0,16988,30,D45,S
|
||||
742,0,1,"Cavendish, Mr. Tyrell William",male,36,1,0,19877,78.85,C46,S
|
||||
743,1,1,"Ryerson, Miss. Susan Parker ""Suzette""",female,21,2,2,PC 17608,262.375,B57 B59 B63 B66,C
|
||||
744,0,3,"McNamee, Mr. Neal",male,24,1,0,376566,16.1,,S
|
||||
745,1,3,"Stranden, Mr. Juho",male,31,0,0,STON/O 2. 3101288,7.925,,S
|
||||
746,0,1,"Crosby, Capt. Edward Gifford",male,70,1,1,WE/P 5735,71,B22,S
|
||||
747,0,3,"Abbott, Mr. Rossmore Edward",male,16,1,1,C.A. 2673,20.25,,S
|
||||
748,1,2,"Sinkkonen, Miss. Anna",female,30,0,0,250648,13,,S
|
||||
749,0,1,"Marvin, Mr. Daniel Warner",male,19,1,0,113773,53.1,D30,S
|
||||
750,0,3,"Connaghton, Mr. Michael",male,31,0,0,335097,7.75,,Q
|
||||
751,1,2,"Wells, Miss. Joan",female,4,1,1,29103,23,,S
|
||||
752,1,3,"Moor, Master. Meier",male,6,0,1,392096,12.475,E121,S
|
||||
753,0,3,"Vande Velde, Mr. Johannes Joseph",male,33,0,0,345780,9.5,,S
|
||||
754,0,3,"Jonkoff, Mr. Lalio",male,23,0,0,349204,7.8958,,S
|
||||
755,1,2,"Herman, Mrs. Samuel (Jane Laver)",female,48,1,2,220845,65,,S
|
||||
756,1,2,"Hamalainen, Master. Viljo",male,0.67,1,1,250649,14.5,,S
|
||||
757,0,3,"Carlsson, Mr. August Sigfrid",male,28,0,0,350042,7.7958,,S
|
||||
758,0,2,"Bailey, Mr. Percy Andrew",male,18,0,0,29108,11.5,,S
|
||||
759,0,3,"Theobald, Mr. Thomas Leonard",male,34,0,0,363294,8.05,,S
|
||||
760,1,1,"Rothes, the Countess. of (Lucy Noel Martha Dyer-Edwards)",female,33,0,0,110152,86.5,B77,S
|
||||
761,0,3,"Garfirth, Mr. John",male,,0,0,358585,14.5,,S
|
||||
762,0,3,"Nirva, Mr. Iisakki Antino Aijo",male,41,0,0,SOTON/O2 3101272,7.125,,S
|
||||
763,1,3,"Barah, Mr. Hanna Assi",male,20,0,0,2663,7.2292,,C
|
||||
764,1,1,"Carter, Mrs. William Ernest (Lucile Polk)",female,36,1,2,113760,120,B96 B98,S
|
||||
765,0,3,"Eklund, Mr. Hans Linus",male,16,0,0,347074,7.775,,S
|
||||
766,1,1,"Hogeboom, Mrs. John C (Anna Andrews)",female,51,1,0,13502,77.9583,D11,S
|
||||
767,0,1,"Brewe, Dr. Arthur Jackson",male,,0,0,112379,39.6,,C
|
||||
768,0,3,"Mangan, Miss. Mary",female,30.5,0,0,364850,7.75,,Q
|
||||
769,0,3,"Moran, Mr. Daniel J",male,,1,0,371110,24.15,,Q
|
||||
770,0,3,"Gronnestad, Mr. Daniel Danielsen",male,32,0,0,8471,8.3625,,S
|
||||
771,0,3,"Lievens, Mr. Rene Aime",male,24,0,0,345781,9.5,,S
|
||||
772,0,3,"Jensen, Mr. Niels Peder",male,48,0,0,350047,7.8542,,S
|
||||
773,0,2,"Mack, Mrs. (Mary)",female,57,0,0,S.O./P.P. 3,10.5,E77,S
|
||||
774,0,3,"Elias, Mr. Dibo",male,,0,0,2674,7.225,,C
|
||||
775,1,2,"Hocking, Mrs. Elizabeth (Eliza Needs)",female,54,1,3,29105,23,,S
|
||||
776,0,3,"Myhrman, Mr. Pehr Fabian Oliver Malkolm",male,18,0,0,347078,7.75,,S
|
||||
777,0,3,"Tobin, Mr. Roger",male,,0,0,383121,7.75,F38,Q
|
||||
778,1,3,"Emanuel, Miss. Virginia Ethel",female,5,0,0,364516,12.475,,S
|
||||
779,0,3,"Kilgannon, Mr. Thomas J",male,,0,0,36865,7.7375,,Q
|
||||
780,1,1,"Robert, Mrs. Edward Scott (Elisabeth Walton McMillan)",female,43,0,1,24160,211.3375,B3,S
|
||||
781,1,3,"Ayoub, Miss. Banoura",female,13,0,0,2687,7.2292,,C
|
||||
782,1,1,"Dick, Mrs. Albert Adrian (Vera Gillespie)",female,17,1,0,17474,57,B20,S
|
||||
783,0,1,"Long, Mr. Milton Clyde",male,29,0,0,113501,30,D6,S
|
||||
784,0,3,"Johnston, Mr. Andrew G",male,,1,2,W./C. 6607,23.45,,S
|
||||
785,0,3,"Ali, Mr. William",male,25,0,0,SOTON/O.Q. 3101312,7.05,,S
|
||||
786,0,3,"Harmer, Mr. Abraham (David Lishin)",male,25,0,0,374887,7.25,,S
|
||||
787,1,3,"Sjoblom, Miss. Anna Sofia",female,18,0,0,3101265,7.4958,,S
|
||||
788,0,3,"Rice, Master. George Hugh",male,8,4,1,382652,29.125,,Q
|
||||
789,1,3,"Dean, Master. Bertram Vere",male,1,1,2,C.A. 2315,20.575,,S
|
||||
790,0,1,"Guggenheim, Mr. Benjamin",male,46,0,0,PC 17593,79.2,B82 B84,C
|
||||
791,0,3,"Keane, Mr. Andrew ""Andy""",male,,0,0,12460,7.75,,Q
|
||||
792,0,2,"Gaskell, Mr. Alfred",male,16,0,0,239865,26,,S
|
||||
793,0,3,"Sage, Miss. Stella Anna",female,,8,2,CA. 2343,69.55,,S
|
||||
794,0,1,"Hoyt, Mr. William Fisher",male,,0,0,PC 17600,30.6958,,C
|
||||
795,0,3,"Dantcheff, Mr. Ristiu",male,25,0,0,349203,7.8958,,S
|
||||
796,0,2,"Otter, Mr. Richard",male,39,0,0,28213,13,,S
|
||||
797,1,1,"Leader, Dr. Alice (Farnham)",female,49,0,0,17465,25.9292,D17,S
|
||||
798,1,3,"Osman, Mrs. Mara",female,31,0,0,349244,8.6833,,S
|
||||
799,0,3,"Ibrahim Shawah, Mr. Yousseff",male,30,0,0,2685,7.2292,,C
|
||||
800,0,3,"Van Impe, Mrs. Jean Baptiste (Rosalie Paula Govaert)",female,30,1,1,345773,24.15,,S
|
||||
801,0,2,"Ponesell, Mr. Martin",male,34,0,0,250647,13,,S
|
||||
802,1,2,"Collyer, Mrs. Harvey (Charlotte Annie Tate)",female,31,1,1,C.A. 31921,26.25,,S
|
||||
803,1,1,"Carter, Master. William Thornton II",male,11,1,2,113760,120,B96 B98,S
|
||||
804,1,3,"Thomas, Master. Assad Alexander",male,0.42,0,1,2625,8.5167,,C
|
||||
805,1,3,"Hedman, Mr. Oskar Arvid",male,27,0,0,347089,6.975,,S
|
||||
806,0,3,"Johansson, Mr. Karl Johan",male,31,0,0,347063,7.775,,S
|
||||
807,0,1,"Andrews, Mr. Thomas Jr",male,39,0,0,112050,0,A36,S
|
||||
808,0,3,"Pettersson, Miss. Ellen Natalia",female,18,0,0,347087,7.775,,S
|
||||
809,0,2,"Meyer, Mr. August",male,39,0,0,248723,13,,S
|
||||
810,1,1,"Chambers, Mrs. Norman Campbell (Bertha Griggs)",female,33,1,0,113806,53.1,E8,S
|
||||
811,0,3,"Alexander, Mr. William",male,26,0,0,3474,7.8875,,S
|
||||
812,0,3,"Lester, Mr. James",male,39,0,0,A/4 48871,24.15,,S
|
||||
813,0,2,"Slemen, Mr. Richard James",male,35,0,0,28206,10.5,,S
|
||||
814,0,3,"Andersson, Miss. Ebba Iris Alfrida",female,6,4,2,347082,31.275,,S
|
||||
815,0,3,"Tomlin, Mr. Ernest Portage",male,30.5,0,0,364499,8.05,,S
|
||||
816,0,1,"Fry, Mr. Richard",male,,0,0,112058,0,B102,S
|
||||
817,0,3,"Heininen, Miss. Wendla Maria",female,23,0,0,STON/O2. 3101290,7.925,,S
|
||||
818,0,2,"Mallet, Mr. Albert",male,31,1,1,S.C./PARIS 2079,37.0042,,C
|
||||
819,0,3,"Holm, Mr. John Fredrik Alexander",male,43,0,0,C 7075,6.45,,S
|
||||
820,0,3,"Skoog, Master. Karl Thorsten",male,10,3,2,347088,27.9,,S
|
||||
821,1,1,"Hays, Mrs. Charles Melville (Clara Jennings Gregg)",female,52,1,1,12749,93.5,B69,S
|
||||
822,1,3,"Lulic, Mr. Nikola",male,27,0,0,315098,8.6625,,S
|
||||
823,0,1,"Reuchlin, Jonkheer. John George",male,38,0,0,19972,0,,S
|
||||
824,1,3,"Moor, Mrs. (Beila)",female,27,0,1,392096,12.475,E121,S
|
||||
825,0,3,"Panula, Master. Urho Abraham",male,2,4,1,3101295,39.6875,,S
|
||||
826,0,3,"Flynn, Mr. John",male,,0,0,368323,6.95,,Q
|
||||
827,0,3,"Lam, Mr. Len",male,,0,0,1601,56.4958,,S
|
||||
828,1,2,"Mallet, Master. Andre",male,1,0,2,S.C./PARIS 2079,37.0042,,C
|
||||
829,1,3,"McCormack, Mr. Thomas Joseph",male,,0,0,367228,7.75,,Q
|
||||
830,1,1,"Stone, Mrs. George Nelson (Martha Evelyn)",female,62,0,0,113572,80,B28,
|
||||
831,1,3,"Yasbeck, Mrs. Antoni (Selini Alexander)",female,15,1,0,2659,14.4542,,C
|
||||
832,1,2,"Richards, Master. George Sibley",male,0.83,1,1,29106,18.75,,S
|
||||
833,0,3,"Saad, Mr. Amin",male,,0,0,2671,7.2292,,C
|
||||
834,0,3,"Augustsson, Mr. Albert",male,23,0,0,347468,7.8542,,S
|
||||
835,0,3,"Allum, Mr. Owen George",male,18,0,0,2223,8.3,,S
|
||||
836,1,1,"Compton, Miss. Sara Rebecca",female,39,1,1,PC 17756,83.1583,E49,C
|
||||
837,0,3,"Pasic, Mr. Jakob",male,21,0,0,315097,8.6625,,S
|
||||
838,0,3,"Sirota, Mr. Maurice",male,,0,0,392092,8.05,,S
|
||||
839,1,3,"Chip, Mr. Chang",male,32,0,0,1601,56.4958,,S
|
||||
840,1,1,"Marechal, Mr. Pierre",male,,0,0,11774,29.7,C47,C
|
||||
841,0,3,"Alhomaki, Mr. Ilmari Rudolf",male,20,0,0,SOTON/O2 3101287,7.925,,S
|
||||
842,0,2,"Mudd, Mr. Thomas Charles",male,16,0,0,S.O./P.P. 3,10.5,,S
|
||||
843,1,1,"Serepeca, Miss. Augusta",female,30,0,0,113798,31,,C
|
||||
844,0,3,"Lemberopolous, Mr. Peter L",male,34.5,0,0,2683,6.4375,,C
|
||||
845,0,3,"Culumovic, Mr. Jeso",male,17,0,0,315090,8.6625,,S
|
||||
846,0,3,"Abbing, Mr. Anthony",male,42,0,0,C.A. 5547,7.55,,S
|
||||
847,0,3,"Sage, Mr. Douglas Bullen",male,,8,2,CA. 2343,69.55,,S
|
||||
848,0,3,"Markoff, Mr. Marin",male,35,0,0,349213,7.8958,,C
|
||||
849,0,2,"Harper, Rev. John",male,28,0,1,248727,33,,S
|
||||
850,1,1,"Goldenberg, Mrs. Samuel L (Edwiga Grabowska)",female,,1,0,17453,89.1042,C92,C
|
||||
851,0,3,"Andersson, Master. Sigvard Harald Elias",male,4,4,2,347082,31.275,,S
|
||||
852,0,3,"Svensson, Mr. Johan",male,74,0,0,347060,7.775,,S
|
||||
853,0,3,"Boulos, Miss. Nourelain",female,9,1,1,2678,15.2458,,C
|
||||
854,1,1,"Lines, Miss. Mary Conover",female,16,0,1,PC 17592,39.4,D28,S
|
||||
855,0,2,"Carter, Mrs. Ernest Courtenay (Lilian Hughes)",female,44,1,0,244252,26,,S
|
||||
856,1,3,"Aks, Mrs. Sam (Leah Rosen)",female,18,0,1,392091,9.35,,S
|
||||
857,1,1,"Wick, Mrs. George Dennick (Mary Hitchcock)",female,45,1,1,36928,164.8667,,S
|
||||
858,1,1,"Daly, Mr. Peter Denis ",male,51,0,0,113055,26.55,E17,S
|
||||
859,1,3,"Baclini, Mrs. Solomon (Latifa Qurban)",female,24,0,3,2666,19.2583,,C
|
||||
860,0,3,"Razi, Mr. Raihed",male,,0,0,2629,7.2292,,C
|
||||
861,0,3,"Hansen, Mr. Claus Peter",male,41,2,0,350026,14.1083,,S
|
||||
862,0,2,"Giles, Mr. Frederick Edward",male,21,1,0,28134,11.5,,S
|
||||
863,1,1,"Swift, Mrs. Frederick Joel (Margaret Welles Barron)",female,48,0,0,17466,25.9292,D17,S
|
||||
864,0,3,"Sage, Miss. Dorothy Edith ""Dolly""",female,,8,2,CA. 2343,69.55,,S
|
||||
865,0,2,"Gill, Mr. John William",male,24,0,0,233866,13,,S
|
||||
866,1,2,"Bystrom, Mrs. (Karolina)",female,42,0,0,236852,13,,S
|
||||
867,1,2,"Duran y More, Miss. Asuncion",female,27,1,0,SC/PARIS 2149,13.8583,,C
|
||||
868,0,1,"Roebling, Mr. Washington Augustus II",male,31,0,0,PC 17590,50.4958,A24,S
|
||||
869,0,3,"van Melkebeke, Mr. Philemon",male,,0,0,345777,9.5,,S
|
||||
870,1,3,"Johnson, Master. Harold Theodor",male,4,1,1,347742,11.1333,,S
|
||||
871,0,3,"Balkic, Mr. Cerin",male,26,0,0,349248,7.8958,,S
|
||||
872,1,1,"Beckwith, Mrs. Richard Leonard (Sallie Monypeny)",female,47,1,1,11751,52.5542,D35,S
|
||||
873,0,1,"Carlsson, Mr. Frans Olof",male,33,0,0,695,5,B51 B53 B55,S
|
||||
874,0,3,"Vander Cruyssen, Mr. Victor",male,47,0,0,345765,9,,S
|
||||
875,1,2,"Abelson, Mrs. Samuel (Hannah Wizosky)",female,28,1,0,P/PP 3381,24,,C
|
||||
876,1,3,"Najib, Miss. Adele Kiamie ""Jane""",female,15,0,0,2667,7.225,,C
|
||||
877,0,3,"Gustafsson, Mr. Alfred Ossian",male,20,0,0,7534,9.8458,,S
|
||||
878,0,3,"Petroff, Mr. Nedelio",male,19,0,0,349212,7.8958,,S
|
||||
879,0,3,"Laleff, Mr. Kristo",male,,0,0,349217,7.8958,,S
|
||||
880,1,1,"Potter, Mrs. Thomas Jr (Lily Alexenia Wilson)",female,56,0,1,11767,83.1583,C50,C
|
||||
881,1,2,"Shelley, Mrs. William (Imanita Parrish Hall)",female,25,0,1,230433,26,,S
|
||||
882,0,3,"Markun, Mr. Johann",male,33,0,0,349257,7.8958,,S
|
||||
883,0,3,"Dahlberg, Miss. Gerda Ulrika",female,22,0,0,7552,10.5167,,S
|
||||
884,0,2,"Banfield, Mr. Frederick James",male,28,0,0,C.A./SOTON 34068,10.5,,S
|
||||
885,0,3,"Sutehall, Mr. Henry Jr",male,25,0,0,SOTON/OQ 392076,7.05,,S
|
||||
886,0,3,"Rice, Mrs. William (Margaret Norton)",female,39,0,5,382652,29.125,,Q
|
||||
887,0,2,"Montvila, Rev. Juozas",male,27,0,0,211536,13,,S
|
||||
888,1,1,"Graham, Miss. Margaret Edith",female,19,0,0,112053,30,B42,S
|
||||
889,0,3,"Johnston, Miss. Catherine Helen ""Carrie""",female,,1,2,W./C. 6607,23.45,,S
|
||||
890,1,1,"Behr, Mr. Karl Howell",male,26,0,0,111369,30,C148,C
|
||||
891,0,3,"Dooley, Mr. Patrick",male,32,0,0,370376,7.75,,Q
|
||||
|
40
alexandrov_dmitrii_lab_4/lab4.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from scipy.cluster import hierarchy
|
||||
import pandas as pd
|
||||
from matplotlib import pyplot as plt
|
||||
|
||||
|
||||
def start():
|
||||
data = pd.read_csv('sberbank_data.csv', index_col='id')
|
||||
x = data[['full_sq', 'price_doc']]
|
||||
plt.figure(1, figsize=(16, 9))
|
||||
plt.title('Дендрограмма кластеризации цен')
|
||||
|
||||
prices = [0, 0, 0, 0]
|
||||
for ind, val in x.iterrows():
|
||||
val = val['price_doc'] / val['full_sq']
|
||||
if val < 100000:
|
||||
prices[0] = prices[0] + 1
|
||||
elif val < 300000:
|
||||
prices[1] = prices[1] + 1
|
||||
elif val < 500000:
|
||||
prices[2] = prices[2] + 1
|
||||
else:
|
||||
prices[3] = prices[3] + 1
|
||||
print('Результаты подчсёта ручного распределения:')
|
||||
print('низких цен:'+str(prices[0]))
|
||||
print('средних цен:'+str(prices[1]))
|
||||
print('высоких цен:'+str(prices[2]))
|
||||
print('премиальных цен:'+str(prices[3]))
|
||||
|
||||
hierarchy.dendrogram(hierarchy.linkage(x, method='single'),
|
||||
truncate_mode='lastp',
|
||||
p=15,
|
||||
orientation='top',
|
||||
leaf_rotation=90,
|
||||
leaf_font_size=8,
|
||||
show_contracted=True)
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
start()
|
||||
27
alexandrov_dmitrii_lab_4/readme.md
Normal file
@@ -0,0 +1,27 @@
|
||||
### Задание
|
||||
Использовать метод кластеризации по варианту для выбранных данных по варианту, самостоятельно сформулировав задачу.
|
||||
Интерпретировать результаты и оценить, насколько хорошо он подходит для
|
||||
решения сформулированной вами задачи.
|
||||
|
||||
Вариант 1: dendrogram
|
||||
|
||||
Была сформулирована следующая задача: необходимо разбить записи на кластеры в зависимости от цен и площади.
|
||||
|
||||
### Запуск программы
|
||||
Файл lab4.py содержит и запускает программу, аргументов и настройки ~~вроде~~ не требует.
|
||||
|
||||
### Описание программы
|
||||
Программа считывает цены и площади из файла статистики сбербанка по рынку недвижимости.
|
||||
Поскольку по заданию требуется оценить машинную кластеризацию, для сравнения программа подсчитывает и выводит в консоль количество записей в каждом из выделенных вручную классов цен.
|
||||
Далее программа кластеризует данные с помощью алгоритма ближайших точек (на другие памяти нету) и выводит дендрограмму на основе кластеризации.
|
||||
Выводимая дендрограмма ограничена 15 последними (верхними) объединениями.
|
||||
|
||||
### Результаты тестирования
|
||||
По результатам тестирования, можно сказать следующее:
|
||||
* Последние объединения в дендрограмме - объединения выбросов с 'основным' кластером, то есть 10-20 записей с кластером с более чем 28000 записями.
|
||||
* Это правильная информация, так как ручная классификация показывает, что премиальных (аномально больших) цен как раз порядка 20, остальные относятся к другим классам.
|
||||
* Поскольку в имеющихся данных нет ограничений по ценам, выбросы аномально высоких цен при использовании данного алгоритма формируют отдельные кластеры, что негативно сказывается на наглядности.
|
||||
* Ценовое ограничение также не дало положительнх результатов: снова сформировался 'основной' кластер, с которым последними объединялись отдельные значения.
|
||||
* Значит, сам алгоритм не эффективен.
|
||||
|
||||
Итого: Алгоритм ближайших точек слишком чувствителен к выбросам, поэтому можно признать его неэффективным для необработанных данных. Дендрограмма как средство визуализации скорее уступает по наглядности диаграмме рассеяния.
|
||||
28896
alexandrov_dmitrii_lab_4/sberbank_data.csv
Normal file
48
alexandrov_dmitrii_lab_5/lab5.py
Normal file
@@ -0,0 +1,48 @@
|
||||
from matplotlib import pyplot as plt
|
||||
from sklearn import metrics
|
||||
from sklearn.linear_model import LinearRegression
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.preprocessing import PolynomialFeatures
|
||||
from sklearn.pipeline import Pipeline
|
||||
import pandas as pd
|
||||
|
||||
|
||||
def start():
|
||||
data = pd.read_csv('sberbank_data.csv', index_col='id')
|
||||
x = data[['timestamp', 'full_sq', 'floor', 'max_floor', 'build_year', 'num_room', 'material', 'kremlin_km']]
|
||||
y = data[['price_doc']]
|
||||
|
||||
x = x.replace('NA', 0)
|
||||
x.fillna(0, inplace=True)
|
||||
|
||||
col_date = []
|
||||
|
||||
for val in x['timestamp']:
|
||||
col_date.append(val.split('-', 1)[0])
|
||||
|
||||
x = x.drop(columns='timestamp')
|
||||
x['timestamp'] = col_date
|
||||
|
||||
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.01, random_state=42)
|
||||
|
||||
poly = Pipeline([('poly', PolynomialFeatures(degree=3)),
|
||||
('linear', LinearRegression())])
|
||||
poly.fit(x_train, y_train)
|
||||
|
||||
y_mean = y['price_doc'].mean()
|
||||
y_predicted = poly.predict(x_test)
|
||||
for i, n in enumerate(y_predicted):
|
||||
if n < 10000:
|
||||
y_predicted[i] = y_mean
|
||||
|
||||
print('Оценка обучения:')
|
||||
print(metrics.r2_score(y_test, y_predicted))
|
||||
|
||||
plt.figure(1, figsize=(16, 9))
|
||||
plt.title('Сравнение результатов обучения')
|
||||
plt.scatter(x=[i for i in range(len(y_test))], y=y_test, c='g', s=5)
|
||||
plt.scatter(x=[i for i in range(len(y_test))], y=y_predicted, c='r', s=5)
|
||||
plt.show()
|
||||
|
||||
|
||||
start()
|
||||
36
alexandrov_dmitrii_lab_5/readme.md
Normal file
@@ -0,0 +1,36 @@
|
||||
### Задание
|
||||
Использовать регрессию по варианту для выбранных данных по варианту, самостоятельно сформулировав задачу.
|
||||
Интерпретировать результаты и оценить, насколько хорошо она подходит для
|
||||
решения сформулированной вами задачи.
|
||||
|
||||
Вариант 1: полиномиальная регрессия
|
||||
|
||||
Была сформулирована следующая задача: необходимо предсказывать стоимость жилья по избранным признакам при помощи регрессии.
|
||||
|
||||
### Запуск программы
|
||||
Файл lab5.py содержит и запускает программу, аргументов и настройки ~~вроде~~ не требует.
|
||||
|
||||
### Описание программы
|
||||
Программа считывает цены на жильё как выходные данные и следующие данные как входные: год размещения объявления, площадь, этаж, количество этажей, год постройки, количество комнат, материал, расстояние до кремля (условного центра).
|
||||
Далее она обрабатывает данные (цифровизирует нулевые данные), оставляет только год объявления.
|
||||
|
||||
После обработки программа делит данные на 99% обучающего материала и 1% тестового и обучает модель полиномиальной регрессии со степенью 3.
|
||||
Далее модель генерирует набор предсказаний на основе тестовых входных данных. Эти предсказания обрабатываются: убираются отрицательные цены.
|
||||
|
||||
Далее программа оценивает предсказания по коэффициенту детерминации и выводит результат в консоль. А также показывает диаграммы рассеяния для действительных (зелёные точки) и предсказанных (красные точки) цен.
|
||||
|
||||
### Результаты тестирования
|
||||
По результатам тестирования, можно сказать следующее:
|
||||
* Полные данные алгоритм обрабатывает плохо, поэтому было необходимо было выбирать наиболее значимые признаки.
|
||||
* В зависимости от данных, разные степени регрессии дают разный результат. В общем случае обычная линейная регрессия давала коэффициент около 0.3. При добавлении же степеней полиномиальная регрессия выдавала выбросные значения цен: например, -300 миллионов, что негативно сказывалось на результате.
|
||||
* Для того, чтобы явно выбросные результаты не портили оценку (коэффициент соответственно становился -1000) эти выбросные значения заменялись на средние.
|
||||
* Опытным путём было найдено, что наилучшие результаты (коэффициент 0.54) показывает степень 3.
|
||||
* Результат 0.54 - наилучший результат - можно назвать неприемлимым: только в половине случаев предсказанная цена условно похожа на действительную.
|
||||
* Возможно, включением большего количества признаков и использованием других моделей (линейная, например, не давала выбросов) удастся решить проблему.
|
||||
|
||||
Пример консольного вывода:
|
||||
>Оценка обучения:
|
||||
>
|
||||
>0.5390648784908953
|
||||
|
||||
Итого: Алгоритм можно привести к некоторой эффективности, однако для конкретно этих данных он не подходит. Лучше попытаться найти другую модель регрессии.
|
||||
28896
alexandrov_dmitrii_lab_5/sberbank_data.csv
Normal file
76
alexandrov_dmitrii_lab_6/lab6.py
Normal file
@@ -0,0 +1,76 @@
|
||||
from matplotlib import pyplot as plt
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
|
||||
data = pd.read_csv('sberbank_data.csv', index_col='id')
|
||||
x = data[['timestamp', 'full_sq', 'floor', 'max_floor', 'build_year', 'num_room', 'material', 'kremlin_km']]
|
||||
|
||||
x = x.replace('NA', 0)
|
||||
x.fillna(0, inplace=True)
|
||||
|
||||
col_date = []
|
||||
|
||||
for val in x['timestamp']:
|
||||
col_date.append(val.split('-', 1)[0])
|
||||
|
||||
x = x.drop(columns='timestamp')
|
||||
x['timestamp'] = col_date
|
||||
|
||||
y = []
|
||||
for val in data['price_doc']:
|
||||
if val < 1500000:
|
||||
y.append('low')
|
||||
elif val < 3000000:
|
||||
y.append('medium')
|
||||
elif val < 5500000:
|
||||
y.append('high')
|
||||
elif val < 10000000:
|
||||
y.append('premium')
|
||||
else:
|
||||
y.append('oligarch')
|
||||
|
||||
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.01, random_state=42)
|
||||
|
||||
min_scores = []
|
||||
med_scores = []
|
||||
max_scores = []
|
||||
|
||||
|
||||
def do_test(iters_num):
|
||||
global x_train, x_test, y_train, y_test, min_scores, med_scores, max_scores
|
||||
|
||||
print("Testing iterations number "+str(iters_num)+":")
|
||||
scores = []
|
||||
|
||||
for i in range(10):
|
||||
neuro = MLPClassifier(max_iter=200)
|
||||
neuro.fit(x_train, y_train)
|
||||
scr = neuro.score(x_test, y_test)
|
||||
print("res"+str(i+1)+": "+str(scr))
|
||||
scores.append(scr)
|
||||
|
||||
print("Medium result: "+str(np.mean(scores)))
|
||||
|
||||
min_scores.append(np.min(scores))
|
||||
med_scores.append(np.mean(scores))
|
||||
max_scores.append(np.max(scores))
|
||||
|
||||
|
||||
def start():
|
||||
global min_scores, med_scores, max_scores
|
||||
|
||||
iter_nums = [200, 400, 600, 800, 1000]
|
||||
|
||||
for num in iter_nums:
|
||||
do_test(num)
|
||||
|
||||
plt.figure(1, figsize=(16, 9))
|
||||
plt.plot(iter_nums, min_scores, c='r')
|
||||
plt.plot(iter_nums, med_scores, c='b')
|
||||
plt.plot(iter_nums, max_scores, c='b')
|
||||
plt.show()
|
||||
|
||||
|
||||
start()
|
||||
149
alexandrov_dmitrii_lab_6/readme.md
Normal file
@@ -0,0 +1,149 @@
|
||||
### Задание
|
||||
Использовать нейронную сеть по варианту для выбранных данных по варианту, самостоятельно сформулировав задачу.
|
||||
Интерпретировать результаты и оценить, насколько хорошо она подходит для
|
||||
решения сформулированной вами задачи.
|
||||
|
||||
Вариант 1: MLPClassifier
|
||||
|
||||
Была сформулирована следующая задача: необходимо классифицировать жильё по стоимости на основе избранных признаков при помощи нейронной сети.
|
||||
|
||||
### Запуск программы
|
||||
Файл lab6.py содержит и запускает программу, аргументов и настройки ~~вроде~~ не требует.
|
||||
|
||||
### Описание программы
|
||||
Программа считывает цены на жильё как выходные данные и следующие данные как входные: год размещения объявления, площадь, этаж, количество этажей, год постройки, количество комнат, материал, расстояние до кремля (условного центра).
|
||||
Далее она обрабатывает данные (цифровизирует нулевые данные), оставляет только год объявления. Цены распределяются по пяти классам.
|
||||
|
||||
После обработки программа делит данные на 99% обучающего материала и 1% тестового.
|
||||
Эти данные обрабатываются по 10 раз для идентичных моделей нейронных сетей, использующих метод градиентного спуска "adam", с разной настройкой максимального количества поколений: 200, 400, 600, 800, 1000.
|
||||
Считаются оценка модели. Для каждой модели запоминаются минимальный, максимальный и средний результаты. В консоль выводятся все результаты.
|
||||
В конце программа показывает графики зависимости результатов от максимального количества поколений модели.
|
||||
|
||||
### Результаты тестирования
|
||||
По результатам тестирования, можно сказать следующее:
|
||||
* В общем, модель даёт средний результат в районе 40-50% точности, что недостаточно.
|
||||
* Увеличение максимального количества поколений влияет сильнее всего на минимальные оценки, сужая разброс точности.
|
||||
* Нельзя сказать, что увеличение максимального количества поколений сильно улучшит модель: максимум на 10% точности.
|
||||
|
||||
Пример консольного вывода:
|
||||
>Testing iterations number 200:
|
||||
>
|
||||
>res1: 0.3806228373702422
|
||||
>
|
||||
>res2: 0.6055363321799307
|
||||
>
|
||||
>res3: 0.4809688581314879
|
||||
>
|
||||
>res4: 0.4913494809688581
|
||||
>
|
||||
>res5: 0.4844290657439446
|
||||
>
|
||||
>res6: 0.2975778546712803
|
||||
>
|
||||
>res7: 0.48788927335640137
|
||||
>
|
||||
>res8: 0.06228373702422145
|
||||
>
|
||||
>res9: 0.6193771626297578
|
||||
>
|
||||
>res10: 0.47750865051903113
|
||||
>
|
||||
>Medium result: 0.4387543252595155
|
||||
>
|
||||
>Testing iterations number 400:
|
||||
>
|
||||
>res1: 0.6124567474048442
|
||||
>
|
||||
>res2: 0.4290657439446367
|
||||
>
|
||||
>res3: 0.3217993079584775
|
||||
>
|
||||
>res4: 0.5467128027681661
|
||||
>
|
||||
>res5: 0.48788927335640137
|
||||
>
|
||||
>res6: 0.40484429065743943
|
||||
>
|
||||
>res7: 0.6020761245674741
|
||||
>
|
||||
>res8: 0.4186851211072664
|
||||
>
|
||||
>res9: 0.42214532871972316
|
||||
>
|
||||
>res10: 0.370242214532872
|
||||
>
|
||||
>Medium result: 0.46159169550173
|
||||
>
|
||||
>Testing iterations number 600:
|
||||
>
|
||||
>res1: 0.4359861591695502
|
||||
>
|
||||
>res2: 0.2560553633217993
|
||||
>
|
||||
>res3: 0.5363321799307958
|
||||
>
|
||||
>res4: 0.5778546712802768
|
||||
>
|
||||
>res5: 0.35986159169550175
|
||||
>
|
||||
>res6: 0.356401384083045
|
||||
>
|
||||
>res7: 0.49480968858131485
|
||||
>
|
||||
>res8: 0.5121107266435986
|
||||
>
|
||||
>res9: 0.5224913494809689
|
||||
>
|
||||
>res10: 0.5190311418685121
|
||||
>
|
||||
>Medium result: 0.4570934256055363
|
||||
>
|
||||
>Testing iterations number 800:
|
||||
>
|
||||
>res1: 0.25951557093425603
|
||||
>
|
||||
>res2: 0.4083044982698962
|
||||
>
|
||||
>res3: 0.5224913494809689
|
||||
>
|
||||
>res4: 0.5986159169550173
|
||||
>
|
||||
>res5: 0.24567474048442905
|
||||
>
|
||||
>res6: 0.4013840830449827
|
||||
>
|
||||
>res7: 0.21453287197231835
|
||||
>
|
||||
>res8: 0.4671280276816609
|
||||
>
|
||||
>res9: 0.40484429065743943
|
||||
>
|
||||
>res10: 0.38408304498269896
|
||||
>
|
||||
>Medium result: 0.3906574394463667
|
||||
>
|
||||
>Testing iterations number 1000:
|
||||
>
|
||||
>res1: 0.4186851211072664
|
||||
>
|
||||
>res2: 0.5017301038062284
|
||||
>
|
||||
>res3: 0.5121107266435986
|
||||
>
|
||||
>res4: 0.3806228373702422
|
||||
>
|
||||
>res5: 0.44982698961937717
|
||||
>
|
||||
>res6: 0.5986159169550173
|
||||
>
|
||||
>res7: 0.5570934256055363
|
||||
>
|
||||
>res8: 0.4290657439446367
|
||||
>
|
||||
>res9: 0.32525951557093424
|
||||
>
|
||||
>res10: 0.41522491349480967
|
||||
>
|
||||
>Medium result: 0.4588235294117647
|
||||
|
||||
Итого: Для отобранных данных нейронная модель с методом градиентного спуска "adam" показала себя не лучшим образом. Возможно, другие методы могут выдать лучшие результаты, либо необходима более обширная модификация модели.
|
||||
28896
alexandrov_dmitrii_lab_6/sberbank_data.csv
Normal file
2795
alexandrov_dmitrii_lab_7/data.txt
Normal file
96
alexandrov_dmitrii_lab_7/lab7.py
Normal file
@@ -0,0 +1,96 @@
|
||||
import numpy as np
|
||||
from keras_preprocessing.sequence import pad_sequences
|
||||
from keras_preprocessing.text import Tokenizer
|
||||
from keras.models import Sequential
|
||||
from keras.layers import Dense, LSTM, Embedding, Dropout
|
||||
from keras.callbacks import ModelCheckpoint
|
||||
|
||||
|
||||
def recreate_model(predictors, labels, model, filepath, epoch_num):
|
||||
model.compile(loss='sparse_categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
|
||||
append_epochs(predictors, labels, model, epoch_num)
|
||||
|
||||
|
||||
def append_epochs(predictors, labels, model, filepath, epoch_num):
|
||||
checkpoint = ModelCheckpoint(filepath, monitor='loss', verbose=1, save_best_only=True, mode='min')
|
||||
desired_callbacks = [checkpoint]
|
||||
model.fit(predictors, labels, epochs=epoch_num, verbose=1, callbacks=desired_callbacks)
|
||||
|
||||
|
||||
def generate_text(tokenizer, seed_text, next_words, model, max_seq_length):
|
||||
for _ in range(next_words):
|
||||
token_list = tokenizer.texts_to_sequences([seed_text])[0]
|
||||
token_list = pad_sequences([token_list], maxlen=max_seq_length - 1, padding='pre')
|
||||
predicted = np.argmax(model.predict(token_list), axis=-1)
|
||||
output_word = ""
|
||||
for word, index in tokenizer.word_index.items():
|
||||
if index == predicted:
|
||||
output_word = word
|
||||
break
|
||||
seed_text += " " + output_word
|
||||
return seed_text
|
||||
|
||||
|
||||
def start():
|
||||
flag = -1
|
||||
while flag < 1 or flag > 2:
|
||||
flag = int(input("Select model and text (1 - eng, 2 - ru): "))
|
||||
|
||||
if flag == 1:
|
||||
file = open("data.txt").read()
|
||||
filepath = "model_eng.hdf5"
|
||||
elif flag == 2:
|
||||
file = open("rus_data.txt").read()
|
||||
filepath = "model_rus.hdf5"
|
||||
else:
|
||||
exit(1)
|
||||
|
||||
tokenizer = Tokenizer()
|
||||
tokenizer.fit_on_texts([file])
|
||||
words_count = len(tokenizer.word_index) + 1
|
||||
|
||||
input_sequences = []
|
||||
for line in file.split('\n'):
|
||||
token_list = tokenizer.texts_to_sequences([line])[0]
|
||||
for i in range(1, len(token_list)):
|
||||
n_gram_sequence = token_list[:i + 1]
|
||||
input_sequences.append(n_gram_sequence)
|
||||
|
||||
max_seq_length = max([len(x) for x in input_sequences])
|
||||
input_sequences = pad_sequences(input_sequences, maxlen=max_seq_length, padding='pre')
|
||||
|
||||
predictors, labels = input_sequences[:, :-1], input_sequences[:, -1]
|
||||
|
||||
model = Sequential()
|
||||
model.add(Embedding(words_count, 100, input_length=max_seq_length - 1))
|
||||
model.add(LSTM(150))
|
||||
model.add(Dropout(0.15))
|
||||
model.add(Dense(words_count, activation='softmax'))
|
||||
|
||||
flag = input("Do you want to recreate the model ? (print yes): ")
|
||||
if flag == 'yes':
|
||||
flag = input("Are you sure? (print yes): ")
|
||||
if flag == 'yes':
|
||||
num = int(input("Select number of epoch: "))
|
||||
if 0 < num < 100:
|
||||
recreate_model(predictors, labels, model, filepath, num)
|
||||
|
||||
model.load_weights(filepath)
|
||||
model.compile(loss='sparse_categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
|
||||
|
||||
flag = input("Do you want to train the model ? (print yes): ")
|
||||
if flag == 'yes':
|
||||
flag = input("Are you sure? (print yes): ")
|
||||
if flag == 'yes':
|
||||
num = int(input("Select number of epoch: "))
|
||||
if 0 < num < 100:
|
||||
append_epochs(predictors, labels, model, filepath, num)
|
||||
|
||||
flag = 'y'
|
||||
while flag == 'y':
|
||||
seed = input("Enter seed: ")
|
||||
print(generate_text(tokenizer, seed, 25, model, max_seq_length))
|
||||
flag = input("Continue? (print \'y\'): ")
|
||||
|
||||
|
||||
start()
|
||||
BIN
alexandrov_dmitrii_lab_7/model_eng.hdf5
Normal file
BIN
alexandrov_dmitrii_lab_7/model_rus.hdf5
Normal file
49
alexandrov_dmitrii_lab_7/readme.md
Normal file
@@ -0,0 +1,49 @@
|
||||
### Задание
|
||||
Выбрать художественный текст(четные варианты – русскоязычный, нечетные – англоязычный)и обучить на нем рекуррентную нейронную сеть для решения задачи генерации. Подобрать архитектуру и параметры так, чтобы приблизиться к максимально осмысленному результату. Далее разбиться на пары четный-нечетный вариант, обменяться разработанными сетями и проверить, как архитектура товарища справляется с вашим текстом.
|
||||
|
||||
Вариант 1: первостепенно - английский текст. Кооперироваться, впрочем, не с кем.
|
||||
|
||||
### Запуск программы
|
||||
Файл lab7.py содержит и запускает программу, аргументов и настройки ~~вроде~~ не требует.
|
||||
|
||||
### Описание программы
|
||||
Программа представляет собой консольное приложение-инструмент для работы с моделями. Она может создавать и обучать однородные модели для разных текстов.
|
||||
В файлах хранятся два текста: англоязычный data.txt (Остров сокровищ) и русскоязычный rus_data.txt (Хоббит). Также там хранятся две сохранённые обученные модели:
|
||||
* model_eng - модель, обученная на английском тексте. На текущий момент 27 эпох обучения.
|
||||
* model_rus - модель, обученная на русском тексте. На текущий момент 12 эпох обучения.
|
||||
Обучение проходило 1 день.
|
||||
|
||||
В программе необходимо выбрать загружаемый текст и соответствующую модель, в данный момент подключается русскоязычная модель.
|
||||
|
||||
Программа содержит методы пересоздания модели и дообучения модели (передаётся модель и количество эпох дообучения). Оба метода отключены и могут быть подключены обратно при необходимости.
|
||||
|
||||
После возможных пересоздания и дообучения моделей программа запрашивает текст-кодовое слово, которое модели будет необходимо продолжить, сгенерировав свой текст.
|
||||
|
||||
Сама модель имеет следующую архитектуру:
|
||||
* слой, преобразующий слова в векторы плотности, Embedding с входом, равным числу слов, с выходом 100, и с длиной ввода, равной длине максимального слова.
|
||||
* слой с блоками долгой краткосрочной памятью, составляющая рекуррентную сеть, LSTM со 150 блоками.
|
||||
* слой, задающий степень разрыва нейронных связей между соседними слоями, Dropout с процентом разрыва 15.
|
||||
* слой вычисления взвешенных сумм Dense с числом нейронов, равным числу слов в тексте и функцией активации 'softmax'
|
||||
|
||||
### Результаты тестирования
|
||||
По результатам дневного обучения можно сказать следующее:
|
||||
|
||||
Модель успешно генерирует бессмысленные последовательности слов, которые либо состоят из обрывков фраз, либо случайно (но достаточно часто) складываются в осмысленные словосочетания, но не более.
|
||||
|
||||
Примеры генераций (первое слово - код генерации):
|
||||
|
||||
Модель, обученная на 'Острове сокровищ', 27 эпох обучения:
|
||||
>ship that he said with the buccaneers a gentleman and neither can read and figure but what is it anyway ah 'deposed' that's it is a
|
||||
>
|
||||
>chest said the doctor touching the black spot mind by the arm who is the ship there's long john now you are the first that were
|
||||
>
|
||||
>silver said the doctor if you can get the treasure you can find the ship there's been a man that has lost his score out he
|
||||
|
||||
Модель, обученная на 'Хоббите', 12 эпох обучения:
|
||||
>дракон и тут они услыхали про смога он понял что он стал видел и разозлился как слоны у гэндальфа хороши но все это было бы он
|
||||
>
|
||||
>поле он не мог сообразить что он делал то в живых и слышал бильбо как раз доедал пуще прежнего а бильбо все таки уж не мог
|
||||
>
|
||||
>паук направился к нему толстому из свертков они добрались до рассвета и даже дальше не останавливаясь а именно что гоблины обидело бильбо они не мог ничего
|
||||
|
||||
Итого: Даже такая простая модель с таким малым количеством эпох обучения может иногда сгенерировать нечто осмысленное. Однако для генерации нормального текста необходимо длительное обучение и более сложная модель, из нескольких слоёв LSTM и Dropout после них, что, однако, потребовало бы вычислительные мощности, которых у меня нет в наличии. Иначе следует взять очень маленький текст.
|
||||
6838
alexandrov_dmitrii_lab_7/rus_data.txt
Normal file
53
almukhammetov_bulat_lab_1/README.md
Normal file
@@ -0,0 +1,53 @@
|
||||
Вариант 2
|
||||
|
||||
Задание:
|
||||
Используя код из пункта «Регуляризация и сеть прямого распространения «из [1] (стр. 228), сгенерируйте определенный тип данных и сравните на нем 3 модели (по варианту)Постройте графики, отобразите качество моделей, объясните полученные результаты.
|
||||
|
||||
Данные:
|
||||
make_circles (noise=0.2, factor=0.5, random_state=rs) Модели: · Линейную регрессию · Полиномиальную регрессию (со степенью 3) · Гребневую полиномиальную регрессию (со степенью 3, alpha= 1.0)
|
||||
|
||||
Запуск:
|
||||
Запустите файл lab1.py
|
||||
|
||||
Описание программы:
|
||||
1. Генерирует набор данных с использованием функции make_circles из scikit-learn. Этот набор данных представляет собой два класса, где точки одного класса окружают точки другого класса с добавленным шумом.
|
||||
2. Разделяет данные на обучающий и тестовый наборы с помощью функции train_test_split.
|
||||
3. Создает три разные модели для классификации данных:
|
||||
4. Линейная регрессия (Logistic Regression).
|
||||
5. Полиномиальная регрессия третьей степени (Polynomial Regression).
|
||||
6. Гребневая полиномиальная регрессия третьей степени с регуляризацией и альфой равной единице (Ridge Polynomial Regression).
|
||||
7. Обучаем каждую из этих моделей на обучающем наборе данных и оцениваем их точность на тестовом наборе данных.
|
||||
8. Выводит результаты точности каждой модели.
|
||||
9. Разделение областей предсказаний моделей (границы решения).
|
||||
10. Тестовые и обучающие точки, окрашенные в соответствии с классами. (красным и синим)
|
||||
|
||||
Результаты:
|
||||
|
||||
<p>
|
||||
<div>Точность</div>
|
||||
<img src="Рисунок1.png">
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<div>Графики регрессии</div>
|
||||
<img src="Рисунок2.png">
|
||||
<img src="Рисунок3.png">
|
||||
<img src="Рисунок4.png">
|
||||
</p>
|
||||
|
||||
|
||||
Исходя из получивших графиков и точночсти с данным типом генерации данных из этих трех моделей наиболее точной получились полиномиальную регрессия (со степенью 3) и гребневaz полиномиальная регрессия (со степенью 3, alpha= 1.0). Они так же являются идентичными между собой. Чтобы проверить это утверждение я провел дополнительное тестирование и написал скрипт, который для 10 разных random_state (2-11) вычисляет точность для трех разных моделей.
|
||||
|
||||
Результаты:
|
||||
|
||||
Значения точности для каждой модели:
|
||||
Линейная регрессия 0.40 0.52 0.44 0.56 0.48 0.49 0.50 0.49 0.46 0.40
|
||||
Полиномиальная регрессия (со степенью 3) 0.63 0.67 0.74 0.64 0.80 0.73 0.64 0.81 0.46 0.62
|
||||
Гребневая полиномиальная регрессия (со степенью 3, alpha = 1.0) 0.63 0.67 0.74 0.64 0.80 0.73 0.64 0.81 0.46 0.62
|
||||
|
||||
Средние значения точности:
|
||||
Линейная регрессия - Средняя точность: 0.47
|
||||
Полиномиальная регрессия (со степенью 3) - Средняя точность: 0.68
|
||||
Гребневая полиномиальная регрессия (со степенью 3, alpha = 1.0) - Средняя точность: 0.68
|
||||
|
||||
Утверждение также подтвердилось.
|
||||
83
almukhammetov_bulat_lab_1/lab1.py
Normal file
@@ -0,0 +1,83 @@
|
||||
import numpy as np
|
||||
from matplotlib import pyplot as plt
|
||||
from matplotlib.colors import ListedColormap
|
||||
from sklearn.datasets import make_circles
|
||||
from sklearn.linear_model import LogisticRegression
|
||||
from sklearn.metrics import accuracy_score
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.pipeline import make_pipeline
|
||||
from sklearn.preprocessing import PolynomialFeatures
|
||||
from sklearn.preprocessing import StandardScaler
|
||||
|
||||
# Используя код из пункта «Регуляризация и сеть прямого распространения»из [1](стр. 228),
|
||||
# сгенерируйте определенный тип данных и сравните на нем 3 модели (по варианту).
|
||||
# Постройте графики, отобразите качество моделей, объясните полученные результаты.
|
||||
|
||||
# Модели
|
||||
# Линейная регрессия
|
||||
# Полиномиальная регрессия (со степенью 3)
|
||||
# Гребневую полиномиальную регрессию (со степенью 3, alpha = 1.0)
|
||||
|
||||
# Данные
|
||||
# make_circles (noise=0.2, factor=0.5, random_state=rs)
|
||||
|
||||
random_state = np.random.RandomState(2)
|
||||
|
||||
# Генерируем датасет
|
||||
circles_dataset = make_circles(noise=0.2, factor=0.5, random_state=random_state)
|
||||
|
||||
X, y = circles_dataset
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.9, random_state=random_state)
|
||||
|
||||
# Создаем модели
|
||||
models = []
|
||||
|
||||
# Линейная регрессия
|
||||
linear_model = LogisticRegression(random_state=random_state)
|
||||
models.append(("Линейная регрессия", linear_model))
|
||||
|
||||
# Полиномиальная регрессия (со степенью 3)
|
||||
poly_model = make_pipeline(PolynomialFeatures(degree=3), StandardScaler(),
|
||||
LogisticRegression(random_state=random_state))
|
||||
models.append(("Полиномиальная регрессия (со степенью 3)", poly_model))
|
||||
|
||||
# Гребневая полиномиальная регрессия (со степенью 3 и alpha=1.0)
|
||||
ridge_poly_model = make_pipeline(PolynomialFeatures(degree=3), StandardScaler(),
|
||||
LogisticRegression(penalty='l2', C=1.0, random_state=random_state))
|
||||
models.append(("Гребневая полиномиальная регрессия (со степенью 3, alpha = 1.0)", ridge_poly_model))
|
||||
|
||||
# Обучаем и оцениваем модели
|
||||
results = []
|
||||
|
||||
for name, model in models:
|
||||
model.fit(X_train, y_train) # обучаем
|
||||
y_pred = model.predict(X_test) # предсказываем
|
||||
accuracy = accuracy_score(y_test, y_pred) # определяем точность
|
||||
results.append((name, accuracy))
|
||||
|
||||
# Выводим результаты
|
||||
for name, accuracy in results:
|
||||
print(f"{name} - Точность: {accuracy:.2f}")
|
||||
|
||||
# Строим графики
|
||||
cmap_background = ListedColormap(['#FFAAAA', '#AAAAFF'])
|
||||
cmap_points = ListedColormap(['#FF0000', '#0000FF'])
|
||||
|
||||
plt.figure(figsize=(15, 5))
|
||||
for i, (name, model) in enumerate(models):
|
||||
plt.subplot(1, 3, i + 1)
|
||||
xx, yy = np.meshgrid(np.linspace(X[:, 0].min() - 1, X[:, 0].max() + 1, 100),
|
||||
np.linspace(X[:, 1].min() - 1, X[:, 1].max() + 1, 100))
|
||||
Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
|
||||
Z = Z.reshape(xx.shape)
|
||||
plt.contourf(xx, yy, Z, cmap=cmap_background, alpha=0.5)
|
||||
plt.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cmap_points, marker='o', label='Тестовые точки')
|
||||
plt.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cmap_points, marker='x', label='Обучающие точки')
|
||||
plt.legend()
|
||||
plt.title(name)
|
||||
|
||||
plt.text(0.5, -1.2, 'Красный класс', color='r', fontsize=12)
|
||||
plt.text(0.5, -1.7, 'Синий класс', color='b', fontsize=12)
|
||||
|
||||
plt.tight_layout()
|
||||
plt.show()
|
||||
BIN
almukhammetov_bulat_lab_1/Рисунок1.png
Normal file
|
After Width: | Height: | Size: 33 KiB |
BIN
almukhammetov_bulat_lab_1/Рисунок2.png
Normal file
|
After Width: | Height: | Size: 66 KiB |
BIN
almukhammetov_bulat_lab_1/Рисунок3.png
Normal file
|
After Width: | Height: | Size: 46 KiB |
BIN
almukhammetov_bulat_lab_1/Рисунок4.png
Normal file
|
After Width: | Height: | Size: 81 KiB |
40
almukhammetov_bulat_lab_2/README.md
Normal file
@@ -0,0 +1,40 @@
|
||||
Вариант 2
|
||||
|
||||
Задание:
|
||||
Используя код из [1](пункт «Решение задачи ранжирования признаков», стр. 205), выполните ранжирование признаков с помощью указанных по варианту моделей. Отобразите получившиеся значения\оценки каждого признака каждым методом\моделью и среднюю оценку. Проведите анализ получившихся результатов. Какие четыре признака оказались самыми важными по среднему значению? (Названия\индексы признаков и будут ответом на задание).
|
||||
|
||||
Данные:
|
||||
Линейная регрессия (LinearRegression)
|
||||
Рекурсивное сокращение признаков (Recursive Feature Elimination –RFE)
|
||||
Сокращение признаков Случайными деревьями (Random Forest Regressor)
|
||||
|
||||
Запуск:
|
||||
Запустите файл lab2.py
|
||||
|
||||
Описание программы:
|
||||
1. Генерирует случайные данные для задачи регрессии с помощью функции make_regression, создавая матрицу признаков X и вектор целевой переменной y.
|
||||
2. Создает DataFrame data, в котором столбцы представляют признаки, а последний столбец - целевую переменную.
|
||||
3. Разделяет данные на матрицу признаков X и вектор целевой переменной y.
|
||||
4. Создает список моделей для ранжирования признаков: линейной регрессии, рекурсивного сокращения признаков и сокращения признаков случайными деревьями.
|
||||
5. Создает словарь model_scores для хранения оценок каждой модели.
|
||||
6. Обучает и оценивает каждую модель на данных:
|
||||
7. Вычисляет ранги признаков и нормализует их в диапазоне от 0 до 1.
|
||||
8. Выводит оценки признаков каждой модели и их средние оценки.
|
||||
9. Находит четыре наиболее важных признака по средней оценке и выводит их индексы и значения.
|
||||
|
||||
Результаты:
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||

|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
Выводы:
|
||||
|
||||
Четыре наиболее важных признака, определенных на основе средних оценок, включают Признак 6, Признак 1, Признак 2 и Признак 5. Эти признаки имеют наибольшую среднюю важность среди всех признаков.
|
||||
|
||||
BIN
almukhammetov_bulat_lab_2/image-1.png
Normal file
|
After Width: | Height: | Size: 21 KiB |
BIN
almukhammetov_bulat_lab_2/image-2.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
BIN
almukhammetov_bulat_lab_2/image-3.png
Normal file
|
After Width: | Height: | Size: 9.8 KiB |
BIN
almukhammetov_bulat_lab_2/image-4.png
Normal file
|
After Width: | Height: | Size: 6.8 KiB |
BIN
almukhammetov_bulat_lab_2/image.png
Normal file
|
After Width: | Height: | Size: 9.7 KiB |
75
almukhammetov_bulat_lab_2/lab2.py
Normal file
@@ -0,0 +1,75 @@
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from sklearn.datasets import make_regression
|
||||
from sklearn.linear_model import LinearRegression
|
||||
from sklearn.feature_selection import RFE
|
||||
from sklearn.ensemble import RandomForestRegressor
|
||||
from sklearn.preprocessing import MinMaxScaler
|
||||
|
||||
# Используя код из [1](пункт «Решение задачи ранжирования признаков», стр. 205), выполните ранжирование признаков
|
||||
# с помощью указанных по варианту моделей. Отобразите получившиеся значения\оценки каждого признака каждым
|
||||
# методом\моделью и среднюю оценку. Проведите анализ получившихся результатов. Какие четыре признака оказались
|
||||
# самыми важными по среднему значению? (Названия\индексы признаков и будут ответом на задание).
|
||||
|
||||
# Линейная регрессия (LinearRegression), Рекурсивное сокращение признаков (Recursive Feature Elimination –RFE),
|
||||
# Сокращение признаков Случайными деревьями (Random Forest Regressor)
|
||||
random_state = np.random.RandomState(2)
|
||||
|
||||
# Генерация случайных данных для регрессии
|
||||
X, y = make_regression(n_samples=100, n_features=10, noise=0.1, random_state=random_state)
|
||||
|
||||
# Создание DataFrame для данных
|
||||
data = pd.DataFrame(X, columns=[f'признак_{i}' for i in range(X.shape[1])])
|
||||
data['целевая_переменная'] = y
|
||||
|
||||
# Разделение данных на признаки (X) и целевую переменную (y)
|
||||
X = data.drop('целевая_переменная', axis=1)
|
||||
y = data['целевая_переменная']
|
||||
|
||||
# Создаем модели
|
||||
models = [
|
||||
("Линейная регрессия", LinearRegression()),
|
||||
("Рекурсивное сокращение признаков", RFE(LinearRegression(), n_features_to_select=1)),
|
||||
("Сокращение признаков Случайными деревьями", RandomForestRegressor())
|
||||
]
|
||||
|
||||
# Словарь для хранения оценок каждой модели
|
||||
model_scores = {}
|
||||
|
||||
# Обучение и оценка моделей
|
||||
for name, model in models:
|
||||
model.fit(X, y)
|
||||
if name == "Рекурсивное сокращение признаков":
|
||||
# RFE возвращает ранжирование признаков
|
||||
rankings = model.ranking_
|
||||
# Нормализация рангов так, чтобы они находились в диапазоне от 0 до 1
|
||||
normalized_rankings = 1 - (rankings - 1) / (np.max(rankings) - 1)
|
||||
model_scores[name] = normalized_rankings
|
||||
elif name == "Сокращение признаков Случайными деревьями":
|
||||
# Важность признаков для RandomForestRegressor
|
||||
feature_importances = model.feature_importances_
|
||||
# Нормализация значений важности признаков в диапазоне от 0 до 1
|
||||
normalized_importances = MinMaxScaler().fit_transform(feature_importances.reshape(-1, 1))
|
||||
model_scores[name] = normalized_importances.flatten()
|
||||
elif name == "Линейная регрессия":
|
||||
# Коэффициенты признаков для Linear Regression
|
||||
coefficients = model.coef_
|
||||
# Нормализация коэффициентов так, чтобы они находились в диапазоне от 0 до 1
|
||||
normalized_coefficients = MinMaxScaler().fit_transform(np.abs(coefficients).reshape(-1, 1))
|
||||
model_scores[name] = normalized_coefficients.flatten()
|
||||
|
||||
# Вывод оценок каждой модели
|
||||
for name, scores in model_scores.items():
|
||||
print(f"{name} оценки признаков:")
|
||||
for feature, score in enumerate(scores, start=1):
|
||||
print(f"Признак {feature}: {score:.2f}")
|
||||
print(f"Средняя оценка: {np.mean(scores):.2f}")
|
||||
print()
|
||||
|
||||
# Находим четыре наиболее важных признака по средней оценке
|
||||
all_feature_scores = np.mean(list(model_scores.values()), axis=0)
|
||||
sorted_features = sorted(enumerate(all_feature_scores, start=1), key=lambda x: x[1], reverse=True)
|
||||
top_features = sorted_features[:4]
|
||||
print("Четыре наиболее важных признака:")
|
||||
for feature, score in top_features:
|
||||
print(f"Признак {feature}: {score:.2f}")
|
||||
97
antonov_dmitry_lab_1/README.md
Normal file
@@ -0,0 +1,97 @@
|
||||
# Лаб 1
|
||||
|
||||
Работа с типовыми наборами данных и различными моделями
|
||||
|
||||
# Вариант 3
|
||||
|
||||
Данные: make_classification (n_samples=500, n_features=2,
|
||||
n_redundant=0, n_informative=2, random_state=rs, n_clusters_per_class=1)
|
||||
|
||||
# Запуск
|
||||
|
||||
Выполнением скрипта файла (вывод в консоль + рисует графики).
|
||||
|
||||
# Модели:
|
||||
|
||||
1. Линейная регрессия
|
||||
1. Полиномиальная регрессия (со степенью 3)
|
||||
1. Гребневая полиномиальная регрессия (со степенью 3, alpha = 1.0)
|
||||
|
||||
# Графики
|
||||
|
||||
<div>
|
||||
Качество каждой модели может быть оценено на основе среднеквадратичной ошибки (MSE).
|
||||
Более низкая MSE указывает на лучшее соответствие данным.
|
||||
Однако выбор модели зависит от набора данных и лежащей в основе взаимосвязи между объектами и целевой переменной.
|
||||
|
||||
Линейная регрессия: Линейная регрессия предполагает линейную зависимость между признаками и целевой переменной.
|
||||
Это хорошо работает, когда взаимосвязь линейна, а шум в наборе данных минимален.
|
||||
Лучше всего сработала на наборе лун. Хуже всего на кругах.
|
||||
На линейном наборе показала себя на равне с остальными.
|
||||
|
||||
Полиномиальная и гребневая показали примерно одинаково на всех наборах.
|
||||
|
||||
Полиномиальная регрессия (степень=3):
|
||||
Полиномиальная регрессия обеспечивает более гибкую подгонку за счет полинома более высокого порядка(кубическая кривая).
|
||||
Она может выявить более сложные взаимосвязи между объектами и целевой переменной.
|
||||
Она может сработать лучше, чем линейная регрессия, если истинная взаимосвязь нелинейна.
|
||||
|
||||
Гребневая регрессия (степень= 3, альфа=1,0):
|
||||
В случае полиномиальной регрессии с регуляризацией (альфа=1,0) модель добавляет коэффициент регуляризации
|
||||
для управления сложностью обучения. Регуляризация помогает предотвратить переобучение, когда набор
|
||||
данных содержит шум или когда он ограничен.
|
||||
</div>
|
||||
|
||||
<p>
|
||||
<div>Набор лун (moon_dataset)</div>
|
||||
<img src="screens/myplot1.png" width="650" title="датасет 1">
|
||||
</p>
|
||||
<p>
|
||||
<div>Графики регрессии</div>
|
||||
<img src="screens/myplot2.png" width="450" title="линейная модель">
|
||||
<img src="screens/myplot3.png" width="450" title="полиномиальная модель">
|
||||
<img src="screens/myplot4.png" width="450" title="гребневая модель">
|
||||
<div>
|
||||
Линейная MSE: 0.0936
|
||||
Полиномиальная (degree=3) MSE: 0.0674
|
||||
Гребневая (degree=3, alpha=1.0) MSE: 0.0682
|
||||
</div>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<div>Набор кругов (circles_dataset)</div>
|
||||
<img src="screens/myplot5.png" width="650" title="датасет 2">
|
||||
</p>
|
||||
<p>
|
||||
<div>Графики регрессии</div>
|
||||
<img src="screens/myplot6.png" width="450" title="линейная модель">
|
||||
<img src="screens/myplot7.png" width="450" title="полиномиальная модель">
|
||||
<img src="screens/myplot8.png" width="450" title="гребневая модель">
|
||||
<div>
|
||||
Линейная MSE: 0.2684
|
||||
Полиномиальная (degree=3) MSE: 0.1341
|
||||
Гребневая (degree=3, alpha=1.0) MSE: 0.1312
|
||||
</div>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<div>Набор линейный (linearly_dataset)</div>
|
||||
<img src="screens/myplot9.png" width="650" title="датасет 3">
|
||||
</p>
|
||||
<p>
|
||||
<div>Графики регрессии</div>
|
||||
<img src="screens/myplot10.png" width="450" title="линейная модель">
|
||||
<img src="screens/myplot11.png" width="450" title="полиномиальная модель">
|
||||
<img src="screens/myplot12.png" width="450" title="гребневая модель">
|
||||
<div>
|
||||
Линейная MSE: 0.1101
|
||||
Полиномиальная (degree=3) MSE: 0.1045
|
||||
Гребневая (degree=3, alpha=1.0) MSE: 0.1078
|
||||
</div>
|
||||
</p>
|
||||
|
||||
<div>
|
||||
Итоговая модель подбирается учитывая зависимость в данных,
|
||||
как правило полиномиальная регрессия справляется лучше, а коэф регуляризации в гребневой регрессии помогает избежать
|
||||
переобучения.
|
||||
</div>
|
||||
97
antonov_dmitry_lab_1/lab1.py
Normal file
@@ -0,0 +1,97 @@
|
||||
import numpy as np
|
||||
from matplotlib import pyplot as plt
|
||||
from skimage.metrics import mean_squared_error
|
||||
from sklearn.datasets import make_moons, make_circles, make_classification
|
||||
from sklearn.linear_model import LinearRegression, Ridge
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.pipeline import make_pipeline
|
||||
from sklearn.preprocessing import StandardScaler, PolynomialFeatures
|
||||
|
||||
X, y = make_classification(
|
||||
n_features=2,
|
||||
n_redundant=0,
|
||||
n_informative=2,
|
||||
random_state=0,
|
||||
n_clusters_per_class=1
|
||||
)
|
||||
|
||||
rng = np.random.RandomState(2)
|
||||
X += 2 * rng.uniform(size=X.shape)
|
||||
linearly_dataset = (X, y)
|
||||
moon_dataset = make_moons(noise=0.3, random_state=0)
|
||||
circles_dataset = make_circles(noise=0.2, factor=0.5, random_state=1)
|
||||
datasets = [moon_dataset, circles_dataset, linearly_dataset]
|
||||
|
||||
"""
|
||||
Данные:
|
||||
· moon_dataset
|
||||
· circles_dataset
|
||||
· linearly_dataset
|
||||
"""
|
||||
for ds_cnt, ds in enumerate(datasets):
|
||||
X, y = ds
|
||||
X = StandardScaler().fit_transform(X)
|
||||
X_train, X_test, y_train, y_test = train_test_split(
|
||||
X, y, test_size=.4, random_state=42
|
||||
)
|
||||
"""
|
||||
Модели:
|
||||
· Линейную регрессию
|
||||
· Полиномиальную регрессию (со степенью 3)
|
||||
· Гребневую полиномиальную регрессию (со степенью 3, alpha = 1.0)
|
||||
"""
|
||||
|
||||
# Линейная
|
||||
linear_regression = LinearRegression()
|
||||
linear_regression.fit(X_train, y_train)
|
||||
linear_predictions = linear_regression.predict(X_test)
|
||||
linear_mse = mean_squared_error(y_test, linear_predictions)
|
||||
|
||||
# Полиномиальная (degree=3)
|
||||
poly_regression = make_pipeline(PolynomialFeatures(degree=3), LinearRegression())
|
||||
poly_regression.fit(X_train, y_train)
|
||||
poly_predictions = poly_regression.predict(X_test)
|
||||
poly_mse = mean_squared_error(y_test, poly_predictions)
|
||||
|
||||
# Гребневая (degree=3, alpha=1.0)
|
||||
poly_regression_alpha = make_pipeline(PolynomialFeatures(degree=3), Ridge(alpha=1.0))
|
||||
poly_regression_alpha.fit(X_train, y_train)
|
||||
poly_alpha_predictions = poly_regression_alpha.predict(X_test)
|
||||
poly_alpha_mse = mean_squared_error(y_test, poly_alpha_predictions)
|
||||
|
||||
# График данных
|
||||
plt.figure(figsize=(10, 6))
|
||||
plt.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap='coolwarm')
|
||||
plt.title('Датасет №' + str(ds_cnt))
|
||||
plt.xlabel('X')
|
||||
plt.ylabel('Y')
|
||||
|
||||
# График линейной модели
|
||||
plt.figure(figsize=(10, 6))
|
||||
plt.scatter(X_test[:, 0], X_test[:, 1], c=linear_predictions, cmap='coolwarm')
|
||||
plt.title('Линейная ds'+ str(ds_cnt))
|
||||
plt.xlabel('X')
|
||||
plt.ylabel('Y')
|
||||
plt.show()
|
||||
|
||||
# График полиномиальной модели (degree=3)
|
||||
plt.figure(figsize=(10, 6))
|
||||
plt.scatter(X_test[:, 0], X_test[:, 1], c=poly_predictions, cmap='coolwarm')
|
||||
plt.title('Полиномиальная (degree=3) ds' + str(ds_cnt))
|
||||
plt.xlabel('X')
|
||||
plt.ylabel('Y')
|
||||
plt.show()
|
||||
|
||||
# График гребневой модели (degree=3, alpha=1.0)
|
||||
plt.figure(figsize=(10, 6))
|
||||
plt.scatter(X_test[:, 0], X_test[:, 1], c=poly_alpha_predictions, cmap='coolwarm')
|
||||
plt.title('Гребневая (degree=3, alpha=1.0) ds' + str(ds_cnt))
|
||||
plt.xlabel('X')
|
||||
plt.ylabel('Y')
|
||||
plt.show()
|
||||
|
||||
# Сравнение качества
|
||||
print('Линейная MSE:', linear_mse)
|
||||
print('Полиномиальная (degree=3) MSE:', poly_mse)
|
||||
print('Гребневая (degree=3, alpha=1.0) MSE:', poly_alpha_mse)
|
||||
|
||||
BIN
antonov_dmitry_lab_1/screens/myplot1.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
antonov_dmitry_lab_1/screens/myplot10.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
antonov_dmitry_lab_1/screens/myplot11.png
Normal file
|
After Width: | Height: | Size: 21 KiB |
BIN
antonov_dmitry_lab_1/screens/myplot12.png
Normal file
|
After Width: | Height: | Size: 21 KiB |
BIN
antonov_dmitry_lab_1/screens/myplot2.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
antonov_dmitry_lab_1/screens/myplot3.png
Normal file
|
After Width: | Height: | Size: 19 KiB |
BIN
antonov_dmitry_lab_1/screens/myplot4.png
Normal file
|
After Width: | Height: | Size: 20 KiB |
BIN
antonov_dmitry_lab_1/screens/myplot5.png
Normal file
|
After Width: | Height: | Size: 20 KiB |
BIN
antonov_dmitry_lab_1/screens/myplot6.png
Normal file
|
After Width: | Height: | Size: 19 KiB |
BIN
antonov_dmitry_lab_1/screens/myplot7.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
BIN
antonov_dmitry_lab_1/screens/myplot8.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
BIN
antonov_dmitry_lab_1/screens/myplot9.png
Normal file
|
After Width: | Height: | Size: 19 KiB |
84
antonov_dmitry_lab_2/README.md
Normal file
@@ -0,0 +1,84 @@
|
||||
# Лаб 2
|
||||
|
||||
Ранжирование признаков
|
||||
|
||||
Выполните ранжирование признаков с помощью указанных по варианту моделей.
|
||||
Отобразите получившиеся значения\оценки каждого признака каждым методом\моделью и среднюю оценку.
|
||||
Проведите анализ получившихся результатов.
|
||||
Какие четыре признака оказались самыми важными по среднему значению?
|
||||
(Названия\индексы признаков и будут ответом на задание).
|
||||
|
||||
# Вариант 3
|
||||
|
||||
Линейная регрессия (LinearRegression) , Сокращение признаков
|
||||
Случайными деревьями (Random Forest Regressor), Линейная корреляция
|
||||
(f_regression)
|
||||
|
||||
Я использовал датасет Predict students' dropout and academic success
|
||||
https://www.kaggle.com/datasets/thedevastator/higher-education-predictors-of-student-retention
|
||||
Он используется мной по заданию на курсовую работу
|
||||
|
||||
# Запуск
|
||||
|
||||
Выполнением скрипта файла (вывод в консоль).
|
||||
|
||||
# Модели:
|
||||
|
||||
1. Линейная регрессия (LinearRegression)
|
||||
1. Сокращение признаков cлучайными деревьями (Random Forest Regressor)
|
||||
1. Линейная корреляция (f_regression)
|
||||
|
||||
# Пояснения
|
||||
|
||||
<div>
|
||||
Выбор наиболее подходящего метода ранжирования объектов зависит от специфики набора данных и требований
|
||||
к модели.
|
||||
|
||||
Линейная регрессия - это простой и понятный метод, который может быть использован для предсказания значений.
|
||||
Он хорошо работает, если зависимость между переменными является линейной.
|
||||
Однако, если данные содержат сложные нелинейные зависимости, линейная регрессия может
|
||||
оказаться не очень эффективной.
|
||||
|
||||
Уменьшение признаков с помощью случайных деревьев (Random Forest Regressor) - это мощный метод,
|
||||
который способен обрабатывать сложные взаимосвязи в данных, даже если они нелинейные.
|
||||
Он основан на идее создания ансамбля деревьев решений, каждое из которых дает свой голос за
|
||||
наиболее подходящий ответ. Случайные леса обычно дают хорошие результаты и являются устойчивыми
|
||||
к переобучению.
|
||||
|
||||
Линейная корреляция или f_regression - это статистический метод, который используется для измерения
|
||||
степени связи между двумя переменными. Он может помочь определить, есть ли вообще связь между переменными,
|
||||
но не подходит для ранжирования объектов.
|
||||
</div>
|
||||
|
||||
### 4 самых важных признака в среднем:
|
||||
1. Признак: Curricular units 2nd sem (approved), Оценка: 0.8428
|
||||
2. Признак: Tuition fees up to date, Оценка: 0.4797
|
||||
3. Признак: Curricular units 1st sem (approved), Оценка: 0.2986
|
||||
4. Признак: Curricular units 2nd sem (grade), Оценка: 0.2778
|
||||
|
||||
### 4 самых важных для lr_scores линейной регрессии:
|
||||
1. 0.3917 'Tuition fees up to date'
|
||||
2. 0.2791 'International'
|
||||
3. 0.2075 'Curricular units 2nd sem (approved)'
|
||||
4. 0.1481 'Debtor'
|
||||
|
||||
### 4 самых важных для rf_scores рандом forests:
|
||||
1. 0.4928 'Curricular units 2nd sem (approved)'
|
||||
2. 0.061 'Tuition fees up to date'
|
||||
3. 0.0458 'Curricular units 2nd sem (grade)'
|
||||
4. 0.0308 'Curricular units 1st sem (grade)'
|
||||
|
||||
### 4 самых важных для f_regression:
|
||||
1. 2822.104 'Curricular units 2nd sem (approved)'
|
||||
2. 2093.3315 'Curricular units 2nd sem (grade)'
|
||||
3. 1719.4229 'Curricular units 1st sem (approved)'
|
||||
4. 1361.6144 'Curricular units 1st sem (grade)'
|
||||
|
||||
### Объяснение:
|
||||
<div>
|
||||
В общем, выбор между линейной регрессией и случайными лесами зависит от характеристик данных.
|
||||
Если данные имеют линейную зависимость, то линейная регрессия будет предпочтительнее.
|
||||
Если данные содержат сложные, возможно нелинейные взаимосвязи, то Random Forest может быть лучшим выбором.
|
||||
В любом случае, важно провести предварительное исследование данных и тестирование различных моделей,
|
||||
чтобы выбрать наиболее подходящую.
|
||||
</div>
|
||||
4425
antonov_dmitry_lab_2/dataset.csv
Normal file
106
antonov_dmitry_lab_2/lab2.py
Normal file
@@ -0,0 +1,106 @@
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.linear_model import LinearRegression
|
||||
from sklearn.ensemble import RandomForestRegressor
|
||||
from sklearn.feature_selection import f_regression
|
||||
from sklearn.preprocessing import MinMaxScaler
|
||||
|
||||
# загрузка dataset
|
||||
data = pd.read_csv('dataset.csv')
|
||||
|
||||
# разделение dataset на тренировочную и тестовую выборки
|
||||
X = data.drop(['Target'], axis=1)
|
||||
y = data['Target']
|
||||
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
||||
|
||||
# Тренировка моделей
|
||||
# Линейная регрессия
|
||||
lr = LinearRegression()
|
||||
lr.fit(X_train, y_train)
|
||||
|
||||
# Сокращение признаков случайными деревьями с помощью Random Forest Regressor
|
||||
rf = RandomForestRegressor()
|
||||
rf.fit(X_train, y_train)
|
||||
|
||||
# Ранжирование признаков использую каждую модель/метод
|
||||
# Получение абсолютных значений коэффициентов в качестве оценок важности признаков
|
||||
lr_scores = abs(lr.coef_)
|
||||
|
||||
# Получение оценок важности объектов из модели Random Forest Regressor
|
||||
rf_scores = rf.feature_importances_
|
||||
|
||||
# Отображение итоговых оценок по каждой колонке
|
||||
feature_names = X.columns.tolist()
|
||||
|
||||
# показать оценки рангов по модели линейной регрессии
|
||||
print("оценки линейной регрессии:")
|
||||
for feature, score in zip(feature_names, lr_scores):
|
||||
print(f"{feature}: {round(score, 4)}")
|
||||
|
||||
# оценки метода рандомных лесов
|
||||
print("\nоценки Random Forest:")
|
||||
for feature, score in zip(feature_names, rf_scores):
|
||||
print(f"{feature}: {round(score, 4)}")
|
||||
|
||||
# вычисление значений оценки для f_regression
|
||||
f_scores, p_values = f_regression(X, y)
|
||||
|
||||
# оценки f_regression
|
||||
print("\nоценки f_regression:")
|
||||
for feature, score in zip(feature_names, f_scores):
|
||||
print(f"{feature}: {round(score, 4)}")
|
||||
|
||||
# использую MinMaxScaler для точных средних значений рангов
|
||||
scaler = MinMaxScaler()
|
||||
lr_scores_scaled = scaler.fit_transform(lr_scores.reshape(-1, 1)).flatten()
|
||||
rf_scores_scaled = scaler.fit_transform(rf_scores.reshape(-1, 1)).flatten()
|
||||
f_scores_scaled = scaler.fit_transform(f_scores.reshape(-1, 1)).flatten()
|
||||
|
||||
# вычисление средних оценок для каждого признака
|
||||
average_scores = {}
|
||||
for feature in feature_names:
|
||||
average_scores[feature] = (lr_scores_scaled[feature_names.index(feature)] +
|
||||
rf_scores_scaled[feature_names.index(feature)] +
|
||||
f_scores_scaled[feature_names.index(feature)]) / 3
|
||||
|
||||
# получаем среднюю оценку признаков
|
||||
sorted_features = sorted(average_scores.items(), key=lambda x: x[1], reverse=True)
|
||||
|
||||
# получаем самых важных признака
|
||||
top_4_features = sorted_features[:4]
|
||||
|
||||
# отображаем 4 самые важные
|
||||
print("\n4 самых важных признака в среднем:")
|
||||
for feature, score in top_4_features:
|
||||
print(f"Признак: {feature}, Оценка: {round(score, 4)}")
|
||||
|
||||
|
||||
# отображаем самых важных признака для каждого метода/модели
|
||||
top_lr_indices = np.argsort(lr_scores)[-4:][::-1]
|
||||
top_rf_indices = np.argsort(rf_scores)[-4:][::-1]
|
||||
top_f_indices = np.argsort(f_scores)[-4:][::-1]
|
||||
|
||||
top_lr_features = [feature_names[i] for i in top_lr_indices]
|
||||
top_rf_features = [feature_names[i] for i in top_rf_indices]
|
||||
top_f_features = [feature_names[i] for i in top_f_indices]
|
||||
|
||||
top_lr_features_score = [lr_scores[i] for i in top_lr_indices]
|
||||
top_rf_features_score = [rf_scores[i] for i in top_rf_indices]
|
||||
top_f_features_score = [f_scores[i] for i in top_f_indices]
|
||||
|
||||
print("\n4 самых важных для lr_scores:")
|
||||
print(top_lr_features)
|
||||
for i in top_lr_features_score:
|
||||
print(round(i, 4))
|
||||
|
||||
print("\n4 самых важных для rf_scores:")
|
||||
print(top_rf_features)
|
||||
for i in top_rf_features_score:
|
||||
print(round(i, 4))
|
||||
|
||||
print("\n4 самых важных для f_scores:")
|
||||
print(top_f_features)
|
||||
for i in top_f_features_score:
|
||||
print(round(i, 4))
|
||||
85
antonov_dmitry_lab_3/README.md
Normal file
@@ -0,0 +1,85 @@
|
||||
# Лаб 3
|
||||
|
||||
Деревья решений
|
||||
|
||||
Часть 1. По данным о пассажирах Титаника решите задачу классификации
|
||||
(с помощью дерева решений), в которой по различным характеристикам
|
||||
пассажиров требуется найти у выживших пассажиров два наиболее важных
|
||||
признака из трех рассматриваемых (по варианту). Пример решения задачи
|
||||
можно посмотреть здесь: [1] (стр.188). Скачать данные можно по ссылке:
|
||||
https://www.kaggle.com/datasets/heptapod/titanic
|
||||
|
||||
Часть 2. Решите с помощью библиотечной реализации дерева решений
|
||||
задачу из лабораторной работы «Веб-сервис «Дерево решений» по предмету
|
||||
«Методы искусственного интеллекта» на 99% ваших данных. Проверьте
|
||||
работу модели на оставшемся проценте, сделайте вывод.
|
||||
|
||||
# Вариант 3
|
||||
|
||||
Признаки Sex,Age,SibSp
|
||||
|
||||
# Запуск
|
||||
|
||||
Выполнением скрипта файла (вывод в консоль).
|
||||
|
||||
# Описание модели:
|
||||
|
||||
DecisionTreeClassifier - это алгоритм машинного обучения, используемый для задач классификации и регрессии.
|
||||
Он представляет собой дерево решений, где на каждом узле дерева решается, какой вопрос задать дальше
|
||||
(признак для дальнейшего разбиения данных), а в листьях находятся окончательные ответы.
|
||||
|
||||
# Результаты
|
||||
|
||||
На данных для Титаника модель определяет важность признаков с точность 75% (исключает 'sibsp').
|
||||
Эти два признака обладают статистической важностью.
|
||||
<p>
|
||||
<div>Титаник</div>
|
||||
<img src="screens/titanic.png" width="650" title="Титаник 1">
|
||||
</p>
|
||||
|
||||
На данных моего датасета модель справляется на 52.768%, если в качестве предлагаемых параметров
|
||||
на вход идут ['Gender', 'Debtor', 'International'] (исключает 'International').
|
||||
|
||||
<p>
|
||||
<div>Мой датасет 1</div>
|
||||
<img src="screens/mydataset1.png" width="650" title="Мой датасет 1">
|
||||
</p>
|
||||
|
||||
И на 70.961, если на вход идут ['Gender', 'Debtor', 'Curricular units 2nd sem (approved)']
|
||||
(исключает 'Gender').
|
||||
|
||||
<p>
|
||||
<div>Мой датасет 2</div>
|
||||
<img src="screens/mydataset2.png" width="650" title="Мой датасет 2">
|
||||
</p>
|
||||
|
||||
Такой результат можно объяснить большей значимостью признака 'Curricular units 2nd sem (approved)'
|
||||
вместо 'International' (было показано в предыдущей лабораторной).
|
||||
|
||||
Из-за того, что мы взяли статистически более значимый признак, модель выдает нам большую точность.
|
||||
|
||||
Точность 52.768% указывает на то, что модель работает на уровне случайности, что означает, что она
|
||||
работает не лучше, чем случайное угадывание. Для этого может быть несколько причин:
|
||||
|
||||
1. Признаки все имеет малое значение: то есть для сравнения подаются признаки статистически малозначимые.
|
||||
|
||||
2. Недостаточно данных: Набор данных может содержать недостаточно информации или примеров для
|
||||
изучения моделью. Если набор данных невелик или нерепрезентативен, модель, возможно, не сможет
|
||||
хорошо обобщить новые данные.
|
||||
|
||||
3. Несбалансированные классы: Если классы в вашей целевой переменной несбалансированы
|
||||
(например, случаев, не связанных с отсевом, гораздо больше, чем случаев отсева), модель может
|
||||
быть смещена в сторону прогнозирования класса большинства.
|
||||
|
||||
4. Переобучение: Модель может быть переобучена обучающими данным, что означает, что она изучает шум
|
||||
в данных, а не лежащие в их основе закономерности. Это может произойти, если модель слишком сложна по
|
||||
сравнению с объемом доступных данных.
|
||||
|
||||
5. Недостаточное соответствие: С другой стороны, модель может быть слишком простой, чтобы отразить
|
||||
взаимосвязи в данных. Важно выбрать соответствующий уровень сложности модели.
|
||||
|
||||
<div>
|
||||
При отборе признаков должна учитываться их статистическая значимость, вычисленная различными способами
|
||||
(например с помощью лин регрессии, Random Forest Regressor, линейной корреляции f_regression или других).
|
||||
Так же должно быть достаточно данных, в модели должно быть сведено к минимуму переобучение.
|
||||
</div>
|
||||
4425
antonov_dmitry_lab_3/dataset.csv
Normal file
35
antonov_dmitry_lab_3/lab3.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import pandas as pd
|
||||
from sklearn.metrics import accuracy_score
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.tree import DecisionTreeClassifier
|
||||
|
||||
# прочитали датасет
|
||||
data = pd.read_csv('dataset.csv')
|
||||
|
||||
# определение признаков
|
||||
# целевая переменная - Target
|
||||
X = data[['Gender', 'Debtor', 'Curricular units 2nd sem (approved)']]
|
||||
y = data['Target'] # Assuming 'Dropout' is the target variable
|
||||
|
||||
# разделили данные на тренировочную и тестовую выборки
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
||||
|
||||
# создали модель decision tree classifier
|
||||
dt_classifier = DecisionTreeClassifier(random_state=42)
|
||||
dt_classifier.fit(X_train, y_train)
|
||||
|
||||
# получили значения модели для 2ух самых важных признаков
|
||||
feature_importances = dt_classifier.feature_importances_
|
||||
|
||||
top_features_indices = feature_importances.argsort()[-2:][::-1]
|
||||
top_features = X.columns[top_features_indices]
|
||||
|
||||
# вывод результата
|
||||
print("2 самых важных признака:", top_features)
|
||||
|
||||
# получили значения модели для проверки точности
|
||||
predictions = dt_classifier.predict(X_test)
|
||||
|
||||
# вычислили точность модели
|
||||
accuracy = accuracy_score(y_test, predictions)
|
||||
print("точность модели:", accuracy)
|
||||
BIN
antonov_dmitry_lab_3/screens/mydataset1.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
BIN
antonov_dmitry_lab_3/screens/mydataset2.png
Normal file
|
After Width: | Height: | Size: 16 KiB |
BIN
antonov_dmitry_lab_3/screens/titanic.png
Normal file
|
After Width: | Height: | Size: 11 KiB |
40
antonov_dmitry_lab_3/titanic.py
Normal file
@@ -0,0 +1,40 @@
|
||||
import pandas as pd
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.tree import DecisionTreeClassifier
|
||||
from sklearn.metrics import accuracy_score
|
||||
|
||||
# прочитали датасет
|
||||
data = pd.read_csv("titanic_data.csv")
|
||||
|
||||
# определение признаков
|
||||
features = ['Sex', 'Age', 'sibsp']
|
||||
|
||||
# целевая переменная - выжившие
|
||||
target = 'Survived'
|
||||
|
||||
# разделили данные на тренировочную и тестовую выборки
|
||||
train_data, test_data, train_labels, test_labels = train_test_split(
|
||||
data[features],
|
||||
data[target],
|
||||
test_size=0.2,
|
||||
random_state=42
|
||||
)
|
||||
|
||||
# создали модель decision tree classifier
|
||||
model = DecisionTreeClassifier()
|
||||
|
||||
# натренировали модель
|
||||
model.fit(train_data, train_labels)
|
||||
|
||||
# получили значения модели для проверки точности
|
||||
predictions = model.predict(test_data)
|
||||
|
||||
# вычислили точность модели
|
||||
accuracy = accuracy_score(test_labels, predictions)
|
||||
print("точность модели:", accuracy)
|
||||
|
||||
# нашли два самых важных признака
|
||||
importances = model.feature_importances_
|
||||
indices = (-importances).argsort()[:2]
|
||||
important_features = [features[i] for i in indices]
|
||||
print("два самых важных признака:", important_features)
|
||||
1310
antonov_dmitry_lab_3/titanic_data.csv
Normal file
78
antonov_dmitry_lab_4/README.md
Normal file
@@ -0,0 +1,78 @@
|
||||
# Лаб 4 Кластеризация
|
||||
|
||||
Использовать метод кластеризации по варианту для данных из датасета курсовой
|
||||
Predict students' dropout and academic success (отсев студентов), самостоятельно сформулировав задачу.
|
||||
Интерпретировать результаты и оценить, насколько хорошо он подходит для
|
||||
решения сформулированной вами задачи.
|
||||
|
||||
# Вариант 3
|
||||
|
||||
Метод t-SNE
|
||||
|
||||
# Запуск
|
||||
|
||||
Выполнением скрипта файла (вывод в консоль).
|
||||
|
||||
# Описание модели:
|
||||
|
||||
T-Distributed Stochastic Neighbor Embedding (t-SNE) - это метод визуализации и снижения размерности,
|
||||
используемый для визуализации многомерных данных в виде двумерной или трехмерной графики.
|
||||
|
||||
Результатом работы t-SNE является визуализация данных, где близкие точки в исходном пространстве отображаются
|
||||
близко друг к другу, а отдаленные точки - далеко. Это позволяет исследователям изучать структуру данных и
|
||||
находить кластеры и структуры, которые могут быть не видны при прямом наблюдении исходного пространства высокой размерности.
|
||||
|
||||
# Задача кластеризации
|
||||
Учитывая набор данных, содержащий информацию о студентах, включая их пол, международный статус и ВВП,
|
||||
цель состоит в том, чтобы сгруппировать этих студентов в отдельные кластеры на основе этих признаков.
|
||||
Цель состоит в том, чтобы выявить естественные закономерности или подгруппы среди учащихся, которые могут
|
||||
иметь сходные характеристики с точки зрения пола, международного статуса и экономического происхождения.
|
||||
Такая кластеризация может помочь в адаптации образовательных программ, служб поддержки или вмешательств
|
||||
к конкретным группам учащихся для улучшения академических результатов и показателей удержания.
|
||||
Цель анализа - выявить значимые идеи, которые могут быть использованы для улучшения общего образовательного опыта
|
||||
и показателей успешности различных групп учащихся.
|
||||
|
||||
# Результаты
|
||||
|
||||
Для применения метода уменьшения размерности t-SNE использованы признаки "Гендер", "Международный" и "ВВП".
|
||||
Данные проецируются на двумерную плоскость, при этом сохраняя локальную структуру данных.
|
||||
|
||||
Как интерпретировать результаты на графике:
|
||||
|
||||
1. Пол:
|
||||
- Поскольку "Пол" является категориальной переменной (бинарной, как "Мужчина" или "Женщина"),
|
||||
- Ожидается увидеть на графике отчетливые кластеры или разделения. Каждая точка представляет учащегося,
|
||||
- и лица одинакового пола должны быть сгруппированы вместе.
|
||||
|
||||
2. Международный:
|
||||
- "Международный" также является бинарной категориальной переменной (например, "Да" или "Нет" указывает,
|
||||
- является ли студент иностранным), вы можете увидеть разделение между иностранными и немеждународными студентами.
|
||||
- Это может привести к образованию двух различных кластеров.
|
||||
|
||||
3. ВВП:
|
||||
- "ВВП" - это непрерывная переменная, и ее значения будут представлены в виде точек на графике. В зависимости от
|
||||
- распределения значений ВВП вы можете наблюдать градиент или закономерность в данных.
|
||||
|
||||
Теперь, когда посмотреть на график, должны быть видны точки, разбросанные по двумерному пространству. Похожие точки
|
||||
находятся близко друг к другу, а непохожие - дальше друг от друга.
|
||||
|
||||
- Результаты:
|
||||
- Видны четкие кластеры, это говорит о том, что эти признаки являются хорошими показателями для разделения
|
||||
- студентов на группы.
|
||||
|
||||
- Доминирующими признаками являются "гендер" и "Интернациональность", можно увидеть два различных кластера,
|
||||
- в одном из которых, например, в основном учатся местные студенты мужского пола, а в другом - иностранные студентки
|
||||
- женского пола.
|
||||
|
||||
- "ВВП" оказывает сильное влияние, можно увидеть градиент точек, указывающий на корреляцию между ВВП и
|
||||
- некоторой базовой закономерностью в данных.
|
||||
|
||||
Конкретная интерпретация будет зависеть от фактического распределения и характеристик данных.
|
||||
Также важно отметить, что t-SNE - это стохастический алгоритм, поэтому его многократное выполнение с одними и теми
|
||||
же параметрами может привести к несколько иным результатам. Поэтому рекомендуется изучить графики из нескольких прогонов,
|
||||
чтобы получить четкое представление о структуре данных.
|
||||
|
||||
<p>
|
||||
<div>График</div>
|
||||
<img src="screens/myplot.png" width="650" title="График">
|
||||
</p>
|
||||
4425
antonov_dmitry_lab_4/dataset.csv
Normal file
22
antonov_dmitry_lab_4/lab4.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import pandas as pd
|
||||
from sklearn.cluster import KMeans
|
||||
from sklearn.manifold import TSNE
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
# загрузка датасета
|
||||
data = pd.read_csv('dataset.csv')
|
||||
|
||||
# выделение необходимых признаков
|
||||
X = data[['Gender', 'International', 'GDP']]
|
||||
|
||||
# применение t-SNE для сокращения размерности
|
||||
tsne = TSNE(n_components=2, random_state=42)
|
||||
X_tsne = tsne.fit_transform(X)
|
||||
|
||||
# визуализация данных
|
||||
plt.scatter(X_tsne[:, 0], X_tsne[:, 1], c=data['Target'], cmap='viridis')
|
||||
plt.colorbar()
|
||||
plt.xlabel('t-SNE х')
|
||||
plt.ylabel('t-SNE у')
|
||||
plt.title('t-SNE визуализация')
|
||||
plt.show()
|
||||
BIN
antonov_dmitry_lab_4/screens/myplot.png
Normal file
|
After Width: | Height: | Size: 37 KiB |
42
antonov_dmitry_lab_5/README.md
Normal file
@@ -0,0 +1,42 @@
|
||||
# Лаб 5 Регрессия
|
||||
|
||||
Использовать регрессию по варианту для данных из датасета курсовой
|
||||
Predict students' dropout and academic success (отсев студентов),
|
||||
самостоятельно сформулировав задачу. Оценить, насколько хорошо она подходит
|
||||
для решения сформулированной вами задачи.
|
||||
|
||||
# Вариант 3
|
||||
|
||||
Лассо-регрессия
|
||||
|
||||
# Запуск
|
||||
|
||||
Выполнением скрипта файла (вывод в консоль).
|
||||
|
||||
# Описание модели:
|
||||
|
||||
Лассо (Lasso) — это метод регрессионного анализа, который используется в статистике и
|
||||
машинном обучении для предсказания значения зависимой переменной.
|
||||
|
||||
Регрессия Лассо использует регуляризацию L1 для добавления штрафа, равного абсолютному
|
||||
значению коэффициентов. Это уменьшает некоторые коэффициенты и устанавливает другие равными 0,
|
||||
выполняя автоматический выбор функции. Обычная регрессия не имеет регуляризации.
|
||||
|
||||
# Задача регрессии
|
||||
Для прогнозирования отсева учащихся и набора данных об успеваемости спрогнозируйте отсев
|
||||
используя регрессию Лассо для признаков
|
||||
'Curricular units 2nd sem (approved)' - (Учебные блоки 2-го семестра (утверждены))
|
||||
'Curricular units 2nd sem (grade)' - (Учебные блоки 2-го семестра (класс))
|
||||
'Tuition fees up to date' - (Стоимость обучения")
|
||||
|
||||
# Результаты
|
||||
Точность регрессии для вышеперечисленных признаков составили 0.6256 (alpha = 0.01)
|
||||
При изменении коэффициента регуляризации в диапозоне от 0.01 до 1.5 наблюдается только ухудшение качества
|
||||
модели, таким образом для заданных параметров подходит больше обычная модель линейной регрессии, так как
|
||||
по этим признакам судя по результатам наблюдается линейная зависимость.
|
||||
Для этих признаков модель регрессии подходит плохо, нужно искать другую.
|
||||
|
||||
<p>
|
||||
<div>График</div>
|
||||
<img src="screens/myplot.png" width="650" title="График">
|
||||
</p>
|
||||