Compare commits
47 Commits
zhukova_al
...
kozlov_ale
| Author | SHA1 | Date | |
|---|---|---|---|
| 0b1489dd30 | |||
| 1cd312ba98 | |||
| ea025d0b4a | |||
| 43ec6863e4 | |||
|
|
e88d1e7fcd | ||
|
|
915ec905c6 | ||
| 4bdc8ea733 | |||
| 236483abf1 | |||
| 8673873a52 | |||
| 5035ca66da | |||
|
|
b9503c8388 | ||
|
|
7c2ddd98f6 | ||
|
|
be4ae51c11 | ||
|
|
5a83f61bd4 | ||
| 2efc882a02 | |||
|
|
676080d48d | ||
|
|
96b55c6711 | ||
| 4498fb5531 | |||
| fdda9659eb | |||
| 099679a413 | |||
| dfa7f803fd | |||
| c527892559 | |||
| d915c4d712 | |||
| b5fc91cfdb | |||
| ac68008d93 | |||
| 75e614aa55 | |||
| 45dc8c70ea | |||
| 86d0b82b5a | |||
| 23e62553d2 | |||
| 7d5463198a | |||
| b04582b80e | |||
| d8470fb939 | |||
|
|
8fae6c78f8 | ||
| 0ce611b443 | |||
| fc5942cdb1 | |||
| 68d1b445a2 | |||
| eb27f1410a | |||
| 4bec95e80f | |||
| d0c010c491 | |||
|
|
790641d82f | ||
|
|
ccc3352aa2 | ||
|
|
fece83fa1a | ||
|
|
ba4a6f1402 | ||
| 71b16e78b7 | |||
| 97493ae413 | |||
| b58da2aab4 | |||
| 4f479043f1 |
143
.idea/workspace.xml
generated
@@ -4,10 +4,9 @@
|
||||
<option name="autoReloadType" value="SELECTIVE" />
|
||||
</component>
|
||||
<component name="ChangeListManager">
|
||||
<list default="true" id="0ceb130e-88da-4a20-aad6-17f5ab4226ac" name="Changes" comment="">
|
||||
<change beforePath="$PROJECT_DIR$/.idea/IIS_2023_1.iml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/IIS_2023_1.iml" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/.idea/misc.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/misc.xml" afterDir="false" />
|
||||
<list default="true" id="0ceb130e-88da-4a20-aad6-17f5ab4226ac" name="Changes" comment="commit 3">
|
||||
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/istyukov_timofey_lab1/lab1.py" beforeDir="false" afterPath="$PROJECT_DIR$/istyukov_timofey_lab1/lab1.py" afterDir="false" />
|
||||
</list>
|
||||
<option name="SHOW_DIALOG" value="false" />
|
||||
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||
@@ -43,29 +42,50 @@
|
||||
<option name="hideEmptyMiddlePackages" value="true" />
|
||||
<option name="showLibraryContents" value="true" />
|
||||
</component>
|
||||
<component name="PropertiesComponent">{
|
||||
"keyToString": {
|
||||
"RunOnceActivity.OpenProjectViewOnStart": "true",
|
||||
"RunOnceActivity.ShowReadmeOnStart": "true",
|
||||
"WebServerToolWindowFactoryState": "false",
|
||||
"git-widget-placeholder": "senkin__alexander__lab__1",
|
||||
"last_opened_file_path": "D:/ulstukek/Course4/IIS/labs",
|
||||
"node.js.detected.package.eslint": "true",
|
||||
"node.js.detected.package.tslint": "true",
|
||||
"node.js.selected.package.eslint": "(autodetect)",
|
||||
"node.js.selected.package.tslint": "(autodetect)",
|
||||
"nodejs_package_manager_path": "npm",
|
||||
"settings.editor.selected.configurable": "reference.settings.ide.settings.new.ui",
|
||||
"vue.rearranger.settings.migration": "true"
|
||||
<component name="PropertiesComponent"><![CDATA[{
|
||||
"keyToString": {
|
||||
"RunOnceActivity.OpenProjectViewOnStart": "true",
|
||||
"RunOnceActivity.ShowReadmeOnStart": "true",
|
||||
"WebServerToolWindowFactoryState": "false",
|
||||
"git-widget-placeholder": "senkin__alexander__lab__1",
|
||||
"last_opened_file_path": "D:/ulstukek/Course4/IIS/labs",
|
||||
"node.js.detected.package.eslint": "true",
|
||||
"node.js.detected.package.tslint": "true",
|
||||
"node.js.selected.package.eslint": "(autodetect)",
|
||||
"node.js.selected.package.tslint": "(autodetect)",
|
||||
"nodejs_package_manager_path": "npm",
|
||||
"settings.editor.selected.configurable": "preferences.sourceCode",
|
||||
"vue.rearranger.settings.migration": "true"
|
||||
}
|
||||
}</component>
|
||||
}]]></component>
|
||||
<component name="RecentsManager">
|
||||
<key name="CopyFile.RECENT_KEYS">
|
||||
<recent name="D:\ulstukek\Course4\IIS\IISLabs\IIS_2023_1\zavrazhnova_svetlana_lab_3" />
|
||||
<recent name="D:\ulstukek\Course4\IIS\IISLabs\IIS_2023_1\zavrazhnova_svetlana_lab_1" />
|
||||
</key>
|
||||
</component>
|
||||
<component name="RunManager">
|
||||
<component name="RunManager" selected="Python.lab1">
|
||||
<configuration name="lab1" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
|
||||
<module name="IIS_2023_1" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="PARENT_ENVS" value="true" />
|
||||
<envs>
|
||||
<env name="PYTHONUNBUFFERED" value="1" />
|
||||
</envs>
|
||||
<option name="SDK_HOME" value="E:\Programms\Python\python.exe" />
|
||||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/istyukov_timofey_lab1" />
|
||||
<option name="IS_MODULE_SDK" value="false" />
|
||||
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/istyukov_timofey_lab1/lab1.py" />
|
||||
<option name="PARAMETERS" value="" />
|
||||
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||
<option name="EMULATE_TERMINAL" value="false" />
|
||||
<option name="MODULE_MODE" value="false" />
|
||||
<option name="REDIRECT_INPUT" value="false" />
|
||||
<option name="INPUT_FILE" value="" />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
<configuration name="zavrazhnova_svetlana_lab3_2" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
|
||||
<module name="IIS_2023_1" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
@@ -132,13 +152,19 @@
|
||||
<option name="INPUT_FILE" value="" />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
<list>
|
||||
<item itemvalue="Python.lab1" />
|
||||
<item itemvalue="Python.zavrazhnova_svetlana_lab3_2" />
|
||||
<item itemvalue="Python.zavrazhnova_svetlana_lab_2" />
|
||||
<item itemvalue="Python.zavrazhnova_svetlana_lab_3_1" />
|
||||
</list>
|
||||
<recent_temporary>
|
||||
<list>
|
||||
<item itemvalue="Python.lab1" />
|
||||
<item itemvalue="Python.zavrazhnova_svetlana_lab_3_1" />
|
||||
<item itemvalue="Python.zavrazhnova_svetlana_lab_2" />
|
||||
<item itemvalue="Python.zavrazhnova_svetlana_lab3_2" />
|
||||
<item itemvalue="Python.zavrazhnova_svetlana_lab3_2" />
|
||||
<item itemvalue="Python.zavrazhnova_svetlana_lab_3_1" />
|
||||
</list>
|
||||
</recent_temporary>
|
||||
</component>
|
||||
@@ -153,6 +179,35 @@
|
||||
<workItem from="1697735437405" duration="1706000" />
|
||||
<workItem from="1697740229646" duration="3802000" />
|
||||
</task>
|
||||
<task id="LOCAL-00001" summary="commit 1">
|
||||
<created>1702203771661</created>
|
||||
<option name="number" value="00001" />
|
||||
<option name="presentableId" value="LOCAL-00001" />
|
||||
<option name="project" value="LOCAL" />
|
||||
<updated>1702203771661</updated>
|
||||
</task>
|
||||
<task id="LOCAL-00002" summary="commit 2">
|
||||
<created>1702208133904</created>
|
||||
<option name="number" value="00002" />
|
||||
<option name="presentableId" value="LOCAL-00002" />
|
||||
<option name="project" value="LOCAL" />
|
||||
<updated>1702208133904</updated>
|
||||
</task>
|
||||
<task id="LOCAL-00003" summary="create README">
|
||||
<created>1702208193675</created>
|
||||
<option name="number" value="00003" />
|
||||
<option name="presentableId" value="LOCAL-00003" />
|
||||
<option name="project" value="LOCAL" />
|
||||
<updated>1702208193675</updated>
|
||||
</task>
|
||||
<task id="LOCAL-00004" summary="commit 3">
|
||||
<created>1702208529340</created>
|
||||
<option name="number" value="00004" />
|
||||
<option name="presentableId" value="LOCAL-00004" />
|
||||
<option name="project" value="LOCAL" />
|
||||
<updated>1702208529340</updated>
|
||||
</task>
|
||||
<option name="localTasksCounter" value="5" />
|
||||
<servers />
|
||||
</component>
|
||||
<component name="TypeScriptGeneratedFilesManager">
|
||||
@@ -169,7 +224,14 @@
|
||||
<entry key="branch">
|
||||
<value>
|
||||
<list>
|
||||
<option value="HEAD" />
|
||||
<option value="istyukov_timofey_lab_1" />
|
||||
</list>
|
||||
</value>
|
||||
</entry>
|
||||
<entry key="user">
|
||||
<value>
|
||||
<list>
|
||||
<option value="*" />
|
||||
</list>
|
||||
</value>
|
||||
</entry>
|
||||
@@ -180,8 +242,43 @@
|
||||
</entry>
|
||||
</map>
|
||||
</option>
|
||||
<option name="RECENT_FILTERS">
|
||||
<map>
|
||||
<entry key="Branch">
|
||||
<value>
|
||||
<list>
|
||||
<RecentGroup>
|
||||
<option name="FILTER_VALUES">
|
||||
<option value="istyukov_timofey_lab_1" />
|
||||
</option>
|
||||
</RecentGroup>
|
||||
<RecentGroup>
|
||||
<option name="FILTER_VALUES">
|
||||
<option value="HEAD" />
|
||||
</option>
|
||||
</RecentGroup>
|
||||
</list>
|
||||
</value>
|
||||
</entry>
|
||||
<entry key="User">
|
||||
<value>
|
||||
<list>
|
||||
<RecentGroup>
|
||||
<option name="FILTER_VALUES">
|
||||
<option value="*" />
|
||||
</option>
|
||||
</RecentGroup>
|
||||
</list>
|
||||
</value>
|
||||
</entry>
|
||||
</map>
|
||||
</option>
|
||||
</component>
|
||||
<component name="com.intellij.coverage.CoverageDataManagerImpl">
|
||||
<SUITE FILE_PATH="coverage/PyCharmProjects$senkin_alexander_lab_1.coverage" NAME="senkin_alexander_lab_1 Coverage Results" MODIFIED="1697744262965" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$/senkin_alexander_lab_1" />
|
||||
<component name="VcsManagerConfiguration">
|
||||
<MESSAGE value="commit 1" />
|
||||
<MESSAGE value="commit 2" />
|
||||
<MESSAGE value="create README" />
|
||||
<MESSAGE value="commit 3" />
|
||||
<option name="LAST_COMMIT_MESSAGE" value="commit 3" />
|
||||
</component>
|
||||
</project>
|
||||
46
arzamaskina_milana_lab_7/README.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# Лабораторная работа №7
|
||||
|
||||
## Рекуррентная нейронная сеть и задача генерации текста
|
||||
|
||||
#### ПИбд-41 Арзамаскина Милана
|
||||
#### Вариант №2
|
||||
|
||||
### Какие технологии использовались:
|
||||
|
||||
Используемые библиотеки:
|
||||
* numpy
|
||||
* keras
|
||||
* tensorflow
|
||||
|
||||
### Как запустить:
|
||||
|
||||
* установить python, numpy, keras, tensorflow
|
||||
* запустить проект (стартовая точка - main.py)
|
||||
|
||||
### Что делает программа:
|
||||
|
||||
На основе выбранных художественных текстов происходит обучение рекуррентной нейронной сети для решения задачи генерации.
|
||||
Необходимо подобрать архитектуру и параметры так, чтобы приблизиться к максимально осмысленному результату.
|
||||
|
||||
* Читает текст из файлов (english.txt, russian.txt)
|
||||
* Получает входные, выходные данные (X, y), размер словаря и токенайзер. Используем Tokenizer с настройкой char_level=True
|
||||
* Создаёт объект Sequential (последовательная рекуррентная нейронная сеть) и добавление двух слоёв LSTM. Dropout — это метод регуляризации для нейронных сетей и моделей глубокого обучения, решение проблемы переобучения. Слой Dense с функцией активации softmax используется для предсказания следующего слова
|
||||
* Компилирует модель
|
||||
* Обучает модель
|
||||
* Генерирует текст
|
||||
|
||||
|
||||
#### Сгенерированные тексты:
|
||||
|
||||
Генерация на русском языке:
|
||||
|
||||

|
||||
|
||||
Генерация на английском языке:
|
||||
|
||||

|
||||
|
||||
|
||||
### Вывод:
|
||||
|
||||
Программа способна сгенерировать осмысленный текст в каждом из случаев.
|
||||
8
arzamaskina_milana_lab_7/english.txt
Normal file
@@ -0,0 +1,8 @@
|
||||
The cloud shuddered with blue flame. Thunder rumbled slowly.
|
||||
It either intensified or almost died down. And the rain, obeying the thunder, began to fall harder at times and rustle widely through the leaves, then stopped.
|
||||
Soon the sun broke through the clouds. The old Pushkin Park in Mikhailovskoye and the steep banks of Soroti were ablaze with red clay and wet grass.
|
||||
A slender rainbow lit up across the cloudy distance. It sparkled and smoked, surrounded by wisps of ashen clouds.
|
||||
The rainbow looked like an arch erected on the border of a protected land. Here, in Pushkin’s places, thoughts about the Russian language arose with particular force.
|
||||
Here Pushkin wandered with his head uncovered, with his cold hair tangled by the autumn wind, listening to the wet hum of the pine tops, looking, squinting,
|
||||
from where the autumn clouds rush, I rushed around the fairs. Here wonderful words overwhelmed him, oppressed his soul and, finally, were composed, one by one, with the stub of a goose feather, into ringing stanzas.
|
||||
|
||||
BIN
arzamaskina_milana_lab_7/img1.png
Normal file
|
After Width: | Height: | Size: 106 KiB |
BIN
arzamaskina_milana_lab_7/img2.png
Normal file
|
After Width: | Height: | Size: 103 KiB |
62
arzamaskina_milana_lab_7/main.py
Normal file
@@ -0,0 +1,62 @@
|
||||
import numpy as np
|
||||
from keras.layers import LSTM, Dense
|
||||
from keras.models import Sequential
|
||||
from keras.preprocessing.sequence import pad_sequences
|
||||
from keras.preprocessing.text import Tokenizer
|
||||
|
||||
# Чтение текста из файла
|
||||
# with open('russian.txt', 'r', encoding='utf-8') as file:
|
||||
# text = file.read()
|
||||
with open('english.txt', 'r', encoding='utf-8') as file:
|
||||
text = file.read()
|
||||
|
||||
# Обучение Tokenizer на тексте
|
||||
tokenizer = Tokenizer(char_level=True)
|
||||
tokenizer.fit_on_texts([text])
|
||||
sequences = tokenizer.texts_to_sequences([text])[0]
|
||||
|
||||
# Создание x, y последовательностей
|
||||
X_data, y_data = [], []
|
||||
seq_length = 10
|
||||
for i in range(seq_length, len(sequences)):
|
||||
sequence = sequences[i - seq_length:i]
|
||||
target = sequences[i]
|
||||
X_data.append(sequence)
|
||||
y_data.append(target)
|
||||
|
||||
# Преобразование в массивы
|
||||
X_mass = pad_sequences(X_data, maxlen=seq_length)
|
||||
y_mass = np.array(y_data)
|
||||
|
||||
# Создание модели
|
||||
vocab_size = len(tokenizer.word_index) + 1
|
||||
model = Sequential()
|
||||
model.add(LSTM(256, input_shape=(seq_length, 1), return_sequences=True))
|
||||
model.add(LSTM(128, input_shape=(seq_length, 1)))
|
||||
model.add(Dense(vocab_size, activation='softmax'))
|
||||
|
||||
# Компиляция
|
||||
model.compile(loss='sparse_categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
|
||||
|
||||
# Обучение
|
||||
model.fit(X_mass, y_mass, epochs=100, verbose=1)
|
||||
|
||||
# Функция генерации
|
||||
def generate_text(_text, gen_length):
|
||||
generated_text = _text
|
||||
for _ in range(gen_length):
|
||||
seq = tokenizer.texts_to_sequences([_text])[0]
|
||||
seq = pad_sequences([seq], maxlen=seq_length)
|
||||
prediction = model.predict(seq)[0]
|
||||
predicted_index = np.argmax(prediction)
|
||||
predicted_char = tokenizer.index_word[predicted_index]
|
||||
generated_text += predicted_char
|
||||
_text += predicted_char
|
||||
_text = _text[1:]
|
||||
return generated_text
|
||||
|
||||
# Генерация текста
|
||||
# _text = "Она сверкала"
|
||||
_text = "It sparkled and smoked"
|
||||
generate_text = generate_text(_text, 250)
|
||||
print(generate_text)
|
||||
7
arzamaskina_milana_lab_7/russian.txt
Normal file
@@ -0,0 +1,7 @@
|
||||
Тучу передернуло синим пламенем. Медленно загремел гром.
|
||||
Он то усиливался, то почти затихал. И дождь, подчиняясь грому, начал временами идти сильнее и широко шуметь по листве, потом останавливался.
|
||||
Вскоре сквозь тучи пробилось солнце. Старый пушкинский парк в Михайловском и крутые берега Сороти запылали рыжей глиной и мокрой травой.
|
||||
Стройная радуга зажглась нал пасмурной далью. Она сверкала и дымилась, окруженная космами пепельных туч.
|
||||
Радуга была похожа на арку, воздвигнутую на границе заповедной земли. С особенной силой здесь, в пушкинских местах, возникали мысли о русском языке.
|
||||
Здесь Пушкин бродил с непокрытой головой, со спутанными осенним ветром холодными волосами, слушал влажный гул сосновых вершин, смотрел, прищурившись,
|
||||
откуда несутся осенние тучи, толкался по ярмаркам. Здесь чудесные слова переполняли его, стесняли его душу и, наконец, слагались по огрызком гусиного пера в звенящие строфы.
|
||||
136
basharin_sevastyan_lab_7/69209.txt
Normal file
@@ -0,0 +1,136 @@
|
||||
Annotation
|
||||
|
||||
The Fellowship of the Ring is the first part of J.R.R.Tolkien's epic adventure, The Lord Of The Rings.
|
||||
Sauron, the Dark Lord, has gathered to him all the Rings of Power - the means by which he intends to rule Middle-earth. All he lacks in his plans for dominion is the One Ring - the ring that rules them all - which has fallen into the hands of the hobbit Bilbo Baggins.
|
||||
In a sleepy village in the Shire, young Frodo Baggins finds himself faced with an immense task, as his elderly cousin Bilbo entrusts the Ring to his care. Frodo must leave his home and make a perilous journey across Middle-earh to the Cracks of Doom, there to destroy the Ring and foil the Dark Lord in his evil purpose.
|
||||
* * *
|
||||
JRR Tolkien The Lord of the Ring 1 - The Fellowship of the Ring
|
||||
Table of Contents
|
||||
|
||||
Foreward
|
||||
|
||||
This tale grew in the telling, until it became a history of the Great War of the Ring and included many glimpses of the yet more ancient history that preceded it. It was begun soon afterThe Hobbit was written and before its publication in 1937; but I did not go on with this sequel, for I wished first to complete and set in order the mythology and legends of the Elder Days, which had then been taking shape for some years. I desired to do this for my own satisfaction, and I had little hope that other people would be interested in this work, especially since it was primarily linguistic in inspiration and was begun in order to provide the necessary background of 'history' for Elvish tongues.
|
||||
|
||||
When those whose advice and opinion I sought correctedlittle hope tono hope, I went back to the sequel, encouraged by requests from readers for more information concerning hobbits and their adventures. But the story was drawn irresistibly towards the older world, and became an account, as it were, of its end and passing away before its beginning and middle had been told. The process had begun in the writing ofThe Hobbit, in which there were already some references to the older matter: Elrond, Gondolin, the High-elves, and the orcs, as well as glimpses that had arisen unbidden of things higher or deeper or darker than its surface: Durin, Moria, Gandalf, the Necromancer, the Ring. The discovery of the significance of these glimpses and of their relation to the ancient histories revealed the Third Age and its culmination in the War of the Ring.
|
||||
|
||||
Those who had asked for more information about hobbits eventually got it, but they had to wait a long time; for the composition ofThe Lord of the Rings went on at intervals during the years 1936 to 1949, a period in which I had many duties that I did not neglect, and many other interests as a learner and teacher that often absorbed me. The delay was, of course, also increased by the outbreak of war in 1939, by the end of which year the tale had not yet reached the end of Book One. In spite of the darkness of the next five years I found that the story could not now be wholly abandoned, and I plodded on, mostly by night, till I stood by Balin's tomb in Moria. There I halted for a long while. It was almost a year later when I went on and so came to Lothlorien and the Great River late in 1941. In the next year I wrote the first drafts of the matter that now stands as Book Three, and the beginnings of chapters I and III of Book Five; and there as the beacons flared in Anorien and Theoden came to Harrowdale I stopped. Foresight had failed and there was no time for thought.
|
||||
|
||||
It was during 1944 that, leaving the loose ends and perplexities of a war which it was my task to conduct, or at least to report, 1 forced myself to tackle the journey of Frodo to Mordor. These chapters, eventually to become Book Four, were written and sent out as a serial to my son, Christopher, then in South Africa with the RAF. Nonetheless it took another five years before the tale was brought to its present end; in that time I changed my house, my chair, and my college, and the days though less dark were no less laborious. Then when the 'end' had at last been reached the whole story had to be revised, and indeed largely re-written backwards. And it had to be typed, and re-typed: by me; the cost of professional typing by the ten-fingered was beyond my means.
|
||||
|
||||
The Lord of the Ringshas been read by many people since it finally appeared in print; and I should like to say something here with reference to the many opinions or guesses that I have received or have read concerning the motives and meaning of the tale. The prime motive was the desire of a tale-teller to try his hand at a really long story that would hold the attention of readers, amuse them, delight them, and at times maybe excite them or deeply move them. As a guide I had only my own feelings for what is appealing or moving, and for many the guide was inevitably often at fault. Some who have read the book, or at any rate have reviewed it, have found it boring, absurd, or contemptible; and I have no cause to complain, since I have similar opinions of their works, or of the kinds of writing that they evidently prefer. But even from the points of view of many who have enjoyed my story there is much that fails to please. It is perhaps not possible in a long tale to please everybody at all points, nor to displease everybody at the same points; for I find from the letters that I have received that the passages or chapters that are to some a blemish are all by others specially approved. The most critical reader of all, myself, now finds many defects, minor and major, but being fortunately under no obligation either to review the book or to write it again, he will pass over these in silence, except one that has been noted by others: the book is too short.
|
||||
|
||||
As for any inner meaning or 'message', it has in the intention of the author none. It is neither allegorical nor topical. As the story grew it put down roots (into the past) and threw out unexpected branches: but its main theme was settled from the outset by the inevitable choice of the Ring as the link between it andThe Hobbit. The crucial chapter, "The Shadow of the Past', is one of the oldest parts of the tale. It was written long before the foreshadow of 1939 had yet become a threat of inevitable disaster, and from that point the story would have developed along essentially the same lines, if that disaster had been averted. Its sources are things long before in mind, or in some cases already written, and little or nothing in it was modified by the war that began in 1939 or its sequels.
|
||||
|
||||
The real war does not resemble the legendary war in its process or its conclusion. If it had inspired or directed the development of the legend, then certainly the Ring would have been seized and used against Sauron; he would not have been annihilated but enslaved, and Barad-dur would not have been destroyed but occupied. Saruman, failing to get possession of the Ring, would m the confusion and treacheries of the time have found in Mordor the missing links in his own researches into Ring-lore, and before long he would have made a Great Ring of his own with which to challenge the self-styled Ruler of Middle-earth. In that conflict both sides would have held hobbits in hatred and contempt: they would not long have survived even as slaves.
|
||||
|
||||
Other arrangements could be devised according to the tastes or views of those who like allegory or topical reference. But I cordially dislike allegory in all its manifestations, and always have done so since I grew old and wary enough to detect its presence. I much prefer history, true or feigned, with its varied applicability to the thought and experience of readers. I think that many confuse 'applicability' with 'allegory'; but the one resides in the freedom of the reader, and the other in the purposed domination of the author.
|
||||
|
||||
An author cannot of course remain wholly unaffected by his experience, but the ways in which a story-germ uses the soil of experience are extremely complex, and attempts to define the process are at best guesses from evidence that is inadequate and ambiguous. It is also false, though naturally attractive, when the lives of an author and critic have overlapped, to suppose that the movements of thought or the events of times common to both were necessarily the most powerful influences. One has indeed personally to come under the shadow of war to feel fully its oppression; but as the years go by it seems now often forgotten that to be caught in youth by 1914 was no less hideous an experience than to be involved in 1939 and the following years. By 1918 all but one of my close friends were dead. Or to take a less grievous matter: it has been supposed by some that "The Scouring of the Shire' reflects the situation in England at the time when I was finishing my tale. It does not. It is an essential part of the plot, foreseen from the outset, though in the event modified by the character of Saruman as developed in the story without, need I say, any allegorical significance or contemporary political reference whatsoever. It has indeed some basis in experience, though slender (for the economic situation was entirely different), and much further back. The country in which I lived in childhood was being shabbily destroyed before I was ten, in days when motor-cars were rare objects (I had never seen one) and men were still building suburban railways. Recently I saw in a paper a picture of the last decrepitude of the once thriving corn-mill beside its pool that long ago seemed to me so important. I never liked the looks of the Young miller, but his father, the Old miller, had a black beard, and he was not named Sandyman.
|
||||
|
||||
The Lord of the Ringsis now issued in a new edition, and the opportunity has been taken of revising it. A number of errors and inconsistencies that still remained in the text have been corrected, and an attempt has been made to provide information on a few points which attentive readers have raised. I have considered all their comments and enquiries, and if some seem to have been passed over that may be because I have failed to keep my notes in order; but many enquiries could only be answered by additional appendices, or indeed by the production of an accessory volume containing much of the material that I did not include in the original edition, in particular more detailed linguistic information. In the meantime this edition offers this Foreword, an addition to the Prologue, some notes, and an index of the names of persons and places. This index is in intention complete in items but not in references, since for the present purpose it has been necessary to reduce its bulk. A complete index, making full use of the material prepared for me by Mrs. N. Smith, belongs rather to the accessory volume.
|
||||
|
||||
Prologue
|
||||
|
||||
This book is largely concerned with Hobbits, and from its pages a reader may discover much of their character and a little of their history. Further information will also be found in the selection from the Red Book of Westmarch that has already been published, under the title ofThe Hobbit . That story was derived from the earlier chapters of the Red Book, composed by Bilbo himself, the first Hobbit to become famous in the world at large, and called by himThere and Back Again, since they told of his journey into the East and his return: an adventure which later involved all the Hobbits in the great events of that Age that are here related.
|
||||
|
||||
Many, however, may wish to know more about this remarkable people from the outset, while some may not possess the earlier book. For such readers a few notes on the more important points are here collected from Hobbit-lore, and the first adventure is briefly recalled.
|
||||
|
||||
Hobbits are an unobtrusive but very ancient people, more numerous formerly than they are today; for they love peace and quiet and good tilled earth: a well-ordered and well-farmed countryside was their favourite haunt. They do not and did not understand or like machines more complicated than a forge-bellows, a water-mill, or a hand-loom, though they were skilful with tools. Even in ancient days they were, as a rule, shy of 'the Big Folk', as they call us, and now they avoid us with dismay and are becoming hard to find. They are quick of hearing and sharp-eyed, and though they are inclined to be fat and do not hurry unnecessarily, they are nonetheless nimble and deft in their movements. They possessed from the first the art of disappearing swiftly and silently, when large folk whom they do not wish to meet come blundering by; and this an they have developed until to Men it may seem magical. But Hobbits have never, in fact, studied magic of any kind, and their elusiveness is due solely to a professional skill that heredity and practice, and a close friendship with the earth, have rendered inimitable by bigger and clumsier races.
|
||||
|
||||
For they are a little people, smaller than Dwarves: less tout and stocky, that is, even when they are not actually much shorter. Their height is variable, ranging between two and four feet of our measure. They seldom now reach three feet; but they hive dwindled, they say, and in ancient days they were taller. According to the Red Book, Bandobras Took (Bullroarer), son of Isengrim the Second, was four foot five and able to ride a horse. He was surpassed in all Hobbit records only by two famous characters of old; but that curious matter is dealt with in this book.
|
||||
|
||||
As for the Hobbits of the Shire, with whom these tales are concerned, in the days of their peace and prosperity they were a merry folk. They dressed in bright colours, being notably fond of yellow and green; but they seldom wore shoes, since their feet had tough leathery soles and were clad in a thick curling hair, much like the hair of their heads, which was commonly brown. Thus, the only craft little practised among them was shoe-making; but they had long and skilful fingers and could make many other useful and comely things. Their faces were as a rule good-natured rather than beautiful, broad, bright-eyed, red-cheeked, with mouths apt to laughter, and to eating and drinking. And laugh they did, and eat, and drink, often and heartily, being fond of simple jests at all times, and of six meals a day (when they could get them). They were hospitable and delighted in parties, and in presents, which they gave away freely and eagerly accepted.
|
||||
|
||||
It is plain indeed that in spite of later estrangement Hobbits are relatives of ours: far nearer to us than Elves, or even than Dwarves. Of old they spoke the languages of Men, after their own fashion, and liked and disliked much the same things as Men did. But what exactly our relationship is can no longer be discovered. The beginning of Hobbits lies far back in the Elder Days that are now lost and forgotten. Only the Elves still preserve any records of that vanished time, and their traditions are concerned almost entirely with their own history, in which Men appear seldom and Hobbits are not mentioned at all. Yet it is clear that Hobbits had, in fact, lived quietly in Middle-earth for many long years before other folk became even aware of them. And the world being after all full of strange creatures beyond count, these little people seemed of very little importance. But in the days of Bilbo, and of Frodo his heir, they suddenly became, by no wish of their own, both important and renowned, and troubled the counsels of the Wise and the Great.
|
||||
|
||||
Those days, the Third Age of Middle-earth, are now long past, and the shape of all lands has been changed; but the regions in which Hobbits then lived were doubtless the same as those in which they still linger: the North-West of the Old World, east of the Sea. Of their original home the Hobbits in Bilbo's time preserved no knowledge. A love of learning (other than genealogical lore) was far from general among them, but there remained still a few in the older families who studied their own books, and even gathered reports of old times and distant lands from Elves, Dwarves, and Men. Their own records began only after the settlement of the Shire, and their most ancient legends hardly looked further back than their Wandering Days. It is clear, nonetheless, from these legends, and from the evidence of their peculiar words and customs, that like many other folk Hobbits had in the distant past moved westward. Their earliest tales seem to glimpse a time when they dwelt in the upper vales of Anduin, between the eaves of Greenwood the Great and the Misty Mountains. Why they later undertook the hard and perilous crossing of the mountains into Eriador is no longer certain. Their own accounts speak of the multiplying of Men in the land, and of a shadow that fell on the forest, so that it became darkened and its new name was Mirkwood.
|
||||
|
||||
Before the crossing of the mountains the Hobbits had already become divided into three somewhat different breeds: Harfoots, Stoors, and Fallohides. The Harfoots were browner of skin, smaller, and shorter, and they were beardless and bootless; their hands and feet were neat and nimble; and they preferred highlands and hillsides. The Stoors were broader, heavier in build; their feet and hands were larger, and they preferred flat lands and riversides. The Fallohides were fairer of skin and also of hair, and they were taller and slimmer than the others; they were lovers of trees and of woodlands.
|
||||
|
||||
The Harfoots had much to do with Dwarves in ancient times, and long lived in the foothills of the mountains. They moved westward early, and roamed over Eriador as far as Weathertop while the others were still in the Wilderland. They were the most normal and representative variety of Hobbit, and far the most numerous. They were the most inclined to settle in one place, and longest preserved their ancestral habit of living in tunnels and holes.
|
||||
|
||||
The Stoors lingered long by the banks of the Great River Anduin, and were less shy of Men. They came west after the Harfoots and followed the course of the Loudwater southwards; and there many of them long dwelt between Tharbad and the borders of Dunland before they moved north again.
|
||||
|
||||
The Fallohides, the least numerous, were a northerly branch. They were more friendly with Elves than the other Hobbits were, and had more skill in language and song than in handicrafts; and of old they preferred hunting to tilling. They crossed the mountains north of Rivendell and came down the River Hoarwell. In Eriador they soon mingled with the other kinds that had preceded them, but being somewhat bolder and more adventurous, they were often found as leaders or chieftains among clans of Harfoots or Stoors. Even in Bilbo's time the strong Fallohidish strain could still be noted among the greater families, such as the Tooks and the Masters of Buckland.
|
||||
|
||||
In the westlands of Eriador, between the Misty Mountains and the Mountains of Lune, the Hobbits found both Men and Elves. Indeed, a remnant still dwelt there of the Dunedain, the kings of Men that came over the Sea out of Westernesse; but they were dwindling fast and the lands of their North Kingdom were falling far and wide into waste. There was room and to spare for incomers, and ere long the Hobbits began to settle in ordered communities. Most of their earlier settlements had long disappeared and been forgotten in Bilbo's time; but one of the first to become important still endured, though reduced in size; this was at Bree and in the Chetwood that lay round about, some forty miles east of the Shire.
|
||||
|
||||
It was in these early days, doubtless, that the Hobbits learned their letters and began to write after the manner of the Dunedain, who had in their turn long before learned the art from the Elves. And in those days also they forgot whatever languages they had used before, and spoke ever after the Common Speech, the Westron as it was named, that was current through all the lands of the kings from Arnor to Gondor, and about all the coasts of the Sea from Belfalas to Lune. Yet they kept a few words of their own, as well as their own names of months and days, and a great store of personal names out of the past.
|
||||
|
||||
About this time legend among the Hobbits first becomes history with a reckoning of years. For it was in the one thousand six hundred and first year of the Third Age that the Fallohide brothers, Marcho and Blanco, set out from Bree; and having obtained permission from the high king at Fornost, they crossed the brown river Baranduin with a great following of Hobbits. They passed over the Bridge of Stonebows, that had been built in the days of the power of the North Kingdom, and they took ail the land beyond to dwell in, between the river and the Far Downs. All that was demanded of them was that they should keep the Great Bridge in repair, and all other bridges and roads, speed the king's messengers, and acknowledge his lordship.
|
||||
|
||||
Thus began theShire-reckoning, for the year of the crossing of the Brandywine (as the Hobbits turned the name) became Year One of the Shire, and all later dates were reckoned from it. At once the western Hobbits fell in love with their new land, and they remained there, and soon passed once more out of the history of Men and of Elves. While there was still a king they were in name his subjects, but they were, in fact, ruled by their own chieftains and meddled not at all with events in the world outside. To the last battle at Fornost with the Witch-lord of Angmar they sent some bowmen to the aid of the king, or so they maintained, though no tales of Men record it. But in that war the North Kingdom ended; and then the Hobbits took the land for their own, and they chose from their own chiefs a Thain to hold the authority of the king that was gone. There for a thousand years they were little troubled by wars, and they prospered and multiplied after the Dark Plague (S.R. 37) until the disaster of the Long Winter and the famine that followed it. Many thousands then perished, but the Days of Dearth (1158-60) were at the time of this tale long past and the Hobbits had again become accustomed to plenty. The land was rich and kindly, and though it had long been deserted when they entered it, it had before been well tilled, and there the king had once had many farms, cornlands, vineyards, and woods.
|
||||
|
||||
Forty leagues it stretched from the Far Downs to the Brandywine Bridge, and fifty from the northern moors to the marshes in the south. The Hobbits named it the Shire, as the region of the authority of their Thain, and a district of well-ordered business; and there in that pleasant comer of the world they plied their well-ordered business of living, and they heeded less and less the world outside where dark things moved, until they came to think that peace and plenty were the rule in Middle-earth and the right of all sensible folk. They forgot or ignored what little they had ever known of the Guardians, and of the labours of those that made possible the long peace of the Shire. They were, in fact, sheltered, but they had ceased to remember it.
|
||||
|
||||
At no time had Hobbits of any kind been warlike, and they had never fought among themselves. In olden days they had, of course, been often obliged to fight to maintain themselves in a hard world; but in Bilbo's time that was very ancient history. The last battle, before this story opens, and indeed the only one that had ever been fought within the borders of the Shire, was beyond living memory: the Battle of Greenfields, S.R. 1147, in which Bandobras Took routed an invasion of Orcs. Even the weathers had grown milder, and the wolves that had once come ravening out of the North in bitter white winters were now only a grandfather's tale. So, though there was still some store of weapons in the Shire, these were used mostly as trophies, hanging above hearths or on walls, or gathered into the museum at Michel Delving. The Mathom-house it was called; for anything that Hobbits had no immediate use for, but were unwilling to throw away, they called amathom . Their dwellings were apt to become rather crowded with mathoms, and many of the presents that passed from hand to hand were of that son.
|
||||
|
||||
Nonetheless, ease and peace had left this people still curiously tough. They were, if it came to it, difficult to daunt or to kill; and they were, perhaps, so unwearyingly fond of good things not least because they could, when put to it, do without them, and could survive rough handling by grief, foe, or weather in a way that astonished those who did not know them well and looked no further than their bellies and their well-fed faces. Though slow to quarrel, and for sport killing nothing that lived, they were doughty at bay, and at need could still handle arms. They shot well with the bow, for they were keen-eyed and sure at the mark. Not only with bows and arrows. If any Hobbit stooped for a stone, it was well to get quickly under cover, as all trespassing beasts knew very well.
|
||||
|
||||
All Hobbits had originally lived in holes in the ground, or so they believed, and in such dwellings they still felt most at home; but in the course of time they had been obliged to adopt other forms of abode. Actually in the Shire in Bilbo's days it was, as a rule, only the richest and the poorest Hobbits that maintained the old custom. The poorest went on living in burrows of the most primitive kind, mere holes indeed, with only one window or none; while the well-to-do still constructed more luxurious versions of the simple diggings of old. But suitable sites for these large and ramifying tunnels (orsmials as they called them) were not everywhere to be found; and in the flats and the low-lying districts the Hobbits, as they multiplied, began to build above ground. Indeed, even in the hilly regions and the older villages, such as Hobbiton or Tuckborough, or in the chief township of the Shire, Michel Delving on the White Downs, there were now many houses of wood, brick, or stone. These were specially favoured by millers, smiths, ropers, and cartwrights, and others of that sort; for even when they had holes to live in. Hobbits had long been accustomed to build sheds and workshops.
|
||||
|
||||
The habit of building farmhouses and barns was said to have begun among the inhabitants of the Marish down by the Brandywine. The Hobbits of that quarter, the Eastfarthing, were rather large and heavy-legged, and they wore dwarf-boots in muddy weather. But they were well known to be Stoors in a large part of their blood, as indeed was shown by the down that many grew on their chins. No Harfoot or Fallohide had any trace of a beard. Indeed, the folk of the Marish, and of Buckland, east of the River, which they afterwards occupied, came for the most part later into the Shire up from south-away; and they still had many peculiar names and strange words not found elsewhere in the Shire.
|
||||
|
||||
It is probable that the craft of building, as many other crafts beside, was derived from the Dunedain. But the Hobbits may have learned it direct from the Elves, the teachers of Men in their youth. For the Elves of the High Kindred had not yet forsaken Middle-earth, and they dwelt still at that time at the Grey Havens away to the west, and in other places within reach of the Shire. Three Elf-towers of immemorial age were still to be seen on the Tower Hills beyond the western marches. They shone far off in the moonlight. The tallest was furthest away, standing alone upon a green mound. The Hobbits of the Westfarthing said that one could see the Sea from the lop of that tower; but no Hobbit had ever been known to climb it. Indeed, few Hobbits had ever seen or sailed upon the Sea, and fewer still had ever returned to report it. Most Hobbits regarded even rivers and small boats with deep misgivings, and not many of them could swim. And as the days of the Shire lengthened they spoke less and less with the Elves, and grew afraid of them, and distrustful of those that had dealings with them; and the Sea became a word of fear among them, and a token of death, and they turned their faces away from the hills in the west.
|
||||
|
||||
The craft of building may have come from Elves or Men, but the Hobbits used it in their own fashion. They did not go in for towers. Their houses were usually long, low, and comfortable. The oldest kind were, indeed, no more than built imitations ofsmials, thatched with dry grass or straw, or roofed with turves, and having walls somewhat bulged. That stage, however, belonged to the early days of the Shire, and hobbit-building had long since been altered, improved by devices, learned from Dwarves, or discovered by themselves. A preference for round windows, and even round doors, was the chief remaining peculiarity of hobbit-architecture.
|
||||
|
||||
The houses and the holes of Shire-hobbits were often large, and inhabited by large families. (Bilbo and Frodo Baggins were as bachelors very exceptional, as they were also in many other ways, such as their friendship with the Elves.) Sometimes, as in the case of the Tooks of Great Smials, or the Brandybucks of Brandy Hall, many generations of relatives lived in (comparative) peace together in one ancestral and many-tunnelled mansion. All Hobbits were, in any case, clannish and reckoned up their relationships with great care. They drew long and elaborate family-trees with innumerable branches. In dealing with Hobbits it is important to remember who is related to whom, and in what degree. It would be impossible in this book to set out a family-tree that included even the more important members of the more important families at the time which these tales tell of. The genealogical trees at the end of the Red Book of Westmarch are a small book in themselves, and all but Hobbits would find them exceedingly dull. Hobbits delighted in such things, if they were accurate: they liked to have books filled with things that they already knew, set out fair and square with no contradictions.
|
||||
|
||||
There is another astonishing thing about Hobbits of old that must be mentioned, an astonishing habit: they imbibed or inhaled, through pipes of clay or wood, the smoke of the burning leaves of a herb, which they calledpipe-weed orleaf, a variety probably ofNicotiana. A great deal of mystery surrounds the origin of this peculiar custom, or 'art' as the Hobbits preferred to call it. All that could be discovered about it in antiquity was put together by Meriadoc Brandybuck (later Master of Buckland), and since he and the tobacco of the Southfarthing play a part in the history that follows, his remarks in the introduction to hisHerblore of the Shire may be quoted.
|
||||
|
||||
"This," he says, 'is the one art that we can certainly claim to be our own invention. When Hobbits first began to smoke is not known, all the legends and family histories take it for granted; for ages folk in the Shire smoked various herbs, some fouler, some sweeter. But all accounts agree that Tobold Hornblower of Longbottom in the Southfarthing first grew the true pipe-weed in his gardens in the days of Isengrim the Second, about the year 1070 of Shire-reckoning. The best home-grown still comes from that district, especially the varieties now known as Longbottom Leaf, Old Toby, and Southern Star.
|
||||
|
||||
"How Old Toby came by the plant is not recorded, for to his dying day he would not tell. He knew much about herbs, but he was no traveller. It is said that in his youth he went often to Bree, though he certainly never went further from the Shire than that. It is thus quite possible that he learned of this plant in Bree, where now, at any rate, it grows well on the south slopes of the hill. The Bree-hobbits claim to have been the first actual smokers of the pipe-weed. They claim, of course, to have done everything before the people of the Shire, whom they refer to as "colonists"; but in this case their claim is, I think, likely to be true. And certainly it was from Bree that the art of smoking the genuine weed spread in the recent centuries among Dwarves and such other folk, Rangers, Wizards, or wanderers, as still passed to and fro through that ancient road-meeting. The home and centre of the an is thus to be found in the old inn of Bree,The Prancing Pony, that has been kept by the family of Butterbur from time beyond record.
|
||||
|
||||
"All the same, observations that I have made on my own many journeys south have convinced me that the weed itself is not native to our parts of the world, but came northward from the lower Anduin, whither it was, I suspect, originally brought over Sea by the Men of Westernesse. It grows abundantly in Gondor, and there is richer and larger than in the North, where it is never found wild, and flourishes only in warm sheltered places like Longbottom. The Men of Gondor call itsweet galenas, and esteem it only for the fragrance of its flowers. From that land it must have been carried up the Greenway during the long centuries between the coming of Elendil and our own day. But even the Dunedain of Gondor allow us this credit: Hobbits first put it into pipes. Not even the Wizards first thought of that before we did. Though one Wizard that I knew took up the art long ago, and became as skilful in it as in all other things that he put his mind to."
|
||||
|
||||
The Shire was divided into four quarters, the Farthings already referred to. North, South, East, and West; and these again each into a number of folklands, which still bore the names of some of the old leading families, although by the time of this history these names were no longer found only in their proper folklands. Nearly all Tooks still lived in the Tookland, but that was not true of many other families, such as the Bagginses or the Boffins. Outside the Farthings were the East and West Marches: the Buckland (see beginning of Chapter V, Book I); and the Westmarch added to the Shire in S.R. 1462.
|
||||
|
||||
The Shire at this time had hardly any 'government'. Families for the most part managed their own affairs. Growing food and eating it occupied most of their time. In other matters they were, as a rule, generous and not greedy, but contented and moderate, so that estates, farms, workshops, and small trades tended to remain unchanged for generations.
|
||||
|
||||
There remained, of course, the ancient tradition concerning the high king at Fornost, or Norbury as they called it, away north of the Shire. But there had been no king for nearly a thousand years, and even the ruins of Kings' Norbury were covered with grass. Yet the Hobbits still said of wild folk and wicked things (such as trolls) that they had not heard of the king. For they attributed to the king of old all their essential laws; and usually they kept the laws of free will, because they were The Rules (as they said), both ancient and just.
|
||||
|
||||
It is true that the Took family had long been pre-eminent; for the office of Thain had passed to them (from the Oldbucks) some centuries before, and the chief Took had borne that title ever since. The Thain was the master of the Shire-moot, and captain of the Shire-muster and the Hobbitry-in-arms, but as muster and moot were only held in times of emergency, which no longer occurred, the Thainship had ceased to be more than a nominal dignity. The Took family was still, indeed, accorded a special respect, for it remained both numerous and exceedingly wealthy, and was liable to produce in every generation strong characters of peculiar habits and even adventurous temperament. The latter qualities, however, were now rather tolerated (in the rich) than generally approved. The custom endured, nonetheless, of referring to the head of the family as The Took, and of adding to his name, if required, a number: such as Isengrim the Second, for instance.
|
||||
|
||||
The only real official in the Shire at this date was the Mayor of Michel Delving (or of the Shire), who was elected every seven years at the Free Fair on the White Downs at the Lithe, that is at Midsummer. As mayor almost his only duty was to preside at banquets, given on the Shire-holidays, which occurred at frequent intervals. But the offices of Postmaster and First Shirriff were attached to the mayoralty, so that he managed both the Messenger Service and the Watch. These were the only Shire-services, and the Messengers were the most numerous, and much the busier of the two. By no means all Hobbits were lettered, but those who were wrote constantly to all their friends (and a selection of their relations) who lived further off than an afternoon's walk.
|
||||
|
||||
The Shirriffs was the name that the Hobbits gave to their police, or the nearest equivalent that they possessed. They had, of course, no uniforms (such things being quite unknown), only a feather in their caps; and they were in practice rather haywards than policemen, more concerned with the strayings of beasts than of people. There were in all the Shire only twelve of them, three in each Farthing, for Inside Work. A rather larger body, varying at need, was employed to 'beat the bounds', and to see that Outsiders of any kind, great or small, did not make themselves a nuisance.
|
||||
|
||||
At the time when this story begins the Bounders, as they were called, had been greatly increased. There were many reports and complaints of strange persons and creatures prowling about the borders, or over them: the first sign that all was not quite as it should be, and always had been except in tales and legends of long ago. Few heeded the sign, and not even Bilbo yet had any notion of what it portended. Sixty years had passed since he set out on his memorable journey, and he was old even for Hobbits, who reached a hundred as often as not; but much evidently still remained of the considerable wealth that he had brought back. How much or how little he revealed to no one, not even to Frodo his favourite 'nephew'. And he still kept secret the ring that he bad found.
|
||||
|
||||
As is told in The Hobbit, there came one day to Bilbo's door the great Wizard, Gandalf the Grey, and thirteen dwarves with him: none other, indeed, than Thorin Oakenshield, descendant of kings, and his twelve companions in exile. With them he set out, to his own lasting astonishment, on a morning of April, it being then the year 1341 Shire-reckoning, on a quest of great treasure, the dwarf-hoards of the Kings under the Mountain, beneath Erebor in Dale, far off in the East. The quest was successful, and the Dragon that guarded the hoard was destroyed. Yet, though before all was won the Battle of Five Armies was fought, and Thorin was slain, and many deeds of renown were done, the matter would scarcely have concerned later history, or earned more than a note in the long annals of the Third Age, but for an 'accident' by the way. The party was assailed by Orcs in a high pass of the Misty Mountains as they went towards Wilderland; and so it happened that Bilbo was lost for a while in the black orc-mines deep under the mountains, and there, as he groped in vain in the dark, he put his hand on a ring, lying on the floor of a tunnel. He put it in his pocket. It seemed then like mere luck.
|
||||
|
||||
Trying to find his way out. Bilbo went on down to the roots of the mountains, until he could go no further. At the bottom of the tunnel lay a cold lake far from the light, and on an island of rock in the water lived Gollum. He was a loathsome little creature: he paddled a small boat with his large flat feet, peering with pale luminous eyes and catching blind fish with his long fingers, and eating them raw. He ate any living thing, even orc, if he could catch it and strangle it without a struggle. He possessed a secret treasure that had come to him long ages ago, when he still lived in the light: a ring of gold that made its wearer invisible. It was the one thing he loved, his 'precious', and he talked to it, even when it was not with him. For he kept it hidden safe in a hole on his island, except when he was hunting or spying on the ores of the mines.
|
||||
|
||||
Maybe he would have attacked Bilbo at once, if the ring had been on him when they met; but it was not, and the hobbit held in his hand an Elvish knife, which served him as a sword. So to gain time Gollum challenged Bilbo to the Riddle-game, saying that if he asked a riddle which Bilbo could not guess, then he would kill him and eat him; but if Bilbo defeated him, then he would do as Bilbo wished: he would lead him to a way out of the tunnels.
|
||||
|
||||
Since he was lost in the dark without hope, and could neither go on nor back. Bilbo accepted the challenge; and they asked one another many riddles. In the end Bilbo won the game, more by luck (as it seemed) than by wits; for he was stumped at last for a riddle to ask, and cried out, as his hand came upon the ring he lad picked up and forgotten:What haw I got in my pocket? This Gollum failed to answer, though he demanded three guesses.
|
||||
|
||||
The Authorities, it is true, differ whether this last question was a mere 'question' and not a 'riddle' according to the strict rules of the Game; but all agree that, after accepting it and trying to guess the answer, Gollum was bound by his promise. And Bilbo pressed him to keep his word; for the thought came to him that this slimy creature might prove false, even though such promises were held sacred, and of old all but the wickedest things feared to break them. But after ages alone in the dark Gollum's heart was black, and treachery was in it. He slipped away, and returned to the island, of which Bilbo knew nothing, not far off in the dark water. There, he thought, lay his ring. He was hungry now, and angry, and once his 'precious' was with him he would not fear any weapon at all.
|
||||
|
||||
But the ring was not on the island; he had lost it, it was gone. His screech sent a shiver down Bilbo's back, though he did not yet understand what had happened. But Gollum had at last leaped to a guess, too late.What has it got in its pocketses? he cried. The light in his eyes was like a green flame as he sped back to murder the hobbit and recover his 'precious'. Just in time Bilbo saw his peril, and he fled blindly up the passage away from the water; and once more he was saved by his luck. For just as he ran he put his hand in his pocket, and the ring slipped quietly on to his finger. So it was that Gollum passed him without seeing him, and went to guard the way out, lest the 'thief' should escape. Warily Bilbo followed him, as he went along, cursing, and talking to himself about his 'precious'; from which talk at last even Bilbo guessed the truth, and hope came to him in the darkness: he himself had found the marvellous ring and a chance of escape from the orcs and from Gollum.
|
||||
|
||||
At length they came to a halt before an unseen opening that led to the lower gates of the mines, on the eastward side of the mountains. There Gollum crouched at bay, smelling and listening; and Bilbo was tempted to slay him with his sword. But pity stayed him, and though he kept the ring, in which his only hope lay, he would not use it to help him kill the wretched creature at a disadvantage. In the end, gathering his courage, he leaped over Gollum in the dark, and fled away down the passage, pursued by his enemy's cries of hate and despair:Thief, thief! Baggins! We hates it for ever!
|
||||
|
||||
Now it is a curious fact that this is not the story as Bilbo first told it to his companions. To them his account was that Gollum had promised to give him apresent, if he won the game; but when Gollum went to fetch it from his island he found the treasure was gone: a magic ring, which had been given to him long ago on his birthday. Bilbo guessed that this was the very ring that he had found, and as he had won the game, it was already his by right. But being in a tight place, he said nothing about it, and made Gollum show him the way out, as a reward instead of a present. This account Bilbo set down in his memoirs, and he seems never to have altered it himself, not even after the Council of Elrond. Evidently it still appeared in the original Red Book, as it did in several of the copies and abstracts. But many copies contain the true account (as an alternative), derived no doubt from notes by Frodo or Samwise, both of whom learned the truth, though they seem to have been unwilling to delete anything actually written by the old hobbit himself.
|
||||
|
||||
Gandalf, however, disbelieved Bilbo's first story, as soon as he heard it, and he continued to be very curious about the ring. Eventually he got the true tale out of Bilbo after much questioning, which for a while strained their friendship; but the wizard seemed to think the truth important. Though he did not say so to Bilbo, he also thought it important, and disturbing, to find that the good hobbit had not told the truth from the first: quite contrary to his habit. The idea of a 'present' was not mere hobbitlike invention, all the same. It was suggested to Bilbo, as he confessed, by Gollum's talk that he overheard; for Gollum did, in fact, call the ring his 'birthday present', many times. That also Gandalf thought strange and suspicious; but he did not discover the truth in this point for many more years, as will be seen in this book.
|
||||
|
||||
Of Bilbo's later adventures little more need be said here. With the help of the ring he escaped from the orc-guards at the gate and rejoined his companions. He used the ring many times on his quest, chiefly for the help of his friends; but he kept it secret from them as long as he could. After his return to his home he never spoke of it again to anyone, save Gandalf and Frodo; and no one else in the Shire knew of its existence, or so he believed. Only to Frodo did he show the account of his Journey that he was writing.
|
||||
|
||||
His sword, Sting, Bilbo hung over his fireplace, and his coat of marvellous mail, the gift of the Dwarves from the Dragon-hoard, he lent to a museum, to the Michel Delving Mathom-house in fact. But he kept in a drawer at Bag End the old cloak and hood that he had worn on his travels; and the ring, secured by a fine chain, remained in his pocket.
|
||||
|
||||
He returned to his home at Bag End on June the 22nd in his fifty-second year (S.R. 1342), and nothing very notable occurred in the Shire until Mr. Baggins began the preparations for the celebration of his hundred-and-eleventh birthday (S.R. 1401). At this point this History begins.
|
||||
|
||||
At the end of the Third Age the part played by the Hobbits in the great events that led to the inclusion of the Shire in the Reunited Kingdom awakened among them a more widespread interest in their own history; and many of their traditions, up to that time still mainly oral, were collected and Written down. The greater families were also concerned with events in the Kingdom at large, and many of their members studied its ancient histories and legends. By the end of the first century of the Fourth Age there were already to be found in the Shire several libraries that contained many historical books and records.
|
||||
|
||||
The largest of these collections were probably at Undertowers, at Great Smials, and at Brandy Hall. This account of the end of the Third Age is drawn mainly from the Red Book of Westmarch. That most important source for the history of the War of the Ring was so called because it was long preserved at Undertowers, the home of the Fairbairns, Wardens of the Westmarch. It was in origin Bilbo's private diary, which he took with him to Rivendell. Frodo brought it back to the Shire, together with many loose leaves of notes, and during S.R. 1420-1 he nearly filled its pages with his account of the War. But annexed to it and preserved with it, probably m a single red case, were the three large volumes, bound in red leather, that Bilbo gave to him as a parting gift. To these four volumes there was added in Westmarch a fifth containing commentaries, genealogies, and various other matter concerning the hobbit members of the Fellowship.
|
||||
|
||||
The original Red Book has not been preserved, but many copies were made, especially of the first volume, for the use of the descendants of the children of Master Samwise. The most important copy, however, has a different history. It was kept at Great Smials, but it was written in Condor, probably at the request of the great-grandson of Peregrin, and completed in S.R. 1592 (F.A. 172). Its southern scribe appended this note: Findegil, King's Writer, finished this work in IV 172. It is an exact copy in all details of the Thain's Book m Minas Tirith. That book was a copy, made at the request of King Elessar, of the Red Book of the Periannath, and was brought to him by the Thain Peregrin when he retired to Gondor in IV 64.
|
||||
|
||||
The Thain's Book was thus the first copy made of the Red Book and contained much that was later omitted or lost. In Minas Tirith it received much annotation, and many corrections, especially of names, words, and quotations in the Elvish languages; and there was added to it an abbreviated version of those parts ofThe Tale of Aragorn and Arwen which lie outside the account of the War. The full tale is stated to have been written by Barahir, grandson of the Steward Faramir, some time after the passing of the King. But the chief importance of Findegil's copy is that it alone contains the whole of Bilbo's "Translations from the Elvish'. These three volumes were found to be a work of great skill and learning in which, between 1403 and 1418, he had used all the sources available to him in Rivendell, both living and written. But since they were little used by Frodo, being almost entirely concerned with the Elder Days, no more is said of them here.
|
||||
|
||||
Since Meriadoc and Peregrin became the heads of their great families, and at the same time kept up their connexions with Rohan and Gondor, the libraries at Bucklebury and Tuckborough contained much that did not appear in the Red Book. In Brandy Hall there were many works dealing with Eriador and the history of Rohan. Some of these were composed or begun by Meriadoc himself, though in the Shire he was chiefly remembered for hisHerblore of the Shire, and for hisReckoning of Years m which he discussed the relation of the calendars of the Shire and Bree to those of Rivendell, Gondor, and Rohan. He also wrote a short treatise onOld Words and Names in the Shire, having special interest in discovering the kinship with the language of the Rohirrim of such 'shire-words' asmathom and old elements in place names.
|
||||
|
||||
At Great Smials the books were of less interest to Shire-folk, though more important for larger history. None of them was written by Peregrin, but he and his successors collected many manuscripts written by scribes of Gondor: mainly copies or summaries of histories or legends relating to Elendil and his heirs. Only here in the Shire were to be found extensive materials for the history of Numenor and the arising of Sauron. It was probably at Great Smials thatThe Tale of Years was put together, with the assistance of material collected by Meriadoc. Though the dates given are often conjectural, especially for the Second Age, they deserve attention. It is probable that Meriadoc obtained assistance and information from Rivendell, which he visited more than once. There, though Elrond had departed, his sons long remained, together with some of the High-elven folk. It is said that Celeborn went to dwell there after the departure of Galadriel; but there is no record of the day when at last he sought the Grey Havens, and with him went the last living memory of the Elder Days in Middle-earth.
|
||||
21
basharin_sevastyan_lab_7/README.md
Normal file
@@ -0,0 +1,21 @@
|
||||
## Лабораторная работа 7. Вариант 5.
|
||||
### Задание
|
||||
Выбрать художественный текст(четные варианты –русскоязычный, нечетные –англоязычный)и обучить на нем рекуррентную
|
||||
нейронную сеть для решения задачи генерации. Подобрать архитектуру и параметры так, чтобы приблизиться к максимально
|
||||
осмысленному результату.
|
||||
|
||||
В завершении подобрать компромиссную архитектуру, справляющуюся достаточно хорошо с обоими видами текстов.
|
||||
|
||||
### Ход работы
|
||||
Для английской модели был взят пролог Властелина колец. Модель хоть им получилась удачнее, чем на русском, но время
|
||||
обучение составило чуть больше часа.
|
||||
|
||||
#### Результат rus
|
||||
здесь был человек прежде всего всего обманывает самого себя ибо он думает что успешно соврал а люди поняли и из
|
||||
деликатности промолчали промолчали промолчали промолчали промолчали какие его неудачи могут его постигнуть не тому
|
||||
помочь много ли людей не нуждаются в помощи помощи было врать врать врать молчания молчания а внести то
|
||||
|
||||
#### Результат eng
|
||||
the harfoots were browner of skin smaller and shorter and they were beardless and bootless their hands and feet were
|
||||
neat and nimble and they preferred highlands and hillsides the stoors were broader heavier in build their feet and
|
||||
hands were larger and they preferred flat lands and riversides
|
||||
70
basharin_sevastyan_lab_7/main.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import numpy as np
|
||||
from keras.preprocessing.sequence import pad_sequences
|
||||
from keras.preprocessing.text import Tokenizer
|
||||
from keras.models import Sequential
|
||||
from keras.layers import Embedding, LSTM, Dense
|
||||
from keras.utils import to_categorical
|
||||
|
||||
with open('ru.txt', "r", encoding='utf-8') as file:
|
||||
text = file.read()
|
||||
|
||||
# Предварительная обработка текста (в зависимости от вашей задачи)
|
||||
|
||||
# Создание словаря для отображения слов в индексы и обратно
|
||||
tokenizer = Tokenizer()
|
||||
tokenizer.fit_on_texts([text])
|
||||
total_words = len(tokenizer.word_index) + 1
|
||||
|
||||
# Подготовка данных для обучения (в зависимости от вашей задачи)
|
||||
|
||||
input_sequences = []
|
||||
for line in text.split('\n'):
|
||||
token_list = tokenizer.texts_to_sequences([line])[0]
|
||||
for i in range(1, len(token_list)):
|
||||
n_gram_sequence = token_list[:i+1]
|
||||
input_sequences.append(n_gram_sequence)
|
||||
|
||||
max_sequence_length = max([len(x) for x in input_sequences])
|
||||
input_sequences = pad_sequences(input_sequences, maxlen=max_sequence_length, padding='pre')
|
||||
X, y = input_sequences[:,:-1],input_sequences[:,-1]
|
||||
y = to_categorical(y, num_classes=total_words)
|
||||
|
||||
# Определение архитектуры модели
|
||||
|
||||
model = Sequential()
|
||||
model.add(Embedding(total_words, 50, input_length=max_sequence_length-1))
|
||||
model.add(LSTM(100))
|
||||
model.add(Dense(total_words, activation='softmax'))
|
||||
|
||||
# Компиляция модели
|
||||
|
||||
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
|
||||
|
||||
# Обучение модели
|
||||
|
||||
model.fit(X, y, epochs=100, verbose=2)
|
||||
|
||||
# Генерация текста с использованием обученной модели
|
||||
|
||||
def generate_text(seed_text, next_words, model_, max_sequence_length):
|
||||
for _ in range(next_words):
|
||||
token_list = tokenizer.texts_to_sequences([seed_text])[0]
|
||||
token_list = pad_sequences([token_list], maxlen=max_sequence_length - 1, padding='pre')
|
||||
predicted_probs = model.predict(token_list, verbose=0)[0]
|
||||
predicted_index = np.argmax(predicted_probs)
|
||||
output_word = ""
|
||||
for word, index in tokenizer.word_index.items():
|
||||
if index == predicted_index:
|
||||
output_word = word
|
||||
break
|
||||
seed_text += " " + output_word
|
||||
|
||||
return seed_text
|
||||
|
||||
|
||||
# Пример генерации текста (замените seed_text и next_words на свои значения)
|
||||
seed_text = "здесь был"
|
||||
next_words = 50
|
||||
generated_text = generate_text(seed_text, next_words, model, max_sequence_length)
|
||||
|
||||
print(generated_text)
|
||||
9
basharin_sevastyan_lab_7/ru.txt
Normal file
@@ -0,0 +1,9 @@
|
||||
Когда человек сознательно или интуитивно выбирает себе в жизни какую-то цель, жизненную задачу, он невольно дает себе оценку. По тому, ради чего человек живет, можно судить и о его самооценке - низкой или высокой.
|
||||
Если человек живет, чтобы приносить людям добро, облегчать их страдания, давать людям радость, то он оценивает себя на уровне этой своей человечности. Он ставит себе цель, достойную человека.
|
||||
Только такая цель позволяет человеку прожить свою жизнь с достоинством и получить настоящую радость. Да, радость! Подумайте: если человек ставит себе задачей увеличивать в жизни добро, приносить людям счастье, какие неудачи могут его постигнуть? Не тому помочь? Но много ли людей не нуждаются в помощи?
|
||||
Если жить только для себя, своими мелкими заботами о собственном благополучии, то от прожитого не останется и следа. Если же жить для других, то другие сберегут то, чему служил, чему отдавал силы.
|
||||
Можно по-разному определять цель своего существования, но цель должна быть. Надо иметь и принципы в жизни. Одно правило в жизни должно быть у каждого человека, в его цели жизни, в его принципах жизни, в его поведении: надо прожить жизнь с достоинством, чтобы не стыдно было вспоминать.
|
||||
Достоинство требует доброты, великодушия, умения не быть эгоистом, быть правдивым, хорошим другом, находить радость в помощи другим.
|
||||
Ради достоинства жизни надо уметь отказываться от мелких удовольствий и немалых тоже… Уметь извиняться, признавать перед другими ошибку - лучше, чем врать.
|
||||
Обманывая, человек прежде всего обманывает самого себя, ибо он думает, что успешно соврал, а люди поняли и из деликатности промолчали.
|
||||
Жизнь - прежде всего творчество, но это не значит, что каждый человек, чтобы жить, должен родиться художником, балериной или ученым. Можно творить просто добрую атмосферу вокруг себя. Человек может принести с собой атмосферу подозрительности, какого-то тягостного молчания, а может внести сразу радость, свет. Вот это и есть творчество.
|
||||
60
degtyarev_mikhail_lab_6/Readme.md
Normal file
@@ -0,0 +1,60 @@
|
||||
# Лабораторная 6
|
||||
## Вариант 9
|
||||
|
||||
## Задание
|
||||
Использовать нейронную сеть MLPClassifier для данных из таблицы 1 по варианту, самостоятельно сформулировав задачу. Интерпретировать результаты и оценить, насколько хорошо она подходит для решения сформулированной вами задачи
|
||||
|
||||
Задача:
|
||||
|
||||
Использовать MLPClassifier для прогнозирования заработной платы на основе опыта работы (experience_level), типа занятости (employment_type), местоположения компании (company_location) и размера компании (company_size). Оценить, насколько хорошо нейронная сеть подходит для решения этой задачи.
|
||||
## Описание Программы
|
||||
Программа представляет собой пример использования MLPClassifier для прогнозирования заработной платы на основе различных признаков.
|
||||
### Используемые библиотеки
|
||||
- `pandas`: Библиотека для обработки и анализа данных, используется для загрузки и предобработки данных.
|
||||
- `scikit-learn`:
|
||||
- `train_test_split`: Используется для разделения данных на обучающий и тестовый наборы.
|
||||
- `StandardScaler`: Применяется для нормализации числовых признаков.
|
||||
- `OneHotEncoder`: Используется для кодирования категориальных признаков.
|
||||
- `MLPClassifier`: Классификатор многослойного персептрона (нейронная сеть).
|
||||
- `accuracy_score`: Используется для оценки точности классификации.
|
||||
|
||||
### Шаги программы
|
||||
|
||||
1. **Загрузка данных:**
|
||||
- Загружаются данные из файла `ds_salaries.csv` с использованием библиотеки pandas.
|
||||
|
||||
2. **Определение категорий заработной платы:**
|
||||
- Создаются категории заработной платы на основе бинов с использованием `pd.cut`.
|
||||
|
||||
3. **Добавление столбца с категориями:**
|
||||
- Добавляется столбец с категориями в данные.
|
||||
|
||||
4. **Предварительная обработка данных:**
|
||||
- Категориальные признаки ('experience_level', 'employment_type', 'job_title', 'employee_residence', 'company_location', 'company_size') обрабатываются с использованием OneHotEncoder.
|
||||
- Числовые признаки ('work_year', 'remote_ratio') нормализуются с помощью StandardScaler.
|
||||
- Эти шаги объединяются в ColumnTransformer и используются в качестве предварительного обработчика данных.
|
||||
|
||||
5. **Выбор признаков:**
|
||||
- Определены признаки, которые будут использоваться для обучения модели.
|
||||
|
||||
6. **Разделение данных:**
|
||||
- Данные разделены на обучающий и тестовый наборы в соотношении 80/20 с использованием функции `train_test_split`.
|
||||
|
||||
7. **Обучение модели:**
|
||||
- Используется MLPClassifier, объединенный с предварительным обработчиком данных в рамках Pipeline.
|
||||
|
||||
8. **Оценка производительности модели:**
|
||||
- Вычисляется и выводится точность модели с использованием метрики `accuracy_score`.
|
||||
|
||||
### Запуск программы
|
||||
- Склонировать или скачать код `main.py`.
|
||||
- Запустите файл в среде, поддерживающей выполнение Python. `python main.py`
|
||||
|
||||
### Результаты
|
||||
|
||||
- Точность модели оценивается метрикой accuracy, которая может быть выведена в консоль или использована для визуализации.
|
||||
|
||||
В данном случае accuracy получилось: 0.5901639344262295
|
||||
|
||||
Чем ближе результат к единице, тем лучше, но данный результат в 59% можно считать средним.
|
||||
|
||||
608
degtyarev_mikhail_lab_6/ds_salaries.csv
Normal file
@@ -0,0 +1,608 @@
|
||||
,work_year,experience_level,employment_type,job_title,salary,salary_currency,salary_in_usd,employee_residence,remote_ratio,company_location,company_size
|
||||
0,2020,MI,FT,Data Scientist,70000,EUR,79833,DE,0,DE,L
|
||||
1,2020,SE,FT,Machine Learning Scientist,260000,USD,260000,JP,0,JP,S
|
||||
2,2020,SE,FT,Big Data Engineer,85000,GBP,109024,GB,50,GB,M
|
||||
3,2020,MI,FT,Product Data Analyst,20000,USD,20000,HN,0,HN,S
|
||||
4,2020,SE,FT,Machine Learning Engineer,150000,USD,150000,US,50,US,L
|
||||
5,2020,EN,FT,Data Analyst,72000,USD,72000,US,100,US,L
|
||||
6,2020,SE,FT,Lead Data Scientist,190000,USD,190000,US,100,US,S
|
||||
7,2020,MI,FT,Data Scientist,11000000,HUF,35735,HU,50,HU,L
|
||||
8,2020,MI,FT,Business Data Analyst,135000,USD,135000,US,100,US,L
|
||||
9,2020,SE,FT,Lead Data Engineer,125000,USD,125000,NZ,50,NZ,S
|
||||
10,2020,EN,FT,Data Scientist,45000,EUR,51321,FR,0,FR,S
|
||||
11,2020,MI,FT,Data Scientist,3000000,INR,40481,IN,0,IN,L
|
||||
12,2020,EN,FT,Data Scientist,35000,EUR,39916,FR,0,FR,M
|
||||
13,2020,MI,FT,Lead Data Analyst,87000,USD,87000,US,100,US,L
|
||||
14,2020,MI,FT,Data Analyst,85000,USD,85000,US,100,US,L
|
||||
15,2020,MI,FT,Data Analyst,8000,USD,8000,PK,50,PK,L
|
||||
16,2020,EN,FT,Data Engineer,4450000,JPY,41689,JP,100,JP,S
|
||||
17,2020,SE,FT,Big Data Engineer,100000,EUR,114047,PL,100,GB,S
|
||||
18,2020,EN,FT,Data Science Consultant,423000,INR,5707,IN,50,IN,M
|
||||
19,2020,MI,FT,Lead Data Engineer,56000,USD,56000,PT,100,US,M
|
||||
20,2020,MI,FT,Machine Learning Engineer,299000,CNY,43331,CN,0,CN,M
|
||||
21,2020,MI,FT,Product Data Analyst,450000,INR,6072,IN,100,IN,L
|
||||
22,2020,SE,FT,Data Engineer,42000,EUR,47899,GR,50,GR,L
|
||||
23,2020,MI,FT,BI Data Analyst,98000,USD,98000,US,0,US,M
|
||||
24,2020,MI,FT,Lead Data Scientist,115000,USD,115000,AE,0,AE,L
|
||||
25,2020,EX,FT,Director of Data Science,325000,USD,325000,US,100,US,L
|
||||
26,2020,EN,FT,Research Scientist,42000,USD,42000,NL,50,NL,L
|
||||
27,2020,SE,FT,Data Engineer,720000,MXN,33511,MX,0,MX,S
|
||||
28,2020,EN,CT,Business Data Analyst,100000,USD,100000,US,100,US,L
|
||||
29,2020,SE,FT,Machine Learning Manager,157000,CAD,117104,CA,50,CA,L
|
||||
30,2020,MI,FT,Data Engineering Manager,51999,EUR,59303,DE,100,DE,S
|
||||
31,2020,EN,FT,Big Data Engineer,70000,USD,70000,US,100,US,L
|
||||
32,2020,SE,FT,Data Scientist,60000,EUR,68428,GR,100,US,L
|
||||
33,2020,MI,FT,Research Scientist,450000,USD,450000,US,0,US,M
|
||||
34,2020,MI,FT,Data Analyst,41000,EUR,46759,FR,50,FR,L
|
||||
35,2020,MI,FT,Data Engineer,65000,EUR,74130,AT,50,AT,L
|
||||
36,2020,MI,FT,Data Science Consultant,103000,USD,103000,US,100,US,L
|
||||
37,2020,EN,FT,Machine Learning Engineer,250000,USD,250000,US,50,US,L
|
||||
38,2020,EN,FT,Data Analyst,10000,USD,10000,NG,100,NG,S
|
||||
39,2020,EN,FT,Machine Learning Engineer,138000,USD,138000,US,100,US,S
|
||||
40,2020,MI,FT,Data Scientist,45760,USD,45760,PH,100,US,S
|
||||
41,2020,EX,FT,Data Engineering Manager,70000,EUR,79833,ES,50,ES,L
|
||||
42,2020,MI,FT,Machine Learning Infrastructure Engineer,44000,EUR,50180,PT,0,PT,M
|
||||
43,2020,MI,FT,Data Engineer,106000,USD,106000,US,100,US,L
|
||||
44,2020,MI,FT,Data Engineer,88000,GBP,112872,GB,50,GB,L
|
||||
45,2020,EN,PT,ML Engineer,14000,EUR,15966,DE,100,DE,S
|
||||
46,2020,MI,FT,Data Scientist,60000,GBP,76958,GB,100,GB,S
|
||||
47,2020,SE,FT,Data Engineer,188000,USD,188000,US,100,US,L
|
||||
48,2020,MI,FT,Data Scientist,105000,USD,105000,US,100,US,L
|
||||
49,2020,MI,FT,Data Engineer,61500,EUR,70139,FR,50,FR,L
|
||||
50,2020,EN,FT,Data Analyst,450000,INR,6072,IN,0,IN,S
|
||||
51,2020,EN,FT,Data Analyst,91000,USD,91000,US,100,US,L
|
||||
52,2020,EN,FT,AI Scientist,300000,DKK,45896,DK,50,DK,S
|
||||
53,2020,EN,FT,Data Engineer,48000,EUR,54742,PK,100,DE,L
|
||||
54,2020,SE,FL,Computer Vision Engineer,60000,USD,60000,RU,100,US,S
|
||||
55,2020,SE,FT,Principal Data Scientist,130000,EUR,148261,DE,100,DE,M
|
||||
56,2020,MI,FT,Data Scientist,34000,EUR,38776,ES,100,ES,M
|
||||
57,2020,MI,FT,Data Scientist,118000,USD,118000,US,100,US,M
|
||||
58,2020,SE,FT,Data Scientist,120000,USD,120000,US,50,US,L
|
||||
59,2020,MI,FT,Data Scientist,138350,USD,138350,US,100,US,M
|
||||
60,2020,MI,FT,Data Engineer,110000,USD,110000,US,100,US,L
|
||||
61,2020,MI,FT,Data Engineer,130800,USD,130800,ES,100,US,M
|
||||
62,2020,EN,PT,Data Scientist,19000,EUR,21669,IT,50,IT,S
|
||||
63,2020,SE,FT,Data Scientist,412000,USD,412000,US,100,US,L
|
||||
64,2020,SE,FT,Machine Learning Engineer,40000,EUR,45618,HR,100,HR,S
|
||||
65,2020,EN,FT,Data Scientist,55000,EUR,62726,DE,50,DE,S
|
||||
66,2020,EN,FT,Data Scientist,43200,EUR,49268,DE,0,DE,S
|
||||
67,2020,SE,FT,Data Science Manager,190200,USD,190200,US,100,US,M
|
||||
68,2020,EN,FT,Data Scientist,105000,USD,105000,US,100,US,S
|
||||
69,2020,SE,FT,Data Scientist,80000,EUR,91237,AT,0,AT,S
|
||||
70,2020,MI,FT,Data Scientist,55000,EUR,62726,FR,50,LU,S
|
||||
71,2020,MI,FT,Data Scientist,37000,EUR,42197,FR,50,FR,S
|
||||
72,2021,EN,FT,Research Scientist,60000,GBP,82528,GB,50,GB,L
|
||||
73,2021,EX,FT,BI Data Analyst,150000,USD,150000,IN,100,US,L
|
||||
74,2021,EX,FT,Head of Data,235000,USD,235000,US,100,US,L
|
||||
75,2021,SE,FT,Data Scientist,45000,EUR,53192,FR,50,FR,L
|
||||
76,2021,MI,FT,BI Data Analyst,100000,USD,100000,US,100,US,M
|
||||
77,2021,MI,PT,3D Computer Vision Researcher,400000,INR,5409,IN,50,IN,M
|
||||
78,2021,MI,CT,ML Engineer,270000,USD,270000,US,100,US,L
|
||||
79,2021,EN,FT,Data Analyst,80000,USD,80000,US,100,US,M
|
||||
80,2021,SE,FT,Data Analytics Engineer,67000,EUR,79197,DE,100,DE,L
|
||||
81,2021,MI,FT,Data Engineer,140000,USD,140000,US,100,US,L
|
||||
82,2021,MI,FT,Applied Data Scientist,68000,CAD,54238,GB,50,CA,L
|
||||
83,2021,MI,FT,Machine Learning Engineer,40000,EUR,47282,ES,100,ES,S
|
||||
84,2021,EX,FT,Director of Data Science,130000,EUR,153667,IT,100,PL,L
|
||||
85,2021,MI,FT,Data Engineer,110000,PLN,28476,PL,100,PL,L
|
||||
86,2021,EN,FT,Data Analyst,50000,EUR,59102,FR,50,FR,M
|
||||
87,2021,MI,FT,Data Analytics Engineer,110000,USD,110000,US,100,US,L
|
||||
88,2021,SE,FT,Lead Data Analyst,170000,USD,170000,US,100,US,L
|
||||
89,2021,SE,FT,Data Analyst,80000,USD,80000,BG,100,US,S
|
||||
90,2021,SE,FT,Marketing Data Analyst,75000,EUR,88654,GR,100,DK,L
|
||||
91,2021,EN,FT,Data Science Consultant,65000,EUR,76833,DE,100,DE,S
|
||||
92,2021,MI,FT,Lead Data Analyst,1450000,INR,19609,IN,100,IN,L
|
||||
93,2021,SE,FT,Lead Data Engineer,276000,USD,276000,US,0,US,L
|
||||
94,2021,EN,FT,Data Scientist,2200000,INR,29751,IN,50,IN,L
|
||||
95,2021,MI,FT,Cloud Data Engineer,120000,SGD,89294,SG,50,SG,L
|
||||
96,2021,EN,PT,AI Scientist,12000,USD,12000,BR,100,US,S
|
||||
97,2021,MI,FT,Financial Data Analyst,450000,USD,450000,US,100,US,L
|
||||
98,2021,EN,FT,Computer Vision Software Engineer,70000,USD,70000,US,100,US,M
|
||||
99,2021,MI,FT,Computer Vision Software Engineer,81000,EUR,95746,DE,100,US,S
|
||||
100,2021,MI,FT,Data Analyst,75000,USD,75000,US,0,US,L
|
||||
101,2021,SE,FT,Data Engineer,150000,USD,150000,US,100,US,L
|
||||
102,2021,MI,FT,BI Data Analyst,11000000,HUF,36259,HU,50,US,L
|
||||
103,2021,MI,FT,Data Analyst,62000,USD,62000,US,0,US,L
|
||||
104,2021,MI,FT,Data Scientist,73000,USD,73000,US,0,US,L
|
||||
105,2021,MI,FT,Data Analyst,37456,GBP,51519,GB,50,GB,L
|
||||
106,2021,MI,FT,Research Scientist,235000,CAD,187442,CA,100,CA,L
|
||||
107,2021,SE,FT,Data Engineer,115000,USD,115000,US,100,US,S
|
||||
108,2021,SE,FT,Data Engineer,150000,USD,150000,US,100,US,M
|
||||
109,2021,EN,FT,Data Engineer,2250000,INR,30428,IN,100,IN,L
|
||||
110,2021,SE,FT,Machine Learning Engineer,80000,EUR,94564,DE,50,DE,L
|
||||
111,2021,SE,FT,Director of Data Engineering,82500,GBP,113476,GB,100,GB,M
|
||||
112,2021,SE,FT,Lead Data Engineer,75000,GBP,103160,GB,100,GB,S
|
||||
113,2021,EN,PT,AI Scientist,12000,USD,12000,PK,100,US,M
|
||||
114,2021,MI,FT,Data Engineer,38400,EUR,45391,NL,100,NL,L
|
||||
115,2021,EN,FT,Machine Learning Scientist,225000,USD,225000,US,100,US,L
|
||||
116,2021,MI,FT,Data Scientist,50000,USD,50000,NG,100,NG,L
|
||||
117,2021,MI,FT,Data Science Engineer,34000,EUR,40189,GR,100,GR,M
|
||||
118,2021,EN,FT,Data Analyst,90000,USD,90000,US,100,US,S
|
||||
119,2021,MI,FT,Data Engineer,200000,USD,200000,US,100,US,L
|
||||
120,2021,MI,FT,Big Data Engineer,60000,USD,60000,ES,50,RO,M
|
||||
121,2021,SE,FT,Principal Data Engineer,200000,USD,200000,US,100,US,M
|
||||
122,2021,EN,FT,Data Analyst,50000,USD,50000,US,100,US,M
|
||||
123,2021,EN,FT,Applied Data Scientist,80000,GBP,110037,GB,0,GB,L
|
||||
124,2021,EN,PT,Data Analyst,8760,EUR,10354,ES,50,ES,M
|
||||
125,2021,MI,FT,Principal Data Scientist,151000,USD,151000,US,100,US,L
|
||||
126,2021,SE,FT,Machine Learning Scientist,120000,USD,120000,US,50,US,S
|
||||
127,2021,MI,FT,Data Scientist,700000,INR,9466,IN,0,IN,S
|
||||
128,2021,EN,FT,Machine Learning Engineer,20000,USD,20000,IN,100,IN,S
|
||||
129,2021,SE,FT,Lead Data Scientist,3000000,INR,40570,IN,50,IN,L
|
||||
130,2021,EN,FT,Machine Learning Developer,100000,USD,100000,IQ,50,IQ,S
|
||||
131,2021,EN,FT,Data Scientist,42000,EUR,49646,FR,50,FR,M
|
||||
132,2021,MI,FT,Applied Machine Learning Scientist,38400,USD,38400,VN,100,US,M
|
||||
133,2021,SE,FT,Computer Vision Engineer,24000,USD,24000,BR,100,BR,M
|
||||
134,2021,EN,FT,Data Scientist,100000,USD,100000,US,0,US,S
|
||||
135,2021,MI,FT,Data Analyst,90000,USD,90000,US,100,US,M
|
||||
136,2021,MI,FT,ML Engineer,7000000,JPY,63711,JP,50,JP,S
|
||||
137,2021,MI,FT,ML Engineer,8500000,JPY,77364,JP,50,JP,S
|
||||
138,2021,SE,FT,Principal Data Scientist,220000,USD,220000,US,0,US,L
|
||||
139,2021,EN,FT,Data Scientist,80000,USD,80000,US,100,US,M
|
||||
140,2021,MI,FT,Data Analyst,135000,USD,135000,US,100,US,L
|
||||
141,2021,SE,FT,Data Science Manager,240000,USD,240000,US,0,US,L
|
||||
142,2021,SE,FT,Data Engineering Manager,150000,USD,150000,US,0,US,L
|
||||
143,2021,MI,FT,Data Scientist,82500,USD,82500,US,100,US,S
|
||||
144,2021,MI,FT,Data Engineer,100000,USD,100000,US,100,US,L
|
||||
145,2021,SE,FT,Machine Learning Engineer,70000,EUR,82744,BE,50,BE,M
|
||||
146,2021,MI,FT,Research Scientist,53000,EUR,62649,FR,50,FR,M
|
||||
147,2021,MI,FT,Data Engineer,90000,USD,90000,US,100,US,L
|
||||
148,2021,SE,FT,Data Engineering Manager,153000,USD,153000,US,100,US,L
|
||||
149,2021,SE,FT,Cloud Data Engineer,160000,USD,160000,BR,100,US,S
|
||||
150,2021,SE,FT,Director of Data Science,168000,USD,168000,JP,0,JP,S
|
||||
151,2021,MI,FT,Data Scientist,150000,USD,150000,US,100,US,M
|
||||
152,2021,MI,FT,Data Scientist,95000,CAD,75774,CA,100,CA,L
|
||||
153,2021,EN,FT,Data Scientist,13400,USD,13400,UA,100,UA,L
|
||||
154,2021,SE,FT,Data Science Manager,144000,USD,144000,US,100,US,L
|
||||
155,2021,SE,FT,Data Science Engineer,159500,CAD,127221,CA,50,CA,L
|
||||
156,2021,MI,FT,Data Scientist,160000,SGD,119059,SG,100,IL,M
|
||||
157,2021,MI,FT,Applied Machine Learning Scientist,423000,USD,423000,US,50,US,L
|
||||
158,2021,SE,FT,Data Analytics Manager,120000,USD,120000,US,100,US,M
|
||||
159,2021,EN,FT,Machine Learning Engineer,125000,USD,125000,US,100,US,S
|
||||
160,2021,EX,FT,Head of Data,230000,USD,230000,RU,50,RU,L
|
||||
161,2021,EX,FT,Head of Data Science,85000,USD,85000,RU,0,RU,M
|
||||
162,2021,MI,FT,Data Engineer,24000,EUR,28369,MT,50,MT,L
|
||||
163,2021,EN,FT,Data Science Consultant,54000,EUR,63831,DE,50,DE,L
|
||||
164,2021,EX,FT,Director of Data Science,110000,EUR,130026,DE,50,DE,M
|
||||
165,2021,SE,FT,Data Specialist,165000,USD,165000,US,100,US,L
|
||||
166,2021,EN,FT,Data Engineer,80000,USD,80000,US,100,US,L
|
||||
167,2021,EX,FT,Director of Data Science,250000,USD,250000,US,0,US,L
|
||||
168,2021,EN,FT,BI Data Analyst,55000,USD,55000,US,50,US,S
|
||||
169,2021,MI,FT,Data Architect,150000,USD,150000,US,100,US,L
|
||||
170,2021,MI,FT,Data Architect,170000,USD,170000,US,100,US,L
|
||||
171,2021,MI,FT,Data Engineer,60000,GBP,82528,GB,100,GB,L
|
||||
172,2021,EN,FT,Data Analyst,60000,USD,60000,US,100,US,S
|
||||
173,2021,SE,FT,Principal Data Scientist,235000,USD,235000,US,100,US,L
|
||||
174,2021,SE,FT,Research Scientist,51400,EUR,60757,PT,50,PT,L
|
||||
175,2021,SE,FT,Data Engineering Manager,174000,USD,174000,US,100,US,L
|
||||
176,2021,MI,FT,Data Scientist,58000,MXN,2859,MX,0,MX,S
|
||||
177,2021,MI,FT,Data Scientist,30400000,CLP,40038,CL,100,CL,L
|
||||
178,2021,EN,FT,Machine Learning Engineer,81000,USD,81000,US,50,US,S
|
||||
179,2021,MI,FT,Data Scientist,420000,INR,5679,IN,100,US,S
|
||||
180,2021,MI,FT,Big Data Engineer,1672000,INR,22611,IN,0,IN,L
|
||||
181,2021,MI,FT,Data Scientist,76760,EUR,90734,DE,50,DE,L
|
||||
182,2021,MI,FT,Data Engineer,22000,EUR,26005,RO,0,US,L
|
||||
183,2021,SE,FT,Finance Data Analyst,45000,GBP,61896,GB,50,GB,L
|
||||
184,2021,MI,FL,Machine Learning Scientist,12000,USD,12000,PK,50,PK,M
|
||||
185,2021,MI,FT,Data Engineer,4000,USD,4000,IR,100,IR,M
|
||||
186,2021,SE,FT,Data Analytics Engineer,50000,USD,50000,VN,100,GB,M
|
||||
187,2021,EX,FT,Data Science Consultant,59000,EUR,69741,FR,100,ES,S
|
||||
188,2021,SE,FT,Data Engineer,65000,EUR,76833,RO,50,GB,S
|
||||
189,2021,MI,FT,Machine Learning Engineer,74000,USD,74000,JP,50,JP,S
|
||||
190,2021,SE,FT,Data Science Manager,152000,USD,152000,US,100,FR,L
|
||||
191,2021,EN,FT,Machine Learning Engineer,21844,USD,21844,CO,50,CO,M
|
||||
192,2021,MI,FT,Big Data Engineer,18000,USD,18000,MD,0,MD,S
|
||||
193,2021,SE,FT,Data Science Manager,174000,USD,174000,US,100,US,L
|
||||
194,2021,SE,FT,Research Scientist,120500,CAD,96113,CA,50,CA,L
|
||||
195,2021,MI,FT,Data Scientist,147000,USD,147000,US,50,US,L
|
||||
196,2021,EN,FT,BI Data Analyst,9272,USD,9272,KE,100,KE,S
|
||||
197,2021,SE,FT,Machine Learning Engineer,1799997,INR,24342,IN,100,IN,L
|
||||
198,2021,SE,FT,Data Science Manager,4000000,INR,54094,IN,50,US,L
|
||||
199,2021,EN,FT,Data Science Consultant,90000,USD,90000,US,100,US,S
|
||||
200,2021,MI,FT,Data Scientist,52000,EUR,61467,DE,50,AT,M
|
||||
201,2021,SE,FT,Machine Learning Infrastructure Engineer,195000,USD,195000,US,100,US,M
|
||||
202,2021,MI,FT,Data Scientist,32000,EUR,37825,ES,100,ES,L
|
||||
203,2021,SE,FT,Research Scientist,50000,USD,50000,FR,100,US,S
|
||||
204,2021,MI,FT,Data Scientist,160000,USD,160000,US,100,US,L
|
||||
205,2021,MI,FT,Data Scientist,69600,BRL,12901,BR,0,BR,S
|
||||
206,2021,SE,FT,Machine Learning Engineer,200000,USD,200000,US,100,US,L
|
||||
207,2021,SE,FT,Data Engineer,165000,USD,165000,US,0,US,M
|
||||
208,2021,MI,FL,Data Engineer,20000,USD,20000,IT,0,US,L
|
||||
209,2021,SE,FT,Data Analytics Manager,120000,USD,120000,US,0,US,L
|
||||
210,2021,MI,FT,Machine Learning Engineer,21000,EUR,24823,SI,50,SI,L
|
||||
211,2021,MI,FT,Research Scientist,48000,EUR,56738,FR,50,FR,S
|
||||
212,2021,MI,FT,Data Engineer,48000,GBP,66022,HK,50,GB,S
|
||||
213,2021,EN,FT,Big Data Engineer,435000,INR,5882,IN,0,CH,L
|
||||
214,2021,EN,FT,Machine Learning Engineer,21000,EUR,24823,DE,50,DE,M
|
||||
215,2021,SE,FT,Principal Data Engineer,185000,USD,185000,US,100,US,L
|
||||
216,2021,EN,PT,Computer Vision Engineer,180000,DKK,28609,DK,50,DK,S
|
||||
217,2021,MI,FT,Data Scientist,76760,EUR,90734,DE,50,DE,L
|
||||
218,2021,MI,FT,Machine Learning Engineer,75000,EUR,88654,BE,100,BE,M
|
||||
219,2021,SE,FT,Data Analytics Manager,140000,USD,140000,US,100,US,L
|
||||
220,2021,MI,FT,Machine Learning Engineer,180000,PLN,46597,PL,100,PL,L
|
||||
221,2021,MI,FT,Data Scientist,85000,GBP,116914,GB,50,GB,L
|
||||
222,2021,MI,FT,Data Scientist,2500000,INR,33808,IN,0,IN,M
|
||||
223,2021,MI,FT,Data Scientist,40900,GBP,56256,GB,50,GB,L
|
||||
224,2021,SE,FT,Machine Learning Scientist,225000,USD,225000,US,100,CA,L
|
||||
225,2021,EX,CT,Principal Data Scientist,416000,USD,416000,US,100,US,S
|
||||
226,2021,SE,FT,Data Scientist,110000,CAD,87738,CA,100,CA,S
|
||||
227,2021,MI,FT,Data Scientist,75000,EUR,88654,DE,50,DE,L
|
||||
228,2021,SE,FT,Data Scientist,135000,USD,135000,US,0,US,L
|
||||
229,2021,SE,FT,Data Analyst,90000,CAD,71786,CA,100,CA,M
|
||||
230,2021,EN,FT,Big Data Engineer,1200000,INR,16228,IN,100,IN,L
|
||||
231,2021,SE,FT,ML Engineer,256000,USD,256000,US,100,US,S
|
||||
232,2021,SE,FT,Director of Data Engineering,200000,USD,200000,US,100,US,L
|
||||
233,2021,SE,FT,Data Analyst,200000,USD,200000,US,100,US,L
|
||||
234,2021,MI,FT,Data Architect,180000,USD,180000,US,100,US,L
|
||||
235,2021,MI,FT,Head of Data Science,110000,USD,110000,US,0,US,S
|
||||
236,2021,MI,FT,Research Scientist,80000,CAD,63810,CA,100,CA,M
|
||||
237,2021,MI,FT,Data Scientist,39600,EUR,46809,ES,100,ES,M
|
||||
238,2021,EN,FT,Data Scientist,4000,USD,4000,VN,0,VN,M
|
||||
239,2021,EN,FT,Data Engineer,1600000,INR,21637,IN,50,IN,M
|
||||
240,2021,SE,FT,Data Scientist,130000,CAD,103691,CA,100,CA,L
|
||||
241,2021,MI,FT,Data Analyst,80000,USD,80000,US,100,US,L
|
||||
242,2021,MI,FT,Data Engineer,110000,USD,110000,US,100,US,L
|
||||
243,2021,SE,FT,Data Scientist,165000,USD,165000,US,100,US,L
|
||||
244,2021,EN,FT,AI Scientist,1335000,INR,18053,IN,100,AS,S
|
||||
245,2021,MI,FT,Data Engineer,52500,GBP,72212,GB,50,GB,L
|
||||
246,2021,EN,FT,Data Scientist,31000,EUR,36643,FR,50,FR,L
|
||||
247,2021,MI,FT,Data Engineer,108000,TRY,12103,TR,0,TR,M
|
||||
248,2021,SE,FT,Data Engineer,70000,GBP,96282,GB,50,GB,L
|
||||
249,2021,SE,FT,Principal Data Analyst,170000,USD,170000,US,100,US,M
|
||||
250,2021,MI,FT,Data Scientist,115000,USD,115000,US,50,US,L
|
||||
251,2021,EN,FT,Data Scientist,90000,USD,90000,US,100,US,S
|
||||
252,2021,EX,FT,Principal Data Engineer,600000,USD,600000,US,100,US,L
|
||||
253,2021,EN,FT,Data Scientist,2100000,INR,28399,IN,100,IN,M
|
||||
254,2021,MI,FT,Data Analyst,93000,USD,93000,US,100,US,L
|
||||
255,2021,SE,FT,Big Data Architect,125000,CAD,99703,CA,50,CA,M
|
||||
256,2021,MI,FT,Data Engineer,200000,USD,200000,US,100,US,L
|
||||
257,2021,SE,FT,Principal Data Scientist,147000,EUR,173762,DE,100,DE,M
|
||||
258,2021,SE,FT,Machine Learning Engineer,185000,USD,185000,US,50,US,L
|
||||
259,2021,EX,FT,Director of Data Science,120000,EUR,141846,DE,0,DE,L
|
||||
260,2021,MI,FT,Data Scientist,130000,USD,130000,US,50,US,L
|
||||
261,2021,SE,FT,Data Analyst,54000,EUR,63831,DE,50,DE,L
|
||||
262,2021,MI,FT,Data Scientist,1250000,INR,16904,IN,100,IN,S
|
||||
263,2021,SE,FT,Machine Learning Engineer,4900000,INR,66265,IN,0,IN,L
|
||||
264,2021,MI,FT,Data Scientist,21600,EUR,25532,RS,100,DE,S
|
||||
265,2021,SE,FT,Lead Data Engineer,160000,USD,160000,PR,50,US,S
|
||||
266,2021,MI,FT,Data Engineer,93150,USD,93150,US,0,US,M
|
||||
267,2021,MI,FT,Data Engineer,111775,USD,111775,US,0,US,M
|
||||
268,2021,MI,FT,Data Engineer,250000,TRY,28016,TR,100,TR,M
|
||||
269,2021,EN,FT,Data Engineer,55000,EUR,65013,DE,50,DE,M
|
||||
270,2021,EN,FT,Data Engineer,72500,USD,72500,US,100,US,L
|
||||
271,2021,SE,FT,Computer Vision Engineer,102000,BRL,18907,BR,0,BR,M
|
||||
272,2021,EN,FT,Data Science Consultant,65000,EUR,76833,DE,0,DE,L
|
||||
273,2021,EN,FT,Machine Learning Engineer,85000,USD,85000,NL,100,DE,S
|
||||
274,2021,SE,FT,Data Scientist,65720,EUR,77684,FR,50,FR,M
|
||||
275,2021,EN,FT,Data Scientist,100000,USD,100000,US,100,US,M
|
||||
276,2021,EN,FT,Data Scientist,58000,USD,58000,US,50,US,L
|
||||
277,2021,SE,FT,AI Scientist,55000,USD,55000,ES,100,ES,L
|
||||
278,2021,SE,FT,Data Scientist,180000,TRY,20171,TR,50,TR,L
|
||||
279,2021,EN,FT,Business Data Analyst,50000,EUR,59102,LU,100,LU,L
|
||||
280,2021,MI,FT,Data Engineer,112000,USD,112000,US,100,US,L
|
||||
281,2021,EN,FT,Research Scientist,100000,USD,100000,JE,0,CN,L
|
||||
282,2021,MI,PT,Data Engineer,59000,EUR,69741,NL,100,NL,L
|
||||
283,2021,SE,CT,Staff Data Scientist,105000,USD,105000,US,100,US,M
|
||||
284,2021,MI,FT,Research Scientist,69999,USD,69999,CZ,50,CZ,L
|
||||
285,2021,SE,FT,Data Science Manager,7000000,INR,94665,IN,50,IN,L
|
||||
286,2021,SE,FT,Head of Data,87000,EUR,102839,SI,100,SI,L
|
||||
287,2021,MI,FT,Data Scientist,109000,USD,109000,US,50,US,L
|
||||
288,2021,MI,FT,Machine Learning Engineer,43200,EUR,51064,IT,50,IT,L
|
||||
289,2022,SE,FT,Data Engineer,135000,USD,135000,US,100,US,M
|
||||
290,2022,SE,FT,Data Analyst,155000,USD,155000,US,100,US,M
|
||||
291,2022,SE,FT,Data Analyst,120600,USD,120600,US,100,US,M
|
||||
292,2022,MI,FT,Data Scientist,130000,USD,130000,US,0,US,M
|
||||
293,2022,MI,FT,Data Scientist,90000,USD,90000,US,0,US,M
|
||||
294,2022,MI,FT,Data Engineer,170000,USD,170000,US,100,US,M
|
||||
295,2022,MI,FT,Data Engineer,150000,USD,150000,US,100,US,M
|
||||
296,2022,SE,FT,Data Analyst,102100,USD,102100,US,100,US,M
|
||||
297,2022,SE,FT,Data Analyst,84900,USD,84900,US,100,US,M
|
||||
298,2022,SE,FT,Data Scientist,136620,USD,136620,US,100,US,M
|
||||
299,2022,SE,FT,Data Scientist,99360,USD,99360,US,100,US,M
|
||||
300,2022,SE,FT,Data Scientist,90000,GBP,117789,GB,0,GB,M
|
||||
301,2022,SE,FT,Data Scientist,80000,GBP,104702,GB,0,GB,M
|
||||
302,2022,SE,FT,Data Scientist,146000,USD,146000,US,100,US,M
|
||||
303,2022,SE,FT,Data Scientist,123000,USD,123000,US,100,US,M
|
||||
304,2022,EN,FT,Data Engineer,40000,GBP,52351,GB,100,GB,M
|
||||
305,2022,SE,FT,Data Analyst,99000,USD,99000,US,0,US,M
|
||||
306,2022,SE,FT,Data Analyst,116000,USD,116000,US,0,US,M
|
||||
307,2022,MI,FT,Data Analyst,106260,USD,106260,US,0,US,M
|
||||
308,2022,MI,FT,Data Analyst,126500,USD,126500,US,0,US,M
|
||||
309,2022,EX,FT,Data Engineer,242000,USD,242000,US,100,US,M
|
||||
310,2022,EX,FT,Data Engineer,200000,USD,200000,US,100,US,M
|
||||
311,2022,MI,FT,Data Scientist,50000,GBP,65438,GB,0,GB,M
|
||||
312,2022,MI,FT,Data Scientist,30000,GBP,39263,GB,0,GB,M
|
||||
313,2022,MI,FT,Data Engineer,60000,GBP,78526,GB,0,GB,M
|
||||
314,2022,MI,FT,Data Engineer,40000,GBP,52351,GB,0,GB,M
|
||||
315,2022,SE,FT,Data Scientist,165220,USD,165220,US,100,US,M
|
||||
316,2022,EN,FT,Data Engineer,35000,GBP,45807,GB,100,GB,M
|
||||
317,2022,SE,FT,Data Scientist,120160,USD,120160,US,100,US,M
|
||||
318,2022,SE,FT,Data Analyst,90320,USD,90320,US,100,US,M
|
||||
319,2022,SE,FT,Data Engineer,181940,USD,181940,US,0,US,M
|
||||
320,2022,SE,FT,Data Engineer,132320,USD,132320,US,0,US,M
|
||||
321,2022,SE,FT,Data Engineer,220110,USD,220110,US,0,US,M
|
||||
322,2022,SE,FT,Data Engineer,160080,USD,160080,US,0,US,M
|
||||
323,2022,SE,FT,Data Scientist,180000,USD,180000,US,0,US,L
|
||||
324,2022,SE,FT,Data Scientist,120000,USD,120000,US,0,US,L
|
||||
325,2022,SE,FT,Data Analyst,124190,USD,124190,US,100,US,M
|
||||
326,2022,EX,FT,Data Analyst,130000,USD,130000,US,100,US,M
|
||||
327,2022,EX,FT,Data Analyst,110000,USD,110000,US,100,US,M
|
||||
328,2022,SE,FT,Data Analyst,170000,USD,170000,US,100,US,M
|
||||
329,2022,MI,FT,Data Analyst,115500,USD,115500,US,100,US,M
|
||||
330,2022,SE,FT,Data Analyst,112900,USD,112900,US,100,US,M
|
||||
331,2022,SE,FT,Data Analyst,90320,USD,90320,US,100,US,M
|
||||
332,2022,SE,FT,Data Analyst,112900,USD,112900,US,100,US,M
|
||||
333,2022,SE,FT,Data Analyst,90320,USD,90320,US,100,US,M
|
||||
334,2022,SE,FT,Data Engineer,165400,USD,165400,US,100,US,M
|
||||
335,2022,SE,FT,Data Engineer,132320,USD,132320,US,100,US,M
|
||||
336,2022,MI,FT,Data Analyst,167000,USD,167000,US,100,US,M
|
||||
337,2022,SE,FT,Data Engineer,243900,USD,243900,US,100,US,M
|
||||
338,2022,SE,FT,Data Analyst,136600,USD,136600,US,100,US,M
|
||||
339,2022,SE,FT,Data Analyst,109280,USD,109280,US,100,US,M
|
||||
340,2022,SE,FT,Data Engineer,128875,USD,128875,US,100,US,M
|
||||
341,2022,SE,FT,Data Engineer,93700,USD,93700,US,100,US,M
|
||||
342,2022,EX,FT,Head of Data Science,224000,USD,224000,US,100,US,M
|
||||
343,2022,EX,FT,Head of Data Science,167875,USD,167875,US,100,US,M
|
||||
344,2022,EX,FT,Analytics Engineer,175000,USD,175000,US,100,US,M
|
||||
345,2022,SE,FT,Data Engineer,156600,USD,156600,US,100,US,M
|
||||
346,2022,SE,FT,Data Engineer,108800,USD,108800,US,0,US,M
|
||||
347,2022,SE,FT,Data Scientist,95550,USD,95550,US,0,US,M
|
||||
348,2022,SE,FT,Data Engineer,113000,USD,113000,US,0,US,L
|
||||
349,2022,SE,FT,Data Analyst,135000,USD,135000,US,100,US,M
|
||||
350,2022,SE,FT,Data Science Manager,161342,USD,161342,US,100,US,M
|
||||
351,2022,SE,FT,Data Science Manager,137141,USD,137141,US,100,US,M
|
||||
352,2022,SE,FT,Data Scientist,167000,USD,167000,US,100,US,M
|
||||
353,2022,SE,FT,Data Scientist,123000,USD,123000,US,100,US,M
|
||||
354,2022,SE,FT,Data Engineer,60000,GBP,78526,GB,0,GB,M
|
||||
355,2022,SE,FT,Data Engineer,50000,GBP,65438,GB,0,GB,M
|
||||
356,2022,SE,FT,Data Scientist,150000,USD,150000,US,0,US,M
|
||||
357,2022,SE,FT,Data Scientist,211500,USD,211500,US,100,US,M
|
||||
358,2022,SE,FT,Data Architect,192400,USD,192400,CA,100,CA,M
|
||||
359,2022,SE,FT,Data Architect,90700,USD,90700,CA,100,CA,M
|
||||
360,2022,SE,FT,Data Analyst,130000,USD,130000,CA,100,CA,M
|
||||
361,2022,SE,FT,Data Analyst,61300,USD,61300,CA,100,CA,M
|
||||
362,2022,SE,FT,Data Analyst,130000,USD,130000,CA,100,CA,M
|
||||
363,2022,SE,FT,Data Analyst,61300,USD,61300,CA,100,CA,M
|
||||
364,2022,SE,FT,Data Engineer,160000,USD,160000,US,0,US,L
|
||||
365,2022,SE,FT,Data Scientist,138600,USD,138600,US,100,US,M
|
||||
366,2022,SE,FT,Data Engineer,136000,USD,136000,US,0,US,M
|
||||
367,2022,MI,FT,Data Analyst,58000,USD,58000,US,0,US,S
|
||||
368,2022,EX,FT,Analytics Engineer,135000,USD,135000,US,100,US,M
|
||||
369,2022,SE,FT,Data Scientist,170000,USD,170000,US,100,US,M
|
||||
370,2022,SE,FT,Data Scientist,123000,USD,123000,US,100,US,M
|
||||
371,2022,SE,FT,Machine Learning Engineer,189650,USD,189650,US,0,US,M
|
||||
372,2022,SE,FT,Machine Learning Engineer,164996,USD,164996,US,0,US,M
|
||||
373,2022,MI,FT,ETL Developer,50000,EUR,54957,GR,0,GR,M
|
||||
374,2022,MI,FT,ETL Developer,50000,EUR,54957,GR,0,GR,M
|
||||
375,2022,EX,FT,Lead Data Engineer,150000,CAD,118187,CA,100,CA,S
|
||||
376,2022,SE,FT,Data Analyst,132000,USD,132000,US,0,US,M
|
||||
377,2022,SE,FT,Data Engineer,165400,USD,165400,US,100,US,M
|
||||
378,2022,SE,FT,Data Architect,208775,USD,208775,US,100,US,M
|
||||
379,2022,SE,FT,Data Architect,147800,USD,147800,US,100,US,M
|
||||
380,2022,SE,FT,Data Engineer,136994,USD,136994,US,100,US,M
|
||||
381,2022,SE,FT,Data Engineer,101570,USD,101570,US,100,US,M
|
||||
382,2022,SE,FT,Data Analyst,128875,USD,128875,US,100,US,M
|
||||
383,2022,SE,FT,Data Analyst,93700,USD,93700,US,100,US,M
|
||||
384,2022,EX,FT,Head of Machine Learning,6000000,INR,79039,IN,50,IN,L
|
||||
385,2022,SE,FT,Data Engineer,132320,USD,132320,US,100,US,M
|
||||
386,2022,EN,FT,Machine Learning Engineer,28500,GBP,37300,GB,100,GB,L
|
||||
387,2022,SE,FT,Data Analyst,164000,USD,164000,US,0,US,M
|
||||
388,2022,SE,FT,Data Engineer,155000,USD,155000,US,100,US,M
|
||||
389,2022,MI,FT,Machine Learning Engineer,95000,GBP,124333,GB,0,GB,M
|
||||
390,2022,MI,FT,Machine Learning Engineer,75000,GBP,98158,GB,0,GB,M
|
||||
391,2022,MI,FT,AI Scientist,120000,USD,120000,US,0,US,M
|
||||
392,2022,SE,FT,Data Analyst,112900,USD,112900,US,100,US,M
|
||||
393,2022,SE,FT,Data Analyst,90320,USD,90320,US,100,US,M
|
||||
394,2022,SE,FT,Data Analytics Manager,145000,USD,145000,US,100,US,M
|
||||
395,2022,SE,FT,Data Analytics Manager,105400,USD,105400,US,100,US,M
|
||||
396,2022,MI,FT,Machine Learning Engineer,80000,EUR,87932,FR,100,DE,M
|
||||
397,2022,MI,FT,Data Engineer,90000,GBP,117789,GB,0,GB,M
|
||||
398,2022,SE,FT,Data Scientist,215300,USD,215300,US,100,US,L
|
||||
399,2022,SE,FT,Data Scientist,158200,USD,158200,US,100,US,L
|
||||
400,2022,SE,FT,Data Engineer,209100,USD,209100,US,100,US,L
|
||||
401,2022,SE,FT,Data Engineer,154600,USD,154600,US,100,US,L
|
||||
402,2022,SE,FT,Data Analyst,115934,USD,115934,US,0,US,M
|
||||
403,2022,SE,FT,Data Analyst,81666,USD,81666,US,0,US,M
|
||||
404,2022,SE,FT,Data Engineer,175000,USD,175000,US,100,US,M
|
||||
405,2022,MI,FT,Data Engineer,75000,GBP,98158,GB,0,GB,M
|
||||
406,2022,MI,FT,Data Analyst,58000,USD,58000,US,0,US,S
|
||||
407,2022,SE,FT,Data Engineer,183600,USD,183600,US,100,US,L
|
||||
408,2022,MI,FT,Data Analyst,40000,GBP,52351,GB,100,GB,M
|
||||
409,2022,SE,FT,Data Scientist,180000,USD,180000,US,100,US,M
|
||||
410,2022,MI,FT,Data Scientist,55000,GBP,71982,GB,0,GB,M
|
||||
411,2022,MI,FT,Data Scientist,35000,GBP,45807,GB,0,GB,M
|
||||
412,2022,MI,FT,Data Engineer,60000,EUR,65949,GR,100,GR,M
|
||||
413,2022,MI,FT,Data Engineer,45000,EUR,49461,GR,100,GR,M
|
||||
414,2022,MI,FT,Data Engineer,60000,GBP,78526,GB,100,GB,M
|
||||
415,2022,MI,FT,Data Engineer,45000,GBP,58894,GB,100,GB,M
|
||||
416,2022,SE,FT,Data Scientist,260000,USD,260000,US,100,US,M
|
||||
417,2022,SE,FT,Data Science Engineer,60000,USD,60000,AR,100,MX,L
|
||||
418,2022,MI,FT,Data Engineer,63900,USD,63900,US,0,US,M
|
||||
419,2022,MI,FT,Machine Learning Scientist,160000,USD,160000,US,100,US,L
|
||||
420,2022,MI,FT,Machine Learning Scientist,112300,USD,112300,US,100,US,L
|
||||
421,2022,MI,FT,Data Science Manager,241000,USD,241000,US,100,US,M
|
||||
422,2022,MI,FT,Data Science Manager,159000,USD,159000,US,100,US,M
|
||||
423,2022,SE,FT,Data Scientist,180000,USD,180000,US,0,US,M
|
||||
424,2022,SE,FT,Data Scientist,80000,USD,80000,US,0,US,M
|
||||
425,2022,MI,FT,Data Engineer,82900,USD,82900,US,0,US,M
|
||||
426,2022,SE,FT,Data Engineer,100800,USD,100800,US,100,US,L
|
||||
427,2022,MI,FT,Data Engineer,45000,EUR,49461,ES,100,ES,M
|
||||
428,2022,SE,FT,Data Scientist,140400,USD,140400,US,0,US,L
|
||||
429,2022,MI,FT,Data Analyst,30000,GBP,39263,GB,100,GB,M
|
||||
430,2022,MI,FT,Data Analyst,40000,EUR,43966,ES,100,ES,M
|
||||
431,2022,MI,FT,Data Analyst,30000,EUR,32974,ES,100,ES,M
|
||||
432,2022,MI,FT,Data Engineer,80000,EUR,87932,ES,100,ES,M
|
||||
433,2022,MI,FT,Data Engineer,70000,EUR,76940,ES,100,ES,M
|
||||
434,2022,MI,FT,Data Engineer,80000,GBP,104702,GB,100,GB,M
|
||||
435,2022,MI,FT,Data Engineer,70000,GBP,91614,GB,100,GB,M
|
||||
436,2022,MI,FT,Data Engineer,60000,EUR,65949,ES,100,ES,M
|
||||
437,2022,MI,FT,Data Engineer,80000,EUR,87932,GR,100,GR,M
|
||||
438,2022,SE,FT,Machine Learning Engineer,189650,USD,189650,US,0,US,M
|
||||
439,2022,SE,FT,Machine Learning Engineer,164996,USD,164996,US,0,US,M
|
||||
440,2022,MI,FT,Data Analyst,40000,EUR,43966,GR,100,GR,M
|
||||
441,2022,MI,FT,Data Analyst,30000,EUR,32974,GR,100,GR,M
|
||||
442,2022,MI,FT,Data Engineer,75000,GBP,98158,GB,100,GB,M
|
||||
443,2022,MI,FT,Data Engineer,60000,GBP,78526,GB,100,GB,M
|
||||
444,2022,SE,FT,Data Scientist,215300,USD,215300,US,0,US,L
|
||||
445,2022,MI,FT,Data Engineer,70000,EUR,76940,GR,100,GR,M
|
||||
446,2022,SE,FT,Data Engineer,209100,USD,209100,US,100,US,L
|
||||
447,2022,SE,FT,Data Engineer,154600,USD,154600,US,100,US,L
|
||||
448,2022,SE,FT,Data Engineer,180000,USD,180000,US,100,US,M
|
||||
449,2022,EN,FT,ML Engineer,20000,EUR,21983,PT,100,PT,L
|
||||
450,2022,SE,FT,Data Engineer,80000,USD,80000,US,100,US,M
|
||||
451,2022,MI,FT,Machine Learning Developer,100000,CAD,78791,CA,100,CA,M
|
||||
452,2022,EX,FT,Director of Data Science,250000,CAD,196979,CA,50,CA,L
|
||||
453,2022,MI,FT,Machine Learning Engineer,120000,USD,120000,US,100,US,S
|
||||
454,2022,EN,FT,Computer Vision Engineer,125000,USD,125000,US,0,US,M
|
||||
455,2022,MI,FT,NLP Engineer,240000,CNY,37236,US,50,US,L
|
||||
456,2022,SE,FT,Data Engineer,105000,USD,105000,US,100,US,M
|
||||
457,2022,SE,FT,Lead Machine Learning Engineer,80000,EUR,87932,DE,0,DE,M
|
||||
458,2022,MI,FT,Business Data Analyst,1400000,INR,18442,IN,100,IN,M
|
||||
459,2022,MI,FT,Data Scientist,2400000,INR,31615,IN,100,IN,L
|
||||
460,2022,MI,FT,Machine Learning Infrastructure Engineer,53000,EUR,58255,PT,50,PT,L
|
||||
461,2022,EN,FT,Financial Data Analyst,100000,USD,100000,US,50,US,L
|
||||
462,2022,MI,PT,Data Engineer,50000,EUR,54957,DE,50,DE,L
|
||||
463,2022,EN,FT,Data Scientist,1400000,INR,18442,IN,100,IN,M
|
||||
464,2022,SE,FT,Principal Data Scientist,148000,EUR,162674,DE,100,DE,M
|
||||
465,2022,EN,FT,Data Engineer,120000,USD,120000,US,100,US,M
|
||||
466,2022,SE,FT,Research Scientist,144000,USD,144000,US,50,US,L
|
||||
467,2022,SE,FT,Data Scientist,104890,USD,104890,US,100,US,M
|
||||
468,2022,SE,FT,Data Engineer,100000,USD,100000,US,100,US,M
|
||||
469,2022,SE,FT,Data Scientist,140000,USD,140000,US,100,US,M
|
||||
470,2022,MI,FT,Data Analyst,135000,USD,135000,US,100,US,M
|
||||
471,2022,MI,FT,Data Analyst,50000,USD,50000,US,100,US,M
|
||||
472,2022,SE,FT,Data Scientist,220000,USD,220000,US,100,US,M
|
||||
473,2022,SE,FT,Data Scientist,140000,USD,140000,US,100,US,M
|
||||
474,2022,MI,FT,Data Scientist,140000,GBP,183228,GB,0,GB,M
|
||||
475,2022,MI,FT,Data Scientist,70000,GBP,91614,GB,0,GB,M
|
||||
476,2022,SE,FT,Data Scientist,185100,USD,185100,US,100,US,M
|
||||
477,2022,SE,FT,Machine Learning Engineer,220000,USD,220000,US,100,US,M
|
||||
478,2022,MI,FT,Data Scientist,200000,USD,200000,US,100,US,M
|
||||
479,2022,MI,FT,Data Scientist,120000,USD,120000,US,100,US,M
|
||||
480,2022,SE,FT,Machine Learning Engineer,120000,USD,120000,AE,100,AE,S
|
||||
481,2022,SE,FT,Machine Learning Engineer,65000,USD,65000,AE,100,AE,S
|
||||
482,2022,EX,FT,Data Engineer,324000,USD,324000,US,100,US,M
|
||||
483,2022,EX,FT,Data Engineer,216000,USD,216000,US,100,US,M
|
||||
484,2022,SE,FT,Data Engineer,210000,USD,210000,US,100,US,M
|
||||
485,2022,SE,FT,Machine Learning Engineer,120000,USD,120000,US,100,US,M
|
||||
486,2022,SE,FT,Data Scientist,230000,USD,230000,US,100,US,M
|
||||
487,2022,EN,PT,Data Scientist,100000,USD,100000,DZ,50,DZ,M
|
||||
488,2022,MI,FL,Data Scientist,100000,USD,100000,CA,100,US,M
|
||||
489,2022,EN,CT,Applied Machine Learning Scientist,29000,EUR,31875,TN,100,CZ,M
|
||||
490,2022,SE,FT,Head of Data,200000,USD,200000,MY,100,US,M
|
||||
491,2022,MI,FT,Principal Data Analyst,75000,USD,75000,CA,100,CA,S
|
||||
492,2022,MI,FT,Data Scientist,150000,PLN,35590,PL,100,PL,L
|
||||
493,2022,SE,FT,Machine Learning Developer,100000,CAD,78791,CA,100,CA,M
|
||||
494,2022,SE,FT,Data Scientist,100000,USD,100000,BR,100,US,M
|
||||
495,2022,MI,FT,Machine Learning Scientist,153000,USD,153000,US,50,US,M
|
||||
496,2022,EN,FT,Data Engineer,52800,EUR,58035,PK,100,DE,M
|
||||
497,2022,SE,FT,Data Scientist,165000,USD,165000,US,100,US,M
|
||||
498,2022,SE,FT,Research Scientist,85000,EUR,93427,FR,50,FR,L
|
||||
499,2022,EN,FT,Data Scientist,66500,CAD,52396,CA,100,CA,L
|
||||
500,2022,SE,FT,Machine Learning Engineer,57000,EUR,62651,NL,100,NL,L
|
||||
501,2022,MI,FT,Head of Data,30000,EUR,32974,EE,100,EE,S
|
||||
502,2022,EN,FT,Data Scientist,40000,USD,40000,JP,100,MY,L
|
||||
503,2022,MI,FT,Machine Learning Engineer,121000,AUD,87425,AU,100,AU,L
|
||||
504,2022,SE,FT,Data Engineer,115000,USD,115000,US,100,US,M
|
||||
505,2022,EN,FT,Data Scientist,120000,AUD,86703,AU,50,AU,M
|
||||
506,2022,MI,FT,Applied Machine Learning Scientist,75000,USD,75000,BO,100,US,L
|
||||
507,2022,MI,FT,Research Scientist,59000,EUR,64849,AT,0,AT,L
|
||||
508,2022,EN,FT,Research Scientist,120000,USD,120000,US,100,US,L
|
||||
509,2022,MI,FT,Applied Data Scientist,157000,USD,157000,US,100,US,L
|
||||
510,2022,EN,FT,Computer Vision Software Engineer,150000,USD,150000,AU,100,AU,S
|
||||
511,2022,MI,FT,Business Data Analyst,90000,CAD,70912,CA,50,CA,L
|
||||
512,2022,EN,FT,Data Engineer,65000,USD,65000,US,100,US,S
|
||||
513,2022,SE,FT,Machine Learning Engineer,65000,EUR,71444,IE,100,IE,S
|
||||
514,2022,EN,FT,Data Analytics Engineer,20000,USD,20000,PK,0,PK,M
|
||||
515,2022,MI,FT,Data Scientist,48000,USD,48000,RU,100,US,S
|
||||
516,2022,SE,FT,Data Science Manager,152500,USD,152500,US,100,US,M
|
||||
517,2022,MI,FT,Data Engineer,62000,EUR,68147,FR,100,FR,M
|
||||
518,2022,MI,FT,Data Scientist,115000,CHF,122346,CH,0,CH,L
|
||||
519,2022,SE,FT,Applied Data Scientist,380000,USD,380000,US,100,US,L
|
||||
520,2022,MI,FT,Data Scientist,88000,CAD,69336,CA,100,CA,M
|
||||
521,2022,EN,FT,Computer Vision Engineer,10000,USD,10000,PT,100,LU,M
|
||||
522,2022,MI,FT,Data Analyst,20000,USD,20000,GR,100,GR,S
|
||||
523,2022,SE,FT,Data Analytics Lead,405000,USD,405000,US,100,US,L
|
||||
524,2022,MI,FT,Data Scientist,135000,USD,135000,US,100,US,L
|
||||
525,2022,SE,FT,Applied Data Scientist,177000,USD,177000,US,100,US,L
|
||||
526,2022,MI,FT,Data Scientist,78000,USD,78000,US,100,US,M
|
||||
527,2022,SE,FT,Data Analyst,135000,USD,135000,US,100,US,M
|
||||
528,2022,SE,FT,Data Analyst,100000,USD,100000,US,100,US,M
|
||||
529,2022,SE,FT,Data Analyst,90320,USD,90320,US,100,US,M
|
||||
530,2022,MI,FT,Data Analyst,85000,USD,85000,CA,0,CA,M
|
||||
531,2022,MI,FT,Data Analyst,75000,USD,75000,CA,0,CA,M
|
||||
532,2022,SE,FT,Machine Learning Engineer,214000,USD,214000,US,100,US,M
|
||||
533,2022,SE,FT,Machine Learning Engineer,192600,USD,192600,US,100,US,M
|
||||
534,2022,SE,FT,Data Architect,266400,USD,266400,US,100,US,M
|
||||
535,2022,SE,FT,Data Architect,213120,USD,213120,US,100,US,M
|
||||
536,2022,SE,FT,Data Analyst,112900,USD,112900,US,100,US,M
|
||||
537,2022,SE,FT,Data Engineer,155000,USD,155000,US,100,US,M
|
||||
538,2022,MI,FT,Data Scientist,141300,USD,141300,US,0,US,M
|
||||
539,2022,MI,FT,Data Scientist,102100,USD,102100,US,0,US,M
|
||||
540,2022,SE,FT,Data Analyst,115934,USD,115934,US,100,US,M
|
||||
541,2022,SE,FT,Data Analyst,81666,USD,81666,US,100,US,M
|
||||
542,2022,MI,FT,Data Engineer,206699,USD,206699,US,0,US,M
|
||||
543,2022,MI,FT,Data Engineer,99100,USD,99100,US,0,US,M
|
||||
544,2022,SE,FT,Data Engineer,130000,USD,130000,US,100,US,M
|
||||
545,2022,SE,FT,Data Engineer,115000,USD,115000,US,100,US,M
|
||||
546,2022,SE,FT,Data Engineer,110500,USD,110500,US,100,US,M
|
||||
547,2022,SE,FT,Data Engineer,130000,USD,130000,US,100,US,M
|
||||
548,2022,SE,FT,Data Analyst,99050,USD,99050,US,100,US,M
|
||||
549,2022,SE,FT,Data Engineer,160000,USD,160000,US,100,US,M
|
||||
550,2022,SE,FT,Data Scientist,205300,USD,205300,US,0,US,L
|
||||
551,2022,SE,FT,Data Scientist,140400,USD,140400,US,0,US,L
|
||||
552,2022,SE,FT,Data Scientist,176000,USD,176000,US,100,US,M
|
||||
553,2022,SE,FT,Data Scientist,144000,USD,144000,US,100,US,M
|
||||
554,2022,SE,FT,Data Engineer,200100,USD,200100,US,100,US,M
|
||||
555,2022,SE,FT,Data Engineer,160000,USD,160000,US,100,US,M
|
||||
556,2022,SE,FT,Data Engineer,145000,USD,145000,US,100,US,M
|
||||
557,2022,SE,FT,Data Engineer,70500,USD,70500,US,0,US,M
|
||||
558,2022,SE,FT,Data Scientist,205300,USD,205300,US,0,US,M
|
||||
559,2022,SE,FT,Data Scientist,140400,USD,140400,US,0,US,M
|
||||
560,2022,SE,FT,Analytics Engineer,205300,USD,205300,US,0,US,M
|
||||
561,2022,SE,FT,Analytics Engineer,184700,USD,184700,US,0,US,M
|
||||
562,2022,SE,FT,Data Engineer,175100,USD,175100,US,100,US,M
|
||||
563,2022,SE,FT,Data Engineer,140250,USD,140250,US,100,US,M
|
||||
564,2022,SE,FT,Data Analyst,116150,USD,116150,US,100,US,M
|
||||
565,2022,SE,FT,Data Engineer,54000,USD,54000,US,0,US,M
|
||||
566,2022,SE,FT,Data Analyst,170000,USD,170000,US,100,US,M
|
||||
567,2022,MI,FT,Data Analyst,50000,GBP,65438,GB,0,GB,M
|
||||
568,2022,SE,FT,Data Analyst,80000,USD,80000,US,100,US,M
|
||||
569,2022,SE,FT,Data Scientist,140000,USD,140000,US,100,US,M
|
||||
570,2022,SE,FT,Data Scientist,210000,USD,210000,US,100,US,M
|
||||
571,2022,SE,FT,Data Scientist,140000,USD,140000,US,100,US,M
|
||||
572,2022,SE,FT,Data Analyst,100000,USD,100000,US,100,US,M
|
||||
573,2022,SE,FT,Data Analyst,69000,USD,69000,US,100,US,M
|
||||
574,2022,SE,FT,Data Scientist,210000,USD,210000,US,100,US,M
|
||||
575,2022,SE,FT,Data Scientist,140000,USD,140000,US,100,US,M
|
||||
576,2022,SE,FT,Data Scientist,210000,USD,210000,US,100,US,M
|
||||
577,2022,SE,FT,Data Analyst,150075,USD,150075,US,100,US,M
|
||||
578,2022,SE,FT,Data Engineer,100000,USD,100000,US,100,US,M
|
||||
579,2022,SE,FT,Data Engineer,25000,USD,25000,US,100,US,M
|
||||
580,2022,SE,FT,Data Analyst,126500,USD,126500,US,100,US,M
|
||||
581,2022,SE,FT,Data Analyst,106260,USD,106260,US,100,US,M
|
||||
582,2022,SE,FT,Data Engineer,220110,USD,220110,US,100,US,M
|
||||
583,2022,SE,FT,Data Engineer,160080,USD,160080,US,100,US,M
|
||||
584,2022,SE,FT,Data Analyst,105000,USD,105000,US,100,US,M
|
||||
585,2022,SE,FT,Data Analyst,110925,USD,110925,US,100,US,M
|
||||
586,2022,MI,FT,Data Analyst,35000,GBP,45807,GB,0,GB,M
|
||||
587,2022,SE,FT,Data Scientist,140000,USD,140000,US,100,US,M
|
||||
588,2022,SE,FT,Data Analyst,99000,USD,99000,US,0,US,M
|
||||
589,2022,SE,FT,Data Analyst,60000,USD,60000,US,100,US,M
|
||||
590,2022,SE,FT,Data Architect,192564,USD,192564,US,100,US,M
|
||||
591,2022,SE,FT,Data Architect,144854,USD,144854,US,100,US,M
|
||||
592,2022,SE,FT,Data Scientist,230000,USD,230000,US,100,US,M
|
||||
593,2022,SE,FT,Data Scientist,150000,USD,150000,US,100,US,M
|
||||
594,2022,SE,FT,Data Analytics Manager,150260,USD,150260,US,100,US,M
|
||||
595,2022,SE,FT,Data Analytics Manager,109280,USD,109280,US,100,US,M
|
||||
596,2022,SE,FT,Data Scientist,210000,USD,210000,US,100,US,M
|
||||
597,2022,SE,FT,Data Analyst,170000,USD,170000,US,100,US,M
|
||||
598,2022,MI,FT,Data Scientist,160000,USD,160000,US,100,US,M
|
||||
599,2022,MI,FT,Data Scientist,130000,USD,130000,US,100,US,M
|
||||
600,2022,EN,FT,Data Analyst,67000,USD,67000,CA,0,CA,M
|
||||
601,2022,EN,FT,Data Analyst,52000,USD,52000,CA,0,CA,M
|
||||
602,2022,SE,FT,Data Engineer,154000,USD,154000,US,100,US,M
|
||||
603,2022,SE,FT,Data Engineer,126000,USD,126000,US,100,US,M
|
||||
604,2022,SE,FT,Data Analyst,129000,USD,129000,US,0,US,M
|
||||
605,2022,SE,FT,Data Analyst,150000,USD,150000,US,100,US,M
|
||||
606,2022,MI,FT,AI Scientist,200000,USD,200000,IN,100,US,L
|
||||
|
60
degtyarev_mikhail_lab_6/main.py
Normal file
@@ -0,0 +1,60 @@
|
||||
import pandas as pd
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.linear_model import Lasso
|
||||
from sklearn.metrics import mean_squared_error
|
||||
from sklearn.preprocessing import StandardScaler, OneHotEncoder
|
||||
from sklearn.compose import ColumnTransformer
|
||||
from sklearn.pipeline import Pipeline
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
# Загрузка данных
|
||||
file_path = 'ds_salaries.csv'
|
||||
data = pd.read_csv(file_path)
|
||||
|
||||
# Предварительная обработка данных
|
||||
categorical_features = ['experience_level', 'employment_type', 'company_location', 'company_size']
|
||||
numeric_features = ['work_year']
|
||||
|
||||
preprocessor = ColumnTransformer(
|
||||
transformers=[
|
||||
('num', StandardScaler(), numeric_features),
|
||||
('cat', OneHotEncoder(handle_unknown='ignore'), categorical_features)
|
||||
])
|
||||
|
||||
# Выбор признаков
|
||||
features = ['work_year', 'experience_level', 'employment_type', 'company_location', 'company_size']
|
||||
X = data[features]
|
||||
y = data['salary_in_usd']
|
||||
|
||||
# Разделение данных на обучающий и тестовый наборы
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
||||
|
||||
# Создание и обучение модели с использованием предварительного обработчика данных
|
||||
alpha = 0.01
|
||||
lasso_model = Pipeline([
|
||||
('preprocessor', preprocessor),
|
||||
('lasso', Lasso(alpha=alpha))
|
||||
])
|
||||
|
||||
lasso_model.fit(X_train, y_train)
|
||||
|
||||
# Получение прогнозов
|
||||
y_pred = lasso_model.predict(X_test)
|
||||
|
||||
# Оценка точности модели
|
||||
accuracy = lasso_model.score(X_test, y_test)
|
||||
mse = mean_squared_error(y_test, y_pred)
|
||||
|
||||
print(f"R^2 Score: {accuracy:.2f}")
|
||||
print(f"Mean Squared Error: {mse:.2f}")
|
||||
|
||||
# Вывод предсказанных и фактических значений
|
||||
predictions_df = pd.DataFrame({'Actual': y_test, 'Predicted': y_pred})
|
||||
print(predictions_df)
|
||||
|
||||
# Визуализация весов (коэффициентов) модели
|
||||
coefficients = pd.Series(lasso_model.named_steps['lasso'].coef_, index=numeric_features + list(lasso_model.named_steps['preprocessor'].transformers_[1][1].get_feature_names(categorical_features)))
|
||||
plt.figure(figsize=(10, 6))
|
||||
coefficients.sort_values().plot(kind='barh')
|
||||
plt.title('Lasso Regression Coefficients')
|
||||
plt.show()
|
||||
55
degtyarev_mikhail_lab_7/Readme.md
Normal file
@@ -0,0 +1,55 @@
|
||||
# Лабораторная 7
|
||||
## Вариант 9
|
||||
|
||||
## Задание
|
||||
Выбрать художественный текст (четные варианты – русскоязычный, нечетные – англоязычный) и обучить на нем рекуррентную нейронную сеть для решения задачи генерации. Подобрать архитектуру и параметры так, чтобы приблизиться к максимально осмысленному результату.Далее разбиться на пары четный-нечетный вариант, обменяться разработанными сетями и проверить, как архитектура товарища справляется с вашим текстом.
|
||||
|
||||
## Описание Программы
|
||||
Программа представляет собой пример использования рекуррентной нейронной сети (LSTM) для генерации текста на основе художественного произведения.
|
||||
### Используемые библиотеки
|
||||
- `numpy`: Библиотека для работы с многомерными массивами и математическими функциями.
|
||||
- `keras`:
|
||||
- `Sequential`: Модель нейронной сети, представляющая собой линейный стек слоев.
|
||||
- `Embedding`: Слой для преобразования целых чисел (индексов слов) в плотные вектора фиксированной размерности.
|
||||
- `LSTM`: Рекуррентный слой долгой краткосрочной памяти.
|
||||
- `Dense`: Полносвязный слой с активацией softmax для генерации вероятностного распределения слов.
|
||||
- `Tokenizer`, `pad_sequences`: Инструменты для токенизации и последовательной обработки текста.
|
||||
|
||||
### Шаги программы
|
||||
|
||||
1. **Загрузка данных:**
|
||||
- Текст загружается из файла `text.txt` (англоязычный текст) с использованием стандартных средств языка Python.
|
||||
|
||||
2. **Подготовка данных для обучения:**
|
||||
- Текст разбивается на последовательности токенов для обучения рекуррентной нейронной сети.
|
||||
- Используется `Tokenizer` для создания словаря и преобразования текста в числовое представление.
|
||||
- Последовательности дополняются до максимальной длины с использованием `pad_sequences`.
|
||||
|
||||
3. **Создание и компиляция модели:**
|
||||
- Создается последовательная модель с вложенным слоем, рекуррентным слоем LSTM и полносвязным слоем.
|
||||
- Модель компилируется с использованием категориальной кросс-энтропии в качестве функции потерь и оптимизатора Adam.
|
||||
|
||||
4. **Обучение модели:**
|
||||
- Модель обучается на подготовленных данных в течение 100 эпох.
|
||||
|
||||
5. **Оценка производительности модели:**
|
||||
- Выводится окончательная ошибка на обучающих данных.
|
||||
|
||||
6. **Генерация текста:**
|
||||
- Создается начальный текст "Amidst the golden hues of autumn leaves".
|
||||
- Модель используется для предсказания следующего слова в последовательности.
|
||||
- Сгенерированный текст выводится на экран.
|
||||
|
||||
### Запуск программы
|
||||
- Замените `'text.txt'` на актуальный путь или имя вашего файла с англоязычным текстом.
|
||||
- Склонируйте или скачайте код из файла `main.py`.
|
||||
- Запустите файл в среде, поддерживающей выполнение Python. `python main.py`
|
||||
|
||||
|
||||
### Результаты
|
||||
|
||||
Потери на тренировочных данных составили не такое большое значение: 0.029374321853453274327
|
||||
|
||||
Результат сгенерированного англоязычного текста:
|
||||
In the quietude of the woods, mystical creatures stirred, their silhouettes dancing in the dappling sunlight. A mysterious energy enveloped the surroundings, as if the very essence of nature had come alive. The rustling leaves seemed to carry ancient tales, whispered secrets of times long past. Each step through the foliage unveiled a new chapter in the enchanted story of the woodland realm.
|
||||
|
||||
60
degtyarev_mikhail_lab_7/main.py
Normal file
@@ -0,0 +1,60 @@
|
||||
import numpy as np
|
||||
from keras.models import Sequential
|
||||
from keras.layers import Embedding, LSTM, Dense
|
||||
from keras.preprocessing.text import Tokenizer
|
||||
from keras.preprocessing.sequence import pad_sequences
|
||||
|
||||
# Load the text
|
||||
with open('text.txt', 'r', encoding='utf-8') as file:
|
||||
text = file.read()
|
||||
|
||||
tokenizer = Tokenizer()
|
||||
tokenizer.fit_on_texts([text])
|
||||
total_words = len(tokenizer.word_index) + 1
|
||||
|
||||
# Create the sequence of training data
|
||||
input_sequences = []
|
||||
for line in text.split('\n'):
|
||||
token_list = tokenizer.texts_to_sequences([line])[0]
|
||||
for i in range(1, len(token_list)):
|
||||
n_gram_sequence = token_list[:i+1]
|
||||
input_sequences.append(n_gram_sequence)
|
||||
|
||||
# Padding sequences
|
||||
max_sequence_length = max([len(seq) for seq in input_sequences])
|
||||
input_sequences = pad_sequences(input_sequences, maxlen=max_sequence_length, padding='pre')
|
||||
|
||||
# Create input and output data
|
||||
X, y = input_sequences[:, :-1], input_sequences[:, -1]
|
||||
y = np.eye(total_words)[y]
|
||||
|
||||
# Create the model
|
||||
model = Sequential()
|
||||
model.add(Embedding(total_words, 50, input_length=max_sequence_length-1))
|
||||
model.add(LSTM(100))
|
||||
model.add(Dense(total_words, activation='softmax'))
|
||||
|
||||
# Compile the model
|
||||
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
|
||||
|
||||
# Train the model
|
||||
history = model.fit(X, y, epochs=100, verbose=2)
|
||||
|
||||
print(f"Final Loss on Training Data: {history.history['loss'][-1]}")
|
||||
|
||||
# Generate text
|
||||
seed_text = "Amidst the golden hues of autumn leaves"
|
||||
next_words = 100
|
||||
|
||||
for _ in range(next_words):
|
||||
token_list = tokenizer.texts_to_sequences([seed_text])[0]
|
||||
token_list = pad_sequences([token_list], maxlen=max_sequence_length-1, padding='pre')
|
||||
predicted = model.predict_classes(token_list, verbose=0)
|
||||
output_word = ""
|
||||
for word, index in tokenizer.word_index.items():
|
||||
if index == predicted:
|
||||
output_word = word
|
||||
break
|
||||
seed_text += " " + output_word
|
||||
|
||||
print(seed_text)
|
||||
1
degtyarev_mikhail_lab_7/text.txt
Normal file
@@ -0,0 +1 @@
|
||||
Amidst the golden hues of autumn leaves, a gentle breeze whispered through the trees. The air was filled with the sweet fragrance of blooming flowers, and the sun cast a warm glow on the peaceful landscape. Birds chirped melodiously, creating a symphony of nature's harmonious melodies. As the day unfolded, the sky painted itself in vibrant shades of orange and pink, showcasing the breathtaking beauty of the changing seasons.
|
||||
BIN
gordeeva_anna_lab_6/1aIk7s_b66s.jpg
Normal file
|
After Width: | Height: | Size: 28 KiB |
23
gordeeva_anna_lab_6/README.md
Normal file
@@ -0,0 +1,23 @@
|
||||
## Данные
|
||||
Я использую следующие данные:
|
||||
* Ссылка на изображение картины
|
||||
* Размер картины в см
|
||||
* Средняя оценка по отзывам
|
||||
* Количество заказов
|
||||
* Стоимость
|
||||
Чтобы сделать анализ конкретнее были добавлены вручную следующие
|
||||
данные:
|
||||
* Жанр (Например: пейзаж, животные, портрет и т.д)
|
||||
* Поджанр (Например: городской пейзаж, коты, собаки и т.д)
|
||||
|
||||
## Задание и решение классификации (нейронная сеть)
|
||||
Необходимо посоветовать/предсказать пользователю поджанр на основе выбранного
|
||||
жанра и категории стоимости. Нет необходимости разбивать на группы, так как сам
|
||||
параметр является категориальным. Для выполнения классификации все категориальные
|
||||
параметры переводим в числа. Точность модель не превышает 0.30, что можно сказать,
|
||||
что модель не удачная. На это влияет то, что в данные достаточно много классов, что
|
||||
делает модель сложнее. Результат предсказания представлен на рисунке 5 и 6
|
||||
|
||||
## Результат
|
||||

|
||||

|
||||
BIN
gordeeva_anna_lab_6/cAofDwrO6o4.jpg
Normal file
|
After Width: | Height: | Size: 26 KiB |
74
gordeeva_anna_lab_6/laba6.py
Normal file
@@ -0,0 +1,74 @@
|
||||
import pandas as pd
|
||||
import streamlit as st
|
||||
import statsmodels.api as sm
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.preprocessing import StandardScaler
|
||||
from sklearn.metrics import accuracy_score
|
||||
import numpy as np
|
||||
|
||||
data = pd.read_csv('222.csv')
|
||||
|
||||
genre_mapping = {genre: code for code, genre in enumerate(data['Жанр'].unique())}
|
||||
subgenre_mapping = {subgenre: code for code, subgenre in enumerate(data['Поджанр'].unique())}
|
||||
price_mapping = {price: code for code, price in enumerate(data['Категория стоимости'].unique())}
|
||||
|
||||
# Преобразование категориальных значений
|
||||
data['Жанр'] = data['Жанр'].map(genre_mapping)
|
||||
data['Поджанр'] = data['Поджанр'].map(subgenre_mapping)
|
||||
data['Категория стоимости'] = data['Категория стоимости'].map(price_mapping)
|
||||
|
||||
columns_to_check = ['Размер', 'Жанр', 'Поджанр', 'Категория стоимости']
|
||||
data = data.dropna(subset=columns_to_check)
|
||||
|
||||
# Разделение данных на признаки (X) и целевую переменную (y)
|
||||
X = data[['Жанр', 'Категория стоимости']]
|
||||
y = data['Поджанр']
|
||||
|
||||
# Разделение на обучающий и тестовый наборы
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
||||
|
||||
# Нормализация данных
|
||||
scaler = StandardScaler()
|
||||
X_train = scaler.fit_transform(X_train)
|
||||
X_test = scaler.transform(X_test)
|
||||
|
||||
# Инициализация MLPClassifier
|
||||
clf = MLPClassifier(hidden_layer_sizes=(100, 50), max_iter=500, alpha=0.1, solver='adam', random_state=42)
|
||||
|
||||
# Обучение модели
|
||||
clf.fit(X_train, y_train)
|
||||
|
||||
# Предсказание на тестовом наборе
|
||||
predictions = clf.predict(X_test)
|
||||
|
||||
# Оценка точности модели
|
||||
accuracy = accuracy_score(y_test, predictions)
|
||||
st.write(f"Точность модели: {accuracy}")
|
||||
|
||||
on_pred = st.toggle('')
|
||||
if on_pred:
|
||||
selected_genre = st.selectbox('Выберите жанр:', genre_mapping)
|
||||
selected_price = st.selectbox('Выберите категорию стоимости:', price_mapping)
|
||||
|
||||
new_data = pd.DataFrame({'Жанр': [selected_genre], 'Категория стоимости': [selected_price]}, index=[0])
|
||||
|
||||
new_data['Жанр'] = new_data['Жанр'].map(genre_mapping)
|
||||
new_data['Категория стоимости'] = new_data['Категория стоимости'].map(price_mapping)
|
||||
|
||||
new_data_normalized = scaler.transform(new_data.values)
|
||||
new_predictions = clf.predict(new_data_normalized)
|
||||
|
||||
# Создание обратного словаря для обратного маппинга числовых кодов поджанров в текстовые метки
|
||||
reverse_subgenre_mapping = {code: subgenre for subgenre, code in subgenre_mapping.items()}
|
||||
|
||||
# Преобразование числовых предсказаний обратно в текстовые метки поджанров
|
||||
predicted_subgenres = [reverse_subgenre_mapping[code] for code in new_predictions]
|
||||
|
||||
# Вывод предсказанных поджанров для новых данных
|
||||
st.write("Предсказанный поджанр:")
|
||||
for subgenre in predicted_subgenres:
|
||||
if isinstance(subgenre, float) and np.isnan(subgenre):
|
||||
st.write("Не удалось предсказать, мало данных по данному жанру")
|
||||
else:
|
||||
st.write(subgenre)
|
||||
43
gordeeva_anna_lab_7/README.md
Normal file
@@ -0,0 +1,43 @@
|
||||
## Задание
|
||||
Выбрать художественный текст и обучить на нем рекуррентную нейронную сеть для решения задачи генерации.
|
||||
|
||||
## Зависимости
|
||||
Для работы этого приложения необходимы следующие библиотеки Python:
|
||||
* NumPy
|
||||
* TensorFlow
|
||||
* Streamlit
|
||||
|
||||
## Запуск
|
||||
```bash
|
||||
streamlit laba7.py
|
||||
```
|
||||
|
||||
## Описание кода
|
||||
1. Импорт библиотек:
|
||||
|
||||
Импортируются необходимые библиотеки, такие как docx для чтения текстов из файлов Word, streamlit для создания веб-приложения, numpy, tensorflow и keras для обучения нейронных сетей.
|
||||
|
||||
2. Извлечение текста из файлов Word:
|
||||
|
||||
Функция extract_text_from_docx используется для извлечения текста из двух файлов Word на русском (textru) и английском (texten). Это делается с помощью библиотеки docx.
|
||||
|
||||
3. Подготовка данных для обучения моделей:
|
||||
|
||||
Текст из файлов разбивается на последовательности для обучения рекуррентных нейронных сетей (LSTM). Текст разбивается на последовательности определенной длины (maxlen) и используется для обучения моделей на русском и английском текстах.
|
||||
|
||||
4. Создание и обучение моделей:
|
||||
|
||||
Два отдельных экземпляра модели (model_russian и model_english) создаются и обучаются на соответствующих данных русского и английского текстов.
|
||||
|
||||
5. Генерация текста на основе обученных моделей:
|
||||
|
||||
Функция generate_text используется для генерации текста на основе обученных моделей. Этот текст выводится с помощью streamlit в веб-приложении.
|
||||
|
||||
## Результат
|
||||
Сгенерированный русский текст:
|
||||
|
||||
Ты к моему несчастью верь как в святыню верит монах как в чудо чудо верит дева как верят в вечернюю печальные странники в пути
|
||||
|
||||
Сгенерированный английский текст:
|
||||
|
||||
In the to my distress as the monk believes in a shrine as the maiden believes in a miracle as weary travelers believe in the evening star on their journey
|
||||
99
gordeeva_anna_lab_7/laba7.py
Normal file
@@ -0,0 +1,99 @@
|
||||
import docx
|
||||
import streamlit as st
|
||||
import numpy as np
|
||||
import tensorflow as tf
|
||||
from tensorflow.keras.models import Sequential
|
||||
from tensorflow.keras.layers import LSTM, Dense, Embedding
|
||||
|
||||
def extract_text_from_docx(file_path):
|
||||
doc = docx.Document(file_path)
|
||||
full_text = []
|
||||
|
||||
for para in doc.paragraphs:
|
||||
full_text.append(para.text)
|
||||
|
||||
return '\n'.join(full_text)
|
||||
|
||||
file_path1 = '"C:/Users/79084/Desktop/textru.doc"'
|
||||
file_path2 = '"C:/Users/79084/Desktop/texten.doc"'
|
||||
|
||||
# Извлечение текста из файла
|
||||
textru = extract_text_from_docx(file_path1)
|
||||
texten = extract_text_from_docx(file_path2)
|
||||
|
||||
# Предобработка текста
|
||||
tokenizer_russian = tf.keras.preprocessing.text.Tokenizer(char_level=True)
|
||||
tokenizer_russian.fit_on_texts(textru)
|
||||
tokenized_text_russian = tokenizer_russian.texts_to_sequences([textru])[0]
|
||||
|
||||
tokenizer_english = tf.keras.preprocessing.text.Tokenizer(char_level=True)
|
||||
tokenizer_english.fit_on_texts(texten)
|
||||
tokenized_text_english = tokenizer_english.texts_to_sequences([texten])[0]
|
||||
|
||||
# Создание последовательных последовательностей для обучения
|
||||
maxlen = 40
|
||||
step = 3
|
||||
sentences_russian = []
|
||||
next_chars_russian = []
|
||||
sentences_english = []
|
||||
next_chars_english = []
|
||||
|
||||
for i in range(0, len(tokenized_text_russian) - maxlen, step):
|
||||
sentences_russian.append(tokenized_text_russian[i: i + maxlen])
|
||||
next_chars_russian.append(tokenized_text_russian[i + maxlen])
|
||||
|
||||
for i in range(0, len(tokenized_text_english) - maxlen, step):
|
||||
sentences_english.append(tokenized_text_english[i: i + maxlen])
|
||||
next_chars_english.append(tokenized_text_english[i + maxlen])
|
||||
|
||||
# Преобразование данных в массивы numpy
|
||||
x_russian = np.array(sentences_russian)
|
||||
y_russian = np.array(next_chars_russian)
|
||||
x_english = np.array(sentences_english)
|
||||
y_english = np.array(next_chars_english)
|
||||
|
||||
# Создание модели для русского текста
|
||||
model_russian = Sequential()
|
||||
model_russian.add(Embedding(len(tokenizer_russian.word_index) + 1, 128))
|
||||
model_russian.add(LSTM(128))
|
||||
model_russian.add(Dense(len(tokenizer_russian.word_index) + 1, activation='softmax'))
|
||||
|
||||
model_russian.compile(loss='sparse_categorical_crossentropy', optimizer='adam')
|
||||
|
||||
# Обучение модели на русском тексте
|
||||
model_russian.fit(x_russian, y_russian, batch_size=128, epochs=50)
|
||||
|
||||
# Создание модели для английского текста
|
||||
model_english = Sequential()
|
||||
model_english.add(Embedding(len(tokenizer_english.word_index) + 1, 128))
|
||||
model_english.add(LSTM(128))
|
||||
model_english.add(Dense(len(tokenizer_english.word_index) + 1, activation='softmax'))
|
||||
|
||||
model_english.compile(loss='sparse_categorical_crossentropy', optimizer='adam')
|
||||
|
||||
# Обучение модели на английском тексте
|
||||
model_english.fit(x_english, y_english, batch_size=128, epochs=50)
|
||||
|
||||
# Функция для генерации текста на основе обученной модели
|
||||
def generate_text(model, tokenizer, seed_text, maxlen, temperature=1.0, num_chars=400):
|
||||
generated_text = seed_text
|
||||
for _ in range(num_chars):
|
||||
encoded = tokenizer.texts_to_sequences([seed_text])[0]
|
||||
encoded = np.array(encoded)
|
||||
predicted_probs = model.predict(encoded, verbose=0)[0]
|
||||
# Используем temperature для более разнообразных предсказаний
|
||||
predicted_probs = np.log(predicted_probs) / temperature
|
||||
exp_preds = np.exp(predicted_probs)
|
||||
predicted_probs = exp_preds / np.sum(exp_preds)
|
||||
predicted = np.random.choice(len(predicted_probs), p=predicted_probs)
|
||||
next_char = tokenizer.index_word.get(predicted, '')
|
||||
generated_text += next_char
|
||||
seed_text += next_char
|
||||
seed_text = seed_text[1:]
|
||||
return generated_text
|
||||
|
||||
generated_russian_text = generate_text(model_russian, tokenizer_russian, 'Ты к моему', maxlen, temperature=0.5, num_chars=400)
|
||||
st.write(generated_russian_text)
|
||||
|
||||
generated_english_text = generate_text(model_english, tokenizer_english, 'In the', maxlen, temperature=0.5, num_chars=400)
|
||||
st.write(generated_english_text)
|
||||
5
gordeeva_anna_lab_7/texten.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
Believe in me, to my distress,
|
||||
As the monk believes in a shrine,
|
||||
As the maiden believes in a miracle,
|
||||
As weary travelers believe
|
||||
In the evening star on their journey.
|
||||
5
gordeeva_anna_lab_7/textru.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
Ты, к моему несчастью, верь,
|
||||
Как в святыню, верит монах,
|
||||
Как в чудо, верит дева,
|
||||
Как верят в вечернюю звезду
|
||||
Печальные странники в пути.
|
||||
BIN
istyukov_timofey_lab1/1_linear_regression.png
Normal file
|
After Width: | Height: | Size: 62 KiB |
BIN
istyukov_timofey_lab1/2_perceptron.png
Normal file
|
After Width: | Height: | Size: 60 KiB |
BIN
istyukov_timofey_lab1/3_poly_ridge.png
Normal file
|
After Width: | Height: | Size: 65 KiB |
61
istyukov_timofey_lab1/README.md
Normal file
@@ -0,0 +1,61 @@
|
||||
# Лабораторная работа №1. Работа с типовыми наборами данных и различными моделями
|
||||
## 12 вариант
|
||||
___
|
||||
|
||||
### Задание:
|
||||
Используя код из пункта «Регуляризация и сеть прямого распространения», сгенерируйте определенный тип данных и сравните на нем 3 модели (по варианту). Постройте графики, отобразите качество моделей, объясните полученные результаты.
|
||||
|
||||
### Данные по варианту:
|
||||
- make_classification (n_samples=500, n_features=2, n_redundant=0, n_informative=2, random_state=rs, n_clusters_per_class=1)
|
||||
|
||||
### Модели по варианту:
|
||||
- Линейная регрессия
|
||||
- Персептрон
|
||||
- Гребневая полиномиальная регрессия (со степенью 4, alpha = 1.0)
|
||||
|
||||
___
|
||||
|
||||
### Запуск
|
||||
- Запустить файл lab1.py
|
||||
|
||||
### Используемые технологии
|
||||
- Язык программирования **Python**
|
||||
- Среда разработки **PyCharm**
|
||||
- Библиотеки:
|
||||
* numpy
|
||||
* sklearn
|
||||
* matplotlib
|
||||
|
||||
### Описание программы
|
||||
Программа генерирует набор данных с помощью функции make_classification()
|
||||
с заданными по варианту параметрами. После этого происходит вывод в консоль
|
||||
качества данных моделей по варианту и построение графикиков для этих моделей.
|
||||
|
||||
Оценка точности происходит при помощи встроенного в модели метода метода
|
||||
**.score()**, который вычисляет правильность модели для набора данных.
|
||||
|
||||
___
|
||||
### Пример работы
|
||||
|
||||

|
||||
```text
|
||||
===> Линейная регрессия <===
|
||||
Оценка точности: 0.4513003751817972
|
||||
```
|
||||
___
|
||||
|
||||

|
||||
```text
|
||||
===> Персептрон <===
|
||||
Оценка точности: 0.7591836734693878
|
||||
```
|
||||
___
|
||||
|
||||

|
||||
```text
|
||||
===> Гребневая полиномиальная регрессия <===
|
||||
Оценка точности: 0.5312017992195672
|
||||
```
|
||||
|
||||
### Вывод
|
||||
Согласно выводу в консоль оценок точности, лучший результат показала модель **персептрона**
|
||||
101
istyukov_timofey_lab1/lab1.py
Normal file
@@ -0,0 +1,101 @@
|
||||
# 12 вариант
|
||||
# Данные: make_classification (n_samples=500, n_features=2, n_redundant=0,
|
||||
# n_informative=2, random_state=rs, n_clusters_per_class=1)
|
||||
# Модели:
|
||||
# -- Линейную регрессию
|
||||
# -- Персептрон
|
||||
# -- Гребневую полиномиальную регрессию (со степенью 4, alpha = 1.0)
|
||||
|
||||
import numpy as np
|
||||
from sklearn.datasets import make_classification
|
||||
from sklearn.linear_model import LinearRegression, Perceptron, Ridge
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.pipeline import make_pipeline
|
||||
from sklearn.preprocessing import PolynomialFeatures
|
||||
from matplotlib import pyplot as plt
|
||||
from matplotlib.colors import ListedColormap
|
||||
|
||||
|
||||
|
||||
cm_bright_1 = ListedColormap(['#7FFFD4', '#00FFFF'])
|
||||
cm_bright_2 = ListedColormap(['#FF69B4', '#FF1493'])
|
||||
|
||||
def main():
|
||||
X, y = make_classification(
|
||||
n_samples=500,
|
||||
n_features=2,
|
||||
n_redundant=0,
|
||||
n_informative=2,
|
||||
random_state=0,
|
||||
n_clusters_per_class=1)
|
||||
rng = np.random.RandomState(2)
|
||||
X += 2 * rng.uniform(size=X.shape)
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=10, random_state=40)
|
||||
|
||||
# модели на основе сгенерированных данных
|
||||
my_linear_regression(X_train, X_test, y_train, y_test)
|
||||
my_perceptron(X_train, X_test, y_train, y_test)
|
||||
my_poly_ridge(X_train, X_test, y_train, y_test)
|
||||
|
||||
|
||||
# Линейная регрессия
|
||||
def my_linear_regression(X_train, X_test, y_train, y_test):
|
||||
lin_reg_model = LinearRegression() # создание модели регрессии
|
||||
lin_reg_model.fit(X_train, y_train) # обучение
|
||||
y_pred = lin_reg_model.predict(X_test) # предсказание по тестовым даннным
|
||||
|
||||
# вывод в консоль
|
||||
print()
|
||||
print('===> Линейная регрессия <===')
|
||||
print('Оценка точности: ', lin_reg_model.score(X_train, y_train))
|
||||
|
||||
# вывод в график
|
||||
plt.title('Линейная регрессия')
|
||||
plt.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm_bright_1)
|
||||
plt.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm_bright_2, alpha=0.8)
|
||||
plt.plot(X_test, y_pred, color='red', linewidth=1)
|
||||
plt.savefig('1_linear_regression.png')
|
||||
plt.show()
|
||||
|
||||
|
||||
# Персептрон
|
||||
def my_perceptron(X_train, X_test, y_train, y_test):
|
||||
perceptron_model = Perceptron()
|
||||
perceptron_model.fit(X_train, y_train)
|
||||
y_pred = perceptron_model.predict(X_test)
|
||||
|
||||
# вывод в консоль
|
||||
print()
|
||||
print('===> Персептрон <===')
|
||||
print('Оценка точности: ', perceptron_model.score(X_train, y_train))
|
||||
|
||||
# вывод в график
|
||||
plt.title('Персептрон')
|
||||
plt.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm_bright_1)
|
||||
plt.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm_bright_2, alpha=0.8)
|
||||
plt.plot(X_test, y_pred, color='red', linewidth=1)
|
||||
plt.savefig('2_perceptron.png')
|
||||
plt.show()
|
||||
|
||||
|
||||
# Гребневая полиномиальная регрессия (степень=4, alpha=1.0)
|
||||
def my_poly_ridge(X_train, X_test, y_train, y_test):
|
||||
poly_rige_model = make_pipeline(PolynomialFeatures(degree=4), Ridge(alpha=1.0))
|
||||
poly_rige_model.fit(X_train, y_train)
|
||||
y_pred = poly_rige_model.predict(X_test)
|
||||
|
||||
# вывод в консоль
|
||||
print()
|
||||
print('===> Гребневая полиномиальная регрессия <===')
|
||||
print('Оценка точности: ', poly_rige_model.score(X_train, y_train))
|
||||
|
||||
# вывод в график
|
||||
plt.title('Гребневая полиномиальная регрессия')
|
||||
plt.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm_bright_1)
|
||||
plt.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm_bright_2, alpha=0.8)
|
||||
plt.plot(X_test, y_pred, color='red', linewidth=1)
|
||||
plt.savefig('3_poly_ridge.png')
|
||||
plt.show()
|
||||
|
||||
|
||||
main()
|
||||
71
istyukov_timofey_lab_2/README.md
Normal file
@@ -0,0 +1,71 @@
|
||||
# Лабораторная работа №2. Ранжирование признаков
|
||||
## 12 вариант
|
||||
___
|
||||
|
||||
### Задание:
|
||||
Используя код из пункта «Решение задачи ранжирования признаков»,
|
||||
выполните ранжирование признаков с помощью указанных по варианту моделей.
|
||||
Отобразите получившиеся значения\оценки каждого признака каждым методом\моделью
|
||||
и среднюю оценку. Проведите анализ получившихся результатов.
|
||||
Какие четыре признака оказались самыми важными по среднему значению?
|
||||
(Названия\индексы признаков и будут ответом на задание).
|
||||
|
||||
### Модели по варианту:
|
||||
- Лассо (Lasso)
|
||||
- Рекурсивное сокращение признаков (Recursive Feature Elimination – RFE)
|
||||
- Линейная корреляция (f_regression)
|
||||
|
||||
___
|
||||
|
||||
### Запуск
|
||||
- Запустить файл lab2.py
|
||||
|
||||
### Используемые технологии
|
||||
- Язык программирования **Python**
|
||||
- Среда разработки **PyCharm**
|
||||
- Библиотеки:
|
||||
* numpy
|
||||
* sklearn
|
||||
|
||||
### Описание программы
|
||||
В качестве примера взята регрессионная проблема Фридмана. На вход
|
||||
моделей подано 15 факторов. Выход рассчитывается по формуле, использующей
|
||||
только пять факторов, но факторы 1-5, а также 10-15 взаимозависимы.
|
||||
|
||||
Последовательность действий:
|
||||
1. Генерация данных по Фридману
|
||||
2. Создание и обучение моделей по варианту
|
||||
3. Ранжирование признаков по этим моделям с присвоением имён этим признакам
|
||||
4. Вывод признаков моделей по убыванию значения оценки
|
||||
5. Вывод среднего значения по каждому признакому по убыванию
|
||||
|
||||
Программа показывает, как разные виды регрессий оценят важности
|
||||
факторов и какой из них будет иметь наибольшую среднюю значимость
|
||||
по всем трём моделям по варианту.
|
||||
|
||||
---
|
||||
### Пример работы
|
||||

|
||||
```text
|
||||
---> Lasso <---
|
||||
[('x15', 1.0), ('x2', 0.88), ('x1', 0.82), ('x4', 0.38), ('x5', 0.38), ('x11', 0.01), ('x3', 0.0), ('x6', 0.0), ('x7', 0.0), ('x8', 0.0), ('x9', 0.0), ('x10', 0.0), ('x12', 0.0), ('x13', 0.0), ('x14', 0.0)]
|
||||
|
||||
---> RFE <---
|
||||
[('x9', 1.0), ('x12', 0.88), ('x10', 0.75), ('x6', 0.62), ('x7', 0.5), ('x11', 0.38), ('x8', 0.25), ('x4', 0.12), ('x1', 0.0), ('x2', 0.0), ('x3', 0.0), ('x5', 0.0), ('x13', 0.0), ('x14', 0.0), ('x15', 0.0)]
|
||||
|
||||
---> F_reg <---
|
||||
[('x4', 1.0), ('x15', 1.0), ('x2', 0.34), ('x13', 0.34), ('x1', 0.3), ('x12', 0.29), ('x5', 0.07), ('x6', 0.01), ('x3', 0.0), ('x7', 0.0), ('x8', 0.0), ('x9', 0.0), ('x10', 0.0), ('x11', 0.0), ('x14', 0.0)]
|
||||
|
||||
Средния значения по каждому признаку:
|
||||
[('x15', 0.67), ('x4', 0.5), ('x2', 0.41), ('x12', 0.39), ('x1', 0.37), ('x9', 0.33), ('x10', 0.25), ('x6', 0.21), ('x7', 0.17), ('x5', 0.15), ('x11', 0.13), ('x13', 0.11), ('x8', 0.08), ('x3', 0.0), ('x14', 0.0)]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Вывод
|
||||
Согласно выводу в консоль ранжированных признаков, был выявлен топ-4 самых важных признаков по среднему значению:
|
||||
|
||||
1. **x15**
|
||||
2. **x4**
|
||||
3. **x2**
|
||||
4. **x12**
|
||||
114
istyukov_timofey_lab_2/lab2.py
Normal file
@@ -0,0 +1,114 @@
|
||||
"""
|
||||
Используя код из пункта «Решение задачи ранжирования признаков»,
|
||||
выполните ранжирование признаков с помощью указанных по варианту моделей.
|
||||
Отобразите получившиеся значения\оценки каждого признака каждым методом\моделью и среднюю оценку.
|
||||
Проведите анализ получившихся результатов. Какие четыре признака оказались самыми важными по среднему значению?
|
||||
(Названия\индексы признаков и будут ответом на задание)
|
||||
"""
|
||||
|
||||
# 12 вариант
|
||||
# Лассо (Lasso)
|
||||
# Рекурсивное сокращение признаков (Recursive Feature Elimination – RFE)
|
||||
# Линейная корреляция (f_regression)
|
||||
|
||||
|
||||
|
||||
import numpy as np
|
||||
from sklearn.linear_model import Lasso, LinearRegression
|
||||
from sklearn.feature_selection import RFE
|
||||
from sklearn.feature_selection import f_regression
|
||||
from sklearn.preprocessing import MinMaxScaler
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
X, Y = friedman_regression_problem(800)
|
||||
|
||||
''' Создание и обучение моделей '''
|
||||
|
||||
# Лассо
|
||||
lasso_model = Lasso(alpha=.05)
|
||||
lasso_model.fit(X, Y)
|
||||
|
||||
# Рекурсивное сокращение признаков
|
||||
lr = LinearRegression()
|
||||
lr.fit(X, Y)
|
||||
rfe_model = RFE(estimator=lr)
|
||||
rfe_model.fit(X, Y)
|
||||
|
||||
# Линейная корреляция
|
||||
f, p_val = f_regression(X, Y)
|
||||
|
||||
# список имён признаков
|
||||
names = ["x%s" % i for i in range(1, 16)]
|
||||
|
||||
# словарь вызова функций моделей
|
||||
ranks = {}
|
||||
ranks["Lasso"] = rank_to_dict(lasso_model.coef_, names)
|
||||
ranks["RFE"] = rank_to_dict(rfe_model.ranking_, names)
|
||||
ranks["F_reg"] = rank_to_dict(f, names)
|
||||
|
||||
# вывод признаков и оценок каждой модели
|
||||
print_sorted_model(ranks)
|
||||
|
||||
# пустой список данных
|
||||
mean = {}
|
||||
|
||||
# Формирование среднего по каждому признаку
|
||||
for key, value in ranks.items():
|
||||
for item in value.items():
|
||||
if item[0] not in mean: #если элемента с текущим ключём нет
|
||||
mean[item[0]] = 0 #добавляем
|
||||
mean[item[0]] += item[1] #суммируем значения по каждому ключу-имени признака
|
||||
|
||||
# Поиск среднего по каждому признаку
|
||||
for key, value in mean.items():
|
||||
res = value / len(ranks)
|
||||
mean[key] = round(res, 2)
|
||||
|
||||
# Сортировка и распечатка списка
|
||||
mean = sorted(mean.items(), key=lambda item: item[1], reverse=True)
|
||||
print("\033[92mСредния значения по каждому признаку:\033[00m")
|
||||
print(mean)
|
||||
|
||||
|
||||
|
||||
# Генерация набора данных по регрессионной проблеме Фридмана
|
||||
def friedman_regression_problem(size):
|
||||
# генерируем исходные данные: 800 строк-наблюдений и 15 столбцов-признаков
|
||||
np.random.seed(0)
|
||||
X = np.random.uniform(0, 1, (size, 15))
|
||||
# Задание функции-выхода (регриссионную проблему Фридмана)
|
||||
Y = (10 * np.sin(np.pi * X[:,0] * X[:,1]) + 20 * (X[:,2] - .5)**2 + 10*X[:,3] + 5*X[:,4]**5) + np.random.normal(0, 1)
|
||||
# Добавление в зависимость признаков
|
||||
X[:,11:] = X[:,:4] + np.random.normal(0, .025, (size, 4))
|
||||
return X, Y
|
||||
|
||||
|
||||
# Функция формирования словаря пар "имя_признака: оценка признака"
|
||||
def rank_to_dict(ranks, names):
|
||||
ranks = np.abs(ranks) #получение абсолютных значений оценок
|
||||
r_array = np.array(ranks) #создание массива списка оценок
|
||||
r_array = r_array.reshape(15, 1) #переформирование строк и столбцов в массиве
|
||||
minmax = MinMaxScaler() # экземпляр для нормализации данных
|
||||
ranks = minmax.fit_transform(r_array) #обучение и преобразование данных
|
||||
ranks = ranks.ravel() #преобразование двумерного массива в одномерный
|
||||
ranks = map(lambda x: round(x, 2), ranks) #округление каждого элемента массива до сотых
|
||||
return dict(zip(names, ranks))
|
||||
|
||||
|
||||
# Функция вывода признаков моделей по убыванию значения оценки
|
||||
def print_sorted_model(ranks):
|
||||
ranks_copy = dict(ranks)
|
||||
for key, value in ranks_copy.items():
|
||||
ranks_copy[key] = sorted(value.items(), key=lambda item: item[1], reverse=True)
|
||||
|
||||
for key, value in ranks_copy.items():
|
||||
print("\033[92m---> {} <---\033[00m" .format(key))
|
||||
print(value)
|
||||
print()
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
BIN
istyukov_timofey_lab_2/result.jpg
Normal file
|
After Width: | Height: | Size: 60 KiB |
BIN
istyukov_timofey_lab_3/1_dataset.jpg
Normal file
|
After Width: | Height: | Size: 45 KiB |
BIN
istyukov_timofey_lab_3/2_accuracy_score.jpg
Normal file
|
After Width: | Height: | Size: 41 KiB |
BIN
istyukov_timofey_lab_3/3_feature_importances.jpg
Normal file
|
After Width: | Height: | Size: 16 KiB |
73
istyukov_timofey_lab_3/README.md
Normal file
@@ -0,0 +1,73 @@
|
||||
# Лабораторная работа №3. Деревья решений
|
||||
## 12 вариант
|
||||
___
|
||||
|
||||
### Задание:
|
||||
Решите с помощью библиотечной реализации дерева решений задачу из
|
||||
лабораторной работы «Веб-сервис «Дерево решений» по предмету
|
||||
«Методы искусственного интеллекта» на 99% ваших данных.
|
||||
Проверьте работу модели на оставшемся проценте, сделайте вывод.
|
||||
|
||||
|
||||
### Вариант набора данных по курсовой работе:
|
||||
- Прогнозирование музыкальных жанров
|
||||
|
||||
___
|
||||
|
||||
### Запуск
|
||||
- Запустить файл lab3.py
|
||||
|
||||
### Используемые технологии
|
||||
- Язык программирования **Python**
|
||||
- Среда разработки **PyCharm**
|
||||
- Библиотеки:
|
||||
* pandas
|
||||
* sklearn
|
||||
|
||||
### Описание программы
|
||||
**Набор данных (Kaggle):** Полный список жанров, включенных в CSV:
|
||||
«Электронная музыка», «Аниме», «Джаз», «Альтернатива», «Кантри», «Рэп»,
|
||||
«Блюз», «Рок», «Классика», «Хип-хоп».
|
||||
|
||||
**Задача, решаемая деревом решений:** Классификация музыкальных треков на
|
||||
основе их характеристик, таких как темп, инструментальность, акустичность,
|
||||
речевость, танцевальность, энергичность, живость. Дерево решений может
|
||||
предсказывать жанр трека, основываясь на его характеристиках.
|
||||
|
||||
**Задачи оценки:** оценить качество работы модели дерева решений и выявить
|
||||
наиболее значимые признаки набора данных.
|
||||
|
||||
---
|
||||
### Пример работы
|
||||
|
||||
*Датасет, сформированный из случайных строк csv-файла.*
|
||||

|
||||
|
||||
---
|
||||
*Сравнение на оставшихся неиспользованных 0,5% строк датасета
|
||||
предсказнных и действительных жанров.*
|
||||
|
||||

|
||||
|
||||
---
|
||||
*Вычисленнные коэффициенты влияния признаков на прогноз жанра*
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
### Вывод
|
||||
Посредством предобработки датасета дерево решений без проблем обучилось и
|
||||
частично верно предсказало некоторые жанры (в частности, Электро, Классику
|
||||
и Рэп). Также модель показала оценку влиятельности признаков на прогноз
|
||||
жанра. Самым влиятельным признаком оказалась **акустичность** музыкального
|
||||
трека. Менее значимыми оказались речевость (преобладание голосов в треке) и
|
||||
инструментальность (преобладание живых инструментов в треке), что звучит
|
||||
вполне разумно.
|
||||
|
||||
На практике дерево решений по качеству классификации уступает некоторым
|
||||
другим методам. Помимо этого, небольшие изменения в данных могут существенно
|
||||
изменять построенное дерево решений. На примере моего датасета дерево решений
|
||||
справилось не очень успешно. Это можно объяснить тем, что данных в нём
|
||||
недостаточно для предсказания жанра. Но также стоит отметить, что
|
||||
жанр – одно из самых неоднозначных, самых многосоставных музыкальных понятий.
|
||||
69
istyukov_timofey_lab_3/lab3.py
Normal file
@@ -0,0 +1,69 @@
|
||||
"""
|
||||
Решите с помощью библиотечной реализации дерева решений задачу изnлабораторной работы
|
||||
«Веб-сервис «Дерево решений» по предмету «Методы искусственного интеллекта» на 99% ваших данных.
|
||||
Проверьте работу модели на оставшемся проценте, сделайте вывод.
|
||||
"""
|
||||
|
||||
"""
|
||||
Задача, решаемая деревом решений: Классификация музыкальных треков на основе их характеристик,
|
||||
таких как акустика, танцевальность, инструментальность, темп и т.д.
|
||||
Дерево решений может предсказывать жанр трека, основываясь на его характеристиках.
|
||||
"""
|
||||
|
||||
# 12 вариант
|
||||
# Набор данных по курсовой: "Prediction of music genre"
|
||||
|
||||
import pandas as pd
|
||||
from sklearn.tree import DecisionTreeClassifier
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.metrics import accuracy_score
|
||||
|
||||
|
||||
DATASET_FILE = 'music_genre.csv'
|
||||
|
||||
|
||||
def main():
|
||||
df = open_dataset(DATASET_FILE)
|
||||
df = df.sample(frac=.1) # отбираем 10% рандомных строк с набора данных, т.к. он большой
|
||||
print("\033[92m[-----> Набор данных <-----]\033[00m")
|
||||
print(df)
|
||||
|
||||
X = df.drop(columns=['music_genre']) # набор числовых признаков
|
||||
y = df['music_genre'] # набор соответствующих им жанров
|
||||
|
||||
# Разделение датасета на тренировочные (99,5%) и тестовые данные (0,5%)
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.005)
|
||||
|
||||
# Создание и обучение дерева решений
|
||||
model = DecisionTreeClassifier()
|
||||
model.fit(X_train.values, y_train)
|
||||
|
||||
# Прогнозирование жанра на тестовых данных
|
||||
y_pred = model.predict(X_test.values)
|
||||
|
||||
print("\033[92m\n\n\n[-----> Сравнение жанров <-----]\033[00m")
|
||||
df_result = pd.DataFrame({'Прогноз': y_pred, 'Реальность': y_test})
|
||||
print(df_result)
|
||||
|
||||
score = accuracy_score(y_test, y_pred)
|
||||
print("\033[92m\n> Оценка точности модели: {}\033[00m" .format(round(score, 2)))
|
||||
|
||||
print("\033[92m\n\n\n[-----> Оценки важности признаков <-----]\033[00m")
|
||||
df_feature = pd.DataFrame({'Признак': X.columns, "Важность": model.feature_importances_})
|
||||
print(df_feature)
|
||||
|
||||
|
||||
# Функция считывания и очищения csv-файла
|
||||
def open_dataset(csv_file):
|
||||
# открываем файл с указанием знака-отделителя
|
||||
df_genres = pd.read_csv(csv_file, delimiter=',')
|
||||
# выбираем необходимые признаки
|
||||
df_genres = df_genres[['tempo', 'instrumentalness', 'acousticness', 'speechiness', 'danceability', 'energy', 'liveness', 'music_genre']]
|
||||
# очищаем набор данных от пустых и неподходящих значений
|
||||
df_genres = df_genres[df_genres['tempo'] != '?']
|
||||
df_genres = df_genres.dropna()
|
||||
return df_genres
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
50006
istyukov_timofey_lab_3/music_genre.csv
Normal file
BIN
istyukov_timofey_lab_4/1_dendrogram.png
Normal file
|
After Width: | Height: | Size: 37 KiB |
BIN
istyukov_timofey_lab_4/2_dataset.jpg
Normal file
|
After Width: | Height: | Size: 29 KiB |
BIN
istyukov_timofey_lab_4/3_clusters.jpg
Normal file
|
After Width: | Height: | Size: 36 KiB |
78
istyukov_timofey_lab_4/README.md
Normal file
@@ -0,0 +1,78 @@
|
||||
# Лабораторная работа №4. Кластеризация
|
||||
## 12 вариант
|
||||
___
|
||||
|
||||
### Задание:
|
||||
Использовать для своих данных метод кластеризации по варианту,
|
||||
самостоятельно сформулировав задачу. Интерпретировать результаты и оценить,
|
||||
насколько хорошо он подходит для решения сформулированной вами задачи.
|
||||
|
||||
### Вариант:
|
||||
- Алгоритм кластеризации: **linkage**
|
||||
|
||||
### Вариант набора данных по курсовой работе:
|
||||
- Прогнозирование музыкальных жанров ("Prediction of music genre")
|
||||
|
||||
___
|
||||
|
||||
### Запуск
|
||||
- Запустить файл lab4.py
|
||||
|
||||
### Используемые технологии
|
||||
- Язык программирования **Python**
|
||||
- Среда разработки **PyCharm**
|
||||
- Библиотеки:
|
||||
* pandas
|
||||
* scipy
|
||||
* matplotlib
|
||||
|
||||
### Описание программы
|
||||
**Набор данных (Kaggle):** Полный список жанров, включенных в CSV:
|
||||
«Электронная музыка», «Аниме», «Джаз», «Альтернатива», «Кантри», «Рэп»,
|
||||
«Блюз», «Рок», «Классика», «Хип-хоп».
|
||||
|
||||
**Задача, решаемая алгоритмом кластеризации:**
|
||||
Группировка музыкальных треков на основе их характеристик с целью создания
|
||||
кластеров треков с схожими характеристиками. Алгоритм кластеризации может
|
||||
помочь в создании плейлистов и рекомендаций, основанных на схожести
|
||||
музыкальных треков по некоторым характеристикам.
|
||||
|
||||
**Задача оценки:**
|
||||
Анализ получившейся иерархической структуры с помощью дендрограмме.
|
||||
|
||||
---
|
||||
### Пример работы
|
||||
|
||||
*Датасет, сформированный из случайных строк csv-файла.*
|
||||
|
||||

|
||||
|
||||
---
|
||||
*Визуализация дерева, представляющего иерархическое слияние кластеров,
|
||||
в виде дендрограммы. Это может быть полезно для понимания структуры данных.*
|
||||
|
||||

|
||||
|
||||
---
|
||||
*Вывод первых 10 музыльных треков из датасета с их
|
||||
принадлежностью к кластеру*
|
||||
|
||||

|
||||
|
||||
### Вывод
|
||||
С моими данными алгоритм справляется довольно успешно. На результате выше
|
||||
можно сравнить два трека — "Gake No Ue No Ponyo" и "He Would Have Laughed".
|
||||
В результате работы программы они были присвоены к кластеру №10.
|
||||
При этом первый трек отнесён к жанру "Anime", а второй — к "Alternative".
|
||||
Тем не менее, эти две песни похожи преобладанием инструментала в них
|
||||
(в особенности перкуссии), а так же наличием ирландских мотивов в нём.
|
||||
|
||||
В ходе работы было проверено 8 пар музыкальных треков, принадлежащих
|
||||
к разным кластерам. Как итог, больше половины пар действительно имели
|
||||
много схожего в звучании или концепте аранжировки, несмотря на различия
|
||||
по некоторым характеристикам (в том числе жанр).
|
||||
|
||||
Из плюсов иерархической кластеризации можно выделить отсутствие
|
||||
конкретного количества кластеров, для поиска похожей музыки это
|
||||
явно преимущество. Из минусов же — слишком медленная работа
|
||||
на больших наборах данных (из-за чего и было взято 50% от всего датасета).
|
||||
85
istyukov_timofey_lab_4/lab4.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""
|
||||
Использовать для своих данных метод кластеризации по варианту, самостоятельно сформулировав задачу.
|
||||
Интерпретировать результаты и оценить, насколько хорошо он подходит для решения сформулированной вами задачи.
|
||||
|
||||
"""
|
||||
|
||||
"""
|
||||
Задача, решаемая алгоритмом кластеризации:
|
||||
Группировка музыкальных треков на основе их характеристик с целью создания кластеров треков
|
||||
с схожими характеристиками. Алгоритм кластеризации может помочь в создании плейлистов и рекомендаций,
|
||||
основанных на схожести музыкальных треков по некоторым характеристикам.
|
||||
"""
|
||||
|
||||
# 12 вариант
|
||||
# Набор данных по курсовой: "Prediction of music genre"
|
||||
# Алгоритм кластеризации: linkage
|
||||
|
||||
|
||||
|
||||
import pandas as pd
|
||||
from scipy.cluster.hierarchy import linkage, dendrogram, fcluster
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
|
||||
|
||||
DATASET_FILE = 'music_genre.csv'
|
||||
|
||||
|
||||
def main():
|
||||
df = open_dataset(DATASET_FILE)
|
||||
df = df.sample(frac=.5) # отбираем 50% рандомных строк с набора данных, т.к. он большой
|
||||
print("\033[92m[-----> Набор данных <-----]\033[00m")
|
||||
print(df)
|
||||
|
||||
# Перевод жанров и ладов (минор/мажор) в числовые признаки
|
||||
df_genres = pd.get_dummies(df['music_genre'])
|
||||
df_modes = pd.get_dummies(df['mode'])
|
||||
# Объединение основной таблицы с числовыми признаками
|
||||
df_music = pd.concat([df, df_genres, df_modes], axis=1).reindex(df.index)
|
||||
# Удаление строковых стоблцов, которые заменили на числовые признаки
|
||||
df_music = df_music.drop(columns=['music_genre', 'mode'])
|
||||
# Датасет для работы с кластеризацией (без исполнителя и названия трека)
|
||||
X = df_music.drop(columns=['artist_name', 'track_name'])
|
||||
|
||||
# Иерархическая кластеризация с связью ward
|
||||
# (минимизация суммы квадратов разностей во всех кластерах)
|
||||
linkage_matrix = linkage(X, method='ward', metric='euclidean')
|
||||
|
||||
# Формирование кластеров из матрицы связей
|
||||
cluster_label = fcluster(Z=linkage_matrix, t=300, criterion='distance')
|
||||
# Присвоение кластера треку
|
||||
df['cluster'] = cluster_label
|
||||
# Установка опции показа 3 столбцов при выводе
|
||||
pd.set_option('display.max_columns', 3)
|
||||
# Вывод результата кластеризации
|
||||
print("\033[92m\nЫ[-----> Результат иерархической кластеризации <-----]\033[00m")
|
||||
print(df[['artist_name', 'track_name', 'cluster']].head(10))
|
||||
print("\033[92mКоличество кластеров: {}\033[00m" .format(cluster_label.max()))
|
||||
|
||||
# Дендрограмма
|
||||
plt.figure(figsize=(12, 6))
|
||||
dendrogram(linkage_matrix, truncate_mode='lastp', p=20, leaf_rotation=90., leaf_font_size=8., show_contracted=True)
|
||||
plt.title('Дендрограмма иерархической кластеризации музыкальных треков')
|
||||
plt.xlabel('Количество треков в узле')
|
||||
plt.ylabel('Евклидово расстояние между треками')
|
||||
plt.savefig('1_dendrogram')
|
||||
plt.show()
|
||||
|
||||
|
||||
# Функция считывания и очищения csv-файла
|
||||
def open_dataset(csv_file):
|
||||
# открываем файл с указанием знака-отделителя
|
||||
df = pd.read_csv(csv_file, delimiter=',')
|
||||
# выбираем необходимые признаки
|
||||
df = df[['artist_name', 'track_name', 'mode', 'tempo', 'instrumentalness', 'acousticness',
|
||||
'speechiness', 'danceability', 'energy', 'liveness', 'valence', 'music_genre']]
|
||||
# очищаем набор данных от пустых и неподходящих значений
|
||||
df = df[df['tempo'] != '?']
|
||||
df = df.dropna()
|
||||
return df
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
50006
istyukov_timofey_lab_4/music_genre.csv
Normal file
91
kochkareva_elizaveta_lab_7/README.md
Normal file
@@ -0,0 +1,91 @@
|
||||
|
||||
# Лабораторная работа 7. Вариант 15
|
||||
|
||||
### Задание
|
||||
Выбрать художественный текст (нечетные варианты – англоязычный) и обучить на нем рекуррентную нейронную сеть
|
||||
для решения задачи генерации. Подобрать архитектуру и параметры так,
|
||||
чтобы приблизиться к максимально осмысленному результату.
|
||||
|
||||
|
||||
### Как запустить лабораторную работу
|
||||
Для запуска программы необходимо с помощью командной строки в корневой директории файлов прокета прописать:
|
||||
```
|
||||
python main.py
|
||||
```
|
||||
### Какие технологии использовали
|
||||
- Библиотека *numpy* для работы с массивами.
|
||||
- Библиотека *tensorflow* - для машинного обучения. Она предоставляет инструменты для создания и обучения различных моделей машинного обучения, включая нейронные сети.
|
||||
|
||||
### Описание лабораторной работы
|
||||
Для данной лабораторной работы был взят текст на 1596 строк текста.
|
||||
|
||||
```python
|
||||
with open('V3001TH2.txt', 'r', encoding='utf-8') as f:
|
||||
text = f.read()
|
||||
```
|
||||
|
||||
Далее создали список уникальных символов `chars`, а также словари `char_to_index` и `index_to_char`, которые используются для преобразования символов в индексы и наоборот.
|
||||
|
||||
```python
|
||||
chars = sorted(list(set(text)))
|
||||
char_to_index = {char: index for index, char in enumerate(chars)}
|
||||
index_to_char = {index: char for index, char in enumerate(chars)}
|
||||
```
|
||||
|
||||
После чего можем генерировать ренировочные данные `train_x` и `train_y`. `train_x` содержит последовательности символов длиной `seq_length` из текста, а `train_y` содержит следующий символ после каждой входной последовательности. Каждый символ преобразуется в соответствующий индекс, используя словарь `char_to_index`.
|
||||
|
||||
```python
|
||||
# Генерация тренировочных данных
|
||||
seq_length = 100 # Длина входной последовательности
|
||||
train_x = []
|
||||
train_y = []
|
||||
for i in range(0, text_length - seq_length, 1):
|
||||
input_seq = text[i:i + seq_length]
|
||||
output_seq = text[i + seq_length]
|
||||
train_x.append([char_to_index[char] for char in input_seq])
|
||||
train_y.append(char_to_index[output_seq])
|
||||
```
|
||||
|
||||
Далее преобразуем `train_x` в трехмерный массив с размерностью (количество примеров, `seq_length`, 1).
|
||||
Нормализуем значения `train_x` путем деления на `num_chars` и преобразуем `train_y` в `one-hot` представление с помощью `tf.keras.utils.to_categorical.`
|
||||
|
||||
```python
|
||||
train_x = np.reshape(train_x, (len(train_x), seq_length, 1))
|
||||
train_x = train_x / float(num_chars)
|
||||
train_y = tf.keras.utils.to_categorical(train_y)
|
||||
```
|
||||
Теперь переходим к созданию модели рекуррентной нейронной сети с `LSTM` слоем, принимающим входные данные размерности `(train_x.shape[1], train_x.shape[2])` и плотным слоем с активацией softmax.
|
||||
Компилируем модель с функцией потерь `categorical_crossentropy` и оптимизатором `adam`.
|
||||
|
||||
```python
|
||||
model = tf.keras.Sequential([
|
||||
tf.keras.layers.LSTM(128, input_shape=(train_x.shape[1], train_x.shape[2])),
|
||||
tf.keras.layers.Dense(num_chars, activation='softmax')
|
||||
])
|
||||
|
||||
model.compile(loss='categorical_crossentropy', optimizer='adam')
|
||||
```
|
||||
|
||||
Обучаем модель на тренировачных данных с заданным количеством эпох - 80 и размером пакета - 128.
|
||||
```
|
||||
model.fit(train_x, train_y, epochs=80, batch_size=128)
|
||||
```
|
||||
|
||||
Генерируется текст, начиная с случайного индекса `start_index` в `train_x`. Затем, на каждой итерации цикла, модель предсказывает следующий символ, добавляет его к сгенерированному тексту и обновляет `start_seq` для использования в следующей итерации.
|
||||
Записывает сгенерированный текст в файл *'сгенерированный_текст.txt'*.
|
||||
|
||||
Результат выполнения:
|
||||
|
||||
```
|
||||
Ih ses shven they to tore a fit oo th toie th sook a buck and tore tote a siee fot oo the searen.
|
||||
Jnd buonds sore toee th the shele and thans to the siee and soans tie his and tooning tie hit cnd toens the his and croninng his bioter.
|
||||
|
||||
|
||||
— Iod you ducking tooeeds so toieg a buck and to bor aeeut tore a sigee oo toire a ducn fo toine to see sooeee oo the saelen. Tnd blond toees the sirt and that the sooel and thai to the soeee of the shale.
|
||||
|
||||
|
||||
"Iotk toe ffcrtes," Vincent says suth a suine and a
|
||||
```
|
||||
### Вывод
|
||||
|
||||
Текст содержит некоторые слова и фразы, которые кажутся некорректными или непонятными. Это может быть связано с недостаточным количеством обучающих данных или эпох обучения.
|
||||
1597
kochkareva_elizaveta_lab_7/V3001TH2.txt
Normal file
64
kochkareva_elizaveta_lab_7/main.py
Normal file
@@ -0,0 +1,64 @@
|
||||
import numpy as np
|
||||
import tensorflow as tf
|
||||
|
||||
|
||||
def recurrent_neural_network():
|
||||
# Загрузка текстового файла и предварительная обработка данных
|
||||
with open('V3001TH2.txt', 'r', encoding='utf-8') as f:
|
||||
text = f.read()
|
||||
|
||||
chars = sorted(list(set(text)))
|
||||
char_to_index = {char: index for index, char in enumerate(chars)}
|
||||
index_to_char = {index: char for index, char in enumerate(chars)}
|
||||
|
||||
num_chars = len(chars)
|
||||
text_length = len(text)
|
||||
|
||||
# Генерация тренировочных данных
|
||||
seq_length = 100 # Длина входной последовательности
|
||||
train_x = []
|
||||
train_y = []
|
||||
for i in range(0, text_length - seq_length, 1):
|
||||
input_seq = text[i:i + seq_length]
|
||||
output_seq = text[i + seq_length]
|
||||
train_x.append([char_to_index[char] for char in input_seq])
|
||||
train_y.append(char_to_index[output_seq])
|
||||
|
||||
train_x = np.reshape(train_x, (len(train_x), seq_length, 1))
|
||||
train_x = train_x / float(num_chars)
|
||||
train_y = tf.keras.utils.to_categorical(train_y)
|
||||
|
||||
model = tf.keras.Sequential([
|
||||
tf.keras.layers.LSTM(128, input_shape=(train_x.shape[1], train_x.shape[2])),
|
||||
tf.keras.layers.Dense(num_chars, activation='softmax')
|
||||
])
|
||||
|
||||
model.compile(loss='categorical_crossentropy', optimizer='adam')
|
||||
|
||||
# Обучение модели
|
||||
model.fit(train_x, train_y, epochs=80, batch_size=128)
|
||||
|
||||
# Генерация текста
|
||||
start_index = np.random.randint(0, len(train_x) - 1)
|
||||
start_seq = train_x[start_index]
|
||||
|
||||
generated_text = ''
|
||||
for _ in range(500):
|
||||
x = np.reshape(start_seq, (1, len(start_seq), 1))
|
||||
x = x / float(num_chars)
|
||||
|
||||
prediction = model.predict(x, verbose=0)
|
||||
index = np.argmax(prediction)
|
||||
result = index_to_char[index]
|
||||
|
||||
generated_text += result
|
||||
start_seq = np.append(start_seq, index)
|
||||
start_seq = start_seq[1:]
|
||||
|
||||
with open('сгенерированный_текст.txt', 'w', encoding='utf-8') as f:
|
||||
f.write(generated_text)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
recurrent_neural_network()
|
||||
|
||||
8
kochkareva_elizaveta_lab_7/сгенерированный_текст.txt
Normal file
@@ -0,0 +1,8 @@
|
||||
Ih ses shven they to tore a fit oo th toie th sook a buck and tore tote a siee fot oo the searen.
|
||||
Jnd buonds sore toee th the shele and thans to the siee and soans tie his and tooning tie hit cnd toens the his and croninng his bioter.
|
||||
|
||||
|
||||
— Iod you ducking tooeeds so toieg a buck and to bor aeeut tore a sigee oo toire a ducn fo toine to see sooeee oo the saelen. Tnd blond toees the sirt and that the sooel and thai to the soeee of the shale.
|
||||
|
||||
|
||||
"Iotk toe ffcrtes," Vincent says suth a suine and a
|
||||
116
kozlov_alexey_lab_5/README.md
Normal file
@@ -0,0 +1,116 @@
|
||||
# Лабораторная работа №5. Регрессия
|
||||
## 14 вариант
|
||||
___
|
||||
|
||||
### Задание:
|
||||
Использовать метод гребневой регрессиии, самостоятельно сформулировав задачу. Интерпретировать результаты и оценить, насколько хорошо он подходит для решения сформулированной вами задачи.
|
||||
|
||||
### Описание используемого набора данных:
|
||||
Объектом исследования является набор данных, который размещен на платформе Kaggle (https://www.kaggle.com/datasets/nelgiriyewithana/top-spotify-songs-2023/data). Он представляет собой полный список самых известных песен 2023 года, перечисленных на Spotify. Данный набор представлен в виде файла spotify.csv
|
||||
|
||||
Столбцами являются:
|
||||
1. track_name – Название композиции
|
||||
2. artist(s)_name – Имя исполнителя/имена исполнителей песни.
|
||||
3. artist_count – Количество исполнителей, участвовавших в со-здании песни
|
||||
4. released_year – Год, когда песня была выпущена
|
||||
5. released_month – Месяц, когда песня была выпущена
|
||||
6. released_day – День месяца, когда песня была выпущена.
|
||||
7. in_spotify_playlists – Количество плейлистов Spotify, в которые песня включена
|
||||
8. in_spotify_charts – Присутствие и рейтинг песни в чартах Spotify.
|
||||
9. streams – Общее количество прослушиваний в Spotify.
|
||||
10. in_apple_playlists – Количество плейлистов Apple Music, в которые песня включена.
|
||||
11. in_apple_charts – Присутствие и рейтинг песни в чартах Apple Music.
|
||||
12. in_deezer_playlists – Количество плейлистов Deezer, в ко-торые песня включена.
|
||||
13. in_deezer_charts – Присутствие и рейтинг песни в чартах Deezer
|
||||
14. in_shazam_charts – Присутствие и рейтинг песни в чартах Shazam.
|
||||
15. bpm – Количество ударов в минуту, показатель темпа песни.
|
||||
16. key – Тональность песни.
|
||||
17. mode – Режим песни (мажорный или минорный).
|
||||
18. danceability_% – Процент, указывающий, насколько песня подходит для танцев.
|
||||
19. valence_% - Позитивность музыкального содержания пес-ни
|
||||
20. energy_% - Воспринимаемый уровень энергии песни
|
||||
21. acousticness_% - Количество акустического звука в песне
|
||||
22. instrumentalness_% - Количество инструментального кон-тента в песне
|
||||
23. liveness_% - Наличие элементов живого исполнения
|
||||
24. speechiness_% - Количество произнесенных слов в песне
|
||||
|
||||
Задачей регрессии на данном наборе данных является прогнозирование значения столбца «in_spotify_playlists» по столбцам «streams», «in_apple_playlists», «in_deezer_playlists» и «bpm».
|
||||
___
|
||||
|
||||
### Запуск
|
||||
- Запустить файл lab5.py
|
||||
|
||||
### Используемые технологии
|
||||
- Язык программирования **Python**
|
||||
- Среда разработки **PyCharm**
|
||||
- Библиотеки:
|
||||
* sklearn
|
||||
* matplotlib
|
||||
* numpy
|
||||
* pandas
|
||||
|
||||
### Описание программы
|
||||
Код программы выполняет следующие действия:
|
||||
|
||||
1. Импортирует необходимые библиотеки: pandas, numpy, Ridge из sklearn.linear_model, mean_absolute_error и mean_squared_error из sklearn.metrics, train_test_split из sklearn.model_selection, StandardScaler из sklearn.preprocessing.
|
||||
|
||||
2. Загружает данные из файла "spotify.csv" в объект DataFrame.
|
||||
|
||||
3. Удаляет строки с пропущенными значениями из DataFrame.
|
||||
|
||||
4. Удаляет столбец 'artist(s)_name' из DataFrame.
|
||||
|
||||
5. Удаляет запятые из значений в столбце 'in_deezer_playlists'.
|
||||
|
||||
6. Приводит столбец 'in_deezer_playlists' к числовому типу данных (int64).
|
||||
|
||||
7. Удаляет запятые из значений в столбце 'in_shazam_charts'.
|
||||
|
||||
8. Приводит столбец 'in_shazam_charts' к числовому типу данных (int64).
|
||||
|
||||
9. Создает словарь соответствия числовых значений и названий трека.
|
||||
|
||||
10. Заменяет значения в столбце 'track_name' на числовые, используя словарь.
|
||||
|
||||
11. Создает словарь соответствия числовых значений и названий тональности.
|
||||
|
||||
12. Заменяет значения в столбце 'key' на числовые, используя словарь.
|
||||
|
||||
13. Создает словарь соответствия числовых значений и режимов песни.
|
||||
|
||||
14. Заменяет значения в столбце 'mode' на числовые, используя словарь.
|
||||
|
||||
15. Создает список regrData содержащий имена столбцов для обучения модели.
|
||||
|
||||
16. Разделяет данные на обучающую и тестовую выборки с помощью функции train_test_split.
|
||||
|
||||
17. Создает объект StandardScaler и выполняет масштабирование данных обучающей и тестовой выборок.
|
||||
|
||||
18. Создает объект Ridge регрессора с параметром alpha=1.0.
|
||||
|
||||
19. Обучает регрессор на масштабированных обучающих данных.
|
||||
|
||||
20. Прогнозирует значения для тестовой выборки.
|
||||
|
||||
21. Вычисляет среднюю абсолютную ошибку (MAE) и среднеквадратичную ошибку (MSE) между фактическими и прогнозируемыми значениями.
|
||||
|
||||
22. Вычисляет коэффициент детерминации модели (Score).
|
||||
|
||||
23. Выводит значения MAE, MSE и Score на экран.
|
||||
|
||||
___
|
||||
### Пример работы
|
||||
|
||||

|
||||
```text
|
||||
Значение метрик ошибки (MAE и MSE) и коэффициент детерминации (Score)
|
||||
```
|
||||
|
||||
### Вывод
|
||||
Значение MAE равно 1491.2695835796214, это означает, что в среднем модель ошибается на примерно 1491 единицу при прогнозировании значений целевой переменной. Чем меньше значение MAE, тем более точные предсказания даёт модель.
|
||||
|
||||
Значение MSE (среднеквадратическая ошибка) равно 7440679.027329878. Оно вычисляется как среднее значение квадратов разницы между предсказанными и наблюдаемыми значениями. Чем меньше значение MSE, тем ближе предсказанные значения к наблюдаемым.
|
||||
|
||||
Score (оценка модели) равен 0.853940909276013. Это означает, что модель объясняет около 85.39% дисперсии в данных. Чем ближе значение score к 1, тем более точная модель.
|
||||
|
||||
В целом, значения MSE и score указывают на достаточно низкую точность модели. Возможно, стоит использовать другие алгоритмы или настраивать параметры текущей модели для получения более точных прогнозов.
|
||||
55
kozlov_alexey_lab_5/lab5.py
Normal file
@@ -0,0 +1,55 @@
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
from sklearn.linear_model import Ridge
|
||||
from sklearn.metrics import mean_absolute_error, mean_squared_error
|
||||
from sklearn.model_selection import train_test_split
|
||||
from sklearn.preprocessing import StandardScaler
|
||||
|
||||
# Загрузить данные
|
||||
data = pd.read_csv("spotify.csv")
|
||||
# Удалить все строки с пропусками
|
||||
data = data.dropna()
|
||||
data.drop('artist(s)_name', axis=1, inplace=True)
|
||||
# Удалить запятые из значений в столбце
|
||||
data['in_deezer_playlists'] = data['in_deezer_playlists'].str.replace(',', '')
|
||||
# Привести столбец к числовому типу данных
|
||||
data['in_deezer_playlists'] = data['in_deezer_playlists'].astype(np.int64)
|
||||
# Удалить запятые из значений в столбце
|
||||
data['in_shazam_charts'] = data['in_shazam_charts'].str.replace(',', '')
|
||||
# Привести столбец к числовому типу данных
|
||||
data['in_shazam_charts'] = data['in_shazam_charts'].astype(np.int64)
|
||||
# Создаем словарь соответствия числовых значений и названий трека
|
||||
track_name_dict = {name: index for index, name in enumerate(data['track_name'].unique())}
|
||||
# Заменяем значения в столбце на числовые
|
||||
data['track_name'] = data['track_name'].map(track_name_dict)
|
||||
# Создаем словарь соответствия числовых значений и названий тональности
|
||||
key_dict = {'C': 0, 'C#': 1, 'D': 2, 'D#': 3, 'E': 4, 'F': 5, 'F#': 6,
|
||||
'G': 7, 'G#': 8, 'A': 9, 'A#': 10, 'B': 11}
|
||||
# Заменяем значения в столбце на числовые
|
||||
data['key'] = data['key'].map(key_dict)
|
||||
# Создаем словарь соответствия числовых значений и режимов песни
|
||||
mode_dict = {'Major': 0, 'Minor': 1}
|
||||
# Заменяем значения в столбце на числовые
|
||||
data['mode'] = data['mode'].map(mode_dict)
|
||||
|
||||
regrData = ['in_apple_playlists', 'in_deezer_playlists', 'streams', 'bpm']
|
||||
y = data['in_spotify_playlists']
|
||||
x_train, x_test, y_train, y_test = train_test_split(data[regrData], y, test_size=0.2, random_state=42)
|
||||
|
||||
scaler = StandardScaler()
|
||||
X_train_scaled = scaler.fit_transform(x_train)
|
||||
X_test_scaled = scaler.transform(x_test)
|
||||
|
||||
ridge = Ridge(alpha=1.0)
|
||||
ridge.fit(X_train_scaled, y_train)
|
||||
|
||||
y_pred = ridge.predict(X_test_scaled)
|
||||
|
||||
mae = mean_absolute_error(y_test, y_pred)
|
||||
mse = mean_squared_error(y_test, y_pred)
|
||||
|
||||
score = ridge.score(X_test_scaled, y_test)
|
||||
|
||||
print("MAE:", mae)
|
||||
print("MSE:", mse)
|
||||
print("Score:", score)
|
||||
BIN
kozlov_alexey_lab_5/output.png
Normal file
|
After Width: | Height: | Size: 4.7 KiB |
954
kozlov_alexey_lab_5/spotify.csv
Normal file
@@ -0,0 +1,954 @@
|
||||
track_name,artist(s)_name,artist_count,released_year,released_month,released_day,in_spotify_playlists,in_spotify_charts,streams,in_apple_playlists,in_apple_charts,in_deezer_playlists,in_deezer_charts,in_shazam_charts,bpm,key,mode,danceability_%,valence_%,energy_%,acousticness_%,instrumentalness_%,liveness_%,speechiness_%
|
||||
Seven (feat. Latto) (Explicit Ver.),"Latto, Jung Kook",2,2023,7,14,553,147,141381703,43,263,45,10,826,125,B,Major,80,89,83,31,0,8,4
|
||||
LALA,Myke Towers,1,2023,3,23,1474,48,133716286,48,126,58,14,382,92,C#,Major,71,61,74,7,0,10,4
|
||||
vampire,Olivia Rodrigo,1,2023,6,30,1397,113,140003974,94,207,91,14,949,138,F,Major,51,32,53,17,0,31,6
|
||||
Cruel Summer,Taylor Swift,1,2019,8,23,7858,100,800840817,116,207,125,12,548,170,A,Major,55,58,72,11,0,11,15
|
||||
WHERE SHE GOES,Bad Bunny,1,2023,5,18,3133,50,303236322,84,133,87,15,425,144,A,Minor,65,23,80,14,63,11,6
|
||||
Sprinter,"Dave, Central Cee",2,2023,6,1,2186,91,183706234,67,213,88,17,946,141,C#,Major,92,66,58,19,0,8,24
|
||||
Ella Baila Sola,"Eslabon Armado, Peso Pluma",2,2023,3,16,3090,50,725980112,34,222,43,13,418,148,F,Minor,67,83,76,48,0,8,3
|
||||
Columbia,Quevedo,1,2023,7,7,714,43,58149378,25,89,30,13,194,100,F,Major,67,26,71,37,0,11,4
|
||||
fukumean,Gunna,1,2023,5,15,1096,83,95217315,60,210,48,11,953,130,C#,Minor,85,22,62,12,0,28,9
|
||||
La Bebe - Remix,"Peso Pluma, Yng Lvcas",2,2023,3,17,2953,44,553634067,49,110,66,13,339,170,D,Minor,81,56,48,21,0,8,33
|
||||
un x100to,"Bad Bunny, Grupo Frontera",2,2023,4,17,2876,40,505671438,41,205,54,12,251,83,F#,Minor,57,56,72,23,0,27,5
|
||||
Super Shy,NewJeans,1,2023,7,7,422,55,58255150,37,202,21,5,168,150,F,Minor,78,52,82,18,0,15,7
|
||||
Flowers,Miley Cyrus,1,2023,1,12,12211,115,1316855716,300,215,745,58,"1,021",118,,Major,71,65,68,6,0,3,7
|
||||
Daylight,David Kushner,1,2023,4,14,3528,98,387570742,80,156,182,24,"1,281",130,D,Minor,51,32,43,83,0,9,3
|
||||
As It Was,Harry Styles,1,2022,3,31,23575,130,2513188493,403,198,863,46,,174,F#,Minor,52,66,73,34,0,31,6
|
||||
Kill Bill,SZA,1,2022,12,8,8109,77,1163093654,183,162,161,12,187,89,G#,Major,64,43,73,5,17,16,4
|
||||
Cupid - Twin Ver.,Fifty Fifty,1,2023,2,24,2942,77,496795686,91,212,78,6,0,120,B,Minor,78,76,59,43,0,34,3
|
||||
"What Was I Made For? [From The Motion Picture ""Barbie""]",Billie Eilish,1,2023,7,13,873,104,30546883,80,227,95,24,"1,173",78,,Major,44,14,9,96,0,10,3
|
||||
Classy 101,"Feid, Young Miko",2,2023,3,31,2610,40,335222234,43,100,54,14,187,100,B,Major,86,67,66,14,0,12,16
|
||||
Like Crazy,Jimin,1,2023,3,24,596,68,363369738,8,104,23,2,29,120,G,Major,63,36,73,0,0,36,4
|
||||
LADY GAGA,"Gabito Ballesteros, Junior H, Peso Pluma",3,2023,6,22,332,26,86444842,11,163,10,4,0,140,F,Minor,65,87,74,22,0,42,4
|
||||
I Can See You (TaylorпїЅпїЅпїЅs Version) (From The ,Taylor Swift,1,2023,7,7,516,38,52135248,73,119,42,1,150,123,F#,Major,69,82,76,6,0,6,3
|
||||
I Wanna Be Yours,Arctic Monkeys,1,2013,1,1,12859,110,1297026226,24,98,582,2,73,135,,Minor,48,44,42,12,2,11,3
|
||||
"Peso Pluma: Bzrp Music Sessions, Vol. 55","Bizarrap, Peso Pluma",2,2023,5,31,1313,40,200647221,17,152,32,11,139,133,F,Minor,85,81,67,26,0,12,5
|
||||
Popular (with Playboi Carti & Madonna) - The Idol Vol. 1 (Music from the HBO Original Series),"The Weeknd, Madonna, Playboi Carti",3,2023,6,2,1945,87,115364561,74,182,87,14,"1,093",99,C#,Major,85,83,68,7,0,36,20
|
||||
SABOR FRESA,Fuerza Regida,1,2023,6,22,250,26,78300654,16,149,10,5,168,130,G,Minor,79,96,86,9,0,9,9
|
||||
Calm Down (with Selena Gomez),"RпїЅпїЅma, Selena G",2,2022,3,25,7112,77,899183384,202,119,318,38,96,107,B,Major,80,82,80,43,0,14,4
|
||||
MOJABI GHOST,"Tainy, Bad Bunny",2,2023,6,29,859,40,61245289,35,109,41,14,211,122,F#,Minor,81,74,71,14,0,56,4
|
||||
Last Night,Morgan Wallen,1,2023,1,31,2420,19,429829812,52,107,15,1,325,204,F#,Major,52,52,68,46,0,15,4
|
||||
Dance The Night (From Barbie The Album),Dua Lipa,1,2023,5,25,2988,101,127408954,0,0,143,38,0,110,B,Minor,67,78,85,2,0,33,5
|
||||
Rush,Troye Sivan,1,2023,7,13,864,78,22581161,71,135,50,1,294,126,F,Minor,74,35,84,0,0,11,6
|
||||
TULUM,"Peso Pluma, Grupo Frontera",2,2023,6,28,266,34,52294266,20,185,13,8,197,168,F#,Major,56,63,87,39,0,11,5
|
||||
Creepin',"The Weeknd, 21 Savage, Metro Boomin",3,2022,12,2,6036,88,843957510,113,149,245,23,27,98,C#,Minor,71,17,61,36,0,8,5
|
||||
Anti-Hero,Taylor Swift,1,2022,10,21,9082,56,999748277,242,142,165,9,310,97,E,Major,64,51,63,12,0,19,5
|
||||
TQG,"Karol G, Shakira",2,2023,2,23,4284,49,618990393,115,123,184,18,354,180,E,Minor,72,61,63,67,0,9,28
|
||||
Los del Espacio,"Big One, Duki, Lit Killah, Maria Becerra, FMK, Rusherking, Emilia, Tiago pzk",8,2023,6,1,1150,31,123122413,22,33,34,7,184,120,,Major,81,63,68,11,0,11,4
|
||||
FrпїЅпїЅgil (feat. Grupo Front,"Yahritza Y Su Esencia, Grupo Frontera",2,2023,4,7,672,34,188933502,19,108,24,9,212,150,F#,Major,61,39,73,37,0,11,3
|
||||
Blank Space,Taylor Swift,1,2014,1,1,11434,53,1355959075,154,123,410,2,81,96,F,Major,75,57,68,9,0,13,6
|
||||
Style,Taylor Swift,1,2014,1,1,7830,42,786181836,94,111,151,4,82,95,D,Major,60,48,79,0,0,12,4
|
||||
TQM,Fuerza Regida,1,2023,5,19,584,28,176553476,16,159,15,6,100,125,F,Minor,79,96,85,27,0,11,6
|
||||
El Azul,"Junior H, Peso Pluma",2,2023,2,10,692,25,354495408,10,107,6,3,62,144,A,Minor,56,84,65,23,0,10,6
|
||||
Sunflower - Spider-Man: Into the Spider-Verse,"Post Malone, Swae Lee",2,2018,10,9,24094,78,2808096550,372,117,843,4,69,90,D,Major,76,91,50,54,0,7,5
|
||||
I'm Good (Blue),"Bebe Rexha, David Guetta",2,2022,8,26,12482,80,1109433169,291,184,537,45,727,128,G,Minor,56,38,97,4,0,35,4
|
||||
See You Again,"Tyler, The Creator, Kali Uchis",3,2017,7,21,13387,64,1047101291,77,58,247,1,311,79,F#,Major,56,58,56,37,0,11,10
|
||||
Barbie World (with Aqua) [From Barbie The Album],"Nicki Minaj, Aqua, Ice Spice",3,2023,6,23,1117,80,65156199,82,145,65,16,"1,133",144,,Major,77,75,58,52,0,23,25
|
||||
Angels Like You,Miley Cyrus,1,2020,11,27,3372,19,570515054,65,48,138,1,102,122,F,Major,67,49,64,10,0,10,3
|
||||
I Ain't Worried,OneRepublic,1,2022,5,13,8431,76,1085685420,241,127,458,37,332,140,,Major,71,82,81,11,0,6,5
|
||||
Die For You,The Weeknd,1,2016,11,24,2483,59,1647990401,68,21,24,0,259,134,C#,Minor,59,51,52,9,0,15,7
|
||||
Starboy,"The Weeknd, Daft Punk",2,2016,9,21,29536,79,2565529693,281,137,"2,445",1,140,186,G,Major,68,49,59,16,0,13,28
|
||||
Die For You - Remix,"Ariana Grande, The Weeknd",2,2023,2,24,3408,47,518745108,87,86,74,1,16,67,C#,Minor,53,50,53,23,0,44,7
|
||||
El Cielo,"Feid, Myke Towers, Sky Rompiendo",3,2023,6,2,1298,38,107753850,44,64,57,10,110,106,A#,Minor,72,17,64,7,0,10,5
|
||||
Baby Don't Hurt Me,"David Guetta, Anne-Marie, Coi Leray",3,2023,4,6,4277,66,177740666,145,111,213,11,810,128,G,Major,60,23,91,0,0,12,3
|
||||
AMARGURA,Karol G,1,2023,2,24,1133,39,153372011,14,71,23,10,176,107,F#,Minor,92,55,70,18,0,15,7
|
||||
(It Goes Like) Nanana - Edit,Peggy Gou,1,2023,6,15,2259,59,57876440,0,0,109,17,0,130,G,Minor,67,96,88,12,19,8,4
|
||||
Another Love,Tom Odell,1,2012,10,15,18371,83,1813673666,250,122,"3,394",19,,123,E,Minor,45,13,54,70,0,9,4
|
||||
Blinding Lights,The Weeknd,1,2019,11,29,43899,69,3703895074,672,199,"3,421",20,,171,C#,Major,50,38,80,0,0,9,7
|
||||
Moonlight,Kali Uchis,1,2023,2,24,2649,42,256483385,67,79,57,1,615,137,G,Minor,64,88,72,51,0,17,5
|
||||
La Bachata,Manuel Turizo,1,2022,5,26,6804,45,1214083358,139,111,161,15,210,125,G,Minor,84,85,68,58,0,22,4
|
||||
S91,Karol G,1,2023,7,14,525,41,16011326,34,115,39,6,216,128,,Minor,86,42,72,59,0,9,19
|
||||
cardigan,Taylor Swift,1,2020,7,24,7923,29,812019557,106,112,142,4,215,130,,Minor,61,53,58,55,0,27,4
|
||||
TпїЅпї,"dennis, MC Kevin o Chris",2,2023,5,4,731,15,111947664,27,17,73,4,167,130,B,Major,86,59,96,50,1,9,5
|
||||
Boy's a liar Pt. 2,"PinkPantheress, Ice Spice",2,2023,2,3,5184,41,156338624,154,84,102,14,37,133,F,Major,70,86,81,25,0,25,5
|
||||
Left and Right (Feat. Jung Kook of BTS),"Charlie Puth, BTS, Jung Kook",3,2022,6,24,3107,39,720434240,38,0,4,0,0,101,D,Major,88,72,59,62,0,9,3
|
||||
BESO,"Rauw Alejandro, ROSALпїЅ",2,2023,3,24,4053,50,357925728,82,121,182,12,171,95,F,Minor,77,53,64,74,0,17,14
|
||||
Hey Mor,"Ozuna, Feid",2,2022,10,6,4637,38,674072710,63,79,89,11,16,98,C#,Minor,90,40,59,0,0,10,29
|
||||
Yellow,Chris Molitor,1,1999,1,1,31358,43,1755214421,196,2,"4,053",5,0,173,B,Major,43,28,66,0,0,23,3
|
||||
Karma,Taylor Swift,1,2022,10,21,3818,23,404562836,37,55,32,0,272,90,G#,Major,64,10,62,7,0,48,7
|
||||
People,Libianca,1,2022,12,2,3506,56,373199958,105,64,169,8,529,198,A#,Minor,59,71,42,55,0,10,7
|
||||
Overdrive,Post Malone,1,2023,7,14,410,36,14780425,36,32,31,1,26,140,C#,Major,56,48,73,0,0,35,4
|
||||
Enchanted (Taylor's Version),Taylor Swift,1,2023,7,7,148,24,39578178,32,93,8,2,5,82,G#,Major,51,22,53,1,0,15,3
|
||||
BABY HELLO,"Rauw Alejandro, Bizarrap",2,2023,6,23,1004,35,54266102,42,80,58,3,169,130,C#,Minor,77,84,89,17,0,43,5
|
||||
Heat Waves,Glass Animals,1,2020,6,28,22543,63,2557975762,386,144,707,28,,81,B,Major,76,53,53,44,0,9,9
|
||||
golden hour,JVKE,1,2022,7,15,4511,36,751134527,70,58,109,18,230,94,C#,Minor,51,14,59,65,18,25,3
|
||||
Sweater Weather,The Neighbourhood,1,2012,5,14,16413,61,2282771485,166,87,"1,056",1,,124,A#,Major,61,41,81,5,2,10,3
|
||||
"Quevedo: Bzrp Music Sessions, Vol. 52","Bizarrap, Quevedo",2,2022,7,6,8506,45,1356565093,94,65,164,14,176,128,D,Major,62,55,78,1,3,23,4
|
||||
Viva La Vida,Coldplay,1,2008,1,1,33898,62,1592909789,233,0,"4,095",9,0,138,F,Minor,49,42,62,9,0,11,3
|
||||
Here With Me,d4vd,1,2022,7,17,3246,23,635412045,94,85,68,1,84,132,E,Major,58,27,48,50,0,12,3
|
||||
Unholy (feat. Kim Petras),"Sam Smith, Kim Petras",2,2022,9,22,8576,42,1230675890,216,108,331,26,154,131,D,Major,71,24,47,1,0,27,9
|
||||
Yandel 150,"Yandel, Feid",2,2022,12,20,3618,38,585695368,47,74,80,14,194,168,F#,Minor,78,58,73,5,0,10,7
|
||||
CORAZпїЅпїЅN VA,Maria Becerra,1,2023,6,22,370,20,43857627,12,16,18,4,93,98,C#,Major,68,40,79,33,0,30,6
|
||||
Riptide,Vance Joy,1,1975,1,1,31123,55,2009094673,300,65,"1,003",1,0,102,C#,Major,48,50,73,43,0,15,4
|
||||
Until I Found You (with Em Beihold) - Em Beihold Version,"Em Beihold, Stephen Sanchez",2,2022,4,22,2790,30,600976848,60,96,71,0,115,101,A#,Major,34,32,57,78,0,20,3
|
||||
Novidade na пїЅпї,"Mc Livinho, DJ Matt D",2,2023,6,23,267,9,39709092,9,6,25,2,72,130,F,Major,63,36,34,76,0,35,9
|
||||
Back To December (Taylor's Version),Taylor Swift,1,2023,7,7,139,17,39228929,16,72,5,0,8,142,D,Major,50,20,64,1,0,12,3
|
||||
STAY (with Justin Bieber),"Justin Bieber, The Kid Laroi",2,2021,7,9,17050,36,2665343922,492,99,798,31,0,170,C#,Major,59,48,76,4,0,10,5
|
||||
El Merengue,"Marshmello, Manuel Turizo",2,2023,3,3,2114,44,223633238,80,75,110,11,323,124,G#,Minor,78,70,68,3,1,11,4
|
||||
Someone You Loved,Lewis Capaldi,1,2018,11,8,17836,53,2887241814,440,125,"1,800",0,,110,C#,Major,50,45,41,75,0,11,3
|
||||
Me Porto Bonito,"Chencho Corleone, Bad Bunny",2,2022,5,6,8870,43,1440757818,104,120,141,26,49,92,C#,Minor,91,43,71,9,0,9,8
|
||||
Makeba,Jain,1,2015,6,22,6060,53,165484133,150,148,"2,703",22,"1,451",116,D,Major,82,40,66,39,51,25,7
|
||||
MONTAGEM - FR PUNK,"Ayparia, unxbected",2,2012,6,20,641,50,58054811,1,52,8,0,"1,170",129,A,Major,63,84,82,70,8,9,7
|
||||
Fast Car,Luke Combs,1,2023,3,24,1446,12,157058870,57,97,35,0,429,98,G#,Major,71,67,60,19,0,12,3
|
||||
What It Is (Solo Version),Doechii,1,2023,3,17,804,25,95131998,29,76,24,0,162,172,C#,Minor,74,76,76,6,0,10,9
|
||||
Coco Chanel,"Bad Bunny, Eladio Carrion",2,2023,3,17,1962,38,250305248,28,89,29,5,82,150,D,Major,68,14,76,4,0,10,4
|
||||
DonпїЅпїЅпїЅt Bl,Taylor Swift,1,2017,11,8,4875,23,685032533,19,45,0,0,10,136,A,Minor,62,19,53,11,0,6,4
|
||||
Still With You,Jung Kook,1,2020,6,5,31,39,38411956,2,107,8,0,0,88,C#,Minor,53,34,47,9,0,83,4
|
||||
All My Life (feat. J. Cole),"J. Cole, Lil Durk",2,2023,5,12,2175,23,144565150,69,145,69,2,478,143,D#,Major,83,69,44,15,0,10,33
|
||||
Say Yes To Heaven,Lana Del Rey,1,2023,3,17,2000,46,127567540,49,105,63,1,0,100,F#,Minor,49,17,35,71,9,11,3
|
||||
Snooze,SZA,1,2022,12,9,2839,25,399686758,58,156,42,1,236,143,F,Major,56,39,55,14,0,11,13
|
||||
Summertime Sadness,Lana Del Rey,1,2011,1,1,20333,52,983637508,89,143,"1,632",3,200,112,C#,Minor,56,24,66,7,0,12,3
|
||||
Take Two,BTS,1,2023,6,9,674,47,118482347,20,106,25,4,78,93,G,Major,62,57,59,3,0,38,3
|
||||
Lover,Taylor Swift,1,2012,1,1,8448,23,882831184,160,110,163,0,5,206,G,Major,43,50,55,50,0,15,10
|
||||
Too Many Nights (feat. Don Toliver & with Future),"Future, Metro Boomin, Don Toliver",3,2022,12,2,2110,58,286400165,17,119,19,2,266,88,G,Minor,68,17,71,15,0,11,5
|
||||
Chemical,Post Malone,1,2023,4,14,2528,39,172825906,56,91,59,3,486,170,D,Major,50,37,90,0,0,12,5
|
||||
Mockingbird,Eminem,1,2004,1,1,12985,61,1241559043,49,98,"2,394",5,204,84,E,Minor,62,24,67,21,0,13,28
|
||||
New Jeans,NewJeans,1,2023,7,7,77,35,29562220,8,166,4,4,34,134,E,Minor,81,53,72,51,0,12,5
|
||||
Primera Cita,Carin Leon,1,2022,4,20,266,27,77309611,6,40,6,6,202,158,A#,Major,54,50,40,61,0,10,6
|
||||
Cold Heart - PNAU Remix,"Dua Lipa, Elton John, Pnau",3,2017,11,10,21097,52,1605224506,384,135,"1,034",37,312,116,C#,Major,80,92,80,4,0,10,3
|
||||
Dandelions,Ruth B.,1,2017,4,28,3423,21,1116995633,41,100,59,1,32,117,C#,Major,61,45,69,2,0,9,3
|
||||
Bones,Imagine Dragons,1,2021,3,11,4198,44,838079900,98,108,327,17,153,114,F,Minor,77,65,72,2,0,7,5
|
||||
Set Fire to the Rain,Adele,1,2011,1,1,14739,43,1163620694,88,112,"2,163",5,519,108,D,Minor,61,47,68,0,0,13,3
|
||||
Money Trees,"Kendrick Lamar, Jay Rock",2,2012,1,1,26792,32,1093605526,69,113,695,0,458,144,E,Minor,74,37,53,7,0,21,10
|
||||
Tak Segampang Itu,Anggi Marito,1,2022,12,2,213,6,179659294,7,6,0,0,48,130,F,Major,51,18,44,76,0,11,3
|
||||
LAGUNAS,"Jasiel NuпїЅпїЅez, Peso P",2,2023,6,22,58,18,39058561,2,106,4,2,184,116,B,Major,77,79,62,33,1,15,3
|
||||
Mine (Taylor's Version),Taylor Swift,1,2023,7,7,99,15,36912123,21,52,6,1,0,121,G,Major,65,49,78,0,0,17,4
|
||||
Everybody Wants To Rule The World,Tears For Fears,1,1985,2,17,41751,25,1205951614,101,32,"2,655",0,666,112,G,Major,64,54,81,36,0,11,6
|
||||
No Role Modelz,J. Cole,1,2014,12,9,21164,36,1791000570,80,65,476,0,14,100,A#,Minor,70,47,52,30,0,6,33
|
||||
Tattoo,Loreen,1,2023,2,25,2988,59,201660859,74,102,145,18,925,150,D#,Minor,55,30,78,24,0,12,8
|
||||
Rara Vez,"Taiu, Milo j",2,2023,2,8,893,38,248088961,19,23,24,3,88,120,F,Minor,84,96,71,18,0,34,17
|
||||
VAGABUNDO,"Sebastian Yatra, Manuel Turizo, BeпїЅп",3,2023,5,12,1094,34,90839753,40,58,47,8,203,127,B,Minor,82,89,85,4,0,23,6
|
||||
august,Taylor Swift,1,2020,7,24,7324,22,607123776,25,81,61,1,44,90,F,Major,51,42,61,53,0,9,3
|
||||
LUNA,"Junior H, Peso Pluma",2,2023,6,22,201,11,55842345,19,117,8,1,74,128,A,Minor,75,79,63,33,0,15,4
|
||||
Miracle (with Ellie Goulding),"Calvin Harris, Ellie Goulding",2,2023,3,10,5120,48,211050784,161,115,246,9,638,143,A,Major,64,31,87,4,4,8,4
|
||||
Nonsense,Sabrina Carpenter,1,2022,7,15,2346,27,342897938,69,12,38,8,64,139,G#,Major,74,68,68,3,0,26,4
|
||||
Que Vuelvas,"Carin Leon, Grupo Frontera",2,2022,12,9,763,26,2762,21,110,21,9,71,162,A#,Major,49,78,64,19,0,11,4
|
||||
Por las Noches,Peso Pluma,1,2021,6,11,457,24,330346424,8,116,4,3,2,92,,Major,81,39,60,31,0,7,3
|
||||
Feliz CumpleaпїЅпїЅos Fe,Feid,1,2022,8,19,3430,38,601863821,45,69,52,4,3,95,F,Major,87,57,55,10,0,29,7
|
||||
Can't Hold Us (feat. Ray Dalton),"Ray Dalton, Ryan Lewis, Macklemore",3,2011,8,16,6074,52,1953533826,201,44,"6,551",2,0,146,D,Major,63,88,93,3,0,10,8
|
||||
Watermelon Sugar,Harry Styles,1,2019,11,17,21915,34,2322580122,437,115,"1,212",12,,95,,Major,55,56,82,12,0,34,5
|
||||
lovely - Bonus Track,"Billie Eilish, Khalid",2,2017,8,11,15032,30,2355719893,221,96,"1,078",2,136,115,E,Minor,35,12,30,93,0,10,3
|
||||
"Rauw Alejandro: Bzrp Music Sessions, Vol. 56","Rauw Alejandro, Bizarrap",2,2023,6,21,871,32,66902503,25,59,32,5,88,128,B,Major,78,59,65,10,0,26,5
|
||||
Queencard,(G)I-DLE,1,2023,5,15,451,33,96273746,10,126,7,0,148,130,E,Minor,82,69,83,3,0,27,5
|
||||
OMG,NewJeans,1,2023,1,2,1783,27,430977451,26,124,15,1,22,127,A,Minor,80,74,77,36,0,11,4
|
||||
Radio,Lana Del Rey,1,2011,1,1,9389,46,284819874,24,122,282,3,368,150,D,Major,42,20,86,21,0,9,9
|
||||
"Shakira: Bzrp Music Sessions, Vol. 53","Shakira, Bizarrap",2,2023,1,11,5724,44,721975598,119,108,254,29,22,122,D,Minor,78,50,63,27,0,9,5
|
||||
505,Arctic Monkeys,1,2007,4,20,13985,25,1217120710,30,80,588,1,1,140,,Major,52,20,85,0,0,7,5
|
||||
"Calling (Spider-Man: Across the Spider-Verse) (Metro Boomin & Swae Lee, NAV, feat. A Boogie Wit da Hoodie)","Swae Lee, A Boogie Wit da Hoodie, Metro Boomin, NAV",4,2023,6,2,1051,16,109276132,31,37,31,0,189,140,,Major,63,22,54,46,0,12,8
|
||||
Trance (with Travis Scott & Young Thug),"Travis Scott, Young Thug, Metro Boomin",3,2022,12,2,1682,46,276259178,24,90,30,1,176,119,C#,Minor,75,48,53,18,0,18,34
|
||||
"Tere Vaaste (From ""Zara Hatke Zara Bachke"")","Sachin-Jigar, Shadab Faridi, Altamash Faridi, Amitabh Bhattacharya, Varun Jain",5,2023,5,22,182,8,54225632,3,88,1,0,52,110,G,Minor,76,96,72,32,0,9,4
|
||||
Perfect,Ed Sheeran,1,2017,1,1,16596,13,2559529074,7,0,"2,094",0,0,95,G#,Major,60,17,45,16,0,11,2
|
||||
Romantic Homicide,d4vd,1,2022,7,20,2335,23,681583126,82,55,50,0,9,132,F#,Major,56,20,55,45,1,32,3
|
||||
Believer,Imagine Dragons,1,2017,1,31,18986,23,2594040133,250,121,"2,969",10,31,125,A#,Minor,77,74,78,4,0,23,11
|
||||
Novo BalanпїЅ,"Veigh, Bvga Beatz, Supernova Ent, Prod Malax",4,2023,5,19,283,7,81102253,6,9,26,1,66,124,D#,Minor,84,65,50,67,0,13,6
|
||||
"Gol Bolinha, Gol Quadrado 2","Mc Pedrinho, DJ 900",2,2023,6,1,293,8,11956641,5,2,30,2,66,133,B,Minor,93,68,65,42,0,12,25
|
||||
Without Me,Eminem,1,2002,1,1,21081,43,1687664027,98,76,"3,889",5,0,112,G,Major,92,67,66,0,0,36,9
|
||||
QUEMA,"Sog, Ryan Castro, Peso Pluma",3,2023,7,13,437,31,11599388,17,29,26,3,208,97,,Major,79,92,89,5,0,6,5
|
||||
Stargirl Interlude,"The Weeknd, Lana Del Rey",2,2016,11,24,1275,32,611700552,13,8,5,0,1,90,F,Minor,59,52,48,38,5,10,11
|
||||
Ojitos Lindos,"Bomba EstпїЅпїЅreo, Bad B",2,2022,5,6,6135,38,1133865788,71,113,99,13,28,80,D#,Minor,65,27,69,8,0,53,4
|
||||
Somewhere Only We Know,Keane,1,2004,1,1,20015,16,1089402494,107,69,"5,239",0,558,172,A,Major,45,33,59,6,0,8,3
|
||||
Those Eyes,New West,1,2019,5,10,1507,14,411747614,24,71,44,1,195,120,E,Major,60,24,35,73,0,31,3
|
||||
El Gordo Trae El Mando,Chino Pacas,1,2023,1,27,539,21,255932395,7,71,4,2,13,140,G,Minor,74,96,80,18,0,5,5
|
||||
Mi Bello Angel,Natanael Cano,1,2023,6,30,86,8,31873544,7,76,3,1,93,128,A,Minor,81,90,77,1,0,9,5
|
||||
Bye,Peso Pluma,1,2023,5,26,324,14,95053634,13,110,8,2,60,122,,Major,78,70,81,57,0,10,5
|
||||
Danza Kuduro,"Don Omar, Lucenzo",2,2010,1,1,17138,37,1279434863,119,81,974,1,503,130,,Major,47,86,92,8,0,5,24
|
||||
Nosso Quadro,"Ana Castela, AgroPlay",2,2023,2,2,894,9,233801632,14,88,66,3,72,160,A,Major,69,61,71,33,0,31,20
|
||||
Locked Out Of Heaven,Bruno Mars,1,2012,12,5,1622,9,1481349984,0,0,356,0,0,144,F,Major,73,87,70,6,0,28,5
|
||||
Un Finde | CROSSOVER #2,"Big One, FMK, Ke personajes",3,2023,4,4,561,14,142095275,4,14,12,5,56,192,B,Major,50,85,52,11,0,28,6
|
||||
Jimmy Cooks (feat. 21 Savage),"Drake, 21 Savage",2,2022,6,17,5871,27,618885532,81,121,58,1,34,163,,Major,54,40,67,0,0,9,17
|
||||
Counting Stars,OneRepublic,1,2013,1,1,29215,43,2011464183,179,97,"3,394",11,153,122,C#,Minor,66,48,71,6,0,12,4
|
||||
Ghost,Justin Bieber,1,2021,3,19,5866,24,1167330737,107,38,95,0,,154,D,Major,61,41,74,21,0,40,6
|
||||
Under The Influence,Chris Brown,1,2019,10,4,3859,26,929964809,133,181,3,0,,117,A,Minor,73,31,69,6,0,11,4
|
||||
PRC,"Natanael Cano, Peso Pluma",2,2023,1,23,961,26,436027885,19,143,10,6,15,138,G,Minor,78,89,83,10,0,12,5
|
||||
Gasolina,Daddy Yankee,1,2004,7,13,6457,18,657723613,98,95,453,0,454,96,,Major,86,74,80,33,0,8,6
|
||||
One Dance,"Drake, WizKid, Kyla",3,2016,4,4,43257,24,2713922350,433,107,"3,631",0,26,104,C#,Major,77,36,63,1,0,36,5
|
||||
Enchanted,Taylor Swift,1,2010,1,1,4564,16,621660989,24,101,113,0,40,164,G#,Major,45,24,62,8,0,16,3
|
||||
Save Your Tears,The Weeknd,1,2020,3,20,12688,13,1591223784,197,115,112,0,200,118,,Major,68,61,82,2,0,50,3
|
||||
Sure Thing,Miguel,1,2010,5,25,13801,19,950906471,137,125,435,6,285,81,B,Minor,68,51,60,3,0,19,10
|
||||
Every Breath You Take - Remastered 2003,The Police,1,1983,1,6,22439,19,1593270737,211,74,929,0,129,117,C#,Major,82,73,45,54,0,7,3
|
||||
The Night We Met,Lord Huron,1,2015,2,2,18515,35,1410088830,70,82,939,1,162,174,D,Major,45,10,37,97,25,64,4
|
||||
We Found Love,"Rihanna, Calvin Harris",2,2011,1,1,36843,21,1235005533,321,91,"4,607",1,58,128,C#,Major,73,60,77,3,0,11,4
|
||||
When I Was Your Man,Bruno Mars,1,2012,12,5,2420,11,1661187319,0,0,806,0,0,145,,Major,60,43,27,94,0,14,4
|
||||
Let Me Down Slowly,Alec Benjamin,1,2018,5,25,5897,19,1374581173,0,0,885,0,0,150,C#,Minor,65,51,55,73,0,14,3
|
||||
"Am I Dreaming (Metro Boomin & A$AP Rocky, Roisee)","A$AP Rocky, Metro Boomin, Roisee",3,2023,6,2,727,16,94186466,17,60,28,1,44,90,A,Minor,60,13,53,4,0,21,4
|
||||
Do I Wanna Know?,Arctic Monkeys,1,2013,1,1,33783,26,1788326445,133,92,"2,733",1,26,85,F,Major,55,42,53,17,0,22,3
|
||||
Demons,Imagine Dragons,1,2012,1,1,26694,13,1840364617,65,82,"3,425",4,13,180,D#,Major,33,38,71,20,0,28,5
|
||||
ээээээээээээ,YOASOBI,1,2023,4,12,356,16,143573775,35,102,8,1,117,166,C#,Major,57,84,94,11,0,37,9
|
||||
Reminder,The Weeknd,1,2016,11,25,6518,17,684675814,45,85,238,1,47,160,G#,Major,71,40,50,16,0,16,22
|
||||
Shake It Off,Taylor Swift,1,2014,1,1,21335,13,1113838873,328,70,"1,378",9,20,160,G,Major,65,95,80,5,0,41,16
|
||||
Why'd You Only Call Me When You're High?,Arctic Monkeys,1,2013,1,1,23389,29,1267333350,54,70,"1,089",2,1,92,D,Major,70,81,63,4,0,8,4
|
||||
SNAP,Rosa Linn,1,2022,3,19,3202,18,726307468,148,80,226,24,0,170,,Major,56,53,64,11,0,45,6
|
||||
Shape of You,Ed Sheeran,1,2017,1,6,32181,10,3562543890,33,0,"6,808",7,0,96,C#,Minor,83,93,65,58,0,9,8
|
||||
Night Changes,One Direction,1,2014,11,17,7124,18,1131090940,60,20,2,0,,120,G#,Major,67,40,52,86,0,12,4
|
||||
Fin de Semana,"Oscar Maydon, Junior H",2,2023,1,13,592,14,307370144,11,84,6,1,30,98,,Major,70,37,54,6,0,9,8
|
||||
Creep,Radiohead,1,1992,9,21,36724,7,1271293243,146,72,"6,807",5,80,92,G,Major,53,12,34,1,0,12,4
|
||||
Car's Outside,James Arthur,1,2019,10,18,794,10,265882712,38,25,61,0,263,150,A,Major,34,24,56,4,0,11,3
|
||||
Apocalypse,Cigarettes After Sex,1,2017,3,21,13091,17,841749534,61,96,790,2,116,94,F,Major,37,17,47,2,46,11,3
|
||||
Cheques,Shubh,1,2023,5,19,67,8,47956378,7,10,0,0,57,90,E,Minor,74,36,63,26,0,27,5
|
||||
Pink + White,Frank Ocean,1,2016,8,20,21574,30,806397070,112,68,266,1,39,160,A,Major,54,54,55,67,0,42,11
|
||||
Circles,Post Malone,1,2019,8,30,19664,16,2132335812,391,73,633,3,37,120,,Major,70,59,75,24,0,9,4
|
||||
Just The Way You Are,Bruno Mars,1,2010,1,1,21106,13,1641426668,82,0,"2,946",0,0,109,F,Major,63,46,85,1,0,9,5
|
||||
Take Me To Church,Hozier,1,2013,9,13,23804,31,2135158446,187,99,"4,623",1,0,129,E,Minor,57,41,66,63,0,12,5
|
||||
Bebe Dame,"Fuerza Regida, Grupo Frontera",2,2022,12,16,849,22,367316268,27,129,21,7,111,157,G,Major,54,75,60,30,0,7,5
|
||||
You Belong With Me (TaylorпїЅпїЅпїЅs Ve,Taylor Swift,1,2021,4,9,2619,12,350381515,47,90,1,0,7,130,F#,Major,63,49,73,5,0,9,3
|
||||
Titi Me Preguntпї,Bad Bunny,1,2022,5,6,9037,42,1264310836,124,133,139,14,166,107,F,Minor,65,19,72,10,0,13,25
|
||||
Better Than Revenge (Taylor's Version),Taylor Swift,1,2023,7,7,86,11,30343206,3,33,3,0,1,146,B,Minor,50,67,89,0,0,19,8
|
||||
Shut up My Moms Calling,Hotel Ugly,1,2020,2,10,1788,14,405136812,1,50,19,0,19,139,A,Minor,48,37,41,32,0,10,10
|
||||
Have You Ever Seen The Rain?,Creedence Clearwater Revival,1,1968,7,1,15890,14,1145727611,71,37,653,0,167,116,,Major,74,76,70,7,0,13,3
|
||||
Es un Secreto,Plan B,1,2010,7,20,492,36,540654286,4,3,19,0,0,95,F#,Minor,84,52,77,12,0,7,4
|
||||
POLARIS - Remix,"Feid, Mora, Saiko, Quevedo",4,2023,6,8,773,33,57312735,20,46,21,8,99,170,G#,Minor,62,55,80,15,0,37,7
|
||||
Ditto,NewJeans,1,2022,12,19,1154,22,397582059,28,125,11,1,51,134,F#,Minor,81,18,64,3,0,10,11
|
||||
Take On Me,a-ha,1,1984,10,19,44927,17,1479115056,34,0,"5,108",6,0,84,F#,Minor,57,86,90,2,0,9,5
|
||||
"Annihilate (Spider-Man: Across the Spider-Verse) (Metro Boomin & Swae Lee, Lil Wayne, Offset)","Swae Lee, Lil Wayne, Offset, Metro Boomin",4,2023,6,2,551,4,86773632,13,46,20,1,10,146,B,Minor,61,20,48,21,0,12,6
|
||||
"Angel Pt 1 (feat. Jimin of BTS, JVKE & Muni Long)","Kodak Black, NLE Choppa, Muni Long, JVKE, Jimin",5,2023,5,1,577,14,133753727,22,18,15,1,0,74,A#,Minor,53,24,67,11,0,10,28
|
||||
AcrпїЅпїЅs,Shakira,1,2023,5,11,955,29,123124076,37,50,79,11,31,144,B,Major,75,35,48,84,0,10,12
|
||||
AMG,"Natanael Cano, Gabito Ballesteros, Peso Pluma",3,2022,11,24,995,19,463564958,12,117,9,5,3,136,B,Minor,77,79,73,15,0,27,10
|
||||
"Phir Aur Kya Chahiye (From ""Zara Hatke Zara Bachke"")","Arijit Singh, Sachin-Jigar, Amitabha Bhattacharya",3,2023,5,15,178,6,64533040,6,71,1,0,31,100,E,Major,56,53,55,53,0,12,4
|
||||
S-Class,Stray Kids,1,2023,6,2,290,19,65496046,9,101,5,0,73,105,F,Minor,89,67,78,9,0,7,33
|
||||
Hits Different,Taylor Swift,1,2023,5,26,547,0,68616963,15,15,6,0,0,106,F,Major,67,24,78,15,0,30,4
|
||||
Chanel,"Becky G, Peso Pluma",2,2023,3,30,681,10,161460990,15,92,21,2,26,132,D,Major,85,53,68,40,0,9,4
|
||||
Self Love (Spider-Man: Across the Spider-Verse) (Metro Boomin & Coi Leray),"Metro Boomin, Coi Leray",2,2023,6,2,332,5,70106975,18,41,5,0,19,120,A,Major,78,5,30,21,0,13,5
|
||||
Area Codes,"Kaliii, Kaliii",2,2023,3,17,1197,13,113509496,44,34,25,1,171,155,C#,Major,82,51,39,2,0,9,49
|
||||
Abcdario,"Junior H, Eden MuпїЅп",2,2023,5,13,262,5,89933133,8,60,4,1,109,129,G#,Major,70,42,43,78,0,11,3
|
||||
Obsessed,"Abhijay Sharma, Riar Saab",2,2022,9,29,161,6,71007139,10,79,2,0,42,135,F,Minor,80,85,74,62,0,8,9
|
||||
PiпїЅпїЅman Deпї,"Semicenk, DoпїЅпїЅu ",2,2023,6,2,185,3,43522589,5,6,4,1,33,98,A#,Minor,73,45,62,28,0,13,13
|
||||
FLOWER,JISOO,1,2023,3,31,839,18,232896922,20,110,20,0,69,124,A,Minor,84,64,39,3,0,11,4
|
||||
"All The Way Live (Spider-Man: Across the Spider-Verse) (Metro Boomin & Future, Lil Uzi Vert)","Future, Lil Uzi Vert, Metro Boomin",3,2023,6,2,259,0,37126685,5,17,5,0,0,135,A,Minor,77,28,55,18,0,22,15
|
||||
Eyes Closed,Ed Sheeran,1,2023,3,23,2915,30,195576623,116,69,107,3,675,107,D,Major,78,39,53,30,0,11,6
|
||||
Escapism.,"RAYE, 070 Shake",2,2022,10,12,5129,25,532336353,116,84,114,18,348,96,D,Major,54,25,74,14,0,9,11
|
||||
La Jumpa,"Arcangel, Bad Bunny",2,2022,11,30,3794,34,538115192,47,77,53,10,8,123,G#,Major,71,58,70,30,0,32,19
|
||||
Karma (feat. Ice Spice),"Taylor Swift, Ice Spice",2,2023,5,26,588,0,46142772,23,21,31,0,0,90,G#,Major,62,7,62,6,0,58,6
|
||||
Superhero (Heroes & Villains) [with Future & Chris Brown],"Future, Chris Brown, Metro Boomin",3,2022,12,2,2959,16,401036314,41,69,38,0,36,117,F,Minor,72,45,59,14,0,20,21
|
||||
Las Morras,"BLESSD, Peso Pluma",2,2023,4,4,291,8,127026613,8,78,4,1,1,133,A,Minor,78,90,84,31,0,7,4
|
||||
CHORRITO PA LAS ANIMAS,Feid,1,2022,12,2,2321,36,345031710,29,65,34,5,3,96,G#,Minor,74,61,83,11,0,35,6
|
||||
Ch y la Pizza,"Fuerza Regida, Natanael Cano",2,2022,12,1,536,10,288101651,10,72,8,3,14,149,G#,Major,66,85,60,40,0,14,13
|
||||
Snow On The Beach (feat. More Lana Del Rey),"Lana Del Rey, Taylor Swift",2,2023,5,26,359,2,60350538,1,0,9,0,0,110,F#,Minor,66,32,40,81,0,11,3
|
||||
Players,Coi Leray,1,2022,11,30,4096,6,335074782,118,48,143,0,240,105,F#,Major,95,62,52,3,0,5,16
|
||||
Bite Me,ENHYPEN,1,2023,5,22,349,69,76767396,8,96,5,0,56,105,C#,Major,80,69,78,28,0,11,14
|
||||
Stand By Me (feat. Morgan Wallen),"Lil Durk, Morgan Wallen",2,2023,5,26,381,5,46065667,23,82,6,0,113,134,B,Major,76,61,58,6,0,16,3
|
||||
Normal,Feid,1,2022,7,8,2461,36,459276435,47,66,45,5,6,170,E,Minor,71,59,56,4,0,27,12
|
||||
Hummingbird (Metro Boomin & James Blake),"James Blake, Metro Boomin",2,2023,6,2,277,1,39666245,1,20,5,0,1,81,F#,Major,59,26,60,46,1,25,13
|
||||
Seu Brilho Sumiu - Ao Vivo,"Israel & Rodolffo, Mari Fernandez",2,2023,3,1,967,5,138517666,7,29,51,1,29,154,F#,Major,63,75,92,31,0,91,5
|
||||
Bad Habit,Steve Lacy,1,2022,6,29,8186,12,822633917,155,72,131,16,29,169,C#,Major,69,69,51,63,0,38,4
|
||||
CUFF IT,Beyoncпї,1,2022,7,29,7842,10,595900742,215,88,330,26,23,115,G,Major,78,64,69,4,0,7,14
|
||||
Lilith (feat. SUGA of BTS) (Diablo IV Anthem),"Halsey, Suga",2,2023,6,5,215,6,51985779,6,14,8,2,4,84,A,Minor,43,14,74,1,0,19,8
|
||||
69,"Nicky Jam, Feid",2,2023,5,18,1134,22,57945987,39,14,48,2,3,93,G#,Major,79,58,62,11,0,11,23
|
||||
NiпїЅпїЅa Bo,"Sean Paul, Feid",2,2023,4,21,1305,34,115010040,29,26,43,5,44,91,G,Major,82,47,62,10,0,10,15
|
||||
Search & Rescue,Drake,1,2023,4,7,2066,6,175097833,58,70,43,0,182,142,A#,Minor,82,54,44,6,0,33,7
|
||||
AMERICA HAS A PROBLEM (feat. Kendrick Lamar),"Kendrick Lamar, Beyoncпї",2,2023,5,19,896,0,57089066,34,2,33,0,1,126,C#,Major,78,20,70,1,0,16,4
|
||||
Lavender Haze,Taylor Swift,1,2022,10,21,3763,8,488386797,51,43,38,10,1,97,A#,Major,73,10,44,26,0,16,8
|
||||
"Link Up (Metro Boomin & Don Toliver, Wizkid feat. BEAM & Toian) - Spider-Verse Remix (Spider-Man: Across the Spider-Verse )","WizKid, Toian, Metro Boomin, Don Toliver, Beam",5,2023,6,2,197,0,32761689,3,10,3,0,0,101,F,Major,92,59,51,41,51,26,8
|
||||
Efecto,Bad Bunny,1,2022,5,6,4004,33,1047480053,34,65,43,6,2,98,G,Minor,80,23,48,14,0,6,5
|
||||
Erro Gostoso - Ao Vivo,Simone Mendes,1,2023,1,27,984,5,153454328,8,57,76,2,49,154,F#,Major,59,63,89,18,0,80,9
|
||||
Cupido,Tini,1,2023,2,14,1240,24,217672943,51,29,63,4,54,120,A,Major,91,63,58,52,0,31,22
|
||||
Just Wanna Rock,Lil Uzi Vert,1,2022,10,17,3995,13,457184829,72,27,47,0,0,150,B,Major,49,4,55,7,0,6,3
|
||||
Unstoppable,Sia,1,2016,1,21,7681,13,939844851,119,66,"1,145",2,,174,A,Major,47,27,78,11,0,10,8
|
||||
Until I Found You,Stephen Sanchez,1,2021,9,1,4427,4,726434358,69,100,154,20,438,202,A#,Major,34,25,51,69,0,18,4
|
||||
Rich Flex,"Drake, 21 Savage",2,2022,11,4,4657,18,573633020,84,84,42,0,23,153,B,Minor,56,42,52,5,0,36,24
|
||||
Easy On Me,Adele,1,2021,10,14,10195,20,1406111294,258,87,657,22,9,142,F,Major,60,13,37,58,0,13,3
|
||||
CartпїЅпїЅo B,"MC Caverinha, KayBlack",2,2023,5,11,269,4,71573339,7,2,30,1,11,108,A,Minor,84,55,47,26,0,20,64
|
||||
Danger (Spider) (Offset & JID),"Offset, JID",2,2023,6,2,214,0,24975653,3,3,6,0,0,143,B,Major,83,25,69,4,0,23,12
|
||||
Oi Balde - Ao Vivo,ZпїЅпїЅ Neto & Crist,1,2023,2,14,845,2,145458418,12,57,47,1,33,108,D,Major,67,55,67,60,0,80,5
|
||||
The Real Slim Shady,Eminem,1,2000,1,1,20763,27,1424589568,81,53,"3,271",1,17,104,F,Minor,95,78,66,3,0,4,6
|
||||
MERCHO,"Migrantes, LiL CaKe, Nico Valdi",3,2022,12,16,1267,20,231332117,41,22,56,4,84,93,F#,Minor,84,96,79,43,0,18,11
|
||||
The Color Violet,Tory Lanez,1,2021,12,10,2585,32,415932686,3,79,21,1,54,105,F#,Minor,65,46,53,16,0,9,5
|
||||
Glimpse of Us,Joji,1,2022,6,10,6330,6,988515741,109,42,158,3,31,170,G#,Major,44,27,32,89,0,14,5
|
||||
Mejor Que Yo,"Mambo Kingz, DJ Luian, Anuel Aa",3,2023,5,4,675,1,50847624,9,13,11,0,1,178,C#,Minor,62,56,66,18,0,12,5
|
||||
Curtains,Ed Sheeran,1,2023,5,5,715,0,39893489,37,3,27,0,50,176,F#,Minor,50,44,76,10,0,32,5
|
||||
UNFORGIVEN (feat. Nile Rodgers),"Nile Rodgers, LE SSERAFIM",2,2023,5,1,327,13,92035115,14,110,9,0,49,104,E,Minor,80,38,88,11,0,11,5
|
||||
Haegeum,Agust D,1,2023,4,21,244,12,118810253,6,84,10,2,9,85,G,Major,70,83,84,31,0,47,30
|
||||
ConexпїЅпїЅes de MпїЅпїЅfia (feat. Rich ,"Rich The Kid, Matuпї",2,2023,4,30,385,4,77233241,17,7,41,1,29,117,F#,Minor,77,69,58,39,0,26,5
|
||||
MIENTRAS ME CURO DEL CORA,Karol G,1,2023,2,24,1020,35,206399629,15,26,30,6,0,80,,Major,52,57,48,86,0,15,39
|
||||
Never Felt So Alone,Labrinth,1,2023,4,7,1730,3,117747907,46,5,51,0,20,98,F,Major,44,36,41,50,0,38,5
|
||||
X SI VOLVEMOS,"Karol G, Romeo Santos",2,2023,2,2,2127,33,266624541,45,80,53,8,4,178,C#,Minor,79,58,78,34,0,11,25
|
||||
ceilings,Lizzy McAlpine,1,2022,4,8,3242,9,293186992,67,55,48,0,6,148,A,Major,51,27,33,48,0,22,3
|
||||
Cupid,Fifty Fifty,1,2023,2,24,526,10,139681964,15,93,30,0,320,120,D,Major,77,94,66,65,0,38,3
|
||||
I AM,IVE,1,2023,4,10,366,15,123132751,16,102,7,0,55,122,E,Minor,68,38,88,1,0,8,5
|
||||
Cupid пїЅпїЅпїЅ Twin Ver. (FIFTY FIFTY) пїЅпїЅпїЅ Spe,sped up 8282,1,1997,1,1,472,2,103762518,0,0,6,0,0,144,F,Major,74,75,73,42,0,9,4
|
||||
Shorty Party,"Cartel De Santa, La Kelly",2,2023,3,4,432,12,162887075,8,14,12,2,33,96,D,Major,93,47,47,33,0,10,36
|
||||
Super,SEVENTEEN,1,2023,4,24,271,12,91221625,16,103,9,0,55,137,G#,Major,77,35,88,16,0,17,9
|
||||
Slut Me Out,NLE Choppa,1,2022,4,22,816,4,190490915,21,4,13,0,4,121,F#,Minor,94,71,61,12,0,53,42
|
||||
Double Fantasy (with Future),"The Weeknd, Future",2,2023,4,21,1169,0,96180277,36,65,28,0,0,119,A,Minor,60,10,57,1,0,50,3
|
||||
All Of The Girls You Loved Before,Taylor Swift,1,2019,8,23,1282,6,185240616,26,6,19,0,5,96,D,Major,72,40,47,71,0,13,4
|
||||
PROVENZA,Karol G,1,2022,4,21,6587,34,885093467,114,104,147,11,20,111,C#,Major,87,53,52,66,1,11,5
|
||||
Princess Diana (with Nicki Minaj),"Nicki Minaj, Ice Spice",2,2023,4,14,1444,4,104992946,0,0,0,0,0,148,A,Major,90,74,68,14,0,10,19
|
||||
Di Que Si,"Grupo Marca Registrada, Grupo Frontera",2,2023,2,3,356,10,147290338,4,64,4,2,37,182,A,Major,57,80,59,8,0,6,5
|
||||
Shivers,Ed Sheeran,1,2021,9,9,10147,30,1302184087,234,71,543,18,,141,D,Major,79,82,86,28,0,4,9
|
||||
Igualito a Mi Apпї,"Fuerza Regida, Peso Pluma",2,2022,12,30,265,6,158950978,8,84,5,1,4,145,E,Minor,76,80,81,19,0,6,9
|
||||
Shoong! (feat. LISA of BLACKPINK),"TAEYANG, Lisa",2,2023,4,25,351,9,76910644,16,90,10,0,64,110,B,Minor,76,26,70,1,0,41,6
|
||||
Komang,Raim Laode,1,2022,8,16,158,4,137123880,5,6,1,1,18,134,G,Major,70,35,41,41,0,10,3
|
||||
DESPECHпї,ROSALпїЅ,1,2022,7,28,7613,33,782369383,180,90,422,15,55,130,G,Major,92,78,62,18,0,6,10
|
||||
Made You Look,Meghan Trainor,1,2022,10,21,3956,6,502574952,142,23,127,3,16,145,A#,Major,84,88,53,35,0,8,7
|
||||
Watch This - ARIZONATEARS Pluggnb Remix,"sped up nightcore, ARIZONATEARS, Lil Uzi Vert",3,2023,2,5,1638,10,207033255,0,0,21,0,0,130,B,Minor,69,36,90,1,10,15,4
|
||||
No Se Va,Grupo Frontera,1,2022,4,28,924,18,404887295,17,80,22,9,38,173,,Major,59,69,53,12,0,23,3
|
||||
Punto G,Quevedo,1,2022,11,4,1985,35,381161027,34,26,37,5,1,92,B,Minor,75,55,76,25,0,10,15
|
||||
Lovers Rock,TV Girl,1,2014,6,5,6339,13,466231982,3,1,36,1,37,105,F,Minor,56,57,87,0,1,10,4
|
||||
METAMORPHOSIS,INTERWORLD,1,2021,11,25,1561,24,357580552,18,78,24,0,30,175,G,Minor,59,15,64,43,90,12,10
|
||||
Mami Chula,"Quevedo, Jhayco",2,2023,4,27,875,4,61105704,17,13,27,0,43,120,G,Minor,80,33,70,22,0,9,4
|
||||
En Paris,"El Chachito, Junior H",2,2022,12,24,406,5,198275403,3,31,2,1,0,139,D#,Minor,70,77,48,37,0,12,5
|
||||
Set Me Free Pt.2,Jimin,1,2023,3,17,340,13,168448603,4,71,16,1,9,132,,Minor,59,56,82,12,0,12,6
|
||||
I Was Never There,"The Weeknd, Gesaffelstein",2,2018,3,29,4188,15,705469769,30,70,142,0,27,114,A#,Major,32,17,74,14,0,17,3
|
||||
Don't ever say love me (feat. RM of BTS),"RM, Colde",2,2023,5,4,105,0,34502215,5,9,5,0,0,145,B,Minor,54,19,48,36,0,37,5
|
||||
Shut Down,BLACKPINK,1,2022,9,16,1524,17,482175240,53,120,62,0,2,110,,Major,82,67,69,0,0,18,4
|
||||
Gato de Noche,"Nengo Flow, Bad Bunny",2,2022,12,22,2651,30,304118600,21,55,32,3,0,94,G#,Major,89,61,66,17,0,36,16
|
||||
Call Out My Name,The Weeknd,1,2018,3,29,11087,6,1449799467,151,107,801,1,105,134,C#,Major,45,17,60,21,0,33,4
|
||||
Like Crazy (English Version),Jimin,1,2023,3,24,373,19,173627354,4,72,5,0,5,120,G,Major,62,32,76,0,0,39,4
|
||||
Rosa Pastel,"Jasiel NuпїЅпїЅez, Peso P",2,2023,2,2,200,4,90025258,8,77,2,1,1,123,G,Minor,70,86,68,24,0,11,4
|
||||
Sunroof,"Nicky Youre, Dazy",2,2021,12,3,3741,17,652704649,156,35,110,19,0,131,A#,Major,77,84,71,35,0,15,4
|
||||
Lose Yourself - Soundtrack Version,Eminem,1,2002,1,1,32502,21,1829992958,247,54,"5,567",1,51,171,D,Major,70,6,73,1,0,36,26
|
||||
Superman,"Eminem, Dina Rae",2,2002,5,26,7615,14,655466831,18,51,"1,005",0,0,130,E,Minor,80,64,76,2,0,20,6
|
||||
Mas Rica Que Ayer,"Mambo Kingz, DJ Luian, Anuel Aa",3,2023,3,2,1208,34,146409671,10,41,20,0,1,94,B,Major,82,53,67,34,0,9,8
|
||||
People Pt.2 (feat. IU),"IU, Agust D",2,2023,4,7,209,4,95816024,4,45,11,2,24,89,G,Minor,73,44,57,39,0,32,6
|
||||
REMIX EXCLUSIVO,Feid,1,2023,3,17,1235,9,117206995,20,8,15,0,6,87,F,Minor,65,71,56,4,0,15,20
|
||||
"ArcпїЅпїЅngel: Bzrp Music Sessions, Vol","Arcangel, Bizarrap",2,2023,3,22,654,3,100409613,11,3,18,1,1,124,B,Minor,72,79,78,55,0,15,30
|
||||
DOGTOOTH,"Tyler, The Creator",2,2023,3,27,1479,0,80758350,23,0,18,0,33,78,G#,Major,71,80,65,51,0,22,32
|
||||
10:35,"TiпїЅпїЅsto, Tate M",2,2022,11,1,4942,26,325592432,190,104,147,18,63,120,G#,Major,70,70,79,7,0,18,10
|
||||
SORRY NOT SORRY,"Tyler, The Creator",2,2023,3,31,709,0,58473276,8,1,13,0,0,96,F#,Minor,48,50,80,40,0,37,20
|
||||
HAPPY,NF,1,2023,3,25,660,0,52722996,22,7,11,0,78,106,G,Major,73,22,86,31,0,12,4
|
||||
La Bebe,Yng Lvcas,1,2021,12,24,489,17,191945597,4,11,5,1,2,170,D,Minor,78,75,46,62,0,12,35
|
||||
I Know - PR1SVX Edit,"Kanii, PR1ISVX",2,2023,3,24,407,0,77377503,16,15,5,0,1,134,B,Minor,67,11,76,8,47,30,7
|
||||
Late Night Talking,Harry Styles,1,2022,5,20,7461,8,743693613,166,42,199,16,58,115,A#,Major,71,90,73,30,0,11,5
|
||||
LeпїЅ,MarпїЅпїЅlia Mendo,1,2022,12,9,993,4,267789608,30,84,88,1,28,130,F#,Major,74,79,87,45,0,30,3
|
||||
Save Your Tears (with Ariana Grande) (Remix),"Ariana Grande, The Weeknd",2,2020,3,20,9161,5,1221813483,240,98,468,3,10,118,,Major,65,63,79,3,0,10,3
|
||||
Something in the Orange,Zach Bryan,1,2022,4,22,3282,12,449701773,67,84,46,16,117,110,G,Major,59,22,38,42,0,12,3
|
||||
VOID,Melanie Martinez,1,2023,3,29,596,0,67070410,29,9,12,0,52,100,A,Major,72,42,66,18,4,19,4
|
||||
Dijeron Que No La Iba Lograr,"Fuerza Regida, Chino Pacas",2,2023,3,14,320,6,116334601,5,48,2,1,9,142,G,Minor,70,76,79,26,0,11,7
|
||||
Midnight Rain,Taylor Swift,1,2022,10,21,2612,4,433356509,19,29,21,0,0,140,,Major,64,18,37,72,0,12,7
|
||||
If We Ever Broke Up,Mae Stephens,1,2023,2,10,2040,4,165584767,81,27,66,9,444,116,G,Major,90,96,73,62,0,9,4
|
||||
You Proof,Morgan Wallen,1,2022,5,13,2128,9,367814306,37,88,9,0,14,120,A,Major,73,64,85,25,0,61,3
|
||||
LA INOCENTE,"Feid, Mora",2,2022,4,1,2598,37,477033549,28,57,43,8,85,92,F,Minor,76,46,79,31,0,7,6
|
||||
Malas Decisiones,Kenia OS,1,2022,10,26,542,2,156214700,23,2,21,0,0,110,G#,Minor,81,64,79,5,0,31,3
|
||||
Murder In My Mind,Kordhell,1,2022,1,21,2459,20,448843705,20,68,50,0,22,120,A#,Major,71,57,97,1,0,13,11
|
||||
Gangsta's Paradise,"Coolio, L.V.",2,1995,7,11,10624,17,1357608774,21,0,386,0,,80,G#,Major,63,40,61,9,0,56,6
|
||||
CAIRO,"Karol G, Ovy On The Drums",2,2022,11,13,2418,26,294352144,52,66,55,1,16,115,F,Minor,95,43,69,47,0,9,31
|
||||
I Love You So,The Walters,1,2014,11,28,7536,7,972164968,44,19,135,0,6,76,A#,Major,58,46,67,65,0,13,4
|
||||
Dark Red,Steve Lacy,1,2017,2,20,10431,7,920045682,71,53,181,0,10,172,F#,Major,60,77,78,45,0,12,6
|
||||
Say You Won't Let Go,James Arthur,1,2016,9,9,15722,16,2420461338,231,37,"1,509",0,13,99,A#,Major,40,45,56,69,0,9,5
|
||||
The Hills,The Weeknd,1,2015,5,27,25744,4,1947371785,122,94,"1,992",0,18,136,,Minor,36,12,57,9,0,14,8
|
||||
Heart To Heart,Mac DeMarco,1,2019,5,10,1640,0,244658767,27,27,29,1,1,150,G#,Minor,90,64,14,67,35,11,10
|
||||
Peaches (from The Super Mario Bros. Movie),Jack Black,1,2023,4,7,34,0,68216992,0,0,0,0,0,92,A#,Minor,71,41,31,79,0,10,5
|
||||
Marisola - Remix,"Duki, NICKI NICOLE, Cris Mj, Standly, Stars Music Chile",5,2022,12,15,1845,16,223582566,20,8,30,2,0,95,C#,Major,77,72,87,17,0,7,5
|
||||
LOKERA,"Brray, Rauw Alejandro, Lyanno",3,2022,7,25,3301,30,471819764,72,64,64,6,3,102,B,Minor,83,58,83,21,0,10,5
|
||||
Low,SZA,1,2022,12,9,1911,0,272377463,23,71,14,0,9,145,,Minor,70,34,55,43,0,16,6
|
||||
Numb,Linkin Park,1,2003,3,24,20111,5,1361425037,39,0,"7,341",0,0,110,A,Major,50,24,86,0,0,64,4
|
||||
Tormenta (feat. Bad Bunny),"Gorillaz, Bad Bunny",2,2023,2,24,1529,0,149778242,32,18,39,2,0,95,,Major,64,30,77,46,0,38,6
|
||||
on the street (with J. Cole),"j-hope, J. Cole",2,2023,3,3,615,2,116599790,22,82,8,0,9,94,B,Minor,68,81,82,53,0,9,13
|
||||
One Thing At A Time,Morgan Wallen,1,2022,12,2,811,4,148469433,11,58,5,0,21,142,G,Major,61,92,91,0,0,26,3
|
||||
Miss You,"Robin Schulz, Oliver Tree",2,2022,8,5,5730,10,497225336,108,16,197,3,165,145,F#,Minor,59,20,74,1,0,15,5
|
||||
AinпїЅпїЅпїЅt Tha,Morgan Wallen,1,2023,3,3,356,4,88791109,4,20,0,0,0,121,F#,Minor,64,67,80,0,0,36,3
|
||||
ThinkinпїЅпїЅпїЅ B,Morgan Wallen,1,2023,3,3,604,6,125917280,22,101,0,0,66,140,D#,Minor,66,43,76,49,0,12,3
|
||||
Private Landing (feat. Justin Bieber & Future),"Don Toliver, Future, Justin Bieber",3,2023,2,23,1190,0,105062254,29,3,18,0,19,137,C#,Minor,84,44,67,8,0,11,6
|
||||
Everything I Love,Morgan Wallen,1,2023,1,31,579,0,95623148,11,54,0,0,103,104,G#,Major,56,72,85,0,0,15,3
|
||||
Heaven,Niall Horan,1,2023,2,17,1553,2,144584800,61,6,48,0,150,92,F,Major,57,68,76,7,0,33,3
|
||||
LET GO,Central Cee,1,2022,12,15,2301,20,298063749,49,23,110,0,8,146,D,Minor,74,51,45,86,0,21,38
|
||||
Sial,Mahalini,1,2023,1,23,134,4,166570053,4,6,0,0,23,120,D,Major,56,20,43,89,0,12,4
|
||||
I Wrote The Book,Morgan Wallen,1,2023,1,31,430,0,83021468,15,17,0,0,0,144,D,Major,68,83,81,9,0,8,4
|
||||
"Apna Bana Le (From ""Bhediya"")","Arijit Singh, Sachin-Jigar",2,2022,11,5,86,0,139836056,11,101,0,0,48,94,A,Major,59,44,56,80,0,6,3
|
||||
SPIT IN MY FACE!,ThxSoMch,1,2022,10,31,629,14,303216294,32,3,9,0,0,94,G#,Major,73,65,79,5,2,11,6
|
||||
PLAYA DEL INGLпїЅ,"Myke Towers, Quevedo",2,2022,12,15,1701,15,221409663,30,15,34,2,47,113,G,Minor,79,66,74,8,0,11,5
|
||||
Man Made A Bar (feat. Eric Church),"Morgan Wallen, Eric Church",2,2023,3,3,329,0,58890931,14,35,1,0,0,148,E,Major,50,49,76,12,0,12,3
|
||||
Red Ruby Da Sleeze,Nicki Minaj,1,2023,3,3,1168,0,81419389,45,11,20,0,21,98,C#,Major,70,29,73,12,0,11,26
|
||||
Kahani Suno 2.0,Kaifi Khalil,1,2022,5,31,162,6,156777415,1,10,1,0,1,140,B,Major,58,26,38,91,0,10,4
|
||||
Nobody Gets Me,SZA,1,2022,12,9,2536,6,284908316,59,100,58,13,2,100,G,Major,36,28,28,81,0,18,3
|
||||
PERO Tпї,"Karol G, Quevedo",2,2023,2,23,387,11,93438910,11,15,14,3,1,140,F#,Major,86,68,79,39,0,11,29
|
||||
Hype Boy,NewJeans,1,2022,8,1,892,17,363472647,20,119,12,2,7,100,E,Minor,59,78,94,27,0,29,23
|
||||
Bloody Mary,Lady Gaga,1,2011,1,1,3909,0,372476382,66,26,277,3,734,100,A,Minor,59,49,65,2,0,13,3
|
||||
MonotonпїЅ,"Ozuna, Shakira",2,2022,10,19,3645,15,380726517,118,34,150,4,19,132,,Minor,87,82,70,42,0,21,5
|
||||
эээ98 Braves,Morgan Wallen,1,2023,3,3,282,0,56533272,6,15,0,0,0,142,D,Major,49,48,67,10,0,26,3
|
||||
WANDA,Quevedo,1,2023,1,20,888,22,175399345,11,24,7,1,7,176,E,Minor,72,96,63,25,0,21,7
|
||||
Thought You Should Know,Morgan Wallen,1,2022,5,6,968,4,203221468,16,53,1,0,61,140,F#,Major,53,51,70,49,0,14,3
|
||||
In The End,Linkin Park,1,2000,10,24,25065,6,1624165576,63,0,"6,808",2,0,105,D#,Minor,55,40,90,1,0,32,6
|
||||
Zona De Perigo,Leo Santana,1,2022,12,8,531,4,134294498,20,1,71,2,0,135,F,Major,81,97,77,75,0,35,3
|
||||
Lovezinho,Treyce,1,2022,7,28,242,0,70069745,12,2,13,0,4,128,E,Minor,82,61,59,30,0,12,4
|
||||
I Like You (A Happier Song) (with Doja Cat),"Post Malone, Doja Cat",2,2022,6,3,5281,14,609293408,94,21,80,15,38,101,F,Major,74,43,69,12,0,12,7
|
||||
Neverita,Bad Bunny,1,2022,5,6,2590,30,671365962,20,64,35,6,0,122,A#,Major,88,43,50,7,0,14,5
|
||||
Vista Al Mar,Quevedo,1,2022,9,8,1769,34,362361576,16,19,21,3,4,105,,Minor,76,49,56,80,12,10,13
|
||||
Sem AlianпїЅпїЅa no ,"MC Xenon, Os Gemeos da Putaria",2,2022,12,23,454,4,93587665,6,1,21,0,1,83,C#,Major,53,40,36,73,0,11,33
|
||||
Enemy (with JID) - from the series Arcane League of Legends,"Imagine Dragons, League of Legends, JID, Arcane",4,2021,9,3,6180,7,1223481149,122,88,580,21,10,77,B,Minor,72,59,76,24,0,42,28
|
||||
Revenge,XXXTENTACION,1,2017,8,25,3600,11,1022258230,7,0,203,0,2,140,B,Minor,75,18,25,78,0,11,26
|
||||
Bombonzinho - Ao Vivo,"Israel & Rodolffo, Ana Castela",2,2022,11,3,1254,6,263453310,26,69,73,2,6,158,C#,Major,65,72,95,31,0,92,5
|
||||
LA CANCIпїЅ,"J Balvin, Bad Bunny",2,2019,6,28,6398,31,1435127549,177,109,305,3,5,176,G,Major,75,43,65,15,0,11,32
|
||||
QuпїЅпїЅ Ago,"Yuridia, Angela Aguilar",2,2022,10,20,660,15,236857112,19,59,18,5,52,98,B,Major,73,88,57,56,0,5,2
|
||||
Love Again,The Kid Laroi,1,2023,1,27,1283,0,147538971,57,4,48,0,0,107,B,Minor,66,47,40,72,0,11,3
|
||||
After Hours,The Weeknd,1,2020,2,19,8084,6,698086140,45,115,218,1,221,109,F,Minor,66,16,57,10,1,12,3
|
||||
About Damn Time,Lizzo,1,2022,7,15,2332,2,723894473,0,0,25,0,0,109,A#,Minor,84,72,74,10,0,34,7
|
||||
Born With A Beer In My Hand,Morgan Wallen,1,2023,3,3,203,0,34450974,5,9,0,0,0,148,,Major,53,61,81,5,0,36,4
|
||||
эээээээээээээээээээээ,Fujii Kaze,1,2020,5,20,685,14,403097450,24,94,9,0,23,158,F#,Minor,60,52,76,17,0,19,5
|
||||
Besos Moja2,"Wisin & Yandel, ROSALпїЅ",2,2022,9,29,2460,13,309483971,53,7,56,3,1,94,F,Minor,74,64,73,6,0,10,6
|
||||
Maan Meri Jaan,King,1,2022,10,12,288,6,319566866,11,80,1,0,8,96,F#,Minor,70,40,51,35,0,10,4
|
||||
Moscow Mule,Bad Bunny,1,2022,5,6,4572,33,909001996,74,113,85,9,2,100,F,Minor,80,29,67,29,0,12,3
|
||||
My Universe,"Coldplay, BTS",2,2021,9,24,6127,13,1061966512,0,0,0,0,0,105,A,Major,58,42,68,1,0,14,4
|
||||
Devil DonпїЅпїЅпїЅ,Morgan Wallen,1,2023,3,3,166,0,32526947,2,10,0,0,0,125,,Major,53,32,66,38,0,9,3
|
||||
LLYLM,ROSALпїЅ,1,2023,1,27,1838,0,124988687,105,41,114,1,59,170,F#,Minor,56,56,63,13,0,19,27
|
||||
I'm Not Here To Make Friends,"Sam Smith, Calvin Harris, Jessie Reyez",3,2023,1,27,1890,0,103787664,86,1,49,0,9,115,,Major,70,84,90,17,0,41,6
|
||||
TRUSTFALL,P!nk,1,2023,1,27,2098,16,134255790,88,24,101,7,451,122,G#,Major,64,25,89,0,0,15,9
|
||||
ANTIFRAGILE,LE SSERAFIM,1,2022,10,17,761,12,301051721,23,95,11,0,3,105,A#,Minor,88,82,80,8,0,11,8
|
||||
Boy's a liar,PinkPantheress,1,2022,11,30,1225,0,156338624,27,0,28,13,0,133,F,Major,66,74,84,25,0,21,4
|
||||
VIBE (feat. Jimin of BTS),"TAEYANG, Jimin",2,2023,1,13,415,2,152850295,15,40,21,1,15,100,,Major,79,60,68,7,0,26,4
|
||||
Shirt,SZA,1,2022,10,28,3469,0,309653982,71,95,31,0,2,120,D#,Minor,82,55,45,15,3,9,10
|
||||
Lift Me Up - From Black Panther: Wakanda Forever - Music From and Inspired By,Rihanna,1,2022,10,28,3311,0,297328960,129,31,212,1,41,177,A,Major,25,17,30,90,0,13,3
|
||||
STAR WALKIN' (League of Legends Worlds Anthem),Lil Nas X,1,2022,9,22,2616,0,332506354,113,17,208,0,192,142,D,Minor,64,31,72,15,0,9,5
|
||||
"Sex, Drugs, Etc.",Beach Weather,1,2016,11,4,3006,16,480507035,47,60,87,12,124,144,E,Minor,57,47,84,1,1,52,4
|
||||
Boy With Luv (feat. Halsey),"Halsey, BTS",2,2019,4,12,4260,0,1065580332,113,92,259,0,1,120,B,Minor,65,80,86,9,0,19,10
|
||||
"Hey, Mickey!",Baby Tate,1,2016,9,27,482,0,122763672,9,1,12,4,3,135,D,Major,92,73,51,55,0,15,6
|
||||
Calm Down,RпїЅп,1,2022,2,9,4013,10,445763624,107,44,750,22,,107,B,Major,81,82,78,38,0,12,4
|
||||
Jhoome Jo Pathaan,"Arijit Singh, Vishal Dadlani, Sukriti Kakar, Vishal-Shekhar, Shekhar Ravjiani, Kumaar",6,2022,12,22,138,4,1365184,13,78,2,0,0,105,G,Major,82,62,74,10,0,33,7
|
||||
Escapism. - Sped Up,"RAYE, 070 Shake",2,2022,11,25,1368,0,184308753,12,1,11,0,8,108,F#,Minor,44,38,77,9,0,9,20
|
||||
Space Song,Beach House,1,2015,1,1,17852,4,789753877,69,76,335,0,,147,,Minor,51,62,79,22,13,14,3
|
||||
Dreamers [Music from the FIFA World Cup Qatar 2022 Official Soundtrack],"BTS, Jung Kook, FIFA Sound",3,2022,11,20,889,20,323358833,11,78,35,0,21,115,,Major,71,34,88,16,0,44,4
|
||||
Te Felicito,"Shakira, Rauw Alejandro",2,2022,4,21,4796,9,606361689,124,66,212,13,1,174,F,Major,70,57,64,23,0,8,32
|
||||
MuпїЅпїЅ,"Steve Aoki, Tini, La Joaqui",3,2023,1,12,658,6,120972253,33,7,53,2,0,90,A#,Minor,68,83,71,3,0,31,8
|
||||
TV,Billie Eilish,1,2022,7,21,3009,2,338564981,68,89,65,0,25,141,E,Minor,41,9,25,84,1,14,4
|
||||
I'm Not The Only One,Sam Smith,1,2014,1,1,17492,3,1606986953,136,24,"1,959",0,30,82,F,Major,68,50,49,56,0,8,4
|
||||
Heather,Conan Gray,1,2020,3,20,6170,7,1301799902,82,1,231,0,2,92,F,Major,47,25,43,62,0,32,3
|
||||
"Besharam Rang (From ""Pathaan"")","Vishal-Shekhar, Shilpa Rao, Caralisa Monteiro, Kumaar, Vishal Dadlani, Shekhar Ravjiani",6,2022,12,12,130,4,140187018,21,79,2,0,0,116,G#,Minor,77,65,80,6,0,15,4
|
||||
One Kiss (with Dua Lipa),"Calvin Harris, Dua Lipa",2,2017,6,2,27705,10,1897517891,537,122,"2,726",6,,124,A,Minor,79,59,86,4,0,8,11
|
||||
Sugar Rush Ride,TOMORROW X TOGETHER,1,2023,1,27,359,0,107642809,12,56,13,0,13,125,A#,Minor,71,83,89,1,0,17,9
|
||||
Pink Venom,BLACKPINK,1,2022,8,19,1963,16,551305895,57,119,77,1,13,90,,Major,80,71,69,2,0,27,10
|
||||
WAIT FOR U (feat. Drake & Tems),"Drake, Future, Tems",3,2022,4,27,5491,14,556585270,128,84,75,0,35,83,C#,Major,46,34,64,31,0,7,34
|
||||
Don't Start Now,Dua Lipa,1,2019,10,31,27119,0,2303033973,532,77,"1,535",3,8,124,B,Minor,79,68,79,1,0,10,8
|
||||
After Dark,Mr.Kitty,1,2014,8,8,1776,14,646886885,1,9,3,0,0,140,G#,Major,58,4,60,7,41,8,3
|
||||
Eu Gosto Assim - Ao Vivo,"Gustavo Mioto, Mari Fernandez",2,2022,9,16,1473,4,222612678,27,64,66,1,4,154,,Major,64,61,91,15,0,72,4
|
||||
INDUSTRY BABY (feat. Jack Harlow),"Jack Harlow, Lil Nas X",2,2021,7,23,13315,0,1814349763,300,47,690,0,,150,D#,Minor,74,89,70,2,0,5,6
|
||||
MIDDLE OF THE NIGHT,Elley Duhпї,1,2020,1,10,4057,8,872137015,78,21,240,1,52,186,E,Minor,41,9,61,2,0,12,5
|
||||
Atlantis,Seafret,1,2015,4,22,3045,6,571386359,43,53,134,1,32,166,G#,Minor,40,23,48,5,0,12,3
|
||||
PUNTO 40,"Baby Rasta, Rauw Alejandro",2,2022,9,22,3006,12,304079786,54,32,66,2,0,107,,Major,87,20,83,0,4,31,9
|
||||
Evoque Prata,"DJ Escobar, MC MENOR SG, MC MENOR HR",3,2022,9,9,852,0,174006928,14,1,50,0,0,87,G,Major,87,52,52,31,0,28,24
|
||||
How Do I Say Goodbye,Dean Lewis,1,2022,8,5,2163,15,284785823,72,97,58,0,154,82,G#,Major,40,39,64,21,0,7,7
|
||||
Blind,SZA,1,2022,12,9,1484,0,163284000,22,51,12,0,0,114,A,Minor,46,60,28,91,0,21,4
|
||||
Die For You,Joji,1,2022,11,4,1703,0,246390068,38,45,36,16,0,148,G#,Major,47,15,52,38,13,29,5
|
||||
Doja,Central Cee,1,2022,7,20,4169,44,482257456,57,44,183,1,11,140,F#,Major,91,97,57,38,0,40,29
|
||||
Gatita,Bellakath,1,2022,10,3,1054,0,168684524,9,0,15,0,1,101,G,Major,90,76,81,15,24,33,6
|
||||
Rumble,"Skrillex, Flowdan, Fred again..",3,2022,1,17,2849,0,78489819,39,45,27,0,1,140,C#,Minor,81,6,84,5,23,6,6
|
||||
Niagara Falls (Foot or 2) [with Travis Scott & 21 Savage],"Travis Scott, 21 Savage, Metro Boomin",3,2022,12,2,1602,0,195516622,12,22,12,0,2,88,G,Minor,49,24,66,19,0,18,29
|
||||
Yonaguni,Bad Bunny,1,2021,6,4,9644,28,1260594497,120,86,164,4,0,180,C#,Major,64,44,65,28,0,14,12
|
||||
Super Freaky Girl,Nicki Minaj,1,2022,8,12,4827,0,428685680,104,17,76,9,2,133,D,Major,95,91,89,6,0,31,24
|
||||
Running Up That Hill (A Deal With God),Kate Bush,1,1985,9,16,21811,0,1024858327,117,1,676,3,0,108,A#,Minor,63,20,55,72,0,6,6
|
||||
Dream On,Aerosmith,1,1973,1,5,168,0,838586769,0,0,5,0,0,80,F,Minor,39,24,43,39,0,23,3
|
||||
Limbo,Freddie Dredd,1,2022,8,11,688,0,199386237,14,1,17,0,2,75,B,Minor,80,46,62,3,6,11,46
|
||||
Where Are You Now,"Lost Frequencies, Calum Scott",2,2021,7,30,10565,44,972509632,238,122,557,17,58,121,F#,Minor,67,26,64,52,0,17,10
|
||||
WORTH NOTHING,"Twisted, Oliver Tree",2,2022,9,28,1612,0,213438580,34,54,34,0,2,140,D,Minor,58,17,62,18,0,20,8
|
||||
Bad Habits,Ed Sheeran,1,2020,9,3,12755,8,1555511105,344,97,945,15,,126,B,Minor,81,59,90,5,0,36,3
|
||||
KICK BACK,Kenshi Yonezu,1,2022,10,12,574,4,210038833,38,101,26,0,0,102,C#,Major,58,29,94,0,0,9,11
|
||||
Evergreen (You DidnпїЅпїЅпїЅt Deserve Me A,Omar Apollo,1,2022,4,8,2499,0,227918678,70,0,49,0,0,82,A,Major,70,31,34,60,1,11,4
|
||||
Good Days,SZA,1,2020,12,24,10426,2,826623384,133,109,182,1,10,121,C#,Minor,46,53,78,23,0,72,6
|
||||
Levitating (feat. DaBaby),"Dua Lipa, DaBaby",2,2020,3,27,15894,8,1802514301,198,13,544,0,60,103,F#,Minor,70,92,83,1,0,7,6
|
||||
Woman,Doja Cat,1,2021,6,25,9424,0,1329090101,202,50,463,4,,108,F,Minor,82,88,76,9,0,12,9
|
||||
Shut up My Moms Calling - (Sped Up),Hotel Ugly,1,2022,9,14,713,7,181831132,2,4,6,0,0,85,F,Minor,65,36,47,31,0,12,10
|
||||
Ferrari,"James Hype, Miggy Dela Rosa",2,2022,3,14,7758,28,462791599,173,79,175,0,168,125,C#,Minor,84,70,69,1,0,5,5
|
||||
"You're On Your Own, Kid",Taylor Swift,1,2022,10,21,2537,2,348647203,8,18,20,0,1,120,D,Major,69,40,39,41,0,13,6
|
||||
"Kesariya (From ""Brahmastra"")","Pritam, Arijit Singh, Amitabh Bhattacharya",3,2022,7,17,292,6,366599607,26,98,4,0,0,94,,Major,58,44,57,57,0,10,3
|
||||
Agudo MпїЅпїЅgi,"Styrx, utku INC, Thezth",3,1930,1,1,323,0,90598517,4,0,14,0,0,130,F#,Minor,65,49,80,22,4,7,5
|
||||
Payphone,"Maroon 5, Wiz Khalifa",2,2012,1,1,14143,4,1479264469,56,38,"1,891",1,,110,E,Major,74,51,74,2,0,29,4
|
||||
All I Want for Christmas Is You,Mariah Carey,1,1994,10,28,25653,0,1449779435,387,132,"2,094",0,,150,G,Major,34,33,63,16,0,7,4
|
||||
Last Christmas,Wham!,1,1984,1,1,22153,0,1159176109,274,111,"1,302",0,,107,B,Minor,74,88,65,28,0,46,3
|
||||
Rockin' Around The Christmas Tree,Brenda Lee,1,1958,1,1,14994,0,769213520,191,168,206,0,,140,G#,Major,70,85,41,71,0,45,5
|
||||
Jingle Bell Rock,Bobby Helms,1,1957,1,1,10326,0,741301563,165,99,104,0,,119,D,Major,74,78,37,84,0,6,3
|
||||
It's Beginning To Look A Lot Like Christmas,Michael Bublпї,1,2011,10,14,12353,0,807561936,35,0,549,0,0,93,E,Major,35,38,23,91,0,29,3
|
||||
Santa Tell Me,Ariana Grande,1,2013,12,13,9408,0,834129063,231,106,439,0,,133,G,Major,46,53,63,5,0,30,18
|
||||
It's the Most Wonderful Time of the Year,Andy Williams,1,1963,10,14,8879,0,663832097,182,107,160,0,1,202,G,Major,24,76,60,77,0,12,4
|
||||
Let It Snow! Let It Snow! Let It Snow!,Dean Martin,1,1959,11,16,6512,0,446390129,88,1,277,0,0,134,C#,Major,45,72,24,91,0,18,4
|
||||
Snowman,Sia,1,2017,1,1,5140,0,690104769,85,110,500,0,,105,C#,Major,72,33,51,48,0,9,3
|
||||
Underneath the Tree,Kelly Clarkson,1,2013,10,25,6596,0,485285717,144,99,251,0,0,160,G#,Major,51,69,81,0,0,21,5
|
||||
Feliz Navidad,JosпїЅпїЅ Felic,1,1970,11,1,3788,0,520034544,21,3,10,0,0,148,D,Major,50,96,82,47,0,34,4
|
||||
Holly Jolly Christmas,Michael Bublпї,1,2011,10,14,7655,0,476244795,5,0,291,0,0,151,G,Major,65,70,47,87,0,9,4
|
||||
Mistletoe,Justin Bieber,1,2011,1,1,9577,0,629173063,195,111,310,0,0,162,F#,Minor,67,88,54,51,0,9,4
|
||||
Sleigh Ride,The Ronettes,1,1963,11,22,10114,0,404664135,114,74,262,0,0,92,D,Major,53,84,77,40,0,32,3
|
||||
Seek & Destroy,SZA,1,2022,12,9,1007,0,98709329,5,31,1,0,0,152,C#,Major,65,35,65,44,18,21,7
|
||||
Love Language,SZA,1,2022,12,9,1127,0,110849052,16,63,8,0,0,65,A,Minor,71,55,26,85,0,13,8
|
||||
Happy Xmas (War Is Over),"John Lennon, The Harlem Community Choir, The Plastic Ono Band, Yoko Ono",4,1971,12,1,10829,0,460492795,130,1,390,0,0,147,D,Major,33,39,61,32,0,77,3
|
||||
Used (feat. Don Toliver),"SZA, Don Toliver",2,2022,12,8,1042,0,94005786,7,29,3,0,0,150,A#,Minor,73,71,69,53,0,32,9
|
||||
A Holly Jolly Christmas - Single Version,Burl Ives,1,1952,1,1,7930,0,395591396,108,120,73,0,0,140,,Major,67,81,36,64,0,15,3
|
||||
The Christmas Song (Merry Christmas To You) - Remastered 1999,Nat King Cole,1,1946,11,1,11500,0,389771964,140,72,251,0,0,139,C#,Major,36,22,15,84,0,11,4
|
||||
Wonderful Christmastime - Edited Version / Remastered 2011,Paul McCartney,1,1979,11,16,1685,0,403939487,1,0,29,0,0,95,B,Major,75,74,58,36,0,9,3
|
||||
Do They Know It's Christmas? - 1984 Version,Band Aid,1,1984,11,25,14169,0,481697415,209,30,449,0,0,115,,Major,60,23,57,0,2,27,3
|
||||
Ghost in the Machine (feat. Phoebe Bridgers),"SZA, Phoebe Bridgers",2,2022,12,9,1634,0,110073250,16,20,4,0,0,125,F#,Major,62,59,43,84,3,11,5
|
||||
Special,SZA,1,2022,12,9,906,0,88092256,6,21,3,0,0,76,,Major,60,19,20,78,0,11,5
|
||||
Merry Christmas Everyone,Shakin' Stevens,1,1984,1,1,1087,0,351636786,90,35,5,0,0,101,C#,Minor,72,91,87,14,0,13,3
|
||||
Let It Snow! Let It Snow! Let It Snow!,"Frank Sinatra, B. Swanson Quartet",2,1950,1,1,10585,0,473248298,126,108,406,0,0,143,D,Major,60,86,32,88,0,34,6
|
||||
SOS,SZA,1,2022,12,9,827,0,73981293,6,18,1,0,0,119,G,Minor,51,51,66,67,0,9,23
|
||||
Open Arms (feat. Travis Scott),"SZA, Travis Scott",2,2022,12,8,1420,4,155653938,13,87,17,0,46,78,A,Major,67,22,59,76,1,15,16
|
||||
White Christmas,"Bing Crosby, John Scott Trotter & His Orchestra, Ken Darby Singers",3,1942,1,1,11940,0,395591396,73,79,123,0,0,96,A,Major,23,19,25,91,0,40,3
|
||||
Driving Home for Christmas - 2019 Remaster,Chris Rea,1,1986,1,1,888,0,429504768,50,0,6,0,0,180,A,Major,51,87,58,36,0,18,4
|
||||
Christmas (Baby Please Come Home),Darlene Love,1,1963,11,22,9122,0,242767149,121,58,212,0,0,126,D#,Major,34,35,76,39,0,8,5
|
||||
Gone Girl,SZA,1,2022,12,9,892,0,65362788,3,17,2,0,0,150,F,Major,43,42,44,57,0,15,6
|
||||
F2F,SZA,1,2022,12,9,989,0,67540165,5,9,1,0,0,90,D,Major,53,47,74,9,0,34,4
|
||||
Notice Me,SZA,1,2022,12,9,819,0,62019074,14,22,0,0,0,160,F,Major,72,78,68,28,0,11,12
|
||||
Merry Christmas,"Ed Sheeran, Elton John",2,2017,11,10,2209,0,135723538,72,90,141,0,0,114,,Major,59,60,94,24,0,10,4
|
||||
It's Beginning to Look a Lot Like Christmas (with Mitchell Ayres & His Orchestra),"Perry Como, The Fontane Sisters, Mitchell Ayres & His Orchestra",3,1958,1,1,6290,0,295998468,89,39,158,0,0,113,G,Major,73,72,32,77,0,15,5
|
||||
My Only Wish (This Year),Britney Spears,1,2000,11,7,6952,0,261116938,115,53,286,0,0,147,,Major,67,69,72,17,0,19,3
|
||||
Antidepresan,"Mabel Matiz, Mert Demir",2,2022,11,4,313,2,136689549,10,6,7,1,9,100,B,Minor,70,92,59,3,0,10,3
|
||||
Wild Flower (with youjeen),RM,1,2022,12,2,353,2,135611421,2,74,14,0,2,155,G#,Major,49,42,77,3,0,12,9
|
||||
I Hate U,SZA,1,2021,12,3,4094,0,356709897,66,96,43,0,0,107,G,Minor,54,41,39,51,0,11,16
|
||||
Raindrops (Insane) [with Travis Scott],"Travis Scott, Metro Boomin",2,2022,12,2,880,0,110649992,3,7,10,0,0,112,G#,Major,80,15,54,9,0,38,5
|
||||
SPIT IN MY FACE!,ThxSoMch,1,2022,10,31,573,0,301869854,1,0,18,0,24,166,C#,Major,70,57,57,9,20,11,7
|
||||
Deck The Hall - Remastered 1999,Nat King Cole,1,1959,1,1,3299,0,127027715,65,39,41,0,0,107,F#,Minor,69,96,36,81,0,8,4
|
||||
Smoking on my Ex Pack,SZA,1,2022,12,9,811,0,57144458,6,11,3,0,0,81,G#,Major,47,33,68,24,0,22,38
|
||||
Conceited,SZA,1,2022,12,9,899,0,56870689,2,14,2,0,0,150,C#,Major,79,77,46,5,0,11,7
|
||||
Snow On The Beach (feat. Lana Del Rey),"Taylor Swift, Lana Del Rey",2,2022,10,21,2415,0,323437194,33,57,30,0,11,110,A,Major,66,19,32,69,0,12,4
|
||||
Maroon,Taylor Swift,1,2022,10,21,2304,0,317726339,12,16,14,0,0,108,G,Major,64,4,40,6,0,10,6
|
||||
TubarпїЅпїЅo Te,"Dj LK da EscпїЅпїЅcia, Tchakabum, mc jhenny, M",4,2022,9,28,1003,0,116144341,21,0,44,0,0,130,E,Minor,89,48,74,30,0,7,36
|
||||
Bejeweled,Taylor Swift,1,2022,10,21,2699,0,328207708,39,35,22,2,0,164,G,Major,70,39,56,6,0,9,7
|
||||
Tarot,"Bad Bunny, Jhay Cortez",2,2022,5,6,2482,20,608228647,27,77,22,2,0,114,B,Minor,80,42,68,2,0,66,4
|
||||
You Make It Feel Like Christmas (feat. Blake Shelton),"Gwen Stefani, Blake Shelton",2,2005,9,20,2577,0,180577478,108,56,30,0,0,93,F,Major,63,93,88,1,0,12,4
|
||||
Desesperados,"Chencho Corleone, Rauw Alejandro",2,2021,6,25,6821,34,809306935,83,58,128,7,0,90,C#,Minor,87,51,69,36,0,9,8
|
||||
Too Late,SZA,1,2022,12,8,714,0,49262961,0,9,2,0,0,128,B,Major,45,35,68,78,0,39,8
|
||||
Party,"Bad Bunny, Rauw Alejandro",2,2022,5,6,3185,4,614555082,38,64,37,3,36,97,A,Major,83,47,80,2,0,24,9
|
||||
Run Rudolph Run - Single Version,Chuck Berry,1,1958,1,1,8612,0,245350949,120,30,52,0,1,152,G,Minor,69,94,71,79,0,7,8
|
||||
Jingle Bells - Remastered 1999,Frank Sinatra,1,1957,1,1,4326,0,178660459,32,3,65,0,0,175,G#,Major,51,94,34,73,0,10,5
|
||||
Far,SZA,1,2022,12,9,680,0,51641685,2,15,1,0,0,116,D,Major,61,48,55,67,0,16,8
|
||||
On Time (with John Legend),"John Legend, Metro Boomin",2,2022,12,2,398,0,78139948,2,2,6,0,2,80,F,Minor,33,51,59,76,0,44,6
|
||||
GATпїЅпїЅ,"Maldy, Karol G",2,2022,8,25,3328,13,322336177,39,50,57,3,1,93,B,Minor,63,34,86,26,0,21,39
|
||||
эээabcdefu,Gayle,1,2021,8,13,7215,0,1007612429,170,12,575,18,,122,E,Major,70,42,54,30,0,37,5
|
||||
Sacrifice,The Weeknd,1,2022,1,7,4440,0,326792833,81,77,133,0,,122,G,Major,70,91,79,3,0,7,10
|
||||
Is There Someone Else?,The Weeknd,1,2022,1,7,2881,6,391251368,13,89,34,0,3,135,A,Minor,70,60,58,4,0,16,3
|
||||
Fingers Crossed,"Lauren Spencer Smith, Lauren Spencer Smith, Lauren Spencer Smith",3,2022,1,5,2235,0,349585590,65,7,70,16,6,109,F,Major,60,45,47,62,0,31,5
|
||||
Out of Time,The Weeknd,1,2022,1,7,3711,0,339659802,49,88,62,0,,93,,Minor,65,82,74,27,0,32,5
|
||||
Do It To It,"Cherish, ACRAZE",2,2021,8,20,12403,0,674772936,183,63,465,0,11,125,B,Minor,85,64,81,2,5,7,9
|
||||
We Don't Talk About Bruno,"Adassa, Mauro Castillo, Stephanie Beatriz, Encanto - Cast, Rhenzy Feliz, Diane Guerrero, Carolina Gaitan",7,2021,11,19,2785,0,432719968,95,89,44,0,,206,,Minor,58,83,45,36,0,11,8
|
||||
Pepas,Farruko,1,2021,6,24,14114,17,1309887447,252,109,965,20,,130,G,Major,76,44,77,1,0,13,3
|
||||
How Do I Make You Love Me?,The Weeknd,1,2022,1,7,1915,0,119238316,7,47,15,0,0,121,G,Minor,80,62,51,2,0,9,8
|
||||
Gasoline,The Weeknd,1,2022,1,7,2297,0,116903579,11,29,14,0,0,123,F#,Minor,74,35,73,0,0,21,5
|
||||
Infinity,Jaymes Young,1,2017,6,23,4375,0,888046992,24,0,396,0,0,122,B,Minor,67,50,67,15,0,30,4
|
||||
Less Than Zero,The Weeknd,1,2022,1,7,2800,0,200660871,18,77,61,0,1,143,,Major,53,50,79,0,0,8,3
|
||||
Take My Breath,The Weeknd,1,2021,8,6,2597,0,130655803,17,80,38,0,0,121,A#,Minor,70,35,77,1,0,26,4
|
||||
good 4 u,Olivia Rodrigo,1,2021,5,14,15563,6,1887039593,259,55,461,1,,166,F#,Minor,56,68,66,28,0,11,18
|
||||
"Here We GoпїЅпїЅпїЅ Again (feat. Tyler, the Cr","The Weeknd, Tyler, The Creator",3,2022,1,7,1420,0,88103848,7,18,7,0,0,135,C#,Major,41,27,64,36,0,60,3
|
||||
Best Friends,The Weeknd,1,2022,1,7,1292,0,101114984,3,18,14,0,0,87,E,Minor,49,49,59,44,0,35,21
|
||||
Kiss Me More (feat. SZA),"SZA, Doja Cat",2,2021,4,9,15867,0,1575467011,382,65,497,0,12,111,G#,Major,77,74,66,30,0,13,3
|
||||
I Heard You're Married (feat. Lil Wayne),"The Weeknd, Lil Wayne",2,2022,1,7,1178,0,91656026,9,10,9,0,0,110,D,Major,75,85,84,10,0,31,19
|
||||
Need To Know,Doja Cat,1,2021,6,11,6672,0,1042568408,125,36,150,0,,130,C#,Major,66,19,61,30,0,9,7
|
||||
MONTERO (Call Me By Your Name),Lil Nas X,1,2020,9,18,12329,0,1735441776,275,19,738,0,,179,G#,Minor,61,76,51,30,0,38,15
|
||||
love nwantiti (ah ah ah),Ckay,1,2019,7,26,5669,2,726837877,74,0,262,14,,93,F,Minor,74,53,73,61,0,13,4
|
||||
Dawn FM,The Weeknd,1,2022,1,7,811,0,53933526,1,8,6,0,0,78,A,Minor,27,10,49,62,0,49,5
|
||||
Surface Pressure,Jessica Darrow,1,2021,11,19,1756,0,267758538,38,48,20,0,0,180,D,Major,65,37,58,2,0,4,31
|
||||
Starry Eyes,The Weeknd,1,2022,1,7,1014,0,74601456,1,17,11,0,0,86,A,Minor,28,13,41,50,0,19,3
|
||||
THATS WHAT I WANT,Lil Nas X,1,2021,9,17,7963,0,920797189,173,7,298,0,4,88,C#,Minor,74,55,85,1,0,5,22
|
||||
One Right Now (with The Weeknd),"The Weeknd, Post Malone",2,2021,11,5,4771,0,539595276,68,18,75,0,1,97,C#,Major,68,72,78,4,0,7,5
|
||||
Beggin,MпїЅпїЅne,1,2017,12,8,8559,0,1367810478,183,64,964,9,75,134,B,Minor,71,59,80,13,0,36,5
|
||||
Mon Amour - Remix,"Aitana, zzoilo",2,2020,9,21,3221,2,578207856,67,13,139,0,40,116,D,Major,75,36,76,12,0,10,10
|
||||
Lo Siento BB:/ (with Bad Bunny & Julieta Venegas),"Julieta Venegas, Bad Bunny, Tainy",3,2021,10,5,3423,26,775542072,61,65,53,6,0,170,E,Minor,64,14,70,9,0,9,8
|
||||
MONEY,Lisa,1,2021,9,10,2566,13,863625566,44,109,131,0,,140,C#,Minor,83,40,55,16,0,14,23
|
||||
The Motto,"TiпїЅпїЅsto, Ava",2,2021,11,4,9151,6,656013912,240,107,268,0,5,118,G,Minor,75,46,76,3,0,9,4
|
||||
Happier Than Ever,Billie Eilish,1,2021,7,30,8476,0,1056760045,138,133,283,0,,65,,Major,31,31,24,76,0,14,4
|
||||
Moth To A Flame (with The Weeknd),"The Weeknd, Swedish House Mafia",2,2021,10,22,7495,17,611994237,114,172,284,2,,120,,Minor,56,16,67,0,0,11,4
|
||||
traitor,"Juan Cruz Toledo, Huilen Toledo",2,2021,5,21,5257,6,1230855859,85,43,134,0,,176,D#,Major,29,12,33,68,0,12,5
|
||||
Toxic,BoyWithUke,1,2021,9,10,1795,0,582981380,34,23,118,0,,180,G#,Minor,59,69,61,84,0,46,36
|
||||
drivers license,Olivia Rodrigo,1,2021,1,8,12685,3,1858144199,185,61,485,0,,144,A#,Major,59,21,43,76,0,10,7
|
||||
MalvadпїЅпї,"XamпїЅпїЅ, Gustah, Neo B",3,2021,11,30,648,1,240684449,14,3,81,0,0,133,F#,Minor,82,52,61,38,0,14,27
|
||||
All Too Well (10 Minute Version) (Taylor's Version) (From The Vault),Taylor Swift,1,2021,11,12,4635,5,583687007,50,49,30,1,2,93,,Major,63,21,52,28,0,9,3
|
||||
DonпїЅпїЅпїЅt Break My,The Weeknd,1,2022,1,7,1184,0,63803529,1,5,6,0,0,122,A#,Major,77,25,62,34,0,23,3
|
||||
Oh My God,Adele,1,2021,11,19,4431,0,466214729,105,7,199,0,0,88,C#,Major,53,55,73,9,0,3,5
|
||||
Entre Nosotros (Remix) [con Nicki Nicole],"Lit Killah, Maria Becerra, Tiago pzk, NICKI NICOLE",4,2022,1,5,759,3,236940480,32,0,12,0,0,170,F,Minor,70,61,44,40,0,37,4
|
||||
A Tale By Quincy,The Weeknd,1,2022,1,7,733,0,41924466,0,2,2,0,0,94,F,Minor,46,55,50,71,0,10,11
|
||||
I AM WOMAN,Emmy Meli,1,2021,11,18,2795,0,225259194,45,0,107,0,0,170,A#,Major,65,46,47,12,0,13,16
|
||||
Medallo,"Justin Quiles, Lenny TavпїЅпїЅrez, BL",3,2021,10,27,2780,2,393230256,54,21,57,1,0,90,E,Minor,79,79,70,57,0,66,8
|
||||
It'll Be Okay,Shawn Mendes,1,2021,12,1,1950,0,287201015,56,8,104,0,10,78,G,Major,40,7,29,62,0,9,3
|
||||
Softcore,The Neighbourhood,1,2018,3,9,3947,12,599770206,33,102,93,0,30,94,D,Major,57,36,58,5,0,15,3
|
||||
Super Gremlin,Kodak Black,1,2021,10,30,2551,0,342779426,52,15,35,0,1,73,D,Major,83,11,41,0,0,36,14
|
||||
Volvпї,"Aventura, Bad Bunny",2,2021,8,3,5375,10,673801126,138,24,133,0,1,176,C#,Major,73,79,86,42,0,7,18
|
||||
Todo De Ti,Rauw Alejandro,1,2020,11,2,11975,8,1168642797,188,75,268,6,16,128,D#,Minor,81,57,63,40,1,10,4
|
||||
Love Nwantiti - Remix,"Ckay, AX'EL, Dj Yo!",3,2019,8,30,2696,0,540539717,42,2,57,0,,120,G#,Major,58,44,60,44,9,5,6
|
||||
Smokin Out The Window,"Bruno Mars, Anderson .Paak, Silk Sonic",3,2021,11,5,4963,0,383550148,63,40,76,0,0,82,D,Major,63,85,62,6,0,35,4
|
||||
Meet Me At Our Spot,"THE ANXIETY, Willow, Tyler Cole",3,2020,3,13,6734,0,530511203,47,14,90,0,1,95,D,Major,77,40,47,2,0,9,3
|
||||
Every Angel is Terrifying,The Weeknd,1,2022,1,7,715,0,37307967,0,1,2,0,0,118,,Major,44,52,94,11,0,4,29
|
||||
Tacones Rojos,Sebastian Yatra,1,2021,10,22,3047,9,510876816,77,31,85,5,28,123,B,Minor,75,93,86,8,0,14,3
|
||||
Peaches (feat. Daniel Caesar & Giveon),"Justin Bieber, Daniel Caesar, Giveon",3,2021,3,19,14140,0,1445941661,231,52,612,6,,90,,Major,63,49,68,38,0,42,18
|
||||
Dakiti,"Bad Bunny, Jhay Cortez",2,2020,10,30,11215,21,1763363713,189,166,525,9,25,110,E,Minor,73,14,57,40,0,11,5
|
||||
"Tiago PZK: Bzrp Music Sessions, Vol. 48","Bizarrap, Tiago pzk",2,2021,12,29,1678,12,374191487,20,4,15,1,3,96,C#,Minor,66,76,82,47,0,10,32
|
||||
LпїЅпїЅпї,Stromae,1,2022,1,9,2035,0,108809090,41,122,394,0,2,88,D,Minor,56,58,55,35,0,23,11
|
||||
NostпїЅпїЅl,"Chris Brown, Rvssian, Rauw Alejandro",3,2021,9,8,2780,2,436695353,86,76,59,0,16,98,A,Minor,73,60,85,17,0,24,5
|
||||
Better Days (NEIKED x Mae Muller x Polo G),"NEIKED, Mae Muller, Polo G",3,2021,9,24,4091,0,421040617,105,2,73,13,1,110,,Minor,72,67,68,0,0,14,4
|
||||
Life Goes On,Oliver Tree,1,2020,7,17,2868,0,501541661,43,15,116,0,,80,,Major,70,57,49,19,0,12,8
|
||||
Sad Girlz Luv Money Remix (feat. Kali Uchis),"Kali Uchis, Amaarae, Moliy",3,2021,9,16,3643,0,354065229,69,3,70,0,3,110,C#,Minor,86,31,53,43,0,11,6
|
||||
Butter,BTS,1,2021,5,21,4779,6,1143647827,180,135,223,0,5,110,G#,Major,79,70,36,0,0,6,11
|
||||
pushin P (feat. Young Thug),"Young Thug, Future, Gunna",3,2022,1,7,3517,0,311395144,54,28,43,0,0,78,C#,Minor,77,49,42,1,1,13,19
|
||||
You Right,"Doja Cat, The Weeknd",2,2021,6,24,5073,0,672656250,83,9,100,0,4,129,G#,Major,83,44,62,2,0,8,6
|
||||
deja vu,Olivia Rodrigo,1,2021,4,1,7545,4,1256880657,117,39,141,0,46,181,A,Major,44,22,60,61,0,42,9
|
||||
Dynamite,BTS,1,2020,8,21,8528,5,1692897992,239,163,583,0,,114,F#,Minor,75,74,77,1,0,9,10
|
||||
TO THE MOON,Jnr Choi,1,2021,11,5,2979,0,245095641,44,0,159,0,0,144,D,Major,74,39,65,5,1,11,35
|
||||
Lost in the Fire,"The Weeknd, Gesaffelstein",2,2019,1,11,7731,2,686734357,110,145,447,0,46,101,D,Major,66,18,68,9,0,12,4
|
||||
Salimo de Noche,"Trueno, Tiago pzk",2,2021,10,21,1057,0,261414174,17,5,21,0,0,84,D,Major,67,78,63,29,0,12,5
|
||||
Volando - Remix,"Sech, Bad Bunny, Mora",3,2021,7,8,3272,19,610045621,101,34,70,1,2,154,F#,Major,66,63,69,21,0,11,7
|
||||
Leave The Door Open,"Bruno Mars, Anderson .Paak, Silk Sonic",3,2021,3,5,14417,0,1115880852,237,123,569,0,10,148,F,Major,59,72,62,18,0,9,3
|
||||
Knife Talk (with 21 Savage ft. Project Pat),"Drake, Project Pat, 21 Savage",3,2021,9,3,5199,0,594482982,45,43,45,0,3,146,F,Minor,85,22,37,13,0,7,30
|
||||
Don't Be Shy,"TiпїЅпїЅsto, Kar",2,2021,8,1,6026,0,566954746,171,51,187,1,21,120,B,Minor,77,51,79,20,0,26,6
|
||||
Love Grows (Where My Rosemary Goes),Edison Lighthouse,1,1970,1,1,2877,0,12,16,0,54,0,0,110,A,Major,53,75,69,7,0,17,3
|
||||
LA FAMA (with The Weeknd),"The Weeknd, ROSALпїЅ",2,2021,11,11,4640,3,374706940,81,93,507,6,4,136,,Minor,77,82,30,95,0,13,5
|
||||
The Family Madrigal,"Olga Merediz, Stephanie Beatriz, Encanto - Cast",3,2021,11,19,1150,0,184937148,20,38,12,0,0,141,C#,Major,59,56,63,12,0,6,36
|
||||
Phantom Regret by Jim,The Weeknd,1,2022,1,7,768,0,31959571,1,1,3,0,0,108,A,Minor,46,23,48,75,30,14,4
|
||||
Dos Oruguitas,Sebastian Yatra,1,2021,11,19,925,0,167076418,24,47,74,0,0,94,,Major,42,47,36,76,0,9,5
|
||||
Freaks,Surf Curse,1,2015,5,10,3006,3,824420218,23,21,121,0,13,180,A,Major,35,41,94,0,63,5,5
|
||||
Acapulco,Jason Derulo,1,2021,9,1,3098,0,363467642,111,5,182,1,0,122,A#,Major,77,51,79,5,0,16,5
|
||||
Daddy Issues,The Neighbourhood,1,2015,10,30,9771,4,1127468248,42,70,384,0,3,85,A#,Major,59,33,52,7,15,12,3
|
||||
thought i was playing,"21 Savage, Gunna",2,2022,1,7,807,0,60680939,3,0,5,0,0,148,C#,Major,68,29,73,0,0,7,7
|
||||
ELEVEN,IVE,1,2021,12,1,521,1,247737946,17,89,11,0,0,120,A,Major,83,59,73,6,0,5,11
|
||||
Mood (feat. Iann Dior),"24kgoldn, Iann Dior",2,2020,7,24,12854,0,1699402402,237,27,636,0,,91,G,Minor,70,76,72,22,0,27,4
|
||||
What Else Can I Do?,"Stephanie Beatriz, Diane Guerrero",2,2021,11,19,802,0,154797871,13,27,8,0,0,120,E,Major,72,54,71,26,0,10,4
|
||||
DANCE CRIP,Trueno,1,2021,11,17,731,0,198883004,14,14,24,0,4,106,,Major,86,86,79,11,0,8,9
|
||||
Miserable Man,David Kushner,1,2022,1,7,788,0,124407432,13,0,32,1,0,110,A#,Major,63,31,35,93,0,29,3
|
||||
happier,Olivia Rodrigo,1,2021,5,21,3069,4,850608354,25,46,105,0,45,169,F#,Major,39,36,45,81,0,8,13
|
||||
Praise God,Kanye West,1,2021,8,29,4651,0,376333030,24,6,105,0,0,118,C#,Major,80,21,55,1,0,26,17
|
||||
Get Into It (Yuh),Doja Cat,1,2021,6,25,4999,0,516784627,43,19,73,12,0,92,G#,Minor,91,79,66,32,0,9,16
|
||||
Before You Go,Lewis Capaldi,1,2019,11,1,8327,24,1608045237,205,130,625,0,25,112,D#,Major,45,19,60,63,0,9,6
|
||||
Sky,Playboi Carti,1,2020,12,25,3297,3,506778838,25,3,52,0,1,140,D,Major,79,56,91,26,0,13,21
|
||||
Rolling in the Deep,Adele,1,2010,11,29,35684,6,1472799873,195,125,"6,280",2,78,105,G#,Major,73,52,76,13,0,5,3
|
||||
Sobrio,Maluma,1,2021,7,8,3506,10,513643924,103,76,100,1,1,178,F,Major,76,63,77,14,0,15,22
|
||||
Peru,"Ed Sheeran, Fireboy DML",2,2021,12,23,2999,0,261286503,60,17,154,0,22,108,G,Minor,96,71,42,57,0,8,9
|
||||
favorite crime,Olivia Rodrigo,1,2021,5,21,3681,0,783706581,20,21,99,0,7,173,A,Major,40,19,29,86,0,34,4
|
||||
Thunder,"Prezioso, Gabry Ponte, LUM!X",3,2021,5,7,4846,10,422691058,54,16,259,0,13,101,C#,Major,67,40,90,3,0,34,6
|
||||
The Business,TiпїЅпї,1,2020,1,1,14311,0,1062345656,255,32,582,0,14,120,G#,Minor,80,24,62,41,2,11,23
|
||||
positions,Ariana Grande,1,2020,10,23,8207,0,1252563873,175,55,95,0,2,144,,Major,73,66,80,44,0,9,12
|
||||
I WANNA BE YOUR SLAVE,MпїЅпїЅne,1,2021,3,19,4873,0,851070493,65,88,434,3,13,133,C#,Major,75,96,61,0,0,18,4
|
||||
Vai LпїЅпїЅ Em Casa ,"MarпїЅпїЅlia MendonпїЅпїЅa, George Henrique &",2,2021,10,22,772,0,263894529,7,3,89,0,0,158,G#,Minor,46,62,83,53,0,97,28
|
||||
The Feels,TWICE,1,2021,10,1,1150,0,345903614,20,99,44,0,2,120,A,Major,81,92,90,9,0,8,7
|
||||
HEARTBREAK ANNIVERSARY,Giveon,1,2020,2,21,5398,4,951637566,111,127,210,0,37,129,,Major,61,59,46,56,0,13,5
|
||||
No Lie,"Sean Paul, Dua Lipa",2,2016,11,18,7370,0,956865266,92,127,"1,219",0,62,102,G,Major,74,45,89,5,0,26,13
|
||||
OUT OUT (feat. Charli XCX & Saweetie),"Charli XCX, Jax Jones, Joel Corry, Saweetie",4,2021,8,13,6890,0,427486004,122,11,201,0,1,124,G#,Minor,79,79,84,2,0,5,5
|
||||
Pope Is a Rockstar,SALES,1,2016,4,20,1966,0,156658366,4,2,50,0,0,90,E,Minor,73,31,45,85,24,11,3
|
||||
2055,Sleepy hallow,1,2021,4,14,2226,0,624515457,29,0,44,0,0,161,F#,Minor,78,65,52,46,0,12,31
|
||||
Bored,Billie Eilish,1,2017,3,30,4204,0,777765388,39,45,250,0,0,120,G,Major,60,11,33,90,0,8,5
|
||||
Happier Than Ever - Edit,Billie Eilish,1,2021,7,28,1959,0,412795151,19,0,38,0,0,81,,Major,45,12,57,7,0,23,3
|
||||
Astronaut In The Ocean,Masked Wolf,1,2019,1,1,7191,0,1138474110,146,18,478,0,7,150,E,Minor,78,47,70,18,0,15,9
|
||||
Ley Seca,"Anuel Aa, Jhay Cortez",2,2021,9,2,2012,0,355219175,101,12,38,0,0,105,G#,Major,76,58,84,10,0,13,10
|
||||
Every Summertime,NIKI,1,2021,8,10,1211,2,290228626,30,2,5,0,6,79,F#,Major,63,76,67,38,0,6,4
|
||||
Talking To The Moon,Bruno Mars,1,2010,10,4,7109,2,1062956628,5,0,862,0,0,146,C#,Minor,52,7,61,51,0,11,3
|
||||
you broke me first,Tate McRae,1,2020,4,17,6638,0,1180094974,167,19,318,0,1,124,E,Major,64,8,37,79,0,9,5
|
||||
The Nights,Avicii,1,2014,1,1,17354,8,1456081449,92,122,"1,282",0,55,126,F#,Major,53,66,85,2,0,24,4
|
||||
Take My Breath,The Weeknd,1,2021,8,6,6392,0,432702334,174,73,344,0,0,121,G#,Major,75,53,74,2,0,11,5
|
||||
Way 2 Sexy (with Future & Young Thug),"Drake, Future, Young Thug",3,2021,9,3,5481,0,489945871,144,30,86,0,0,136,B,Minor,80,33,60,0,0,32,14
|
||||
CпїЅпїЅ,Rauw Alejandro,1,2021,6,25,2636,3,465959382,36,82,39,0,5,102,A,Minor,77,26,63,58,0,11,5
|
||||
Enemy - from the series Arcane League of Legends,"Imagine Dragons, League of Legends, Arcane",3,2021,10,27,824,0,1223481149,8,8,8,2,0,77,G,Major,73,54,74,23,0,41,23
|
||||
Dance Monkey,Tones and I,1,2019,5,10,24529,0,2864791672,533,167,"3,595",6,,98,F#,Minor,82,54,59,69,0,18,10
|
||||
Lucid Dreams,Juice WRLD,1,2017,6,15,14749,0,2288695111,188,34,710,0,5,84,B,Minor,44,22,48,38,0,33,24
|
||||
QuпїЅпїЅ MпїЅпї,"J Balvin, Maria Becerra",2,2021,1,28,8087,0,720825549,92,34,131,0,0,102,G#,Major,89,77,82,3,0,17,11
|
||||
All of Me,John Legend,1,2013,8,1,27221,0,2086124197,308,118,"4,534",2,77,120,G#,Major,42,33,26,92,0,13,3
|
||||
Smells Like Teen Spirit - Remastered 2021,Nirvana,1,1991,9,10,49991,9,1690192927,265,121,"12,367",4,160,117,C#,Major,52,73,91,0,0,11,7
|
||||
Arcade,Duncan Laurence,1,2019,3,7,6646,0,991336132,107,47,584,1,,72,A,Minor,45,27,33,82,0,14,4
|
||||
Fair Trade (with Travis Scott),"Drake, Travis Scott",2,2021,9,3,5403,6,593917618,118,116,96,0,3,168,C#,Major,67,29,47,5,0,22,26
|
||||
Bar,"Tini, L-Gante",2,2021,11,11,755,4,200972675,22,3,0,0,0,94,C#,Major,52,68,69,13,0,33,8
|
||||
The Rumbling (TV Size),SiM,1,2022,1,10,254,0,71014967,0,4,23,0,0,145,G#,Major,41,65,88,0,0,26,5
|
||||
family ties (with Kendrick Lamar),"Kendrick Lamar, Baby Keem",2,2021,8,27,6308,5,560222750,114,15,63,0,2,134,C#,Major,71,14,61,1,0,23,33
|
||||
Mr. Brightside,The Killers,1,2003,9,23,51979,15,1806617704,306,99,"5,063",2,120,148,C#,Major,35,24,93,0,0,10,8
|
||||
Blessed-Cursed,ENHYPEN,1,2022,1,10,246,0,77337771,2,12,10,0,0,127,E,Minor,60,40,89,9,0,60,6
|
||||
AM Remix,"J Balvin, Nio Garcia, Bad Bunny",3,2021,6,24,6556,0,528544703,93,5,67,1,0,172,F#,Minor,74,79,66,11,0,8,16
|
||||
Streets,Doja Cat,1,2019,11,7,5728,0,865640097,85,87,179,0,12,90,B,Major,75,19,46,21,4,34,8
|
||||
Shallow,"Lady Gaga, Bradley Cooper",2,2018,9,27,16636,12,2159346687,368,155,"2,854",6,121,96,G,Major,57,30,40,38,0,26,3
|
||||
Polaroid Love,ENHYPEN,1,2022,1,10,461,0,211372494,10,56,17,0,0,138,G,Minor,74,65,67,46,0,36,4
|
||||
Leave Before You Love Me (with Jonas Brothers),"Marshmello, Jonas Brothers",2,2021,4,28,4893,4,651732901,143,24,110,0,70,120,G,Major,72,67,72,0,0,13,4
|
||||
Permission to Dance,BTS,1,2021,7,9,1801,2,608334048,77,134,74,0,2,125,A,Major,70,65,74,1,0,34,4
|
||||
Friday (feat. Mufasa & Hypeman) - Dopamine Re-Edit,"Riton, Nightcrawlers, Mufasa & Hypeman, Dopamine",4,2021,1,15,12043,0,863756573,209,54,710,0,18,123,D,Major,82,80,86,1,0,30,13
|
||||
RAPSTAR,Polo G,1,2021,4,9,4731,0,797402345,141,12,78,0,0,81,F#,Major,79,44,54,41,0,13,24
|
||||
'Till I Collapse,"Eminem, Nate Dogg",2,2002,5,26,22923,0,1695712020,78,46,"2,515",1,0,171,C#,Major,55,10,85,7,0,8,20
|
||||
Memories,Maroon 5,1,2019,9,20,9974,2,1759567999,272,67,"1,066",1,19,91,B,Major,77,60,32,84,0,8,5
|
||||
Se Le Ve,"Arcangel, De La Ghetto, Justin Quiles, Lenny TavпїЅпїЅrez, Sech, Dalex, Dimelo Flow, Rich Music",8,2021,8,12,1560,0,223319934,72,0,0,0,0,84,G,Minor,56,61,76,10,0,14,11
|
||||
25k jacket (feat. Lil Baby),"Gunna, Lil Baby",2,2022,1,7,620,0,54937991,17,3,3,0,0,115,F,Minor,90,74,54,16,0,13,28
|
||||
When IпїЅпїЅпїЅm Gone (with Katy ,"Katy Perry, Alesso",2,2021,12,29,3270,0,226897599,89,21,65,0,0,125,,Major,69,70,89,4,0,49,3
|
||||
EsqueпїЅпїЅa-Me Se For C,"MarпїЅпїЅlia MendonпїЅпїЅa, Maiara &",2,2021,10,14,580,0,258316038,24,92,93,0,0,122,F#,Minor,80,62,69,28,0,13,7
|
||||
MiпїЅпїЅn,"Tini, Maria Becerra",2,2021,4,29,3406,16,596152090,61,23,70,1,0,92,D,Major,85,92,54,16,0,29,7
|
||||
SпїЅпїЅ,"Anuel Aa, Myke Towers, Jhay Cortez",3,2021,11,18,903,0,177129919,30,26,15,0,0,90,F#,Minor,63,75,75,17,0,11,8
|
||||
MAMIII,"Karol G, Becky G",2,2022,2,10,6809,28,716591492,151,102,175,5,29,94,E,Minor,84,90,70,9,0,14,8
|
||||
Still D.R.E.,"Dr. Dre, Snoop Dogg",2,1999,1,1,33966,0,1210599487,141,78,"6,591",1,0,93,B,Major,81,53,78,18,0,6,24
|
||||
Stay Alive (Prod. SUGA of BTS),Jung Kook,1,2022,2,11,590,5,246376690,4,113,20,0,1,130,D,Minor,51,50,76,30,0,10,7
|
||||
Boyfriend,Dove Cameron,1,2022,2,11,3766,0,496311364,76,0,102,0,2,180,G,Minor,35,23,61,23,0,19,6
|
||||
The Joker And The Queen (feat. Taylor Swift),"Ed Sheeran, Taylor Swift",2,2022,2,11,1430,0,146789379,46,1,34,0,0,134,,Major,53,31,31,92,0,28,3
|
||||
The Next Episode,"Dr. Dre, Snoop Dogg",2,1999,1,1,31762,0,843309044,142,40,"5,451",1,953,95,D#,Minor,92,31,89,3,0,8,25
|
||||
Light Switch,Charlie Puth,1,2022,1,19,4049,0,500340342,119,6,85,1,1,184,F#,Major,69,91,63,11,0,9,31
|
||||
City of Gods,"Kanye West, Alicia Keys, Fivio Foreign",3,2022,2,11,2096,0,107255472,34,3,30,0,0,147,G#,Minor,47,50,80,10,0,32,38
|
||||
Brividi,"Mahmood, Blanco",2,2022,2,2,1175,0,135079152,34,1,31,0,0,123,G,Major,52,44,60,40,0,26,3
|
||||
Lost,Frank Ocean,1,2012,1,1,29499,11,822239726,124,27,587,0,4,123,A#,Minor,91,49,61,3,0,17,22
|
||||
In Da Club,50 Cent,1,2002,1,1,30427,7,1202722675,235,106,"5,221",1,35,90,F#,Minor,90,79,71,26,0,7,37
|
||||
she's all i wanna be,Tate McRae,1,2022,2,4,2711,0,343197054,105,12,51,0,12,160,D,Minor,61,65,64,1,0,12,4
|
||||
Ginseng Strip 2002,Yung Lean,1,2013,8,16,4310,0,240769997,13,0,113,1,3,115,C#,Major,60,37,71,8,0,48,4
|
||||
All For Us - from the HBO Original Series Euphoria,"Labrinth, Zendaya",2,2019,8,4,5342,0,426204830,33,123,7,0,4,141,D#,Major,37,17,67,2,0,34,9
|
||||
Notion,The Rare Occasions,1,2016,8,5,2393,0,421135627,22,13,65,0,6,160,A,Major,31,30,88,7,0,8,8
|
||||
Formula,Labrinth,1,2019,10,4,3444,7,554875730,24,85,102,0,1,145,B,Major,57,23,66,4,6,15,4
|
||||
Mount Everest,Labrinth,1,2019,6,21,5443,0,467727006,45,1,80,0,12,89,,Minor,46,43,43,23,8,8,22
|
||||
Excuses,"Intense, AP Dhillon, Gurinder Gill",3,2020,7,24,272,4,327498031,7,21,2,0,0,95,F,Minor,84,49,72,8,0,15,8
|
||||
Cigarettes,Juice WRLD,1,2021,12,10,975,0,185408548,11,0,10,0,0,160,D,Major,60,47,62,3,0,38,4
|
||||
"CayпїЅпїЅ La Noche (feat. Cruz CafunпїЅпїЅ, Abhir Hathi, Bejo, EL IMA)","Quevedo, La Pantera, Juseph, Cruz CafunпїЅпїЅ, BпїЅпїЅjo, Abhir Hathi",7,2022,1,14,1034,1,245400167,19,5,12,0,0,174,F,Minor,67,74,75,44,0,7,30
|
||||
California Love - Original Version (Explicit),"Dr. Dre, 2Pac, Roger",3,1995,12,28,18773,0,579395142,128,17,"1,663",0,0,92,G,Major,77,76,84,3,0,38,4
|
||||
Forgot About Dre,"Eminem, Dr. Dre",2,1999,1,1,19067,0,675039469,78,15,"1,674",0,3,134,G#,Major,93,61,74,9,0,17,8
|
||||
Down Under (feat. Colin Hay),"Luude, Colin Hay",2,2021,11,19,3541,2,252871192,57,13,50,1,7,172,B,Minor,31,4,86,1,0,28,17
|
||||
Mujeriego,Ryan Castro,1,2021,12,17,1492,0,231996128,44,63,91,0,24,80,A,Major,76,92,62,6,0,8,13
|
||||
HUMBLE.,Kendrick Lamar,1,2017,3,30,33206,1,1929770265,284,114,"1,481",0,5,150,C#,Minor,91,42,60,0,0,9,12
|
||||
Stan,"Eminem, Dido",2,1999,11,21,17115,0,918915401,83,63,"4,180",0,0,80,F#,Minor,78,53,74,4,0,45,21
|
||||
Contection,"GODZZ__-, Zakaria",2,2020,1,16,6955,0,1180896317,65,45,398,0,1,166,A#,Minor,81,83,75,14,0,29,34
|
||||
Swim,Chase Atlantic,1,2017,10,4,2742,0,498960285,5,1,62,0,0,120,G#,Major,67,8,54,27,0,8,4
|
||||
A Tu Merced,Bad Bunny,1,2020,2,29,4214,11,685071800,21,20,40,0,0,92,,Major,86,89,79,17,0,11,6
|
||||
Numb Little Bug,Em Beihold,1,2022,1,28,2026,0,258714692,47,3,42,12,6,85,G#,Minor,74,61,52,30,0,26,9
|
||||
212,"Mainstreet, Chefin",2,2022,1,15,352,0,143139338,10,0,39,0,0,154,D,Minor,79,86,52,66,0,9,7
|
||||
ProblemпїЅ,"Alvaro Diaz, Rauw Alejandro",2,2021,10,22,1517,0,209768491,42,7,15,0,4,92,D,Minor,73,37,74,28,0,14,6
|
||||
Bussin,"Nicki Minaj, Lil Baby",2,2022,2,11,847,0,64714573,25,0,14,0,0,140,B,Major,89,75,70,6,0,13,5
|
||||
Worst Day,Future,1,2022,2,11,910,0,50746620,20,3,24,0,0,89,C#,Major,65,60,60,11,0,10,40
|
||||
Malvada,ZпїЅпїЅ Fe,1,2022,1,28,601,0,154119539,28,73,64,0,0,135,F,Minor,89,89,86,16,0,8,5
|
||||
Hrs and Hrs,Muni Long,1,2021,11,19,1800,0,181328253,43,36,46,13,4,140,G#,Minor,51,66,53,60,0,11,18
|
||||
Alien Blues,Vundabar,1,2015,7,24,1930,0,370068639,3,0,28,0,1,82,D#,Major,47,44,76,8,91,9,3
|
||||
Thinking Out Loud,Ed Sheeran,1,2014,1,1,33032,0,2280566092,363,129,"3,895",0,28,79,D,Major,78,58,45,47,0,18,3
|
||||
Still Don't Know My Name,Labrinth,1,2019,10,4,6332,0,563902868,47,116,266,0,0,88,F,Major,31,31,63,47,27,21,12
|
||||
Christmas Tree,V,1,2021,12,24,509,9,317622165,8,106,6,0,8,139,F,Major,44,18,38,70,0,12,4
|
||||
Mal Feito - Ao Vivo,"MarпїЅпїЅlia MendonпїЅпїЅa, Hugo & G",2,2022,1,14,971,2,291709698,35,104,93,1,0,124,,Minor,73,68,83,55,0,90,7
|
||||
When I R.I.P.,Labrinth,1,2019,10,4,2578,0,203680270,8,67,66,0,0,80,G#,Minor,39,45,55,73,0,9,21
|
||||
Do We Have A Problem?,"Nicki Minaj, Lil Baby",2,2022,2,4,1064,0,81350745,42,1,26,0,0,120,C#,Minor,84,54,51,47,0,12,40
|
||||
Forever,Labrinth,1,2019,10,4,3618,0,282883169,21,86,138,0,2,80,E,Minor,56,19,46,92,72,11,3
|
||||
Gospel (with Eminem),"Eminem, Dr. Dre",2,2022,2,4,1040,0,64787943,8,0,29,0,0,117,C#,Major,92,62,86,11,0,24,24
|
||||
SeпїЅпїЅo,"Shawn Mendes, Camila Cabello",2,2019,6,19,15010,2,2484812918,453,50,"1,785",1,8,117,A,Minor,76,77,52,4,0,8,3
|
||||
NEW MAGIC WAND,"Tyler, The Creator",2,2019,5,16,4708,0,461437791,13,7,55,0,1,140,F,Minor,62,46,73,10,0,67,11
|
||||
Adore You,Harry Styles,1,2019,12,6,13454,1,1439191367,246,71,519,2,5,99,G#,Major,68,57,77,2,0,10,5
|
||||
La Santa,"Daddy Yankee, Bad Bunny",2,2020,2,29,4890,20,759208783,52,42,100,0,0,93,C#,Major,74,59,87,3,0,8,5
|
||||
Something In The Way - Remastered 2021,Nirvana,1,1991,9,24,9514,0,368646862,45,27,"1,197",0,43,106,G#,Major,44,8,20,74,42,11,3
|
||||
Sweetest Pie,"Dua Lipa, Megan Thee Stallion",2,2022,3,11,3501,0,299634472,69,2,51,11,0,124,G,Major,81,68,63,17,0,10,22
|
||||
Bam Bam (feat. Ed Sheeran),"Camila Cabello, Ed Sheeran",2,2022,3,4,6111,4,756907987,185,40,492,9,35,95,G#,Major,76,96,70,18,0,33,4
|
||||
Una Noche en MedellпїЅ,Cris Mj,1,2022,1,21,5415,32,682475162,46,16,53,1,2,96,A#,Minor,87,82,53,10,0,5,8
|
||||
Envolver,Anitta,1,2021,11,11,4673,2,546191065,123,113,180,1,4,92,E,Minor,81,40,73,15,0,9,8
|
||||
Starlight,Dave,1,2022,3,3,1856,3,229473310,29,40,31,1,1,124,G,Major,95,36,37,35,0,10,28
|
||||
Hati-Hati di Jalan,Tulus,1,2022,3,3,200,2,202677468,12,4,0,0,0,72,F#,Major,64,76,44,70,9,12,4
|
||||
"I'm Tired - From ""Euphoria"" An Original HBO Series",Labrinth,1,2022,2,4,1888,0,121913181,26,1,58,0,0,71,,Minor,28,26,20,19,0,30,3
|
||||
DANпїЅпїЅA,"Mc Pedrinho, Pedro Sampaio",2,2022,2,1,911,2,208166039,45,0,99,1,1,135,A,Minor,78,55,57,4,0,10,8
|
||||
Yo Voy (feat. Daddy Yankee),Zion & Lennox,1,2004,5,4,2954,2,527033089,18,82,0,0,5,95,A#,Major,81,56,70,4,0,5,24
|
||||
"Residente: Bzrp Music Sessions, Vol. 49","Residente, Bizarrap",2,2022,3,3,461,0,94616487,7,11,13,0,0,71,C#,Minor,59,70,74,56,0,11,40
|
||||
Jordan,Ryan Castro,1,2021,4,30,2844,2,394030335,29,2,35,1,0,180,A#,Minor,80,71,68,6,0,10,37
|
||||
Nail Tech,Jack Harlow,1,2022,2,18,2939,0,193443895,42,0,24,0,0,150,E,Minor,65,11,58,0,0,12,9
|
||||
Chale,Eden MuпїЅп,1,2022,2,18,695,11,299648208,16,41,13,1,1,189,G#,Major,55,86,44,40,0,7,4
|
||||
DARARI,Treasure,1,2022,2,15,328,0,182978249,10,21,7,0,9,85,,Major,72,55,64,49,0,7,4
|
||||
Ya No Somos Ni Seremos,Christian Nodal,1,2022,2,18,866,16,319757142,27,84,32,7,11,140,G,Major,59,73,45,44,0,34,3
|
||||
Thinking with My Dick,"Kevin Gates, Juicy J",2,2013,7,16,1550,0,109091573,0,0,14,0,0,81,,Major,77,68,70,6,0,17,20
|
||||
Freaky Deaky,"Tyga, Doja Cat",2,2022,2,25,1729,0,153240879,26,1,19,0,0,104,D,Minor,80,24,65,2,0,9,4
|
||||
this is what falling in love feels like,JVKE,1,2021,9,3,2005,0,346127840,16,5,43,0,7,129,B,Major,42,33,44,62,0,8,6
|
||||
La Zona,Bad Bunny,1,2020,2,29,1188,0,312622938,13,1,15,0,1,94,C#,Minor,76,81,80,20,0,25,4
|
||||
Bohemian Rhapsody - Remastered 2011,Queen,1,1975,10,31,40112,3,2197010679,321,162,"5,691",8,17,71,,Minor,41,23,40,27,0,30,5
|
||||
Hope,XXXTENTACION,1,2018,3,16,3659,0,1200808494,11,10,267,0,7,146,A,Minor,59,23,46,66,0,15,6
|
||||
Levitating,Dua Lipa,1,2020,3,27,9833,0,797196073,233,82,531,1,1,103,F#,Minor,69,90,88,5,0,29,8
|
||||
Wake Me Up - Radio Edit,Avicii,1,2013,1,1,50887,34,1970673297,315,160,"6,284",1,46,124,D,Major,53,66,78,0,0,16,5
|
||||
"jealousy, jealousy",Olivia Rodrigo,1,2021,5,21,3257,0,665765558,10,0,70,0,0,164,A#,Minor,70,71,58,24,0,7,13
|
||||
MonпїЅпїЅy so,YEAT,1,2021,9,10,1473,0,263779030,2,0,12,0,10,138,D,Major,69,35,57,12,0,10,9
|
||||
Demasiadas Mujeres,C. Tangana,1,2020,10,8,2226,0,339473453,36,2,11,0,14,126,A,Minor,67,37,46,13,0,10,39
|
||||
Something Just Like This,"The Chainsmokers, Coldplay",2,2017,2,22,23375,21,2204080728,336,188,"2,692",3,30,103,B,Minor,61,47,65,3,0,17,4
|
||||
Closer,"The Chainsmokers, Halsey",2,2016,5,31,28032,0,2591224264,315,159,"2,179",0,44,95,G#,Major,75,64,52,41,0,11,3
|
||||
O.O,NMIXX,1,2022,2,22,290,0,135444283,9,66,10,0,0,200,B,Minor,39,28,77,4,0,6,29
|
||||
Somebody That I Used To Know,"Gotye, Kimbra",2,2011,1,1,42798,0,1457139296,217,136,"6,508",1,,129,,Major,86,75,52,54,0,10,4
|
||||
Tom's Diner,"AnnenMayKantereit, Giant Rooks",2,2019,6,28,2605,0,236872197,15,48,50,0,3,98,F#,Minor,68,33,43,38,0,36,14
|
||||
First Class,Jack Harlow,1,2022,4,8,8737,0,694525298,163,32,137,15,12,107,G#,Major,91,32,56,3,0,11,10
|
||||
Plan A,Paulo Londra,1,2022,3,23,1105,0,240661097,32,0,19,0,0,174,,Major,58,56,83,5,0,7,4
|
||||
Fuera del mercado,Danny Ocean,1,2022,2,17,2499,21,421365166,68,24,43,1,0,92,G#,Major,45,27,67,32,0,13,14
|
||||
X пїЅпїЅLTIMA,"Daddy Yankee, Bad Bunny",2,2022,3,25,2697,1,349746291,55,3,32,0,0,90,G,Major,81,59,83,9,0,11,5
|
||||
When You're Gone,Shawn Mendes,1,2022,3,31,2092,0,255120451,75,11,44,0,14,147,F,Major,60,58,69,2,0,58,4
|
||||
In My Head,Lil Tjay,1,2022,4,1,1185,0,190981339,21,0,31,0,0,143,C#,Major,68,41,55,16,0,10,12
|
||||
Wait a Minute!,Willow,1,2015,1,11,11985,0,924193303,79,80,250,3,10,101,D#,Minor,76,63,71,3,0,10,3
|
||||
LOVE DIVE,IVE,1,2022,4,5,753,8,305771063,28,124,13,0,1,118,C#,Minor,70,54,71,0,0,33,4
|
||||
Pantysito,"Feid, Alejo, Robi",3,2022,3,18,2995,0,273005485,49,17,30,0,0,98,G,Minor,83,63,74,14,0,16,6
|
||||
Chance,Paulo Londra,1,2022,4,6,225,0,89566512,11,0,7,0,0,138,C#,Minor,72,22,46,24,0,9,6
|
||||
Cool for the Summer,Demi Lovato,1,2015,1,1,9243,0,677389855,155,5,577,0,6,114,F,Minor,59,30,62,1,0,8,4
|
||||
psychofreak (feat. WILLOW),"Camila Cabello, Willow",2,2022,4,7,918,0,75476209,24,0,52,0,0,180,G,Minor,63,45,64,34,0,9,8
|
||||
Angel Baby,Troye Sivan,1,2021,9,9,1959,9,408843328,52,25,32,0,10,145,B,Major,56,41,57,1,0,13,3
|
||||
Vampiro,"MatuпїЅпїЅ, Wiu, ",3,2022,1,30,540,4,187772591,26,3,39,0,0,115,G#,Minor,78,63,64,1,0,7,4
|
||||
Si Quieren Frontear,"De La Ghetto, Duki, Quevedo",3,2022,3,31,859,0,178512385,14,0,11,0,0,82,A#,Major,79,48,84,13,0,21,23
|
||||
Right On,Lil Baby,1,2022,4,8,1116,0,101780047,31,9,15,0,1,166,D,Major,70,22,61,2,0,10,34
|
||||
Me Arrepentпї,"Ak4:20, Cris Mj, Pailita",3,2022,3,30,273,2,118381354,12,2,4,0,0,92,,Major,86,91,79,29,0,60,16
|
||||
That's Hilarious,Charlie Puth,1,2022,4,8,686,2,146363130,11,6,12,0,15,108,F#,Major,71,55,44,74,0,11,6
|
||||
Soy El Unico,Yahritza Y Su Esencia,1,2022,3,25,226,0,126443991,5,0,4,0,1,84,E,Minor,71,63,45,45,0,11,3
|
||||
RUMBATпїЅ,Daddy Yankee,1,2022,3,25,1264,0,157990698,20,4,52,0,1,95,D#,Minor,69,90,91,6,0,35,4
|
||||
sentaDONA (Remix) s2,"LuпїЅпїЅsa Sonza, MC Frog, Dj Gabriel do Borel, Davi K",4,2022,3,17,870,0,176290831,32,0,49,0,0,135,D#,Minor,93,77,45,25,0,13,27
|
||||
Falling,Harry Styles,1,2019,12,13,7556,0,1023187129,124,24,254,0,8,110,E,Major,57,6,27,84,0,9,3
|
||||
Sigue,"Ed Sheeran, J Balvin",2,1996,11,24,1370,0,106933107,46,8,60,0,0,88,G,Major,89,59,64,5,0,19,7
|
||||
Fim de Semana no Rio,teto,1,2022,3,30,315,2,139193812,27,2,31,0,0,119,C#,Minor,87,28,50,12,0,10,10
|
||||
MANIAC,Stray Kids,1,2022,3,18,651,0,212234990,6,188,20,0,0,120,C#,Major,58,71,80,15,0,7,41
|
||||
There's Nothing Holdin' Me Back,Shawn Mendes,1,2016,9,23,12382,0,1714490998,229,57,"1,370",2,71,122,D,Major,86,97,80,36,0,9,6
|
||||
IDGAF (with blackbear),"Blackbear, BoyWithUke",2,2022,3,18,625,0,197643795,10,2,13,0,1,98,F,Major,78,83,75,41,0,27,8
|
||||
Golden,Harry Styles,1,2019,12,13,8429,1,807015863,85,24,200,0,2,140,E,Minor,45,25,84,21,0,13,6
|
||||
Get Lucky - Radio Edit,"Pharrell Williams, Nile Rodgers, Daft Punk",3,2013,1,1,52898,0,933815613,203,1,"8,215",0,0,116,F#,Minor,79,87,81,4,0,10,4
|
||||
Ain't Shit,Doja Cat,1,2021,6,25,3436,0,499710590,32,6,46,0,1,124,D#,Major,86,62,49,51,0,35,21
|
||||
"Nobody Like U - From ""Turning Red""","Jordan Fisher, Josh Levi, Finneas O'Connell, 4*TOWN (From Disney and PixarпїЅпїЅпїЅs Turning Red), Topher Ngo, Grayson Vill",6,2022,2,25,918,0,120847157,34,39,30,0,0,105,A,Minor,91,73,72,13,0,9,15
|
||||
Still Life,BIGBANG,1,2022,4,5,181,0,53909146,16,14,7,0,0,118,G#,Major,68,24,58,44,0,6,3
|
||||
Photograph,Ed Sheeran,1,2014,6,20,18778,3,2236667932,228,105,"2,453",0,84,108,E,Major,61,20,38,61,0,10,5
|
||||
Love Yourself,Justin Bieber,1,2015,11,9,22730,5,2123309722,289,87,"2,430",0,36,100,E,Major,61,53,38,84,0,28,44
|
||||
N95,Kendrick Lamar,1,2022,5,13,5542,0,301242089,52,16,65,0,206,140,D#,Minor,81,39,66,38,0,12,14
|
||||
About Damn Time,Lizzo,1,2022,4,14,9021,0,723894473,242,49,272,21,24,109,A#,Minor,84,72,74,10,0,34,7
|
||||
Die Hard,"Kendrick Lamar, Blxst, Amanda Reifer",3,2022,5,13,4627,0,237351106,38,13,32,0,0,101,C#,Minor,78,40,74,36,0,17,27
|
||||
DespuпїЅпїЅs de la P,Bad Bunny,1,2022,5,6,2229,0,461558540,27,44,24,0,5,78,F,Major,56,61,90,36,0,18,31
|
||||
Un Ratito,Bad Bunny,1,2022,5,6,1112,6,417230415,7,30,13,1,1,93,,Minor,79,22,55,31,0,12,5
|
||||
United In Grief,Kendrick Lamar,1,2022,5,13,2575,0,156898322,4,5,12,0,51,87,G#,Major,52,32,83,24,0,17,43
|
||||
Father Time (feat. Sampha),"Kendrick Lamar, Sampha",2,2022,5,13,3107,0,127309180,4,0,22,0,0,153,A#,Minor,55,50,78,19,0,11,35
|
||||
Yo No Soy Celoso,Bad Bunny,1,2022,5,6,1179,0,313113297,7,21,11,0,0,142,,Major,87,93,59,28,0,17,5
|
||||
Rich Spirit,Kendrick Lamar,1,2022,5,13,3486,0,173702135,20,10,33,0,0,96,A#,Minor,85,41,43,39,0,12,21
|
||||
Cooped Up (with Roddy Ricch),"Post Malone, Roddy Ricch",2,2022,5,12,2942,0,271666301,42,28,43,0,0,125,,Major,50,39,78,4,0,11,33
|
||||
Me Fui de Vacaciones,Bad Bunny,1,2022,5,6,1443,0,305650299,9,11,22,1,0,85,A#,Major,71,43,65,23,0,9,5
|
||||
Silent Hill,"Kendrick Lamar, Kodak Black",2,2022,5,13,3028,0,123216717,22,0,23,0,0,140,C#,Minor,92,78,57,46,0,14,9
|
||||
La Corriente,"Tony Dize, Bad Bunny",2,2022,5,6,1796,8,479655659,8,25,18,1,0,196,B,Minor,66,58,79,23,0,22,20
|
||||
Count Me Out,Kendrick Lamar,1,2022,5,13,2729,0,126191104,3,7,13,0,1,134,G,Major,78,51,43,69,0,14,9
|
||||
Andrea,"Buscabulla, Bad Bunny",2,2022,5,6,1195,0,344055883,8,30,13,1,1,103,C#,Minor,80,45,62,76,0,10,38
|
||||
Dos Mil 16,Bad Bunny,1,2022,5,6,892,3,338422004,10,24,11,0,0,130,C#,Major,82,50,67,12,0,13,5
|
||||
We Cry Together,"Kendrick Lamar, Taylour Paige",2,2022,5,13,1635,0,68895644,4,1,6,0,0,108,B,Major,65,52,69,31,0,8,36
|
||||
Savior,"Kendrick Lamar, Sam Dew, Baby Keem",3,2022,5,13,2291,0,86176890,9,0,8,0,0,123,G#,Major,61,66,71,53,0,32,46
|
||||
Un Coco,Bad Bunny,1,2022,5,6,1029,28,403231558,5,28,9,0,0,152,B,Major,84,74,69,21,0,18,6
|
||||
Otro Atardecer,"Bad Bunny, The MarпїЅп",2,2022,5,6,1681,7,319546754,10,30,13,0,33,108,D,Major,78,55,60,59,0,7,4
|
||||
Worldwide Steppers,Kendrick Lamar,1,2022,5,13,1480,0,61739839,1,0,5,0,0,72,A#,Minor,56,56,47,76,0,8,36
|
||||
Aguacero,Bad Bunny,1,2022,5,6,829,0,283359161,4,15,10,0,0,121,F#,Minor,86,67,65,42,0,35,7
|
||||
Purple Hearts,"Kendrick Lamar, Ghostface Killah, Summer Walker",3,2022,5,13,2308,0,76831876,7,0,7,0,0,138,D#,Minor,57,71,82,19,0,15,29
|
||||
Un Verano Sin Ti,Bad Bunny,1,2022,5,6,1004,1,283332261,8,12,9,0,0,188,F,Minor,50,41,50,69,0,12,6
|
||||
ULTRA SOLO,"Polima WestCoast, Pailita",2,2022,2,14,1367,0,307752576,48,4,34,1,0,110,C#,Major,80,26,85,23,0,11,21
|
||||
EnsпїЅпїЅпїЅпїЅame ,Bad Bunny,1,2022,5,6,1112,3,279737940,7,25,12,0,0,105,G#,Major,81,77,79,19,0,47,8
|
||||
El ApagпїЅ,Bad Bunny,1,2022,5,6,1209,0,212351890,9,7,14,0,1,118,G#,Major,63,60,70,5,0,9,31
|
||||
Callaita,"Bad Bunny, Tainy",2,2019,5,31,9539,15,1304313953,162,116,355,7,0,176,D,Major,61,24,62,60,0,24,31
|
||||
Dua Lipa,Jack Harlow,1,2022,5,6,1992,0,150500965,35,0,3,0,0,158,B,Major,83,41,65,0,10,11,8
|
||||
Agosto,Bad Bunny,1,2022,5,6,897,0,246127838,6,20,8,0,0,115,C#,Minor,85,72,58,9,0,49,12
|
||||
House Of Memories,Panic! At The Disco,1,2016,1,15,2948,0,582863434,10,2,150,0,0,110,B,Minor,51,48,82,0,0,5,3
|
||||
Mr. Morale,"Kendrick Lamar, Tanna Leone",2,2022,5,13,1860,0,58687425,1,0,3,0,0,174,A,Major,73,26,54,30,0,34,32
|
||||
That That (prod. & feat. SUGA of BTS),"PSY, Suga",2,2022,4,29,802,0,212109195,16,81,23,0,0,130,E,Major,91,91,96,3,0,3,9
|
||||
In The Stars,Benson Boone,1,2022,4,29,2224,8,382199619,48,40,87,1,210,78,A#,Major,36,30,54,34,0,14,5
|
||||
Rich - Interlude,Kendrick Lamar,1,2022,5,13,1103,0,41210087,0,0,0,0,0,104,G,Major,44,74,42,88,0,9,9
|
||||
SUPERMODEL,MпїЅпїЅne,1,2022,5,13,2265,0,231657891,93,12,173,11,3,121,G,Major,64,80,88,0,0,12,6
|
||||
Stefania (Kalush Orchestra),KALUSH,1,2022,3,10,555,0,53729194,10,4,4,0,0,105,D,Major,83,32,82,14,0,12,4
|
||||
Thousand Miles,The Kid Laroi,1,2022,4,22,2050,0,244741137,52,9,46,0,1,81,G,Major,38,20,66,9,0,9,8
|
||||
Crown,Kendrick Lamar,1,2022,5,13,1493,0,42485571,2,0,10,0,0,170,C#,Minor,37,14,24,80,0,11,4
|
||||
Auntie Diaries,Kendrick Lamar,1,2022,5,13,1545,0,37778188,1,0,4,0,0,78,G,Major,43,60,38,76,1,48,38
|
||||
PUFFIN ON ZOOTIEZ,Future,1,2022,4,29,2350,0,254218729,28,42,23,0,0,125,G#,Major,88,28,66,6,0,13,31
|
||||
Mirror,Kendrick Lamar,1,2022,5,13,1929,0,53603447,2,0,4,0,0,92,E,Minor,66,29,65,23,0,8,7
|
||||
Beautiful Girl,Luciano,1,2022,4,22,710,4,160035717,16,11,18,0,0,140,B,Minor,84,61,42,31,0,9,9
|
||||
"Paulo Londra: Bzrp Music Sessions, Vol. 23","Bizarrap, Paulo Londra",2,2022,4,25,928,0,164163229,19,0,7,0,0,96,,Major,61,32,90,25,0,10,9
|
||||
Savior - Interlude,Kendrick Lamar,1,2022,5,13,1194,0,37091576,0,0,3,0,0,118,F,Minor,66,83,43,84,0,19,19
|
||||
Pasoori,"Shae Gill, Ali Sethi",2,2022,2,6,349,6,284249832,10,22,3,0,0,92,B,Minor,71,67,60,7,0,6,4
|
||||
Mother I Sober (feat. Beth Gibbons of Portishead),"Kendrick Lamar, Beth Gibbons",2,2022,5,13,1890,0,33381454,3,0,2,0,0,140,G#,Minor,49,50,37,87,0,11,35
|
||||
TUS LпїЅпїЅGR,"Sech, Mora",2,2022,4,1,1308,2,184622518,23,16,9,0,5,174,C#,Major,68,45,71,18,0,11,25
|
||||
Where Did You Go?,"MNEK, Jax Jones",2,2022,1,26,4531,0,300983101,135,74,119,1,18,127,A#,Major,77,53,78,19,0,29,4
|
||||
I Tried to Tell Y'all,"Ugly Dray, Tesla Jnr",2,2022,5,6,1657,0,121077868,15,3,3,0,0,96,B,Major,71,37,52,62,0,11,36
|
||||
Honest (feat. Don Toliver),"Justin Bieber, Don Toliver",2,2022,4,29,1351,0,106919680,26,1,17,0,0,150,G#,Minor,82,82,66,13,0,11,6
|
||||
ZOOM,Jessi,1,2022,4,13,608,0,136996305,5,29,14,0,0,100,F#,Major,87,46,60,1,0,13,16
|
||||
SloMo,Chanel,1,2021,12,24,1211,0,65719930,31,0,19,0,2,105,C#,Minor,73,59,81,13,0,9,6
|
||||
FEARLESS,LE SSERAFIM,1,2022,5,2,629,0,229497852,18,75,9,0,0,104,G,Major,86,43,62,5,0,13,14
|
||||
10 Things I Hate About You,Leah Kate,1,2022,3,23,1301,0,185550869,23,1,15,0,0,154,G#,Major,54,45,79,1,0,17,5
|
||||
SPACE MAN,Sam Ryder,1,2022,2,22,1329,0,54682594,42,51,32,0,0,80,B,Major,47,33,83,18,0,10,5
|
||||
With you,"HA SUNG WOON, Jimin",2,2022,4,24,343,2,240580042,4,35,11,1,1,150,D#,Major,53,14,43,64,0,13,3
|
||||
Iris,The Goo Goo Dolls,1,1998,3,31,13101,9,1284942608,137,5,582,0,0,156,B,Minor,32,49,72,0,0,9,4
|
||||
The Heart Part 5,Kendrick Lamar,1,2022,5,8,2939,0,71423324,29,0,30,0,0,98,G#,Minor,76,79,81,18,0,6,34
|
||||
San Lucas,Kevin Kaarl,1,2019,11,7,407,1,244891912,5,0,5,0,0,92,G,Major,58,27,36,86,0,9,3
|
||||
This Love (TaylorпїЅпїЅпїЅs Ve,Taylor Swift,1,2022,5,6,1492,0,132171975,26,2,15,0,2,144,E,Major,47,7,50,32,0,7,4
|
||||
Good Looking,Suki Waterhouse,1,2017,10,20,2264,0,184706613,11,6,25,0,1,150,E,Major,37,27,56,8,0,13,3
|
||||
Flowers,Lauren Spencer Smith,1,2021,8,9,801,0,184826429,42,9,24,1,1,138,D#,Major,70,31,44,84,0,9,39
|
||||
Yet To Come,BTS,1,2022,6,10,829,0,302006641,32,110,26,0,0,172,C#,Major,56,68,89,4,0,33,19
|
||||
Run BTS,BTS,1,2022,6,10,736,0,330881149,5,132,17,0,3,77,G#,Major,72,70,82,2,0,4,17
|
||||
Music For a Sushi Restaurant,Harry Styles,1,2022,5,20,4449,1,334733572,80,11,66,0,1,107,B,Major,72,36,72,26,6,11,4
|
||||
Matilda,Harry Styles,1,2022,5,20,3218,3,366214458,29,10,79,0,0,114,D,Major,51,39,29,90,0,10,4
|
||||
For Youth,BTS,1,2022,6,10,327,0,114546317,5,40,12,0,0,117,D,Major,63,29,51,43,0,25,4
|
||||
Daylight,Harry Styles,1,2022,5,20,2775,0,290833204,21,11,40,0,0,146,,Major,69,63,45,48,0,18,4
|
||||
Vegas (From the Original Motion Picture Soundtrack ELVIS),Doja Cat,1,2022,5,6,4576,0,448500832,79,13,93,0,17,160,G#,Minor,80,74,60,8,0,14,26
|
||||
Cash In Cash Out,"Pharrell Williams, Tyler, The Creator, 21 Savage",4,2022,6,10,2313,0,136676504,34,0,29,0,0,120,G,Major,88,15,56,1,0,8,6
|
||||
Potion (with Dua Lipa & Young Thug),"Calvin Harris, Dua Lipa, Young Thug",3,2022,5,27,3983,0,190625045,73,45,119,0,0,100,B,Minor,82,76,70,16,0,8,5
|
||||
Born Singer,BTS,1,2022,6,10,279,0,79095270,0,18,6,0,0,158,G#,Minor,60,68,84,4,0,24,11
|
||||
Little Freak,Harry Styles,1,2022,5,20,2302,0,273194684,20,3,39,0,0,142,D#,Major,56,40,54,72,0,10,4
|
||||
La Llevo Al Cielo (Ft. пїЅпїЅengo F,"Nengo Flow, Anuel Aa, Chris Jedi, Chencho Corleone",4,2022,5,20,3559,3,333146475,36,1,31,0,1,170,A,Minor,80,77,85,11,0,17,14
|
||||
True Love,"Kanye West, XXXTENTACION",2,2022,5,27,2129,0,194902696,23,1,44,1,0,84,F,Minor,71,39,86,2,0,51,25
|
||||
Satellite,Harry Styles,1,2022,5,20,3291,5,311482393,43,28,79,0,208,139,,Major,58,30,46,14,0,9,3
|
||||
Pass The Dutchie,Musical Youth,1,1982,1,1,5328,0,195918494,54,76,900,0,0,151,G,Major,73,88,67,20,0,32,5
|
||||
"Villano Antillano: Bzrp Music Sessions, Vol. 51","Bizarrap, Villano Antillano",2,2022,6,8,1401,0,248511839,26,16,17,0,1,128,C#,Minor,82,42,75,6,0,63,6
|
||||
Love Of My Life,Harry Styles,1,2022,5,20,1933,0,233671263,13,2,31,0,0,118,G,Major,56,20,54,67,0,6,5
|
||||
Grapejuice,Harry Styles,1,2022,5,20,1986,0,199587884,7,1,15,0,0,183,F,Major,65,88,72,36,14,20,3
|
||||
So Good,Halsey,1,2022,6,9,1057,0,91781263,51,14,19,0,0,83,D#,Minor,63,29,62,4,0,18,4
|
||||
Belly Dancer,"BYOR, Imanbek",2,2022,2,18,5115,13,383835984,109,38,301,1,33,122,C#,Major,85,42,80,6,0,17,14
|
||||
Keep Driving,Harry Styles,1,2022,5,20,2094,1,236060709,8,1,18,0,0,165,A,Major,72,90,48,32,0,18,23
|
||||
Cinema,Harry Styles,1,2022,5,20,2171,0,189236868,18,1,28,0,0,106,A,Minor,83,90,64,35,5,9,4
|
||||
Die Young (feat. 347aidan),"Sleepy hallow, 347aidan",2,2022,5,25,896,0,160845341,19,0,15,0,0,77,B,Major,81,68,58,83,0,11,34
|
||||
Only Love Can Hurt Like This,Paloma Faith,1,2014,1,1,5148,0,588955257,90,22,365,0,114,91,G#,Major,57,30,89,10,0,33,8
|
||||
Hold My Hand,Lady Gaga,1,2022,5,3,2528,0,238350348,63,8,270,2,105,148,G,Major,51,21,63,5,0,41,3
|
||||
Daydreaming,Harry Styles,1,2022,5,20,1900,1,187703102,15,1,23,0,0,114,E,Minor,71,90,81,31,2,13,3
|
||||
Numb,"Marshmello, Khalid",2,2022,6,10,3879,2,295307001,107,76,86,1,9,120,,Minor,91,63,77,12,1,10,5
|
||||
Nos Comemos (feat. Ozuna),"Ozuna, Tiago pzk",2,2022,6,2,896,0,138334433,0,0,2,0,0,92,F#,Major,83,56,82,10,0,9,5
|
||||
Me and Your Mama,Childish Gambino,1,2016,11,10,8775,0,445590495,33,60,107,1,0,118,F,Major,56,25,45,1,3,7,3
|
||||
Crazy What Love Can Do,"David Guetta, Ella Henderson, Becky Hill",3,2022,3,18,5290,0,286739476,139,73,142,0,5,123,E,Minor,60,46,71,3,0,16,4
|
||||
SLOW DANCING IN THE DARK,Joji,1,2018,9,12,10211,0,1122364376,38,79,65,0,1,89,D#,Major,52,28,48,54,1,19,3
|
||||
Antes de Perderte,Duki,1,2022,6,2,584,8,157136970,12,1,8,0,1,110,F,Minor,81,61,93,49,0,12,11
|
||||
Boyfriends,Harry Styles,1,2022,5,20,1517,0,137070925,26,2,30,0,0,118,,Major,42,32,20,94,0,11,4
|
||||
295,Sidhu Moose Wala,1,2021,5,15,246,4,183273246,4,106,0,0,7,90,B,Minor,68,54,76,21,0,11,20
|
||||
Tak Ingin Usai,Keisya Levronka,1,2022,5,13,220,4,184807630,16,5,0,0,6,130,,Major,49,14,40,82,0,11,3
|
||||
En El Radio Un Cochinero,Victor Cibrian,1,2022,5,13,514,0,164856284,5,36,1,0,0,154,F#,Minor,70,97,62,47,0,10,4
|
||||
Master of Puppets (Remastered),Metallica,1,1986,3,3,6080,0,704171068,112,198,406,1,0,105,E,Minor,54,59,83,0,44,20,4
|
||||
BREAK MY SOUL,Beyoncпї,1,2022,6,21,9724,0,354614964,222,61,259,14,2,115,C#,Minor,70,87,88,4,0,26,8
|
||||
ULTRA SOLO REMIX,"De La Ghetto, Feid, Polima WestCoast, Paloma Mami, Pailita",5,2022,6,16,2341,0,279717388,37,2,38,2,0,110,C#,Major,91,59,82,8,0,6,8
|
||||
Massive,Drake,1,2022,6,17,5263,0,195628667,66,89,61,0,11,125,E,Minor,51,5,68,12,2,15,6
|
||||
Betty (Get Money),Yung Gravy,1,2022,6,10,2402,0,221752937,45,2,26,0,1,102,C#,Major,73,62,75,0,0,34,8
|
||||
Ojos Marrones,Lasso,1,2022,2,6,795,11,263280370,26,18,15,1,4,120,G#,Minor,81,72,65,4,0,14,4
|
||||
POP!,Nayeon,1,2022,6,24,571,0,213505179,19,21,14,0,2,97,D,Major,80,36,86,4,0,3,5
|
||||
Layla,"SchпїЅпїЅrze, DJ R",2,2022,3,24,832,3,130419412,18,124,24,1,0,140,F,Minor,44,41,92,0,0,44,7
|
||||
MORE,j-hope,1,2022,7,1,565,0,155795783,6,52,11,0,0,97,D,Major,78,31,72,1,0,11,4
|
||||
Sweet Child O' Mine,Guns N' Roses,1,1987,1,1,41231,1,1553497987,228,151,"6,720",3,99,125,F#,Major,45,67,90,9,11,10,5
|
||||
Last Last,Burna Boy,1,2022,5,12,4526,12,293466523,156,275,150,3,128,88,D#,Minor,80,55,56,13,0,8,9
|
||||
SNAP,Rosa Linn,1,2022,3,19,1818,0,711366595,3,0,63,0,353,170,,Major,56,52,64,11,0,45,7
|
||||
Sticky,Drake,1,2022,6,17,2814,0,191448892,38,105,25,0,2,137,A#,Minor,88,8,49,9,0,9,14
|
||||
Hot Shit (feat. Ye & Lil Durk),"Kanye West, Lil Durk, Cardi B",3,2022,7,1,1601,0,85924992,11,0,2,0,0,157,A,Major,88,52,69,0,0,8,23
|
||||
Ai Preto,"L7nnon, DJ Biel do Furduncinho, Bianca",3,2022,6,3,894,4,176103902,28,0,54,0,0,130,F#,Major,95,83,57,54,0,10,25
|
||||
La Loto,"Anitta, Tini, Becky G",3,2022,7,6,945,0,121189256,49,3,57,0,0,90,C#,Major,85,80,83,10,0,8,11
|
||||
die first,Nessa Barrett,1,2022,6,24,767,0,131746175,25,0,22,0,0,183,G#,Minor,44,44,67,7,0,12,13
|
||||
Afraid To Feel,LF System,1,2022,5,2,5898,5,244790012,129,55,128,0,101,128,C#,Major,58,68,91,2,0,27,11
|
||||
Baile no Morro,"Mc Vitin Da Igrejinha, MC Tairon, DJ Win",3,2022,6,10,685,2,129314708,17,0,24,0,30,130,D#,Minor,71,78,40,46,0,7,45
|
||||
cпїЅпїЅmo dormi,Rels B,1,2022,8,4,1452,35,331511413,16,15,20,0,0,126,C#,Major,63,56,43,24,0,12,23
|
||||
Bad Decisions (with BTS & Snoop Dogg),"Snoop Dogg, BTS, Benny Blanco",3,2022,8,5,1456,0,219196651,53,2,33,0,0,120,,Major,77,94,87,2,0,23,12
|
||||
STAYING ALIVE (feat. Drake & Lil Baby),"Drake, DJ Khaled, Lil Baby",3,2022,8,5,2107,0,170732845,51,1,50,0,0,130,E,Minor,72,18,46,7,0,28,8
|
||||
Caile,Luar La L,1,2020,12,18,1494,2,273914335,17,12,15,0,0,122,,Major,70,46,76,30,0,9,45
|
||||
Si Te La Encuentras Por Ahпї,Feid,1,2022,8,5,1379,4,179061440,23,10,18,0,0,172,G#,Minor,74,46,58,15,0,13,8
|
||||
GIVENCHY,Duki,1,2022,7,20,625,4,185236961,13,18,12,0,0,103,G#,Major,61,38,62,14,0,23,40
|
||||
ALIEN SUPERSTAR,Beyoncпї,1,2022,7,29,2688,0,171788484,39,47,36,0,0,122,A#,Minor,55,46,64,0,0,17,10
|
||||
Mary On A Cross,Ghost,1,2019,9,13,2668,2,387080183,38,266,78,0,141,130,B,Major,47,56,90,0,0,10,4
|
||||
Attention,NewJeans,1,2022,8,1,799,12,264717480,14,141,9,0,1,105,A#,Minor,81,70,65,24,0,8,4
|
||||
THE SHADE,Rex Orange County,1,2022,3,11,1189,6,244928911,17,10,16,0,4,120,F,Major,90,73,51,39,0,9,6
|
||||
"Come Back Home - From ""Purple Hearts""",Sofia Carson,1,2022,7,12,367,0,97610446,28,67,195,0,0,145,G,Major,56,43,53,24,0,12,4
|
||||
El Rescate,"Grupo Marca Registrada, Junior H",2,2022,7,22,527,4,287278853,10,43,3,1,0,99,G,Minor,79,64,59,28,0,11,14
|
||||
Heartless,Kanye West,1,2008,1,1,17504,34,887906111,63,39,"1,315",0,2,88,A#,Minor,79,66,65,5,0,25,14
|
||||
"Stay With Me (with Justin Timberlake, Halsey, & Pharrell)","Calvin Harris, Halsey, Pharrell Williams, Justin Timberlake",4,2022,7,15,3113,0,123473120,54,6,124,1,0,126,A,Major,81,90,73,28,0,29,4
|
||||
Siempre Pendientes,"Peso Pluma, Luis R Conriquez",2,2022,8,15,685,5,295152154,15,79,4,2,0,136,,Major,77,71,75,33,1,13,4
|
||||
JGL,"Luis R Conriquez, La Adictiva",2,2022,2,18,782,9,323455692,15,33,6,1,0,113,G#,Major,70,97,59,55,0,27,12
|
||||
Don't You Worry,"David Guetta, Shakira, Black Eyed Peas",3,2022,6,16,2442,0,240918092,81,4,248,1,6,132,B,Major,82,49,88,20,0,21,3
|
||||
Pipoco,"Melody, Ana Castela, Dj Chris No Beat",3,2022,5,20,1112,4,191873381,22,2,65,1,3,135,G#,Major,77,74,74,47,0,34,8
|
||||
Hold Me Closer,"Elton John, Britney Spears",2,2017,11,10,4967,0,284216603,165,10,177,4,73,126,,Major,67,49,77,11,0,19,11
|
||||
Forget Me,Lewis Capaldi,1,2022,9,9,2520,4,239411309,93,95,84,9,202,102,C#,Minor,67,72,74,30,0,36,4
|
||||
After LIKE,IVE,1,2022,8,22,767,12,265548837,20,129,11,0,12,125,,Major,68,80,92,10,0,9,12
|
||||
Bound 2,Kanye West,1,2013,1,1,19806,7,703301727,33,11,274,0,0,149,C#,Major,37,28,66,14,0,9,5
|
||||
B.O.T.A. (Baddest Of Them All) - Edit,"Interplanetary Criminal, Eliza Rose",2,2022,6,15,5153,6,244585109,102,53,113,12,0,137,,Major,74,71,89,24,61,15,5
|
||||
Talk that Talk,TWICE,1,2022,8,26,615,0,189476119,14,77,15,1,2,120,D#,Minor,77,78,91,14,0,33,12
|
||||
BILLIE EILISH.,Armani White,1,2022,1,20,2537,0,277132266,49,1,67,11,1,100,C#,Major,90,75,50,11,0,9,26
|
||||
Ferxxo 100,Feid,1,2022,6,3,1647,30,278920007,20,49,23,2,2,164,G#,Minor,70,58,57,25,0,15,7
|
||||
KU LO SA - A COLORS SHOW,Oxlade,1,2022,6,10,2019,8,222410722,117,72,107,1,7,93,A#,Minor,65,79,66,31,0,22,7
|
||||
Prohibidox,Feid,1,2022,9,13,1473,12,185392587,25,36,25,1,0,180,C#,Minor,65,52,80,5,0,6,25
|
||||
Static,Steve Lacy,1,2022,7,15,1613,0,202452860,21,15,13,0,0,79,C#,Major,34,22,31,43,63,10,7
|
||||
The Scientist,Coldplay,1,2002,8,5,30992,6,1608164312,124,25,"7,827",1,0,146,F,Major,56,21,44,73,0,11,2
|
||||
Sparks,Coldplay,1,2000,7,10,10826,4,624101957,24,0,805,0,0,103,C#,Major,37,17,27,75,5,10,3
|
||||
Talk,YEAT,1,2022,9,2,920,0,148461629,10,1,8,0,0,140,E,Minor,70,26,76,8,0,54,23
|
||||
XQ Te Pones Asпї,"Yandel, Feid",2,2022,9,13,308,0,47093942,6,1,6,0,0,92,A#,Major,81,48,70,13,0,15,7
|
||||
Selfish,PnB Rock,1,2016,6,23,2468,0,380319238,15,0,0,0,0,102,C#,Minor,64,4,60,11,0,19,4
|
||||
Sin SeпїЅп,"Ovy On The Drums, Quevedo",2,2022,7,22,1097,2,209106362,18,10,13,1,1,118,B,Minor,82,75,85,33,1,11,4
|
||||
Lady Mi Amor,Feid,1,2022,9,13,330,0,53987404,3,0,2,0,0,93,D,Major,78,75,62,6,0,15,6
|
||||
Poland,Lil Yachty,1,2022,6,23,1584,0,115331792,38,0,24,0,0,150,F,Minor,70,26,56,14,83,11,5
|
||||
THE LONELIEST,MпїЅпїЅne,1,2022,10,7,1585,5,225093344,78,65,328,1,198,130,D,Major,52,24,60,0,0,8,3
|
||||
Bye Bye,"Marshmello, Juice WRLD",2,2022,10,14,766,0,84697729,16,0,9,0,0,83,D#,Minor,65,24,53,6,0,51,4
|
||||
BABY OTAKU,"Fran C, Polima WestCoast, Nickoog Clk, Pablito Pesadilla",4,2022,8,18,836,0,159240673,14,1,13,0,0,102,A,Major,84,43,75,5,0,6,8
|
||||
Nxde,(G)I-DLE,1,2022,10,17,430,6,170709584,14,116,9,0,11,136,E,Minor,73,65,91,4,0,48,18
|
||||
Miss You,Southstar,1,1982,5,16,2020,0,154356956,77,10,119,0,40,145,A,Major,66,24,58,17,0,19,5
|
||||
we fell in love in october,girl in red,1,2018,11,21,6858,0,723043854,31,21,15,0,4,130,G,Major,57,24,37,11,18,16,3
|
||||
2 Be Loved (Am I Ready),Lizzo,1,2022,7,14,3682,6,247689123,41,0,158,2,68,156,G,Major,72,92,77,9,0,8,11
|
||||
Celestial,Ed Sheeran,1,2022,9,29,1639,0,176474912,86,14,80,0,37,123,D,Major,57,50,85,5,0,16,4
|
||||
Typa Girl,BLACKPINK,1,2022,9,16,452,10,235549288,2,129,13,0,1,132,G,Major,92,53,62,7,0,63,10
|
||||
I Really Want to Stay at Your House,"Rosa Walton, Hallie Coggins",2,2020,12,18,668,1,140430339,0,0,31,0,,125,D#,Minor,49,13,74,0,0,9,4
|
||||
California Breeze,Lil Baby,1,2022,10,14,991,0,85559365,36,38,13,0,3,162,F,Minor,74,22,67,0,0,11,46
|
||||
Bamba (feat. Aitch & BIA),"Luciano, Aitch, BпїЅ",3,2022,9,22,869,7,146223492,14,12,12,2,28,138,A#,Major,80,82,81,14,0,13,36
|
||||
Casei Com a Putaria,"MC Ryan SP, Love Funk, Mc Paiva ZS",3,2022,7,1,648,4,187701588,0,0,30,0,0,161,A#,Minor,59,62,60,12,0,5,44
|
||||
Major Distribution,"Drake, 21 Savage",2,2022,11,4,1545,0,154863153,22,7,15,0,0,131,G#,Minor,91,23,55,1,0,7,32
|
||||
Pussy & Millions (feat. Travis Scott),"Drake, Travis Scott, 21 Savage",3,2022,11,4,1930,0,191333656,24,8,17,0,1,122,E,Minor,75,45,63,6,0,35,12
|
||||
Vigilante Shit,Taylor Swift,1,2022,10,21,1948,0,253650850,12,9,16,0,0,80,E,Minor,80,16,28,17,0,12,39
|
||||
Question...?,Taylor Swift,1,2022,10,21,1608,0,223064273,10,3,12,0,0,109,G,Major,75,11,50,20,0,30,17
|
||||
On BS,"Drake, 21 Savage",2,2022,11,4,1338,0,170413877,9,20,7,0,0,158,A,Major,84,33,36,2,0,39,59
|
||||
Mastermind,Taylor Swift,1,2022,10,21,1936,0,218320587,7,5,13,0,0,126,E,Major,66,12,35,55,0,9,14
|
||||
Circo Loco,"Drake, 21 Savage",2,2022,11,4,1794,0,141720999,26,9,17,0,3,104,C#,Major,73,25,61,1,0,32,7
|
||||
Labyrinth,Taylor Swift,1,2022,10,21,1597,0,187339835,6,3,15,0,0,110,,Major,48,15,31,80,22,12,4
|
||||
Spin Bout U,"Drake, 21 Savage",2,2022,11,4,1652,2,198365537,26,52,10,0,95,130,G,Major,77,20,70,1,0,16,5
|
||||
Sweet Nothing,Taylor Swift,1,2022,10,21,1747,0,186104310,9,6,13,0,2,177,,Major,34,39,16,97,0,12,5
|
||||
"Would've, Could've, Should've",Taylor Swift,1,2022,10,21,1715,0,177503916,4,5,8,0,0,158,G,Major,48,55,84,43,0,15,12
|
||||
Con La Brisa,"Ludwig Goransson, Foudeqush",2,2022,11,4,486,0,71095708,8,1,7,0,0,114,D,Minor,62,25,44,51,33,14,3
|
||||
Privileged Rappers,"Drake, 21 Savage",2,2022,11,4,1007,0,112436403,6,5,3,0,0,144,F,Major,93,62,61,0,0,12,20
|
||||
The Astronaut,Jin,1,2022,10,28,481,9,203436468,10,100,15,1,27,125,F,Major,54,22,76,0,0,14,3
|
||||
BackOutsideBoyz,Drake,1,2022,11,4,1045,0,93367537,8,5,2,0,0,142,F,Minor,85,40,43,4,0,39,32
|
||||
Broke Boys,"Drake, 21 Savage",2,2022,11,4,1060,0,106249219,3,8,5,0,0,120,D,Major,64,11,53,1,0,25,27
|
||||
The Great War,Taylor Swift,1,2022,10,21,1274,0,181382590,1,6,11,0,0,96,F,Major,57,55,74,22,0,8,4
|
||||
My Mind & Me,Selena Gomez,1,2022,11,3,953,0,91473363,61,13,37,1,0,144,A,Major,60,24,39,57,0,8,3
|
||||
Bigger Than The Whole Sky,Taylor Swift,1,2022,10,21,1180,0,121871870,4,0,8,0,0,166,F#,Major,42,7,24,83,1,12,6
|
||||
A Veces (feat. Feid),"Feid, Paulo Londra",2,2022,11,3,573,0,73513683,2,0,7,0,0,92,C#,Major,80,81,67,4,0,8,6
|
||||
En La De Ella,"Feid, Sech, Jhayco",3,2022,10,20,1320,0,133895612,29,26,17,0,0,97,C#,Major,82,67,77,8,0,12,5
|
||||
Alone,Burna Boy,1,2022,11,4,782,2,96007391,27,18,32,1,0,90,E,Minor,61,32,67,15,0,11,5
|
||||
|
BIN
romanova_adelina_lab_3/1.png
Normal file
|
After Width: | Height: | Size: 76 KiB |
BIN
romanova_adelina_lab_3/2.png
Normal file
|
After Width: | Height: | Size: 184 KiB |
BIN
romanova_adelina_lab_3/3.png
Normal file
|
After Width: | Height: | Size: 25 KiB |
BIN
romanova_adelina_lab_3/4.png
Normal file
|
After Width: | Height: | Size: 43 KiB |
77
romanova_adelina_lab_3/README.md
Normal file
@@ -0,0 +1,77 @@
|
||||
# Лабораторная работа №3. Вариант 21
|
||||
|
||||
## Тема:
|
||||
Деревья решений
|
||||
|
||||
## Модель:
|
||||
|
||||
Decision Tree Classifier
|
||||
|
||||
## Как запустить программу:
|
||||
Установить *python, numpy, matplotlib, sklearn*
|
||||
```
|
||||
python main.py
|
||||
```
|
||||
|
||||
## Какие технологии использовались:
|
||||
Язык программирования Python, библиотеки numpy, matplotlib, sklearn
|
||||
|
||||
Среда разработки VSCode
|
||||
|
||||
# Что делает лабораторная работа:
|
||||
Использует данные из набора "UCI Heart Disease Data" и обучает модель: ```Decision Tree Classifier```
|
||||
|
||||
Датасет UCI Heart Disease Data содержит информацию о различных клинических признаках, таких как возраст, пол, артериальное давление, холестерин, наличие электрокардиографических признаков и другие, а также целевую переменную, отражающую наличие или отсутствие заболевания сердца.
|
||||
|
||||
Для начала нужно предобработать данные, чтобы модель могла принимать их на вход. Изначально данный имеют следующий вид:
|
||||
|
||||

|
||||
|
||||
Так как модели машинного обучения умеют работать исключительно с числовыми значениями, то нужно свести все данных к данному формату и использовать только полные строки, значение признаков которых не являются пустыми значениями. Это происходит с использованием функции, представленной ниже:
|
||||
|
||||

|
||||
|
||||
Далее нужно привести целевое значение к бинарному виду, т.к изначально данное поле принимает 4 значения. После этого применить подход, называемый “feature engineering”, для получения большего количества признаков, которые возможно помогут модели при решении задачи, т.к. обычно в машинном и глубоком обучении действует следующая логика: Больше данных - лучше результат. Получение новых признаков происходит с помощью функции ниже и далее обновленный набор данных снова преобразовывается к численному формату.
|
||||
|
||||
```
|
||||
def fe_creation(df):
|
||||
# Feature engineering (FE)
|
||||
df['age2'] = df['age']//10
|
||||
df['trestbps2'] = df['trestbps']//10
|
||||
df['chol2'] = df['chol']//60
|
||||
df['thalch2'] = df['thalch']//40
|
||||
df['oldpeak2'] = df['oldpeak']//0.4
|
||||
for i in ['sex', 'age2', 'fbs', 'restecg', 'exang']:
|
||||
for j in ['cp','trestbps2', 'chol2', 'thalch2', 'oldpeak2', 'slope']:
|
||||
df[i + "_" + j] = df[i].astype('str') + "_" + df[j].astype('str')
|
||||
return df
|
||||
```
|
||||
После применения данной функции количество признаков увеличилось с 12 до 47. Далее все признаки стандартизируются с помощью следующей формулы z = (x-mean)/std, где х - текущее значение признак, mean - математическое ожидание столбца с этим признаком, std - стандартное отклонение данного признака, а z - соответственно новое значение признака x. После всех описанных действий данные стали готовыми для их использования для обучения деревьев.
|
||||
|
||||
```Decision Tree Classifier```- это алгоритм машинного обучения, который использует структуру дерева для принятия решений. Каждый узел дерева представляет собой тест по какому-то признаку, а каждая ветвь представляет возможный результат этого теста. Цель - разделить данные на подгруппы так, чтобы в каждой подгруппе преобладал один класс.
|
||||
|
||||
```
|
||||
decision_tree = DecisionTreeClassifier()
|
||||
param_grid = {'min_samples_leaf': [i for i in range(2,12)]}
|
||||
decision_tree_CV = GridSearchCV(decision_tree, param_grid=param_grid, cv=cv_train, verbose=False)
|
||||
decision_tree_CV.fit(train, train_target)
|
||||
print(decision_tree_CV.best_params_)
|
||||
|
||||
acc_all = acc_metrics_calc(0, acc_all, decision_tree_CV, train, valid, train_target, valid_target, title="Decision Tree Classifier")
|
||||
plot_learning_curve(decision_tree_CV, "Decision Tree", train, train_target, cv=cv_train)
|
||||
|
||||
feature_importances_dt = decision_tree_CV.best_estimator_.feature_importances_
|
||||
plot_feature_importance(feature_importances_dt, data.columns, "Decision Tree")
|
||||
```
|
||||
|
||||
Первым был обучен Decision Tree Classifier, который с помощью алгоритма GridSearch нашел наилучшие гиперпараметры для решения задачи. Ниже приведены графики, отображающие качество и процесс обучения данного классификатора.
|
||||
|
||||

|
||||
|
||||
На следующем графике мы можем увидеть какие признаки модель посчитала наиболее важными:
|
||||
|
||||

|
||||
|
||||
## Вывод
|
||||
|
||||
На обучающихся данных мы в большинстве случаев предсказываем правильно, а в валидационных появляется проблема с выявлением второго класса, которое отображает наличие заболеваний.
|
||||
302
romanova_adelina_lab_3/main.py
Normal file
@@ -0,0 +1,302 @@
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import seaborn as sns
|
||||
|
||||
import sklearn
|
||||
from sklearn.preprocessing import (LabelEncoder,
|
||||
StandardScaler,
|
||||
MinMaxScaler,
|
||||
RobustScaler)
|
||||
from sklearn.model_selection import train_test_split, GridSearchCV, StratifiedKFold, learning_curve, ShuffleSplit
|
||||
from sklearn.model_selection import cross_val_predict as cvp
|
||||
from sklearn import metrics
|
||||
from sklearn.metrics import mean_absolute_error, mean_squared_error, accuracy_score, confusion_matrix, explained_variance_score
|
||||
|
||||
from sklearn.tree import DecisionTreeClassifier, plot_tree
|
||||
from sklearn.ensemble import RandomForestClassifier
|
||||
|
||||
|
||||
def str_features_to_numeric(data):
|
||||
# Преобразовывает все строковые признаки в числовые.
|
||||
|
||||
# Определение категориальных признаков
|
||||
categorical_columns = []
|
||||
numerics = ['int8', 'int16', 'int32', 'int64', 'float16', 'float32', 'float64']
|
||||
features = data.columns.values.tolist()
|
||||
for col in features:
|
||||
if data[col].dtype in numerics: continue
|
||||
categorical_columns.append(col)
|
||||
|
||||
# Кодирование категориальных признаков
|
||||
for col in categorical_columns:
|
||||
if col in data.columns:
|
||||
le = LabelEncoder()
|
||||
le.fit(list(data[col].astype(str).values))
|
||||
data[col] = le.transform(list(data[col].astype(str).values))
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def fe_creation(df):
|
||||
# Feature engineering (FE)
|
||||
df['age2'] = df['age']//10
|
||||
df['trestbps2'] = df['trestbps']//10
|
||||
df['chol2'] = df['chol']//60
|
||||
df['thalch2'] = df['thalch']//40
|
||||
df['oldpeak2'] = df['oldpeak']//0.4
|
||||
for i in ['sex', 'age2', 'fbs', 'restecg', 'exang']:
|
||||
for j in ['cp','trestbps2', 'chol2', 'thalch2', 'oldpeak2', 'slope']:
|
||||
df[i + "_" + j] = df[i].astype('str') + "_" + df[j].astype('str')
|
||||
return df
|
||||
|
||||
|
||||
def acc_d(y_meas, y_pred):
|
||||
# Относительная погрешность между прогнозируемыми значениями y_pred и измеренными значениями y_meas
|
||||
return mean_absolute_error(y_meas, y_pred)*len(y_meas)/sum(abs(y_meas))
|
||||
|
||||
|
||||
def acc_rmse(y_meas, y_pred):
|
||||
# Среднеквадратичная ошибка между прогнозируемыми значениями y_pred и измеренными значениями y_meas
|
||||
return (mean_squared_error(y_meas, y_pred))**0.5
|
||||
|
||||
|
||||
def plot_cm(train_target, train_target_pred, valid_target, valid_target_pred, title):
|
||||
# Построение матриц ошибок
|
||||
|
||||
def cm_calc(y_true, y_pred):
|
||||
cm = confusion_matrix(y_true, y_pred, labels=np.unique(y_true))
|
||||
cm_sum = np.sum(cm, axis=1, keepdims=True)
|
||||
cm_perc = cm / cm_sum.astype(float) * 100
|
||||
annot = np.empty_like(cm).astype(str)
|
||||
nrows, ncols = cm.shape
|
||||
for i in range(nrows):
|
||||
for j in range(ncols):
|
||||
c = cm[i, j]
|
||||
p = cm_perc[i, j]
|
||||
if i == j:
|
||||
s = cm_sum[i]
|
||||
annot[i, j] = '%.1f%%\n%d/%d' % (p, c, s)
|
||||
elif c == 0:
|
||||
annot[i, j] = ''
|
||||
else:
|
||||
annot[i, j] = '%.1f%%\n%d' % (p, c)
|
||||
cm = pd.DataFrame(cm, index=np.unique(y_true), columns=np.unique(y_true))
|
||||
cm.index.name = 'Actual'
|
||||
cm.columns.name = 'Predicted'
|
||||
return cm, annot
|
||||
|
||||
|
||||
# Построение матриц ошибок
|
||||
fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(16, 6), sharex=True)
|
||||
|
||||
# Обучающие данные
|
||||
ax = axes[0]
|
||||
ax.set_title("for training data")
|
||||
cm0, annot0 = cm_calc(train_target, train_target_pred)
|
||||
sns.heatmap(cm0, cmap= "YlGnBu", annot=annot0, fmt='', ax=ax)
|
||||
|
||||
# Тестовые данные
|
||||
ax = axes[1]
|
||||
ax.set_title("for test (validation) data")
|
||||
cm1, annot1 = cm_calc(valid_target, valid_target_pred)
|
||||
sns.heatmap(cm1, cmap= "YlGnBu", annot=annot1, fmt='', ax=ax)
|
||||
|
||||
fig.suptitle(f'CONFUSION MATRICES for {title}')
|
||||
plt.savefig(f'CONFUSION MATRICES for {title}.png')
|
||||
plt.show()
|
||||
|
||||
|
||||
def acc_metrics_calc(num, acc_all, model, train, valid, train_target, valid_target, title):
|
||||
# Этап выбора моделей
|
||||
# Расчет точности модели по различным показателям
|
||||
|
||||
ytrain = model.predict(train).astype(int)
|
||||
yvalid = model.predict(valid).astype(int)
|
||||
print('train_target = ', train_target[:5].values)
|
||||
print('ytrain = ', ytrain[:5])
|
||||
print('valid_target =', valid_target[:5].values)
|
||||
print('yvalid =', yvalid[:5])
|
||||
|
||||
num_acc = 0
|
||||
for x in metrics_now:
|
||||
if x == 1:
|
||||
#критерий точности score
|
||||
acc_train = round(metrics.accuracy_score(train_target, ytrain), 2)
|
||||
acc_valid = round(metrics.accuracy_score(valid_target, yvalid), 2)
|
||||
elif x == 2:
|
||||
# rmse критерий
|
||||
acc_train = round(acc_rmse(train_target, ytrain), 2)
|
||||
acc_valid = round(acc_rmse(valid_target, yvalid), 2)
|
||||
elif x == 3:
|
||||
# критерий относительной погрешности
|
||||
acc_train = round(acc_d(train_target, ytrain) * 100, 2)
|
||||
acc_valid = round(acc_d(valid_target, yvalid) * 100, 2)
|
||||
|
||||
print('acc of', metrics_all[x], 'for train =', acc_train)
|
||||
print('acc of', metrics_all[x], 'for valid =', acc_valid)
|
||||
acc_all[num_acc].append(acc_train) #train
|
||||
acc_all[num_acc+1].append(acc_valid) #valid
|
||||
num_acc += 2
|
||||
|
||||
# Построение матриц
|
||||
plot_cm(train_target, ytrain, valid_target, yvalid, title)
|
||||
|
||||
return acc_all
|
||||
|
||||
|
||||
def plot_feature_importance(feature_importances, feature_names, model_name):
|
||||
import matplotlib.pyplot as plt
|
||||
import seaborn as sns
|
||||
|
||||
# Создание цветовой палитры
|
||||
colors = sns.color_palette('viridis', len(feature_importances))
|
||||
|
||||
# Сортировка индексов важностей признаков
|
||||
indices = feature_importances.argsort()[::-1]
|
||||
|
||||
# Создание стильного барплота
|
||||
plt.figure(figsize=(12, 8))
|
||||
ax = sns.barplot(x=feature_importances[indices], y=feature_names[indices], palette=colors)
|
||||
|
||||
# Добавление декораций
|
||||
plt.xlabel('Важность признака', fontsize=14)
|
||||
plt.ylabel('Признаки', fontsize=14)
|
||||
plt.title(f'Важность признаков в модели {model_name}', fontsize=16)
|
||||
plt.xticks(fontsize=12)
|
||||
plt.yticks(fontsize=12)
|
||||
|
||||
# Добавление цветовой шкалы и ее описания
|
||||
cbar = plt.colorbar(plt.cm.ScalarMappable(cmap='viridis'), ax=ax)
|
||||
cbar.set_label('Уровень важности', rotation=270, labelpad=15, fontsize=12)
|
||||
|
||||
# Добавление сетки для лучшей читаемости
|
||||
plt.grid(axis='x', linestyle='--', alpha=0.6)
|
||||
|
||||
# Сохранение графика в файл
|
||||
plt.savefig('feature_importance_plot.png', bbox_inches='tight')
|
||||
|
||||
# Отображение графика
|
||||
plt.savefig(f'feature_importances_{model_name}.png')
|
||||
plt.show()
|
||||
|
||||
|
||||
def plot_learning_curve(estimator, title, X, y, cv=None, axes=None, ylim=None,
|
||||
n_jobs=None, train_sizes=np.linspace(.1, 1.0, 5), random_state=0):
|
||||
fig, axes = plt.subplots(2, 1, figsize=(20, 10))
|
||||
|
||||
if axes is None:
|
||||
_, axes = plt.subplots(1, 2, figsize=(20, 5))
|
||||
|
||||
axes[0].set_title(title)
|
||||
if ylim is not None:
|
||||
axes[0].set_ylim(*ylim)
|
||||
axes[0].set_xlabel("Training examples")
|
||||
axes[0].set_ylabel("Score")
|
||||
|
||||
cv_train = ShuffleSplit(n_splits=cv_n_split, test_size=test_train_split_part, random_state=random_state)
|
||||
|
||||
train_sizes, train_scores, test_scores, fit_times, _ = \
|
||||
learning_curve(estimator=estimator, X=X, y=y, cv=cv,
|
||||
train_sizes=train_sizes,
|
||||
return_times=True)
|
||||
|
||||
train_scores_mean = np.mean(train_scores, axis=1)
|
||||
train_scores_std = np.std(train_scores, axis=1)
|
||||
test_scores_mean = np.mean(test_scores, axis=1)
|
||||
test_scores_std = np.std(test_scores, axis=1)
|
||||
fit_times_mean = np.mean(fit_times, axis=1)
|
||||
fit_times_std = np.std(fit_times, axis=1)
|
||||
|
||||
# Plot learning curve
|
||||
axes[0].grid()
|
||||
axes[0].fill_between(train_sizes, train_scores_mean - train_scores_std,
|
||||
train_scores_mean + train_scores_std, alpha=0.1,
|
||||
color="r")
|
||||
axes[0].fill_between(train_sizes, test_scores_mean - test_scores_std,
|
||||
test_scores_mean + test_scores_std, alpha=0.1,
|
||||
color="g")
|
||||
axes[0].plot(train_sizes, train_scores_mean, 'o-', color="r",
|
||||
label="Training score")
|
||||
axes[0].plot(train_sizes, test_scores_mean, 'o-', color="g",
|
||||
label="Cross-validation score")
|
||||
axes[0].legend(loc="best")
|
||||
|
||||
# Plot n_samples vs fit_times
|
||||
axes[1].grid()
|
||||
axes[1].plot(train_sizes, fit_times_mean, 'o-')
|
||||
axes[1].fill_between(train_sizes, fit_times_mean - fit_times_std,
|
||||
fit_times_mean + fit_times_std, alpha=0.1)
|
||||
axes[1].set_xlabel("Training examples")
|
||||
axes[1].set_ylabel("fit_times")
|
||||
axes[1].set_title("Scalability of the model")
|
||||
|
||||
plt.savefig(f'{title}.png')
|
||||
|
||||
plt.show()
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Загрузка данных
|
||||
# Преобразование данных и предобработка
|
||||
# Обучение моделей Decision Tree Classifier и Random Forest Classifier
|
||||
# Расчет метрик и построение графиков
|
||||
cv_n_split = 5
|
||||
random_state = 42
|
||||
test_train_split_part = 0.25
|
||||
|
||||
metrics_all = {1: 'acc', 2 : 'rmse', 3 : 're'}
|
||||
metrics_now = [1, 2, 3]
|
||||
|
||||
data = pd.read_csv("..//heart_disease_uci.csv")
|
||||
data['target'] = data['num']
|
||||
data = data.drop(columns=['id', 'dataset', 'ca', 'thal', 'num'])
|
||||
|
||||
data = data[(data['chol'] <= 420) & (data['oldpeak'] >=0) & (data['oldpeak'] <=4)].reset_index(drop=True)
|
||||
data = data.dropna().reset_index(drop=True)
|
||||
print(data.info())
|
||||
|
||||
data = str_features_to_numeric(data)
|
||||
data = data[data['target'].isin([0, 1])] # приволим столбец с целевыми значениями к бинарному виду
|
||||
|
||||
data = fe_creation(data)
|
||||
data = str_features_to_numeric(data)
|
||||
|
||||
dataset = data.copy() # original data
|
||||
target_name = 'target'
|
||||
target = data.pop(target_name)
|
||||
|
||||
# Model standartization
|
||||
# The standard score of a sample x is calculated as:
|
||||
# z = (x - мат.ож.) / (стандартное отклонение)
|
||||
scaler = StandardScaler()
|
||||
data = pd.DataFrame(scaler.fit_transform(data), columns = data.columns)
|
||||
|
||||
train, valid, train_target, valid_target = train_test_split(data, target, test_size=test_train_split_part, random_state=random_state)
|
||||
|
||||
# list of accuracy of all model - amount of metrics_now * 2 (train & valid datasets)
|
||||
num_models = 6
|
||||
acc_train = []
|
||||
acc_valid = []
|
||||
acc_all = np.empty((len(metrics_now)*2, 0)).tolist()
|
||||
acc_all
|
||||
|
||||
acc_all_pred = np.empty((len(metrics_now), 0)).tolist()
|
||||
acc_all_pred
|
||||
|
||||
cv_train = ShuffleSplit(n_splits=cv_n_split, test_size=test_train_split_part, random_state=random_state)
|
||||
|
||||
decision_tree = DecisionTreeClassifier()
|
||||
param_grid = {'min_samples_leaf': [i for i in range(2,12)]}
|
||||
decision_tree_CV = GridSearchCV(decision_tree, param_grid=param_grid, cv=cv_train, verbose=False)
|
||||
decision_tree_CV.fit(train, train_target)
|
||||
print(decision_tree_CV.best_params_)
|
||||
|
||||
acc_all = acc_metrics_calc(0, acc_all, decision_tree_CV, train, valid, train_target, valid_target, title="Decision Tree Classifier")
|
||||
plot_learning_curve(decision_tree_CV, "Decision Tree", train, train_target, cv=cv_train)
|
||||
|
||||
feature_importances_dt = decision_tree_CV.best_estimator_.feature_importances_
|
||||
plot_feature_importance(feature_importances_dt, data.columns, "Decision Tree")
|
||||
|
||||
|
||||
BIN
romanova_adelina_lab_4/1.png
Normal file
|
After Width: | Height: | Size: 20 KiB |
BIN
romanova_adelina_lab_4/2.png
Normal file
|
After Width: | Height: | Size: 21 KiB |
BIN
romanova_adelina_lab_4/3.png
Normal file
|
After Width: | Height: | Size: 60 KiB |
76
romanova_adelina_lab_4/README.md
Normal file
@@ -0,0 +1,76 @@
|
||||
# Лабораторная работа №4. Вариант 21
|
||||
|
||||
## Тема:
|
||||
Кластеризация
|
||||
|
||||
## Модель:
|
||||
|
||||
KMeans
|
||||
|
||||
## Как запустить программу:
|
||||
Установить *python, numpy, matplotlib, sklearn*
|
||||
```
|
||||
python main.py
|
||||
```
|
||||
|
||||
## Какие технологии использовались:
|
||||
Язык программирования Python, библиотеки numpy, matplotlib, sklearn
|
||||
|
||||
Среда разработки VSCode
|
||||
|
||||
# Что делает лабораторная работа:
|
||||
|
||||
Задача кластеризации заключается в разделении множества данных на группы, называемые кластерами, таким образом, чтобы объекты внутри одного кластера были более похожи друг на друга, чем на объекты из других кластеров. Это позволяет выявлять скрытые структуры данных, облегчая последующий анализ и принятие решений.
|
||||
|
||||
В данной работе была рассмотрена модель ```KMeans```.
|
||||
|
||||
### Описание:
|
||||
```KMeans``` разбивает данные на K кластеров, где K - заранее заданное число. Он минимизирует сумму квадратов расстояний между точками данных и центрами своих соответствующих кластеров. Этот алгоритм прост в реализации и хорошо работает для сферических кластеров.
|
||||
|
||||
Кластеризация данных - это мощный инструмент для выделения закономерностей в больших объемах информации, и выбор конкретного алгоритма зависит от характера данных и поставленных задач. В данной работе мы рассмотрим эти алгоритмы более подробно, выявим их преимущества и недостатки, и проиллюстрируем их применение на практике.
|
||||
|
||||
Процесс получения кластеров происходит по следующему алгоритму:
|
||||
|
||||
1. Получаемый исходные данные
|
||||
|
||||
2. Приводим их всех к численному формату
|
||||
|
||||
3. Обучение на подготовленных данных
|
||||
|
||||
```
|
||||
def clustering_df(X, n, m, output_hist, title='clusters_by'):
|
||||
|
||||
X_columns = X.columns
|
||||
scaler = StandardScaler()
|
||||
scaler.fit(X)
|
||||
X = pd.DataFrame(scaler.transform(X), columns = X_columns)
|
||||
cl = generate_clustering_algorithms(X, n, m)
|
||||
cl.fit(X)
|
||||
if hasattr(cl, 'labels_'):
|
||||
labels = cl.labels_.astype(np.uint8)
|
||||
else:
|
||||
labels = cl.predict(X)
|
||||
clusters=pd.concat([X, pd.DataFrame({'cluster':labels})], axis=1)
|
||||
```
|
||||
Для кластеризации были выбраны все столбцы, часть кода представлена ниже:
|
||||
|
||||
```
|
||||
print(data.select_dtypes(include='object').columns.tolist())
|
||||
for column in data.select_dtypes(include='object').columns.tolist():
|
||||
data[column] = pd.factorize(data[column])[0]
|
||||
```
|
||||
Программа генерирует диаграммы для каждого кластера относительно всех признаков. Для меня наиболее интересным показались признаки возраста и наличия заболевания человека.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
Изучая графики выше, мы можем сделать вывод, что люди из кластера №3 почти все болеют и большинство имеет 2,3 и 4 стадии. А возраст этих людей от 45 до 70 лет.
|
||||
|
||||
Ниже приложен результат обучения алгоритма кластеризации:
|
||||
|
||||

|
||||
|
||||
## Вывод
|
||||
|
||||
Я думаю, что алгоритм ```KMeans```справился достаточно хорошо, т.к. в нем каждый кластер получился обособленным, то есть более отличным от других кластеров. Следовательно, это может означать, что именно этот алгоритм смог понять ключевые признаки для каждого кластера.
|
||||
166
romanova_adelina_lab_4/main.py
Normal file
@@ -0,0 +1,166 @@
|
||||
import os
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import seaborn as sns
|
||||
|
||||
from sklearn import cluster, mixture
|
||||
from sklearn.decomposition import PCA
|
||||
from sklearn.cluster import KMeans, DBSCAN, OPTICS
|
||||
from sklearn.preprocessing import StandardScaler
|
||||
from sklearn.metrics.pairwise import cosine_similarity
|
||||
from sklearn.neighbors import kneighbors_graph
|
||||
from itertools import cycle, islice
|
||||
|
||||
import warnings
|
||||
warnings.simplefilter('ignore')
|
||||
|
||||
|
||||
def generate_clustering_algorithms(Z, n_clusters, m):
|
||||
# Generate clustering algorithms:
|
||||
# m = 'MeanShift', 'KMeans', 'MiniBatchKMeans'
|
||||
|
||||
# The minimal percentage of similarity of the clustered feature with "Survived" for inclusion in the final dataset
|
||||
limit_opt = 0.7
|
||||
|
||||
params = {'quantile': .2,
|
||||
'eps': .3,
|
||||
'damping': .9,
|
||||
'preference': -200,
|
||||
'n_neighbors': 10,
|
||||
'n_clusters': n_clusters,
|
||||
'min_samples': 3,
|
||||
'xi': 0.05,
|
||||
'min_cluster_size': 0.05}
|
||||
|
||||
# estimate bandwidth for mean shift
|
||||
bandwidth = cluster.estimate_bandwidth(Z, quantile=params['quantile'])
|
||||
|
||||
# connectivity matrix for structured Ward
|
||||
connectivity = kneighbors_graph(
|
||||
Z, n_neighbors=params['n_neighbors'], include_self=False)
|
||||
|
||||
# make connectivity symmetric
|
||||
connectivity = 0.5 * (connectivity + connectivity.T)
|
||||
|
||||
# ============
|
||||
# Create cluster objects
|
||||
# ============
|
||||
if m == 'MeanShift':
|
||||
cl = cluster.MeanShift(bandwidth=bandwidth, bin_seeding=True)
|
||||
elif m == 'KMeans':
|
||||
cl = cluster.KMeans(n_clusters=n_clusters, random_state = 1000)
|
||||
elif m == 'MiniBatchKMeans':
|
||||
cl = cluster.MiniBatchKMeans(n_clusters=n_clusters)
|
||||
|
||||
return cl
|
||||
|
||||
|
||||
def clustering_df(X, n, m, output_hist, title='clusters_by'):
|
||||
|
||||
# Standardization
|
||||
X_columns = X.columns
|
||||
scaler = StandardScaler()
|
||||
scaler.fit(X)
|
||||
X = pd.DataFrame(scaler.transform(X), columns = X_columns)
|
||||
cl = generate_clustering_algorithms(X, n, m)
|
||||
cl.fit(X)
|
||||
if hasattr(cl, 'labels_'):
|
||||
labels = cl.labels_.astype(np.uint8)
|
||||
else:
|
||||
labels = cl.predict(X)
|
||||
clusters=pd.concat([X, pd.DataFrame({'cluster':labels})], axis=1)
|
||||
|
||||
# Inverse Standardization
|
||||
X_inv = pd.DataFrame(scaler.inverse_transform(X), columns = X_columns)
|
||||
clusters_inv=pd.concat([X_inv, pd.DataFrame({'cluster':labels})], axis=1)
|
||||
|
||||
# Number of points in clusters
|
||||
print("Number of points in clusters:\n", clusters['cluster'].value_counts())
|
||||
|
||||
# Data in clusters - thanks to https://www.kaggle.com/sabanasimbutt/clustering-visualization-of-clusters-using-pca
|
||||
if output_hist:
|
||||
for c in clusters:
|
||||
grid = sns.FacetGrid(clusters_inv, col='cluster')
|
||||
grid.map(plt.hist, c)
|
||||
|
||||
plt.savefig(f'{title}_by_method_{m}.png')
|
||||
|
||||
return clusters, clusters_inv
|
||||
|
||||
|
||||
def plot_draw(X, title, m):
|
||||
# Drawing a plot with clusters on the plane (using PCA transformation)
|
||||
# Thanks to https://www.kaggle.com/sabanasimbutt/clustering-visualization-of-clusters-using-pca
|
||||
|
||||
dist = 1 - cosine_similarity(X)
|
||||
|
||||
# PCA transform
|
||||
pca = PCA(2)
|
||||
pca.fit(dist)
|
||||
X_PCA = pca.transform(dist)
|
||||
|
||||
# Generate point numbers and colors for clusters
|
||||
hsv = plt.get_cmap('hsv')
|
||||
n_clusters = max(X['cluster'].value_counts().index)-min(X['cluster'].value_counts().index)+2
|
||||
colors = list(hsv(np.linspace(0, 1, n_clusters)))
|
||||
colors_num = list(np.linspace(min(X['cluster'].value_counts().index), max(X['cluster'].value_counts().index), n_clusters))
|
||||
colors_num = [int(x) for x in colors_num]
|
||||
colors_str = [str(x) for x in colors_num]
|
||||
names_dict = dict(zip(colors_num, colors_str))
|
||||
colors_dict = dict(zip(colors_num, colors))
|
||||
|
||||
# Visualization
|
||||
x, y = X_PCA[:, 0], X_PCA[:, 1]
|
||||
|
||||
df = pd.DataFrame({'x': x, 'y':y, 'label':X['cluster'].tolist()})
|
||||
groups = df.groupby('label')
|
||||
|
||||
fig, ax = plt.subplots(figsize=(12, 8))
|
||||
|
||||
for name, group in groups:
|
||||
ax.plot(group.x, group.y, marker='o', linestyle='', ms=10,
|
||||
color=colors_dict[name],
|
||||
label=names_dict[name],
|
||||
mec='none')
|
||||
ax.set_aspect('auto')
|
||||
ax.tick_params(axis='x',which='both',bottom='off',top='off',labelbottom='off')
|
||||
ax.tick_params(axis= 'y',which='both',left='off',top='off',labelleft='off')
|
||||
|
||||
ax.legend(loc='upper right')
|
||||
ax.set_title(f"{title} by method {m}")
|
||||
plt.savefig(f'{title}_by_method_{m}.png')
|
||||
plt.show()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
data = pd.read_csv("..//heart_disease_uci.csv")
|
||||
data = data.drop_duplicates().reset_index(drop=True)
|
||||
|
||||
print(data.select_dtypes(include='object').columns.tolist())
|
||||
for column in data.select_dtypes(include='object').columns.tolist():
|
||||
data[column] = pd.factorize(data[column])[0]
|
||||
# print(pd.factorize(data[column])[0])
|
||||
|
||||
methods_all = ['KMeans', 'MiniBatchKMeans', 'MeanShift']
|
||||
n_default = 6
|
||||
|
||||
data = data[data.notna().all(axis=1)]
|
||||
|
||||
res = dict(zip(methods_all, [False]*len(methods_all)))
|
||||
n_clust = dict(zip(methods_all, [1]*len(methods_all)))
|
||||
for method in methods_all:
|
||||
print(f"Method - {method}")
|
||||
Y, Y_inv = clustering_df(data.copy(), n_default, method, True)
|
||||
|
||||
# If the number of clusters is less than 2, then the clustering is not successful
|
||||
n_cl = len(Y['cluster'].value_counts())
|
||||
if n_cl > 1:
|
||||
res[method] = True
|
||||
n_clust[method] = n_cl
|
||||
|
||||
plot_draw(Y, "Data clustering", method)
|
||||
else:
|
||||
print('Clustering is not successful because all data is in one cluster!\n')
|
||||
|
||||
BIN
romanova_adelina_lab_5/1.png
Normal file
|
After Width: | Height: | Size: 54 KiB |
79
romanova_adelina_lab_5/README.md
Normal file
@@ -0,0 +1,79 @@
|
||||
# Лабораторная работа №5. Вариант 21
|
||||
|
||||
## Тема:
|
||||
Регрессия
|
||||
|
||||
## Модель:
|
||||
|
||||
LinearRegression
|
||||
|
||||
## Как запустить программу:
|
||||
Установить *python, numpy, matplotlib, sklearn*
|
||||
```
|
||||
python lab.py
|
||||
```
|
||||
|
||||
## Какие технологии использовались:
|
||||
Язык программирования Python, библиотеки numpy, matplotlib, sklearn
|
||||
|
||||
Среда разработки VSCode
|
||||
|
||||
# Что делает лабораторная работа:
|
||||
|
||||
Поскольку артериальное давление пациента в состоянии покоя является важным медицинским показателем, оно было выбрано для предсказания на основе доступных признаков, таких как возраст, пол и других.
|
||||
|
||||
Внедрение линейной регрессии в решение задачи прогнозирования артериального давления в состоянии покоя приносит несколько ключевых преимуществ.
|
||||
|
||||
Линейная регрессия является мощным инструментом в области статистики и машинного обучения, широко применяемым для анализа и моделирования связей между зависимыми и независимыми переменными. Ее основная цель — построить линейную функцию, наилучшим образом приближающую отношение между входными данными и целевой переменной. Это позволяет предсказывать значения целевой переменной на основе новых входных данных.
|
||||
|
||||
### Описание:
|
||||
```LinearRegression``` - метод наименьших квадратов (MSE) – это основной принцип LinearRegression. Он стремится минимизировать сумму квадратов разностей между фактическими и предсказанными значениями. Этот алгоритм предоставляет аналитическое решение для определения коэффициентов линейной модели, что делает его эффективным и простым для понимания.
|
||||
|
||||
Процесс обучения линейной регрессии требует выполнения следующих шагов:
|
||||
|
||||
1. Получить исходные данные
|
||||
|
||||
2. Выбрать целевое значение, которые нужно предсказывать
|
||||
|
||||
3. Обработать данные таким образом, чтобы все признаки имели только числовой формат, и добавить нормализацию, или иначе, стандартизацию данных
|
||||
|
||||
4. 4. Провести обучение выбранной модели на подготовленных данных
|
||||
|
||||
Обработка данных происходит с помощью функции ```str_features_to_numeric```:
|
||||
|
||||
```
|
||||
def str_features_to_numeric(data):
|
||||
# Преобразовывает все строковые признаки в числовые.
|
||||
|
||||
# Определение категориальных признаков
|
||||
categorical_columns = []
|
||||
numerics = ['int8', 'int16', 'int32', 'int64', 'float16', 'float32', 'float64']
|
||||
features = data.columns.values.tolist()
|
||||
for col in features:
|
||||
if data[col].dtype in numerics: continue
|
||||
categorical_columns.append(col)
|
||||
|
||||
# Кодирование категориальных признаков
|
||||
for col in categorical_columns:
|
||||
if col in data.columns:
|
||||
le = LabelEncoder()
|
||||
le.fit(list(data[col].astype(str).values))
|
||||
data[col] = le.transform(list(data[col].astype(str).values))
|
||||
|
||||
return data
|
||||
```
|
||||
|
||||
Далее происходит нормализация с помощью ```StandardScaler```.
|
||||
|
||||
В качестве целевого признака был выбран артериальное давление в состоянии покоя ```trestbps```- артериальное давление в состоянии покоя (в мм рт. ст. при поступлении в больницу). Обработанные данные поступают на вход обучения модели линейной регресии:
|
||||
|
||||

|
||||
|
||||
- reg.score_ - отображает точность работы модели
|
||||
- reg.coef_ - отображает коэффициенты при признаках расположенных по порядку
|
||||
- reg.intercept_ - показывает параметр смещения (в английской литературе bias)
|
||||
|
||||
## Вывод
|
||||
|
||||
На основе полученных результатов, можно сказать, что классическая модель линейной регрессии является более чем подходящей для решения именно этой конкретной задачи
|
||||
|
||||
87
romanova_adelina_lab_5/lab.py
Normal file
@@ -0,0 +1,87 @@
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import seaborn as sns
|
||||
import sklearn
|
||||
from sklearn.linear_model import LinearRegression, SGDRegressor, Ridge
|
||||
|
||||
|
||||
from sklearn.preprocessing import (LabelEncoder,
|
||||
StandardScaler,
|
||||
MinMaxScaler,
|
||||
RobustScaler)
|
||||
from sklearn.model_selection import train_test_split, GridSearchCV, StratifiedKFold, learning_curve, ShuffleSplit
|
||||
|
||||
|
||||
def str_features_to_numeric(data):
|
||||
# Преобразовывает все строковые признаки в числовые.
|
||||
|
||||
# Определение категориальных признаков
|
||||
categorical_columns = []
|
||||
numerics = ['int8', 'int16', 'int32', 'int64', 'float16', 'float32', 'float64']
|
||||
features = data.columns.values.tolist()
|
||||
for col in features:
|
||||
if data[col].dtype in numerics: continue
|
||||
categorical_columns.append(col)
|
||||
|
||||
# Кодирование категориальных признаков
|
||||
for col in categorical_columns:
|
||||
if col in data.columns:
|
||||
le = LabelEncoder()
|
||||
le.fit(list(data[col].astype(str).values))
|
||||
data[col] = le.transform(list(data[col].astype(str).values))
|
||||
|
||||
return data
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
data = pd.read_csv("..//heart_disease_uci.csv")
|
||||
data['target'] = data['trestbps']
|
||||
data = data.drop(columns=['id', 'dataset', 'trestbps'])
|
||||
|
||||
data_wo_null = data.dropna()
|
||||
print(len(data_wo_null))
|
||||
|
||||
encoded_data_wo_null = str_features_to_numeric(data_wo_null)
|
||||
print(len(encoded_data_wo_null))
|
||||
|
||||
# Model standartization
|
||||
# The standard score of a sample x is calculated as:
|
||||
# z = (x - мат.ож.) / (стандартное отклонение)
|
||||
scaler = StandardScaler()
|
||||
new_data = pd.DataFrame(scaler.fit_transform(encoded_data_wo_null), columns = encoded_data_wo_null.columns)
|
||||
|
||||
dataset = data_wo_null.copy() # original data
|
||||
target_name = 'target'
|
||||
target = data_wo_null.pop(target_name)
|
||||
|
||||
test_train_split_part = 0.2
|
||||
random_state = 42
|
||||
|
||||
train, valid, train_target, valid_target = train_test_split(new_data, target,
|
||||
test_size=test_train_split_part,
|
||||
random_state=random_state)
|
||||
|
||||
reg = LinearRegression().fit(train, train_target)
|
||||
|
||||
print("---"*15, " LinearRegression ", "---"*15)
|
||||
print(f"Accuracy: {reg.score(valid, valid_target)}")
|
||||
print(f"коэффициенты: {reg.coef_}")
|
||||
print(f"Смещение относительно начала координат (bias): {reg.intercept_}")
|
||||
|
||||
SGD_reg = SGDRegressor(max_iter=1000, tol=1e-3)
|
||||
SGD_reg.fit(train, train_target)
|
||||
|
||||
print("---"*15, " SGDRegressor ", "---"*15)
|
||||
print(f"Accuracy: {SGD_reg.score(valid, valid_target)}")
|
||||
print(f"коэффициенты: {SGD_reg.coef_}")
|
||||
print(f"Смещение относительно начала координат (bias): {SGD_reg.intercept_}")
|
||||
|
||||
Ridge_clf = Ridge(alpha=1.0)
|
||||
Ridge_clf.fit(train, train_target)
|
||||
|
||||
print("---"*15, " Ridge ", "---"*15)
|
||||
print(f"Accuracy: {Ridge_clf.score(valid, valid_target)}")
|
||||
print(f"коэффициенты: {Ridge_clf.coef_}")
|
||||
print(f"Смещение относительно начала координат (bias): {Ridge_clf.intercept_}")
|
||||
|
||||
BIN
romanova_adelina_lab_6/1.png
Normal file
|
After Width: | Height: | Size: 158 KiB |
BIN
romanova_adelina_lab_6/2.png
Normal file
|
After Width: | Height: | Size: 47 KiB |
BIN
romanova_adelina_lab_6/3.png
Normal file
|
After Width: | Height: | Size: 48 KiB |
BIN
romanova_adelina_lab_6/4.png
Normal file
|
After Width: | Height: | Size: 98 KiB |
47
romanova_adelina_lab_6/README.md
Normal file
@@ -0,0 +1,47 @@
|
||||
# Лабораторная работа №6. Вариант 21
|
||||
|
||||
## Тема:
|
||||
Нейронная сеть
|
||||
|
||||
## Модель:
|
||||
|
||||
MLPClassifier
|
||||
|
||||
## Как запустить программу:
|
||||
Установить *python, numpy, matplotlib, sklearn*
|
||||
```
|
||||
python lab.py
|
||||
```
|
||||
|
||||
## Какие технологии использовались:
|
||||
Язык программирования Python, библиотеки numpy, matplotlib, sklearn
|
||||
|
||||
Среда разработки VSCode
|
||||
|
||||
# Что делает лабораторная работа:
|
||||
|
||||
В ходе исследования нейронных сетей, в особенности многослойных перцептронов (MLP), был проведен тщательный анализ влияния архитектуры сети на её производительность в задаче классификации стадий сердечных заболеваний. Эксперименты с различными конфигурациями слоев и их размерами позволили более глубоко понять, какие параметры сети оказывают наибольшее влияние на точность прогнозов.
|
||||
|
||||
В качестве MLP в коде использовался класс ```sklearn.neural_network.MLPClassifier``` и целевой задачей являлось предсказание наличие болезни сердца (0 - отсутствует, а 1,2,3,4 - стадии)
|
||||
|
||||
Процесс подготовки данных и обучение MLP представлен на изображении ниже и ```качество оценки составило 0.83```, данное число представляет точность оценки и вычисляется как отношение правильных ответов к общему количеству ответов. Важно отметить, что данный MLP состоял только из ```одного скрытого слоя с размером = 100```.
|
||||
|
||||

|
||||
|
||||
При MLP, содержащим два скрытых состояния с размерами ```300``` и ```100``` соответственно получилось добиться ```точности в примерно 0.92```.
|
||||
|
||||

|
||||
|
||||
При MLP, содержащим четыре скрытых состояния с размерами ```150, 100, 50 и 50 ```соответственно получилось добиться ```точности в 0.95```.
|
||||
|
||||

|
||||
|
||||
При MLP, который содержит 5 слоев с размерами ```100, 400, 600, 400, 100```, то есть самая большая с точки зрения архитектуры модель имеет наилучший показать точности.
|
||||
|
||||

|
||||
|
||||
## Вывод
|
||||
|
||||
На основе проведенных экспериментов можно сделать вывод, что при усложнении архитектуры нейронной сети мы получаем улучшение в ее качестве.
|
||||
|
||||

|
||||
86
romanova_adelina_lab_6/lab.py
Normal file
@@ -0,0 +1,86 @@
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
import seaborn as sns
|
||||
import sklearn
|
||||
from sklearn.neural_network import MLPClassifier
|
||||
import argparse
|
||||
|
||||
from sklearn.preprocessing import (LabelEncoder,
|
||||
StandardScaler,
|
||||
MinMaxScaler,
|
||||
RobustScaler)
|
||||
from sklearn.model_selection import train_test_split, GridSearchCV, StratifiedKFold, learning_curve, ShuffleSplit
|
||||
|
||||
|
||||
def get_arguments():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument('--id_pred', type=int, default=1, help='Какой id из тестовой выборки будем предсказывать')
|
||||
|
||||
args = parser.parse_args()
|
||||
return args
|
||||
|
||||
|
||||
def str_features_to_numeric(data):
|
||||
# Преобразовывает все строковые признаки в числовые.
|
||||
|
||||
# Определение категориальных признаков
|
||||
categorical_columns = []
|
||||
numerics = ['int8', 'int16', 'int32', 'int64', 'float16', 'float32', 'float64']
|
||||
features = data.columns.values.tolist()
|
||||
for col in features:
|
||||
if data[col].dtype in numerics: continue
|
||||
categorical_columns.append(col)
|
||||
|
||||
# Кодирование категориальных признаков
|
||||
for col in categorical_columns:
|
||||
if col in data.columns:
|
||||
le = LabelEncoder()
|
||||
le.fit(list(data[col].astype(str).values))
|
||||
data[col] = le.transform(list(data[col].astype(str).values))
|
||||
|
||||
return data
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = get_arguments()
|
||||
|
||||
data = pd.read_csv("..//heart_disease_uci.csv")
|
||||
data['target'] = data['num']
|
||||
data = data.drop(columns=['id', 'dataset', 'num'])
|
||||
|
||||
data_wo_null = data.dropna()
|
||||
print(len(data_wo_null))
|
||||
data_wo_null.head(3)
|
||||
|
||||
encoded_data_wo_null = str_features_to_numeric(data_wo_null)
|
||||
|
||||
scaler = StandardScaler()
|
||||
new_data = pd.DataFrame(scaler.fit_transform(encoded_data_wo_null), columns = encoded_data_wo_null.columns)
|
||||
|
||||
dataset = data_wo_null.copy() # original data
|
||||
target_name = 'target'
|
||||
target = data_wo_null.pop(target_name)
|
||||
|
||||
X_train, X_test, y_train, y_test = train_test_split(new_data, target, test_size=0.2, random_state=42)
|
||||
|
||||
clf = MLPClassifier(random_state=42, max_iter=300, hidden_layer_sizes=(100)).fit(X_train, y_train)
|
||||
print("---"*15, " MLPClassifier(100) ", "---"*15)
|
||||
print(f"Accuracy: {clf.score(X_test, y_test)}")
|
||||
|
||||
clf2 = MLPClassifier(random_state=42, max_iter=300, hidden_layer_sizes=(300, 100)).fit(X_train, y_train)
|
||||
print("---"*15, " MLPClassifier(300, 100) ", "---"*15)
|
||||
print(f"Accuracy: {clf2.score(X_test, y_test)}")
|
||||
|
||||
clf3 = MLPClassifier(random_state=42, max_iter=300, hidden_layer_sizes=(150, 100, 50, 50)).fit(X_train, y_train)
|
||||
print("---"*15, " MLPClassifier(150, 100, 50, 50) ", "---"*15)
|
||||
print(f"Accuracy: {clf3.score(X_test, y_test)}")
|
||||
|
||||
clf4 = MLPClassifier(random_state=42, max_iter=300, hidden_layer_sizes=(100, 400, 600, 400, 100)).fit(X_train, y_train)
|
||||
print("---"*15, " MLPClassifier(100, 400, 600, 400, 100) ", "---"*15)
|
||||
print(f"Accuracy: {clf4.score(X_test, y_test)}")
|
||||
|
||||
print("---"*15, f" Предсказание элемента под id = {args.id_pred}", "---"*15)
|
||||
print(f"Предсказанное значение: {clf3.predict(np.array(list(X_test.iloc[args.id_pred])).reshape(1, -1))}")
|
||||
print(f"Настоящее значение {y_test.iloc[args.id_pred]}")
|
||||
BIN
romanova_adelina_lab_6/res.png
Normal file
|
After Width: | Height: | Size: 23 KiB |
52
romanova_adelina_lab_7/README.md
Normal file
@@ -0,0 +1,52 @@
|
||||
# Лабораторная работа №7. Вариант 21
|
||||
|
||||
## Тема
|
||||
|
||||
Рекуррентная нейронная сеть и задача генерации текста
|
||||
|
||||
## Задание
|
||||
|
||||
- Выбрать художественный текст и обучить на нем рекуррентную нейронную сеть для решения задачи генерации.
|
||||
|
||||
- Подобрать архитектуру и параметры так, чтобы приблизиться к максимально осмысленному результату.
|
||||
|
||||
## Используемые ресурсы
|
||||
|
||||
1. Художественный текст на английском языке ```wonderland.txt```
|
||||
|
||||
2. Python-скрипты: ```generate.py```, ```model.py```, ```train.py```.
|
||||
|
||||
## Описание работы
|
||||
|
||||
### Подготовка данных:
|
||||
В файле ```train.py``` реализована функция ```get_data```, которая загружает художественный текст, приводит его к нижнему регистру, и создает сопоставление символов числовым значениям.
|
||||
|
||||
Текст разбивается на последовательности фиксированной длины ```seq_length```, и каждая последовательность связывается с символом, следующим за ней.
|
||||
|
||||
Данные приводятся к тензорам PyTorch и нормализуются для обучения модели.
|
||||
|
||||
### Архитектура модели:
|
||||
|
||||
В файле ```model.py``` определен класс ```CharModel```, наследуемый от ```nn.Module``` и представляющий собой рекуррентную нейронную сеть.
|
||||
|
||||
Архитектура модели включает в себя один слой LSTM с размером скрытого состояния 256, слой dropout для регуляризации и линейный слой для вывода результатов.
|
||||
|
||||
### Обучение модели:
|
||||
|
||||
В файле ```train.py``` реализован скрипт для обучения модели. Выбрана оптимизация Adam, функция потерь - ```CrossEntropyLoss```.
|
||||
|
||||
Обучение происходит на GPU, если он доступен. Обучение проводится в течение нескольких эпох, с валидацией на каждой эпохе. Сохраняется лучшая модель.
|
||||
|
||||
Процесс обучения модели:
|
||||
|
||||

|
||||
|
||||
### Генерация текста:
|
||||
|
||||
В файле ```generate.py``` модель загружается из сохраненного состояния. Генерируется случайный промпт из исходного текста, и модель используется для предсказания следующего символа в цикле.
|
||||
|
||||
## Вывод:
|
||||
|
||||

|
||||
|
||||
В сгенерированном тексте можно найти осмысленные участки, поэтому можно сделать вывод, что модель действительно хорошо обучилась.
|
||||
46
romanova_adelina_lab_7/generate.py
Normal file
@@ -0,0 +1,46 @@
|
||||
import torch
|
||||
from model import CharModel
|
||||
import numpy as np
|
||||
|
||||
if __name__ == "__main__":
|
||||
best_model, char_to_int = torch.load("single-char.pth")
|
||||
n_vocab = len(char_to_int)
|
||||
int_to_char = dict((i, c) for c, i in char_to_int.items())
|
||||
|
||||
|
||||
model = CharModel()
|
||||
model.load_state_dict(best_model)
|
||||
|
||||
# randomly generate a prompt
|
||||
filename = "wonderland.txt"
|
||||
seq_length = 100
|
||||
raw_text = open(filename, 'r', encoding='utf-8').read()
|
||||
raw_text = raw_text.lower()
|
||||
|
||||
start = np.random.randint(0, len(raw_text)-seq_length)
|
||||
prompt = raw_text[start:start+seq_length]
|
||||
pattern = [char_to_int[c] for c in prompt]
|
||||
|
||||
model.eval()
|
||||
print(f'Prompt:\n{prompt}')
|
||||
print("==="*15, "Сгенерированный результ", "==="*15, sep=" ")
|
||||
|
||||
with torch.no_grad():
|
||||
for i in range(1000):
|
||||
# format input array of int into PyTorch tensor
|
||||
x = np.reshape(pattern, (1, len(pattern), 1)) / float(n_vocab)
|
||||
x = torch.tensor(x, dtype=torch.float32)
|
||||
# generate logits as output from the model
|
||||
prediction = model(x)
|
||||
# convert logits into one character
|
||||
index = int(prediction.argmax())
|
||||
result = int_to_char[index]
|
||||
print(result, end="")
|
||||
# append the new character into the prompt for the next iteration
|
||||
pattern.append(index)
|
||||
pattern = pattern[1:]
|
||||
|
||||
print()
|
||||
print("==="*30)
|
||||
print("Done.")
|
||||
|
||||
BIN
romanova_adelina_lab_7/generated_text.png
Normal file
|
After Width: | Height: | Size: 42 KiB |
16
romanova_adelina_lab_7/model.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import torch.nn as nn
|
||||
|
||||
|
||||
class CharModel(nn.Module):
|
||||
def __init__(self, n_vocab):
|
||||
super().__init__()
|
||||
self.lstm = nn.LSTM(input_size=1, hidden_size=256, num_layers=1, batch_first=True)
|
||||
self.dropout = nn.Dropout(0.2)
|
||||
self.linear = nn.Linear(256, n_vocab)
|
||||
def forward(self, x):
|
||||
x, _ = self.lstm(x)
|
||||
# take only the last output
|
||||
x = x[:, -1, :]
|
||||
# produce output
|
||||
x = self.linear(self.dropout(x))
|
||||
return x
|
||||
BIN
romanova_adelina_lab_7/single-char.pth
Normal file
86
romanova_adelina_lab_7/train.py
Normal file
@@ -0,0 +1,86 @@
|
||||
import numpy as np
|
||||
import torch.nn as nn
|
||||
import torch.optim as optim
|
||||
import torch.utils.data as data
|
||||
import torch
|
||||
from model import CharModel
|
||||
|
||||
|
||||
def get_data(filename="wonderland.txt"):
|
||||
# загружаем датасет и приводим к нижнему регистру
|
||||
filename = "wonderland.txt"
|
||||
raw_text = open(filename, 'r', encoding='utf-8').read()
|
||||
raw_text = raw_text.lower()
|
||||
|
||||
# делаем сопоставление текста с соответствующим ему значением
|
||||
chars = sorted(list(set(raw_text)))
|
||||
char_to_int = dict((c, i) for i, c in enumerate(chars))
|
||||
|
||||
# статистика обучаемых данных
|
||||
n_chars = len(raw_text)
|
||||
n_vocab = len(chars)
|
||||
print("Total Characters: ", n_chars)
|
||||
print("Total Vocab: ", n_vocab)
|
||||
|
||||
# подготовка датасета
|
||||
seq_length = 100
|
||||
dataX = []
|
||||
dataY = []
|
||||
for i in range(0, n_chars - seq_length, 1):
|
||||
seq_in = raw_text[i:i + seq_length]
|
||||
seq_out = raw_text[i + seq_length]
|
||||
dataX.append([char_to_int[char] for char in seq_in])
|
||||
dataY.append(char_to_int[seq_out])
|
||||
n_patterns = len(dataX)
|
||||
print("Total Patterns: ", n_patterns)
|
||||
|
||||
# --- переводим данные к тензору, чтобы рабоать с ними внутри pytorch ---
|
||||
X = torch.tensor(dataX, dtype=torch.float32).reshape(n_patterns, seq_length, 1)
|
||||
X = X / float(n_vocab)
|
||||
y = torch.tensor(dataY)
|
||||
print(X.shape, y.shape)
|
||||
|
||||
return X, y, char_to_int
|
||||
|
||||
|
||||
def main():
|
||||
X, y, char_to_int = get_data()
|
||||
|
||||
n_epochs = 40
|
||||
batch_size = 128
|
||||
model = CharModel()
|
||||
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
||||
print(f"device: {device}")
|
||||
model.to(device)
|
||||
|
||||
optimizer = optim.Adam(model.parameters())
|
||||
loss_fn = nn.CrossEntropyLoss(reduction="sum")
|
||||
loader = data.DataLoader(data.TensorDataset(X, y), shuffle=True, batch_size=batch_size)
|
||||
|
||||
best_model = None
|
||||
best_loss = np.inf
|
||||
|
||||
for epoch in range(n_epochs):
|
||||
model.train()
|
||||
for X_batch, y_batch in loader:
|
||||
y_pred = model(X_batch.to(device))
|
||||
loss = loss_fn(y_pred, y_batch.to(device))
|
||||
|
||||
optimizer.zero_grad()
|
||||
loss.backward()
|
||||
optimizer.step()
|
||||
|
||||
# Validation
|
||||
model.eval()
|
||||
loss = 0
|
||||
with torch.no_grad():
|
||||
for X_batch, y_batch in loader:
|
||||
y_pred = model(X_batch.to(device))
|
||||
loss += loss_fn(y_pred, y_batch.to(device))
|
||||
if loss < best_loss:
|
||||
best_loss = loss
|
||||
best_model = model.state_dict()
|
||||
print("Epoch %d: Cross-entropy: %.4f" % (epoch, loss))
|
||||
|
||||
torch.save([best_model, char_to_int], "single-char.pth")
|
||||
|
||||
BIN
romanova_adelina_lab_7/train_process.png
Normal file
|
After Width: | Height: | Size: 45 KiB |