forked from Alexey/DAS_2024_1
Compare commits
324 Commits
kuzarin_ma
...
main
Author | SHA1 | Date | |
---|---|---|---|
528309ab84 | |||
0814d8533d | |||
354ee2679e | |||
d302bd2213 | |||
2aed7bf385 | |||
d4e24db25e | |||
c0ca1d4bb5 | |||
6eeb90ea45 | |||
bc2d7cb2f6 | |||
e1da6f26ab | |||
e5df53b5c2 | |||
c98770752e | |||
a800c3df86 | |||
a51e33a201 | |||
a9af84010a | |||
3645d0c1cd | |||
08f2f63ad4 | |||
e4e3748a3d | |||
|
5e522fbcc0 | ||
|
cae7189c1e | ||
|
2bfc8a0a43 | ||
|
1f89960672 | ||
|
ffb4c2a8a4 | ||
|
1dc621e0be | ||
|
11c62d9bf7 | ||
|
03910a9a3f | ||
f7d483196c | |||
545377f948 | |||
bb867da520 | |||
c4a260ebda | |||
88392a8041 | |||
|
400de30b49 | ||
96a4e6ac43 | |||
|
03c52d0c76 | ||
6dd4835f54 | |||
|
5187005e6a | ||
3b9698ac38 | |||
a456344432 | |||
383a5e3b25 | |||
2834efbbce | |||
decc46b37c | |||
a41e76795f | |||
bcfec37329 | |||
e17b0b0d61 | |||
62290fc43d | |||
0b5fb8da2e | |||
9c6ef7e89e | |||
e763cf36e2 | |||
adf3f384a3 | |||
5ae6cd3cf1 | |||
daf3742ce6 | |||
fb37a53f66 | |||
23e035f9b2 | |||
556d8cf262 | |||
419790f5df | |||
54a9b8a778 | |||
3aeae245fa | |||
382273ccb8 | |||
4a37f55328 | |||
4e32398903 | |||
e69819aedd | |||
d9c4402ec9 | |||
93687ad850 | |||
4528bcd22c | |||
eef1d03249 | |||
7e09109cd2 | |||
f46724e5cf | |||
72b0b63e58 | |||
fd54e426b5 | |||
|
a5f0403627 | ||
ad8894c0ca | |||
edea94a4f2 | |||
|
5700e75965 | ||
|
9e9711f004 | ||
014845df45 | |||
636592bbac | |||
6711e8b0f6 | |||
c91aa6e1f3 | |||
d340d34c0b | |||
aaff3b8183 | |||
06a7114499 | |||
0246f32bcf | |||
417368d25e | |||
20a39fa9a5 | |||
fb15f87160 | |||
f86dfba785 | |||
e874c69b62 | |||
6f0726185a | |||
|
b4b0ef7730 | ||
|
4d51941016 | ||
|
a07b272c79 | ||
7cb94c14b0 | |||
|
506d544060 | ||
1ef9e02d32 | |||
ff8a87ebb8 | |||
740d49d368 | |||
df1b8bd8ce | |||
7549429b6b | |||
00d9e2409a | |||
098cb9b9ad | |||
af39fdc505 | |||
ef603a8056 | |||
c8b3124074 | |||
ce853de348 | |||
c3ac60eaa2 | |||
e12438b727 | |||
aa54f9187f | |||
b1d8660774 | |||
6c66654acc | |||
1d9c308bb4 | |||
a64b6c7329 | |||
7ec5c45faa | |||
340dc6aa19 | |||
a152275bb7 | |||
6e3ec51fe7 | |||
131dc39f6c | |||
d82f47e04c | |||
3175352d02 | |||
2e86e68e12 | |||
63dd60f20e | |||
63e031ef17 | |||
5fdabedcd6 | |||
9eadb70f85 | |||
5fd241a980 | |||
4f53dff75f | |||
57b7675030 | |||
b1c16dc76c | |||
309911ed75 | |||
d23e808325 | |||
4c974bfb51 | |||
b573569a97 | |||
60c79b64fb | |||
07105e81a0 | |||
|
0ebd562be2 | ||
|
22a3917d28 | ||
46b94ea885 | |||
|
94b8ba783c | ||
|
060bd2321e | ||
|
a8f1b39dd7 | ||
d3a7046f97 | |||
06d65650ab | |||
992a169c9b | |||
b82a13c106 | |||
e33ffef85e | |||
9362e62999 | |||
430fad9ef4 | |||
d0aedf8495 | |||
effd849042 | |||
55e18b6a64 | |||
5a7409d60c | |||
265cf478bf | |||
c6f29a13a1 | |||
4103a23984 | |||
f8ac151629 | |||
13b5dfc707 | |||
5d3517c2b0 | |||
f3bbfb2efd | |||
3c6c7f47e8 | |||
dc7c2c9694 | |||
481631cda5 | |||
9b4f9b608c | |||
3b842c2228 | |||
c4b8f4b4de | |||
85567eea48 | |||
ea8da8c665 | |||
2497e3c742 | |||
|
a628469960 | ||
f107797a2d | |||
98e9047b45 | |||
53f96303bc | |||
eb7211c6f9 | |||
66ffe827f8 | |||
a0209b612e | |||
1f72d4dc70 | |||
b351431f51 | |||
56baf52b61 | |||
f5ec3f1767 | |||
77790c37fb | |||
735a403027 | |||
c67049687b | |||
022e2dc49e | |||
8f24aad349 | |||
a54e13f7ee | |||
1bb988ea2f | |||
f7668394b0 | |||
|
a6a247cabf | ||
|
f5194bf885 | ||
|
12cd98aa7d | ||
85b809333b | |||
5e3c9c0d5b | |||
daf24d364d | |||
6c13deb231 | |||
543d41d9c3 | |||
153684c403 | |||
0708b01560 | |||
8a6932ff20 | |||
35cf16824d | |||
ac3dc2e566 | |||
2f46c05849 | |||
84cb26162c | |||
129b991712 | |||
ffecef8fa3 | |||
1289d67a62 | |||
b09f3ea844 | |||
2f368ffb07 | |||
ead06782ad | |||
b2ac5eba9a | |||
0c0a47549a | |||
|
84e8cac198 | ||
|
3db4a0fcd4 | ||
a4f9cf13cc | |||
bde242318f | |||
761cc83ebd | |||
4699fda797 | |||
940cc6757f | |||
1e9bdf2806 | |||
|
9bd14a60b4 | ||
5aa2cae670 | |||
75b118ba6e | |||
d8441a0989 | |||
1213b5db3c | |||
281d30a89e | |||
f2093f376c | |||
80c666d6b0 | |||
a589994db5 | |||
38ce2bb347 | |||
f25af86d9c | |||
45eb2b72c5 | |||
77bdc1d8e9 | |||
da6593c4d0 | |||
|
eeac04be49 | ||
0a73e2d5d4 | |||
1565e49462 | |||
858ea65e71 | |||
0f898b968d | |||
8b96102dbd | |||
82ecad71f4 | |||
8a96320fd5 | |||
bd25930973 | |||
|
f0b48bba28 | ||
ef68b506b8 | |||
|
8efc2422cf | ||
c3537b5abe | |||
a0ef65e0f9 | |||
23087c87ea | |||
5a6580ff8c | |||
5f6472b5ff | |||
e1950c80ea | |||
5586bec4b8 | |||
6815b2e560 | |||
48b7fbd900 | |||
080625d270 | |||
37996c249a | |||
9456d4fe01 | |||
c14e105db5 | |||
4d1e900721 | |||
7184d6d728 | |||
|
6e7055efa4 | ||
|
9e40adc53c | ||
|
4a36528cc7 | ||
ad3988e5fc | |||
780b4b2924 | |||
d9f5f75f5e | |||
7d9c9ec4d0 | |||
5047b16cde | |||
2b87427299 | |||
|
21cdd4971d | ||
6b55b7b0fc | |||
47193155d9 | |||
bc8c4c887e | |||
4a2adcc35a | |||
d7cb666a0d | |||
6c642384c1 | |||
bdb5cc07ed | |||
e761e33201 | |||
|
ceee500b95 | ||
4c74a16753 | |||
a830cb2198 | |||
9d0fa199f7 | |||
2be2c71b69 | |||
|
aa8180ba49 | ||
c509e74465 | |||
314751f25c | |||
48f7f3a215 | |||
f112d2a44b | |||
477afb824d | |||
6ce78e60ad | |||
b13182c80e | |||
e7b9938278 | |||
822467bd99 | |||
|
ba7480cb4f | ||
520337f92d | |||
6de5160da9 | |||
d7faf2a1b7 | |||
d98803227e | |||
6f12270c73 | |||
6e6266c228 | |||
06d1d8cdd4 | |||
4c76a9dea6 | |||
e5d0aa0b3d | |||
d326e64f24 | |||
1a118ae71f | |||
e9b06b1f27 | |||
|
1adaac9281 | ||
f0083bc4cd | |||
|
5e9e2600f3 | ||
b6e311755e | |||
|
0c3e973307 | ||
c474c13c4a | |||
8eedde24a1 | |||
829a04a913 | |||
57970b3333 | |||
1c77ba3272 | |||
ce9527b1c9 | |||
a1419f21ec | |||
aac01e9f48 | |||
221f3e248b | |||
3d98388a13 | |||
4922e9075e | |||
891eae4211 | |||
121e4bbcd2 | |||
0590f7b532 | |||
0eec58a347 | |||
|
c8dbd5fb37 |
1
.idea/.name
Normal file
1
.idea/.name
Normal file
@ -0,0 +1 @@
|
||||
main.py
|
12
.idea/inspectionProfiles/Project_Default.xml
Normal file
12
.idea/inspectionProfiles/Project_Default.xml
Normal file
@ -0,0 +1,12 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Project Default" />
|
||||
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
|
||||
<option name="ignoredIdentifiers">
|
||||
<list>
|
||||
<option value="str.__pos__" />
|
||||
</list>
|
||||
</option>
|
||||
</inspection_tool>
|
||||
</profile>
|
||||
</component>
|
6
.idea/inspectionProfiles/profiles_settings.xml
Normal file
6
.idea/inspectionProfiles/profiles_settings.xml
Normal file
@ -0,0 +1,6 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<settings>
|
||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||
<version value="1.0" />
|
||||
</settings>
|
||||
</component>
|
4
.idea/misc.xml
Normal file
4
.idea/misc.xml
Normal file
@ -0,0 +1,4 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (tukaeva_alfiya_lab_4)" project-jdk-type="Python SDK" />
|
||||
</project>
|
32
aleikin_artem_lab_1/README.md
Normal file
32
aleikin_artem_lab_1/README.md
Normal file
@ -0,0 +1,32 @@
|
||||
# Лабораторная работа 1 - Знакомство с Docker и Docker Compose
|
||||
## ПИбд-42 || Алейкин Артем
|
||||
|
||||
### Описание
|
||||
В данной лабораторной работе мы разворачиваем три популярных сервиса — MediaWiki и Redmine — с использованием Docker Compose. Каждый сервис работает в своем контейнере и использует общую базу данных PostgreSQL для хранения данных. Мы также настраиваем проброс портов для доступа к веб-интерфейсам сервисов и используем Docker volumes для сохранения данных вне контейнеров.
|
||||
|
||||
### Цель проекта
|
||||
изучение современных технологий контейнеризации
|
||||
|
||||
### Шаги для запуска:
|
||||
1. Клонирование репозитория:
|
||||
```
|
||||
git clone <ссылка-на-репозиторий>
|
||||
cd <папка репозитория>
|
||||
```
|
||||
|
||||
2. Запуск контейнеров:
|
||||
```
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
3. После запуска должны быть доступны следующие контейнеры:
|
||||
|
||||
MediaWiki: http://localhost:8080
|
||||
Redmine: http://localhost:8081
|
||||
|
||||
4. Чтобы остановить контейнеры:
|
||||
```
|
||||
docker-compose down
|
||||
```
|
||||
|
||||
Видео демонстрации работы: https://vk.com/video248424990_456239601?list=ln-sCRa9IIiV1VpInn2d1
|
45
aleikin_artem_lab_1/docker-compose.yml
Normal file
45
aleikin_artem_lab_1/docker-compose.yml
Normal file
@ -0,0 +1,45 @@
|
||||
services:
|
||||
mediawiki:
|
||||
image: mediawiki
|
||||
container_name: mediawiki
|
||||
ports:
|
||||
- "8080:80" # Пробрасываем порт 8080 на хост для доступа к MediaWiki
|
||||
volumes:
|
||||
- mediawiki_data:/var/www/html/images # Создаем volume для хранения данных MediaWiki
|
||||
environment:
|
||||
- MEDIAWIKI_DB_HOST=db
|
||||
- MEDIAWIKI_DB_NAME=mediawiki
|
||||
- MEDIAWIKI_DB_USER=root
|
||||
- MEDIAWIKI_DB_PASSWORD=example
|
||||
depends_on:
|
||||
- db
|
||||
|
||||
redmine:
|
||||
image: redmine
|
||||
container_name: redmine
|
||||
ports:
|
||||
- "8081:3000" # Пробрасываем порт 8081 на хост для доступа к Redmine
|
||||
volumes:
|
||||
- redmine_data:/usr/src/redmine/files # Создаем volume для хранения данных Redmine
|
||||
environment:
|
||||
- REDMINE_DB_POSTGRESQL=db
|
||||
- REDMINE_DB_DATABASE=redmine
|
||||
- REDMINE_DB_USERNAME=root
|
||||
- REDMINE_DB_PASSWORD=example
|
||||
depends_on:
|
||||
- db
|
||||
|
||||
db:
|
||||
image: postgres:latest
|
||||
container_name: db
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: example
|
||||
POSTGRES_DB: postgres
|
||||
volumes:
|
||||
- db_data:/var/lib/postgresql # Volume для базы данных
|
||||
|
||||
volumes:
|
||||
mediawiki_data: # volume для MediaWiki
|
||||
redmine_data: # volume для Redmine
|
||||
db_data: # volume для базы данных
|
48
artamonova_tatyana_lab_1/README.md
Normal file
48
artamonova_tatyana_lab_1/README.md
Normal file
@ -0,0 +1,48 @@
|
||||
## Отчет по Docker Compose конфигурации
|
||||
|
||||
### Краткое описание:
|
||||
|
||||
Данная конфигурация Docker Compose запускает набор сервисов, необходимых для работы WordPress и MediaWiki. Она включает в себя:
|
||||
|
||||
- **WordPress:** веб-сервис для блогов и CMS
|
||||
- **MySQL:** база данных для хранения данных WordPress
|
||||
- **RabbitMQ:** брокер сообщений для потенциального использования в будущем
|
||||
- **MediaWiki:** вики-движок для создания и редактирования вики-страниц
|
||||
|
||||
### Запуск лабораторной работы:
|
||||
|
||||
1. Установить Docker и Docker Compose.
|
||||
2. Сохранить конфигурацию в файл docker-compose.yml.
|
||||
3. Запустить команду docker-compose up --build
|
||||
|
||||
### Используемые технологии:
|
||||
|
||||
- **Docker Compose:** инструмент для определения и запуска многоконтейнерных приложений.
|
||||
- **Docker:** платформа для создания, развертывания и запуска контейнеров.
|
||||
- **WordPress:** популярная платформа для создания блогов и CMS.
|
||||
- **MySQL:** популярная система управления базами данных.
|
||||
- **RabbitMQ:** брокер сообщений, используемый для асинхронного обмена сообщениями.
|
||||
- **MediaWiki:** свободное программное обеспечение для создания и редактирования вики-страниц.
|
||||
|
||||
### Функциональность:
|
||||
|
||||
Конфигурация запускает следующие сервисы:
|
||||
|
||||
- **WordPress:** работает на порту 8080, доступен по адресу http://localhost:8080.
|
||||
- **MySQL:** предоставляет базу данных для WordPress и MediaWiki.
|
||||
- **RabbitMQ:** работает на порту 5672, доступен по адресу http://localhost:15672 для управления.
|
||||
- **MediaWiki:** работает на порту 8081, доступен по адресу http://localhost:8081.
|
||||
|
||||
### Дополнительные сведения
|
||||
|
||||
- **Volumes**: используются для хранения данных сервисов, чтобы они не терялись при перезапуске контейнеров.
|
||||
- **Depends_on**: указывает на зависимость между сервисами, например, WordPress зависит от MySQL.
|
||||
- **Restart policy**: определяет, как сервисы будут перезапускаться после сбоя.
|
||||
|
||||
### Видео
|
||||
|
||||
https://vk.com/video/@artamonovat?z=video212084908_456239356%2Fpl_212084908_-2
|
||||
|
||||
### Заключение:
|
||||
|
||||
Данная конфигурация Docker Compose обеспечивает простой и удобный способ запуска и управления несколькими сервисами, связанными с WordPress и MediaWiki. Она позволяет разработчикам легко развертывать и управлять приложениями в изолированной среде.
|
61
artamonova_tatyana_lab_1/docker-compose.yml
Normal file
61
artamonova_tatyana_lab_1/docker-compose.yml
Normal file
@ -0,0 +1,61 @@
|
||||
version: '3.7'
|
||||
|
||||
services:
|
||||
wordpress:
|
||||
image: wordpress:latest
|
||||
ports:
|
||||
- "8080:80"
|
||||
volumes:
|
||||
- wordpress_data:/var/www/html
|
||||
environment:
|
||||
WORDPRESS_DB_HOST: db
|
||||
WORDPRESS_DB_NAME: wordpress
|
||||
WORDPRESS_DB_USER: wordpress
|
||||
WORDPRESS_DB_PASSWORD: password
|
||||
depends_on:
|
||||
- db
|
||||
restart: unless-stopped
|
||||
|
||||
db:
|
||||
image: mysql:latest
|
||||
volumes:
|
||||
- db_data:/var/lib/mysql
|
||||
environment:
|
||||
MYSQL_DATABASE: wordpress
|
||||
MYSQL_USER: wordpress
|
||||
MYSQL_PASSWORD: dbpassword
|
||||
MYSQL_ROOT_PASSWORD: rootpassword
|
||||
restart: unless-stopped
|
||||
|
||||
rabbitmq:
|
||||
image: rabbitmq:3-management
|
||||
ports:
|
||||
- "5672:5672"
|
||||
- "15672:15672"
|
||||
volumes:
|
||||
- rabbitmq_data:/var/lib/rabbitmq
|
||||
environment:
|
||||
RABBITMQ_DEFAULT_USER: guest
|
||||
RABBITMQ_DEFAULT_PASS: password
|
||||
restart: unless-stopped
|
||||
|
||||
mediawiki:
|
||||
image: mediawiki:latest
|
||||
ports:
|
||||
- "8081:80"
|
||||
volumes:
|
||||
- mediawiki_data:/var/www/html
|
||||
environment:
|
||||
MW_DB_SERVER: db
|
||||
MW_DB_NAME: mediawiki
|
||||
MW_DB_USER: mediawiki
|
||||
MW_DB_PASSWORD: mediawiki_password
|
||||
depends_on:
|
||||
- db
|
||||
restart: unless-stopped
|
||||
|
||||
volumes:
|
||||
wordpress_data:
|
||||
db_data:
|
||||
rabbitmq_data:
|
||||
mediawiki_data:
|
5
artamonova_tatyana_lab_2/.gitignore
vendored
Normal file
5
artamonova_tatyana_lab_2/.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
*.pyc
|
||||
__pycache__
|
||||
*.egg-info
|
||||
*.dist-info
|
||||
.DS_Store
|
22
artamonova_tatyana_lab_2/README.md
Normal file
22
artamonova_tatyana_lab_2/README.md
Normal file
@ -0,0 +1,22 @@
|
||||
## Лабораторная работа №2
|
||||
### Выполнила Артамонова Татьяна ПИбд-42
|
||||
|
||||
**Вариант 1: Программа 4 - Количество символов в именах файлов из каталога /var/data**
|
||||
|
||||
- Формирует файл /var/result/data1.txt так, что каждая строка файла - количество символов в именах файлов из каталога /var/data.
|
||||
|
||||
**Вариант 2: Программа 3 - Количество чисел в последовательности**
|
||||
|
||||
- Ищет набольшее число из файла /var/result/data1.txt и сохраняет количество таких чисел из последовательности в /var/result/data2.txt.
|
||||
|
||||
**Структура проекта:**
|
||||
|
||||
1. В папках worker-1, worker-2 лежат выполняемые файлы .py и Dockerfile-ы с необходимым набором инструкций.
|
||||
2. В папке data лежат файлы, длину имен которых нужно посчитать.
|
||||
3. В папке result лежат файлы с результатами выполнения программ. data1.txt - результат выполнения main1.py (worker-1), data2.txt - результат выполнения main2.py (worker-2). Данные в data2 рассчитываются из данных data1.
|
||||
4. Файл .gitignore - для указания, какие файлы отслеживать, а какие - нет.
|
||||
5. docker-compose.yml - для определения и управления контейнерами Docker.
|
||||
|
||||
**Команда для запуска** - docker-compose up --build
|
||||
|
||||
**Ссылка на видео:** https://vk.com/artamonovat?z=video212084908_456239357%2Fvideos212084908%2Fpl_212084908_-2
|
0
artamonova_tatyana_lab_2/data/exampleFile.txt
Normal file
0
artamonova_tatyana_lab_2/data/exampleFile.txt
Normal file
0
artamonova_tatyana_lab_2/data/exampleFile123.txt
Normal file
0
artamonova_tatyana_lab_2/data/exampleFile123.txt
Normal file
0
artamonova_tatyana_lab_2/data/exampleFile456.txt
Normal file
0
artamonova_tatyana_lab_2/data/exampleFile456.txt
Normal file
22
artamonova_tatyana_lab_2/docker-compose.yml
Normal file
22
artamonova_tatyana_lab_2/docker-compose.yml
Normal file
@ -0,0 +1,22 @@
|
||||
services:
|
||||
worker-1:
|
||||
build:
|
||||
context: ./worker-1
|
||||
volumes:
|
||||
- ./worker-1:/app
|
||||
- ./data:/var/data
|
||||
- ./result:/var/result
|
||||
depends_on:
|
||||
- worker-2
|
||||
|
||||
worker-2:
|
||||
build:
|
||||
context: ./worker-2
|
||||
volumes:
|
||||
- ./worker-2:/app
|
||||
- ./data:/var/data
|
||||
- ./result:/var/result
|
||||
|
||||
volumes:
|
||||
data:
|
||||
result:
|
3
artamonova_tatyana_lab_2/result/data1.txt
Normal file
3
artamonova_tatyana_lab_2/result/data1.txt
Normal file
@ -0,0 +1,3 @@
|
||||
15
|
||||
18
|
||||
18
|
1
artamonova_tatyana_lab_2/result/data2.txt
Normal file
1
artamonova_tatyana_lab_2/result/data2.txt
Normal file
@ -0,0 +1 @@
|
||||
2
|
14
artamonova_tatyana_lab_2/worker-1/Dockerfile
Normal file
14
artamonova_tatyana_lab_2/worker-1/Dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
# Используем образ Python 3.10-slim как основу для нашего контейнера.
|
||||
# slim-версия образа более компактная, что делает контейнер меньше.
|
||||
FROM python:3.10-slim
|
||||
|
||||
# Устанавливаем рабочую директорию в контейнере как /app.
|
||||
# Все последующие команды будут выполняться в этой директории.
|
||||
WORKDIR /app
|
||||
|
||||
# Копируем файл main1.py из текущей директории в директорию /app в контейнере.
|
||||
COPY main1.py .
|
||||
|
||||
# Определяем команду, которая будет выполняться при запуске контейнера.
|
||||
# В данном случае запускается Python-скрипт main1.py.
|
||||
CMD ["python", "main1.py"]
|
21
artamonova_tatyana_lab_2/worker-1/main1.py
Normal file
21
artamonova_tatyana_lab_2/worker-1/main1.py
Normal file
@ -0,0 +1,21 @@
|
||||
import os
|
||||
import glob
|
||||
|
||||
# Формирует файл data1.txt так, что каждая строка файла - кол-во символов в именах файла из каталога /data
|
||||
def main():
|
||||
data_dir = "/var/data"
|
||||
result_file = "/var/result/data1.txt"
|
||||
|
||||
result_dir = os.path.dirname(result_file)
|
||||
if not os.path.exists(result_dir):
|
||||
os.makedirs(result_dir)
|
||||
|
||||
files = glob.glob(os.path.join(data_dir, '*'))
|
||||
|
||||
with open(result_file, 'w') as f:
|
||||
for file in files:
|
||||
filename = os.path.basename(file)
|
||||
f.write(f"{len(filename)}\n")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
14
artamonova_tatyana_lab_2/worker-2/Dockerfile
Normal file
14
artamonova_tatyana_lab_2/worker-2/Dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
# Используем образ Python 3.10-slim как основу для нашего контейнера.
|
||||
# slim-версия образа более компактная, что делает контейнер меньше.
|
||||
FROM python:3.10-slim
|
||||
|
||||
# Устанавливаем рабочую директорию в контейнере как /app.
|
||||
# Все последующие команды будут выполняться в этой директории.
|
||||
WORKDIR /app
|
||||
|
||||
# Копируем файл main2.py из текущей директории в директорию /app в контейнере.
|
||||
COPY main2.py .
|
||||
|
||||
# Определяем команду, которая будет выполняться при запуске контейнера.
|
||||
# В данном случае запускается Python-скрипт main2.py.
|
||||
CMD ["python", "main2.py"]
|
26
artamonova_tatyana_lab_2/worker-2/main2.py
Normal file
26
artamonova_tatyana_lab_2/worker-2/main2.py
Normal file
@ -0,0 +1,26 @@
|
||||
import os
|
||||
|
||||
# Ищет наибольшее число из файла data1.txt и сохраняет количество таких чисел из последовательности в data2.txt
|
||||
def main():
|
||||
data_file_path = "/var/result/data1.txt"
|
||||
result_file_path = "/var/result/data2.txt"
|
||||
|
||||
if not os.path.exists(data_file_path):
|
||||
data_dir = os.path.dirname(data_file_path)
|
||||
|
||||
if not os.path.exists(result_file_path):
|
||||
result_dir = os.path.dirname(result_file_path)
|
||||
|
||||
with open(data_file_path, 'r') as f:
|
||||
numbers = [int(x.strip()) for x in f.read().splitlines()]
|
||||
|
||||
max_number = max(numbers)
|
||||
count = numbers.count(max_number)
|
||||
|
||||
with open(result_file_path, 'w') as f:
|
||||
f.write(str(count))
|
||||
|
||||
print(f"Количество наибольших чисел: {count}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
59
balakhonov_danila_lab_1/README.md
Normal file
59
balakhonov_danila_lab_1/README.md
Normal file
@ -0,0 +1,59 @@
|
||||
# Лабораторная работа номер 1
|
||||
|
||||
> Здравствуйте меня зовут Балахонов Данила группа ПИбд-42
|
||||
>
|
||||
> *— Балахонов Данила ПИбд-42*
|
||||
|
||||
Видео лабораторной работы номер 1 доступно по этой [ссылке](https://drive.google.com/file/d/1Up_JzDcK_TjYLixpfYXN7PhJmOeg_Uck/view?usp=sharing).
|
||||
|
||||
## Как запустить лабораторную работу номер 1?
|
||||
### Необходимые компоненты для запуска лабораторной работы номер 1
|
||||
> Здесь рассказана установка необходимых компонентов для запуска лабораторной работы номер 1 под дистрибутив GNU/Linux **Ubuntu**.
|
||||
|
||||
Для запуска лабораторной работы номер 1 необходимы такие компоненты:
|
||||
- Git
|
||||
- Docker
|
||||
- Docker compose
|
||||
|
||||
Чтобы установить **Git**, необходимо ввести данные команды в командную строку:
|
||||
``` bash
|
||||
sudo apt-get update
|
||||
sudo apt-get install git
|
||||
```
|
||||
|
||||
Чтобы установить **Docker** и **Docker compose**, стоит ввести такие команды:
|
||||
``` bash
|
||||
# Настройка репозитория Docker
|
||||
sudo apt-get update
|
||||
sudo apt-get install ca-certificates curl
|
||||
sudo install -m 0755 -d /etc/apt/keyrings
|
||||
sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc
|
||||
sudo chmod a+r /etc/apt/keyrings/docker.asc
|
||||
|
||||
echo \
|
||||
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
|
||||
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
|
||||
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
|
||||
sudo apt-get update
|
||||
|
||||
# Установка Docker и его компонентов
|
||||
sudo apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
|
||||
```
|
||||
### Запуск лабораторной работы номер 1
|
||||
Для запуска лабораторной работы номер 1 необходимо **склонировать** репозиторий в любую папку и **перейти на ветку** balakhonov_danila_lab_1.
|
||||
Далее в папке с `docker-compose.yaml` нужно вызвать такую команду:
|
||||
``` bash
|
||||
sudo docker-compose up -d
|
||||
```
|
||||
Таким образом будут запущены контейнеры в фоновом режиме.
|
||||
## Какие технологии были использованы?
|
||||
Для выполнения лабораторной работы номер 1 использовались такие технологии, как: *git*, *docker*, *docker compose*.
|
||||
|
||||
Сервисы, выбранные для запуска в docker-compose файле:
|
||||
- *Gitea* - удобный сервис отслеживания версий разрабатываемого ПО
|
||||
- *MediaWiki* - сервис создания и ведения электронной энциклопедии
|
||||
- *PostgreSQL* - база данных, используемая сервисами выше
|
||||
|
||||
Системой, на которую были установлены указанные технологии, является Ubuntu 22.
|
||||
## Что делает лабораторная работа номер 1?
|
||||
Лабораторная работа номер 1 заключается в написании docker-compose файла для удобного запуска и администрирования сразу нескольких сервисов в docker-контейнерах.
|
58
balakhonov_danila_lab_1/docker-compose.yaml
Normal file
58
balakhonov_danila_lab_1/docker-compose.yaml
Normal file
@ -0,0 +1,58 @@
|
||||
services:
|
||||
# PostgreSQL
|
||||
db:
|
||||
# Образ контейнера PostgreSQL последней версии
|
||||
image: postgres
|
||||
# Название контейнера
|
||||
container_name: db
|
||||
# Переменные окружения для настройки базы данных
|
||||
environment:
|
||||
- POSTGRES_USER=gitea
|
||||
- POSTGRES_PASSWORD=gitea
|
||||
- POSTGRES_DB=gitea
|
||||
# Настройка корневого каталога, где хранятся данные
|
||||
# Слева указан каталог компьютера, справа - каталог контейнера
|
||||
# Нужно для сохранения данных на сервере после отключения контейнера
|
||||
volumes:
|
||||
- ./postgres:/var/lib/postgresql/data
|
||||
# Порт, через который можно будет подключиться к базе данных
|
||||
ports:
|
||||
- 5432:5432
|
||||
# После перезапуска докера всегда запускать этот контейнер
|
||||
restart: always
|
||||
# Gitea
|
||||
gitea:
|
||||
# Используется Gitea последней версии
|
||||
image: gitea/gitea
|
||||
container_name: gitea
|
||||
# После перезапуска докера всегда запускать этот контейнер
|
||||
restart: always
|
||||
volumes:
|
||||
- ./data:/var/lib/gitea
|
||||
- ./config:/etc/gitea
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
ports:
|
||||
- 3000:3000
|
||||
- 2222:2222
|
||||
environment:
|
||||
- GITEA__database__DB_TYPE=postgres
|
||||
- GITEA__database__HOST=db:5432
|
||||
- GITEA__database__NAME=gitea
|
||||
- GITEA__database__USER=gitea
|
||||
- GITEA__database__PASSWD=gitea
|
||||
# Указывается, что этот контейнер запускается только после запуска контейнера db
|
||||
depends_on:
|
||||
- db
|
||||
# MediaWiki
|
||||
mediawiki:
|
||||
# Образ контейнера MediaWiki последней версии
|
||||
image: mediawiki
|
||||
container_name: mediawiki
|
||||
restart: always
|
||||
ports:
|
||||
- 8080:80
|
||||
links:
|
||||
- db
|
||||
volumes:
|
||||
- ./images:/var/www/html/images
|
64
balakhonov_danila_lab_2/README.md
Normal file
64
balakhonov_danila_lab_2/README.md
Normal file
@ -0,0 +1,64 @@
|
||||
# Лабораторная работа номер 2
|
||||
|
||||
> Здравствуйте меня зовут Балахонов Данила группа ПИбд-42
|
||||
>
|
||||
> *— Балахонов Данила ПИбд-42*
|
||||
|
||||
Видео лабораторной работы номер 2 доступно по этой [ссылке](https://drive.google.com/file/d/1N4NgWsFLlHY5lGOO3Ps7DPvdJbHNxaqz/view?usp=sharing).
|
||||
|
||||
## Как запустить лабораторную работу номер 2?
|
||||
### Необходимые компоненты для запуска лабораторной работы номер 2
|
||||
> Здесь рассказана установка необходимых компонентов для запуска лабораторной работы номер 2 под дистрибутив GNU/Linux **Ubuntu**.
|
||||
|
||||
Для запуска лабораторной работы номер 2 необходимы такие компоненты:
|
||||
- Git
|
||||
- Docker
|
||||
- Docker compose
|
||||
|
||||
Чтобы установить **Git**, необходимо ввести данные команды в командную строку:
|
||||
``` bash
|
||||
sudo apt-get update
|
||||
sudo apt-get install git
|
||||
```
|
||||
|
||||
Чтобы установить **Docker** и **Docker compose**, стоит ввести такие команды:
|
||||
``` bash
|
||||
# Настройка репозитория Docker
|
||||
sudo apt-get update
|
||||
sudo apt-get install ca-certificates curl
|
||||
sudo install -m 0755 -d /etc/apt/keyrings
|
||||
sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc
|
||||
sudo chmod a+r /etc/apt/keyrings/docker.asc
|
||||
|
||||
echo \
|
||||
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
|
||||
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
|
||||
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
|
||||
sudo apt-get update
|
||||
|
||||
# Установка Docker и его компонентов
|
||||
sudo apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
|
||||
```
|
||||
### Запуск лабораторной работы номер 2
|
||||
Для запуска лабораторной работы номер 2 необходимо **склонировать** репозиторий в любую папку и **перейти на ветку** balakhonov_danila_lab_2.
|
||||
Далее в папке с `docker-compose.yaml` нужно вызвать такую команду:
|
||||
``` bash
|
||||
sudo docker-compose up --build
|
||||
```
|
||||
Таким образом контейнеры будут подготовлены и запущены. Результат выполнения будет находится внутри директории докера. Расположение файлов data.txt и result.txt: `/var/lib/docker/volumes/balakhonov_danila_lab_2_result/_data/`
|
||||
|
||||
## Какие технологии были использованы?
|
||||
Для выполнения лабораторной работы номер 2 были применены такие технологии, как:
|
||||
- Dockerfile
|
||||
- Docker compose
|
||||
- Git
|
||||
- .NET SDK и F# в частности
|
||||
|
||||
Сервисы были написаны с использованием .NET SDK на языке F#.
|
||||
## Что делает лабораторная работа номер 2?
|
||||
Лабораторная работа номер 2 запускает два сервиса:
|
||||
|
||||
1. Сервис, который берёт из каталога `/var/data` случайный файл и перекладывает его в `/var/result/data.txt`
|
||||
2. Сервис, который ищет наибольшее число из файла `/var/result/data.txt` и сохраняет количество таких чисел из последовательности в `/var/result/result.txt`
|
||||
|
||||
Благодаря лабораторной работе номер 2 были получены навыки создания Dockerfile для развертывания проектов в контейнерах, а также их связки с помощью docker-compose.yaml.
|
22
balakhonov_danila_lab_2/docker-compose.yaml
Normal file
22
balakhonov_danila_lab_2/docker-compose.yaml
Normal file
@ -0,0 +1,22 @@
|
||||
services:
|
||||
app1:
|
||||
build: ./sigma_app_1/
|
||||
volumes:
|
||||
# Создание папки /var/data внутри контейнера
|
||||
# И копирование файлов из ./files в эту папку
|
||||
- ./files:/var/data
|
||||
# Создание папки /var/result внутри контейнера
|
||||
# А также папки result внутри директории докера
|
||||
- result:/var/result
|
||||
app2:
|
||||
build: ./skibidi_app_2/
|
||||
# Указано, что пока не запуститься app1, app2 не запустится
|
||||
# Он ЗАВИСИТ от app1 (depends on (с англ.) - зависит от)
|
||||
depends_on:
|
||||
- app1
|
||||
volumes:
|
||||
- result:/var/result
|
||||
volumes:
|
||||
# Указывается, что будет создана папка result
|
||||
# внутри директории докера
|
||||
result:
|
323
balakhonov_danila_lab_2/files/1.txt
Normal file
323
balakhonov_danila_lab_2/files/1.txt
Normal file
@ -0,0 +1,323 @@
|
||||
245
|
||||
678
|
||||
12
|
||||
987
|
||||
456
|
||||
234
|
||||
789
|
||||
345
|
||||
678
|
||||
123
|
||||
456
|
||||
789
|
||||
234
|
||||
567
|
||||
890
|
||||
12
|
||||
34
|
||||
56
|
||||
78
|
||||
90
|
||||
123
|
||||
456
|
||||
789
|
||||
321
|
||||
654
|
||||
987
|
||||
432
|
||||
876
|
||||
543
|
||||
210
|
||||
987
|
||||
654
|
||||
321
|
||||
456
|
||||
789
|
||||
12
|
||||
34
|
||||
56
|
||||
78
|
||||
90
|
||||
123
|
||||
456
|
||||
789
|
||||
234
|
||||
567
|
||||
890
|
||||
123
|
||||
456
|
||||
789
|
||||
987
|
||||
654
|
||||
321
|
||||
432
|
||||
876
|
||||
543
|
||||
210
|
||||
678
|
||||
345
|
||||
678
|
||||
123
|
||||
456
|
||||
789
|
||||
234
|
||||
567
|
||||
890
|
||||
12
|
||||
34
|
||||
56
|
||||
78
|
||||
90
|
||||
123
|
||||
456
|
||||
789
|
||||
321
|
||||
654
|
||||
987
|
||||
432
|
||||
876
|
||||
543
|
||||
210
|
||||
678
|
||||
345
|
||||
678
|
||||
123
|
||||
456
|
||||
789
|
||||
234
|
||||
567
|
||||
890
|
||||
12
|
||||
34
|
||||
56
|
||||
78
|
||||
90
|
||||
123
|
||||
456
|
||||
789
|
||||
321
|
||||
654
|
||||
987
|
||||
432
|
||||
876
|
||||
543
|
||||
210
|
||||
678
|
||||
345
|
||||
678
|
||||
123
|
||||
456
|
||||
789
|
||||
234
|
||||
567
|
||||
890
|
||||
12
|
||||
34
|
||||
56
|
||||
78
|
||||
90
|
||||
123
|
||||
456
|
||||
789
|
||||
321
|
||||
654
|
||||
987
|
||||
432
|
||||
876
|
||||
543
|
||||
210
|
||||
678
|
||||
345
|
||||
678
|
||||
123
|
||||
456
|
||||
789
|
||||
234
|
||||
567
|
||||
890
|
||||
12
|
||||
34
|
||||
56
|
||||
78
|
||||
90
|
||||
123
|
||||
456
|
||||
789
|
||||
321
|
||||
654
|
||||
987
|
||||
432
|
||||
876
|
||||
543
|
||||
210
|
||||
678
|
||||
345
|
||||
678
|
||||
123
|
||||
456
|
||||
789
|
||||
234
|
||||
567
|
||||
890
|
||||
12
|
||||
34
|
||||
56
|
||||
78
|
||||
90
|
||||
123
|
||||
456
|
||||
789
|
||||
321
|
||||
654
|
||||
987
|
||||
432
|
||||
876
|
||||
543
|
||||
210
|
||||
678
|
||||
345
|
||||
678
|
||||
123
|
||||
456
|
||||
789
|
||||
234
|
||||
567
|
||||
890
|
||||
12
|
||||
34
|
||||
56
|
||||
78
|
||||
90
|
||||
123
|
||||
456
|
||||
789
|
||||
321
|
||||
654
|
||||
987
|
||||
432
|
||||
876
|
||||
543
|
||||
210
|
||||
678
|
||||
345
|
||||
678
|
||||
123
|
||||
456
|
||||
789
|
||||
234
|
||||
567
|
||||
890
|
||||
12
|
||||
34
|
||||
56
|
||||
78
|
||||
90
|
||||
123
|
||||
456
|
||||
789
|
||||
321
|
||||
654
|
||||
987
|
||||
432
|
||||
876
|
||||
543
|
||||
210
|
||||
678
|
||||
345
|
||||
678
|
||||
123
|
||||
456
|
||||
789
|
||||
234
|
||||
567
|
||||
890
|
||||
12
|
||||
34
|
||||
56
|
||||
78
|
||||
90
|
||||
123
|
||||
456
|
||||
789
|
||||
321
|
||||
654
|
||||
987
|
||||
432
|
||||
876
|
||||
543
|
||||
210
|
||||
678
|
||||
345
|
||||
678
|
||||
123
|
||||
456
|
||||
789
|
||||
234
|
||||
567
|
||||
890
|
||||
12
|
||||
34
|
||||
56
|
||||
78
|
||||
90
|
||||
123
|
||||
456
|
||||
789
|
||||
321
|
||||
654
|
||||
987
|
||||
432
|
||||
876
|
||||
543
|
||||
210
|
||||
678
|
||||
345
|
||||
678
|
||||
123
|
||||
456
|
||||
789
|
||||
234
|
||||
567
|
||||
890
|
||||
12
|
||||
34
|
||||
56
|
||||
78
|
||||
90
|
||||
123
|
||||
456
|
||||
789
|
||||
321
|
||||
654
|
||||
987
|
||||
432
|
||||
876
|
||||
543
|
||||
210
|
||||
678
|
||||
345
|
||||
678
|
||||
123
|
||||
456
|
||||
789
|
||||
234
|
||||
567
|
||||
890
|
||||
12
|
||||
34
|
||||
56
|
||||
78
|
||||
90
|
||||
123
|
||||
456
|
||||
789
|
||||
321
|
||||
654
|
||||
987
|
||||
432
|
||||
876
|
||||
543
|
||||
210
|
||||
678
|
||||
345
|
||||
678
|
642
balakhonov_danila_lab_2/files/2.txt
Normal file
642
balakhonov_danila_lab_2/files/2.txt
Normal file
@ -0,0 +1,642 @@
|
||||
873
|
||||
62
|
||||
455
|
||||
879
|
||||
235
|
||||
941
|
||||
267
|
||||
811
|
||||
174
|
||||
517
|
||||
382
|
||||
399
|
||||
460
|
||||
221
|
||||
640
|
||||
915
|
||||
384
|
||||
622
|
||||
897
|
||||
212
|
||||
798
|
||||
109
|
||||
477
|
||||
546
|
||||
29
|
||||
995
|
||||
678
|
||||
342
|
||||
135
|
||||
804
|
||||
890
|
||||
453
|
||||
726
|
||||
891
|
||||
664
|
||||
290
|
||||
872
|
||||
190
|
||||
526
|
||||
304
|
||||
12
|
||||
587
|
||||
234
|
||||
753
|
||||
980
|
||||
197
|
||||
824
|
||||
579
|
||||
458
|
||||
15
|
||||
999
|
||||
614
|
||||
704
|
||||
205
|
||||
860
|
||||
537
|
||||
842
|
||||
491
|
||||
668
|
||||
210
|
||||
920
|
||||
477
|
||||
811
|
||||
350
|
||||
731
|
||||
95
|
||||
639
|
||||
287
|
||||
127
|
||||
423
|
||||
1000
|
||||
394
|
||||
521
|
||||
8
|
||||
267
|
||||
154
|
||||
431
|
||||
715
|
||||
266
|
||||
834
|
||||
173
|
||||
268
|
||||
947
|
||||
582
|
||||
157
|
||||
367
|
||||
882
|
||||
737
|
||||
305
|
||||
472
|
||||
481
|
||||
651
|
||||
960
|
||||
843
|
||||
701
|
||||
122
|
||||
514
|
||||
92
|
||||
658
|
||||
884
|
||||
371
|
||||
458
|
||||
637
|
||||
620
|
||||
793
|
||||
285
|
||||
611
|
||||
785
|
||||
495
|
||||
822
|
||||
849
|
||||
708
|
||||
592
|
||||
465
|
||||
469
|
||||
78
|
||||
734
|
||||
667
|
||||
606
|
||||
241
|
||||
666
|
||||
474
|
||||
569
|
||||
543
|
||||
918
|
||||
68
|
||||
906
|
||||
123
|
||||
501
|
||||
330
|
||||
947
|
||||
111
|
||||
365
|
||||
734
|
||||
249
|
||||
429
|
||||
296
|
||||
16
|
||||
511
|
||||
974
|
||||
317
|
||||
764
|
||||
230
|
||||
542
|
||||
920
|
||||
821
|
||||
718
|
||||
281
|
||||
556
|
||||
575
|
||||
900
|
||||
632
|
||||
720
|
||||
462
|
||||
88
|
||||
275
|
||||
403
|
||||
100
|
||||
418
|
||||
684
|
||||
600
|
||||
119
|
||||
863
|
||||
781
|
||||
225
|
||||
971
|
||||
670
|
||||
80
|
||||
643
|
||||
220
|
||||
176
|
||||
588
|
||||
58
|
||||
202
|
||||
850
|
||||
537
|
||||
934
|
||||
748
|
||||
378
|
||||
817
|
||||
505
|
||||
696
|
||||
21
|
||||
630
|
||||
324
|
||||
117
|
||||
420
|
||||
257
|
||||
493
|
||||
826
|
||||
688
|
||||
305
|
||||
772
|
||||
654
|
||||
927
|
||||
208
|
||||
525
|
||||
511
|
||||
256
|
||||
650
|
||||
447
|
||||
163
|
||||
99
|
||||
74
|
||||
99
|
||||
487
|
||||
306
|
||||
754
|
||||
510
|
||||
132
|
||||
201
|
||||
392
|
||||
785
|
||||
778
|
||||
512
|
||||
258
|
||||
904
|
||||
932
|
||||
589
|
||||
694
|
||||
204
|
||||
884
|
||||
110
|
||||
673
|
||||
152
|
||||
649
|
||||
295
|
||||
387
|
||||
758
|
||||
927
|
||||
538
|
||||
619
|
||||
904
|
||||
651
|
||||
174
|
||||
712
|
||||
104
|
||||
641
|
||||
474
|
||||
198
|
||||
322
|
||||
764
|
||||
204
|
||||
407
|
||||
550
|
||||
42
|
||||
879
|
||||
716
|
||||
368
|
||||
316
|
||||
43
|
||||
600
|
||||
893
|
||||
370
|
||||
137
|
||||
631
|
||||
244
|
||||
571
|
||||
663
|
||||
551
|
||||
907
|
||||
211
|
||||
166
|
||||
746
|
||||
583
|
||||
708
|
||||
771
|
||||
215
|
||||
90
|
||||
829
|
||||
653
|
||||
494
|
||||
563
|
||||
334
|
||||
794
|
||||
745
|
||||
936
|
||||
718
|
||||
126
|
||||
923
|
||||
451
|
||||
668
|
||||
966
|
||||
532
|
||||
935
|
||||
886
|
||||
646
|
||||
75
|
||||
858
|
||||
693
|
||||
859
|
||||
284
|
||||
315
|
||||
679
|
||||
133
|
||||
878
|
||||
292
|
||||
340
|
||||
716
|
||||
128
|
||||
250
|
||||
554
|
||||
482
|
||||
789
|
||||
677
|
||||
308
|
||||
494
|
||||
931
|
||||
144
|
||||
337
|
||||
982
|
||||
713
|
||||
535
|
||||
893
|
||||
939
|
||||
932
|
||||
905
|
||||
805
|
||||
236
|
||||
991
|
||||
781
|
||||
686
|
||||
572
|
||||
951
|
||||
335
|
||||
58
|
||||
303
|
||||
335
|
||||
145
|
||||
608
|
||||
794
|
||||
862
|
||||
792
|
||||
619
|
||||
54
|
||||
292
|
||||
878
|
||||
585
|
||||
293
|
||||
959
|
||||
379
|
||||
20
|
||||
484
|
||||
144
|
||||
678
|
||||
67
|
||||
363
|
||||
946
|
||||
566
|
||||
106
|
||||
442
|
||||
820
|
||||
562
|
||||
109
|
||||
201
|
||||
759
|
||||
481
|
||||
289
|
||||
698
|
||||
25
|
||||
847
|
||||
648
|
||||
733
|
||||
613
|
||||
776
|
||||
989
|
||||
257
|
||||
864
|
||||
32
|
||||
703
|
||||
989
|
||||
465
|
||||
103
|
||||
963
|
||||
515
|
||||
829
|
||||
30
|
||||
303
|
||||
926
|
||||
159
|
||||
586
|
||||
268
|
||||
852
|
||||
953
|
||||
321
|
||||
306
|
||||
978
|
||||
909
|
||||
177
|
||||
835
|
||||
458
|
||||
994
|
||||
885
|
||||
213
|
||||
775
|
||||
385
|
||||
598
|
||||
267
|
||||
754
|
||||
448
|
||||
1000
|
||||
555
|
||||
354
|
||||
657
|
||||
231
|
||||
979
|
||||
265
|
||||
374
|
||||
68
|
||||
197
|
||||
953
|
||||
648
|
||||
153
|
||||
523
|
||||
761
|
||||
827
|
||||
819
|
||||
63
|
||||
782
|
||||
766
|
||||
882
|
||||
404
|
||||
258
|
||||
672
|
||||
883
|
||||
80
|
||||
111
|
||||
212
|
||||
681
|
||||
812
|
||||
911
|
||||
837
|
||||
194
|
||||
161
|
||||
143
|
||||
427
|
||||
981
|
||||
132
|
||||
357
|
||||
605
|
||||
810
|
||||
414
|
||||
20
|
||||
210
|
||||
772
|
||||
882
|
||||
313
|
||||
186
|
||||
578
|
||||
154
|
||||
523
|
||||
339
|
||||
383
|
||||
903
|
||||
29
|
||||
172
|
||||
62
|
||||
314
|
||||
491
|
||||
289
|
||||
550
|
||||
521
|
||||
327
|
||||
794
|
||||
299
|
||||
678
|
||||
769
|
||||
415
|
||||
266
|
||||
77
|
||||
33
|
||||
438
|
||||
233
|
||||
160
|
||||
11
|
||||
523
|
||||
623
|
||||
254
|
||||
29
|
||||
327
|
||||
924
|
||||
938
|
||||
588
|
||||
444
|
||||
976
|
||||
547
|
||||
775
|
||||
638
|
||||
35
|
||||
23
|
||||
203
|
||||
203
|
||||
927
|
||||
149
|
||||
198
|
||||
150
|
||||
370
|
||||
728
|
||||
775
|
||||
818
|
||||
768
|
||||
99
|
||||
40
|
||||
969
|
||||
435
|
||||
49
|
||||
276
|
||||
360
|
||||
964
|
||||
277
|
||||
283
|
||||
825
|
||||
479
|
||||
331
|
||||
471
|
||||
381
|
||||
652
|
||||
264
|
||||
564
|
||||
891
|
||||
638
|
||||
470
|
||||
291
|
||||
101
|
||||
143
|
||||
93
|
||||
663
|
||||
328
|
||||
841
|
||||
881
|
||||
94
|
||||
327
|
||||
2
|
||||
628
|
||||
474
|
||||
905
|
||||
545
|
||||
421
|
||||
453
|
||||
282
|
||||
276
|
||||
24
|
||||
655
|
||||
295
|
||||
48
|
||||
102
|
||||
49
|
||||
676
|
||||
187
|
||||
773
|
||||
169
|
||||
170
|
||||
165
|
||||
405
|
||||
348
|
||||
4
|
||||
654
|
||||
276
|
||||
343
|
||||
153
|
||||
381
|
||||
756
|
||||
753
|
||||
816
|
||||
474
|
||||
186
|
||||
652
|
||||
67
|
||||
689
|
||||
69
|
||||
920
|
||||
880
|
||||
363
|
||||
637
|
||||
524
|
||||
171
|
||||
753
|
||||
12
|
||||
634
|
||||
648
|
||||
668
|
||||
220
|
||||
408
|
||||
348
|
||||
887
|
||||
341
|
||||
738
|
||||
681
|
||||
408
|
||||
377
|
||||
693
|
||||
234
|
||||
83
|
||||
982
|
||||
417
|
||||
222
|
||||
322
|
||||
253
|
||||
494
|
||||
868
|
||||
951
|
||||
344
|
||||
60
|
||||
23
|
||||
41
|
||||
99
|
||||
944
|
||||
723
|
||||
156
|
||||
813
|
||||
5
|
||||
44
|
||||
62
|
||||
899
|
||||
835
|
||||
482
|
||||
469
|
||||
157
|
||||
637
|
||||
295
|
||||
929
|
||||
992
|
||||
234
|
||||
66
|
||||
31
|
||||
170
|
||||
333
|
||||
92
|
||||
185
|
||||
117
|
||||
627
|
||||
82
|
||||
292
|
||||
796
|
||||
840
|
||||
768
|
||||
532
|
||||
981
|
||||
300
|
||||
125
|
||||
958
|
||||
4
|
489
balakhonov_danila_lab_2/files/3.txt
Normal file
489
balakhonov_danila_lab_2/files/3.txt
Normal file
@ -0,0 +1,489 @@
|
||||
522
|
||||
173
|
||||
815
|
||||
671
|
||||
284
|
||||
903
|
||||
477
|
||||
639
|
||||
732
|
||||
143
|
||||
928
|
||||
564
|
||||
812
|
||||
109
|
||||
397
|
||||
249
|
||||
868
|
||||
301
|
||||
848
|
||||
376
|
||||
794
|
||||
99
|
||||
506
|
||||
217
|
||||
645
|
||||
12
|
||||
187
|
||||
930
|
||||
811
|
||||
583
|
||||
684
|
||||
455
|
||||
94
|
||||
499
|
||||
118
|
||||
722
|
||||
603
|
||||
267
|
||||
772
|
||||
947
|
||||
845
|
||||
210
|
||||
495
|
||||
632
|
||||
372
|
||||
930
|
||||
908
|
||||
546
|
||||
327
|
||||
685
|
||||
883
|
||||
235
|
||||
613
|
||||
579
|
||||
762
|
||||
491
|
||||
328
|
||||
672
|
||||
156
|
||||
739
|
||||
1000
|
||||
421
|
||||
731
|
||||
215
|
||||
867
|
||||
610
|
||||
847
|
||||
732
|
||||
204
|
||||
411
|
||||
515
|
||||
150
|
||||
438
|
||||
651
|
||||
174
|
||||
590
|
||||
725
|
||||
963
|
||||
530
|
||||
889
|
||||
577
|
||||
694
|
||||
417
|
||||
261
|
||||
767
|
||||
480
|
||||
934
|
||||
125
|
||||
558
|
||||
282
|
||||
899
|
||||
96
|
||||
653
|
||||
908
|
||||
303
|
||||
774
|
||||
617
|
||||
407
|
||||
482
|
||||
538
|
||||
239
|
||||
472
|
||||
766
|
||||
118
|
||||
920
|
||||
206
|
||||
797
|
||||
420
|
||||
853
|
||||
205
|
||||
340
|
||||
123
|
||||
387
|
||||
497
|
||||
640
|
||||
24
|
||||
999
|
||||
476
|
||||
77
|
||||
920
|
||||
382
|
||||
405
|
||||
55
|
||||
834
|
||||
371
|
||||
167
|
||||
290
|
||||
300
|
||||
611
|
||||
53
|
||||
470
|
||||
81
|
||||
232
|
||||
14
|
||||
451
|
||||
678
|
||||
623
|
||||
564
|
||||
787
|
||||
99
|
||||
648
|
||||
873
|
||||
803
|
||||
888
|
||||
504
|
||||
186
|
||||
256
|
||||
405
|
||||
102
|
||||
999
|
||||
673
|
||||
721
|
||||
434
|
||||
814
|
||||
305
|
||||
582
|
||||
436
|
||||
90
|
||||
774
|
||||
216
|
||||
706
|
||||
855
|
||||
702
|
||||
307
|
||||
59
|
||||
835
|
||||
812
|
||||
234
|
||||
736
|
||||
168
|
||||
523
|
||||
219
|
||||
868
|
||||
365
|
||||
294
|
||||
500
|
||||
207
|
||||
927
|
||||
450
|
||||
521
|
||||
851
|
||||
703
|
||||
992
|
||||
327
|
||||
916
|
||||
554
|
||||
846
|
||||
658
|
||||
88
|
||||
659
|
||||
628
|
||||
764
|
||||
84
|
||||
45
|
||||
10
|
||||
870
|
||||
779
|
||||
320
|
||||
882
|
||||
942
|
||||
93
|
||||
792
|
||||
836
|
||||
137
|
||||
489
|
||||
862
|
||||
391
|
||||
337
|
||||
887
|
||||
114
|
||||
237
|
||||
178
|
||||
874
|
||||
569
|
||||
135
|
||||
919
|
||||
931
|
||||
231
|
||||
50
|
||||
995
|
||||
215
|
||||
658
|
||||
139
|
||||
484
|
||||
292
|
||||
903
|
||||
113
|
||||
755
|
||||
333
|
||||
829
|
||||
942
|
||||
360
|
||||
172
|
||||
689
|
||||
42
|
||||
127
|
||||
799
|
||||
191
|
||||
455
|
||||
533
|
||||
234
|
||||
15
|
||||
404
|
||||
636
|
||||
373
|
||||
884
|
||||
921
|
||||
977
|
||||
113
|
||||
227
|
||||
703
|
||||
173
|
||||
297
|
||||
440
|
||||
604
|
||||
575
|
||||
971
|
||||
855
|
||||
82
|
||||
252
|
||||
589
|
||||
276
|
||||
826
|
||||
206
|
||||
166
|
||||
482
|
||||
375
|
||||
174
|
||||
612
|
||||
818
|
||||
854
|
||||
832
|
||||
809
|
||||
569
|
||||
306
|
||||
993
|
||||
931
|
||||
289
|
||||
148
|
||||
943
|
||||
421
|
||||
784
|
||||
441
|
||||
536
|
||||
426
|
||||
548
|
||||
49
|
||||
687
|
||||
415
|
||||
505
|
||||
951
|
||||
583
|
||||
368
|
||||
172
|
||||
974
|
||||
47
|
||||
173
|
||||
570
|
||||
264
|
||||
754
|
||||
701
|
||||
693
|
||||
796
|
||||
914
|
||||
809
|
||||
310
|
||||
512
|
||||
725
|
||||
963
|
||||
829
|
||||
614
|
||||
220
|
||||
410
|
||||
631
|
||||
860
|
||||
270
|
||||
158
|
||||
168
|
||||
595
|
||||
62
|
||||
715
|
||||
913
|
||||
517
|
||||
157
|
||||
5
|
||||
660
|
||||
274
|
||||
414
|
||||
139
|
||||
300
|
||||
698
|
||||
675
|
||||
263
|
||||
872
|
||||
292
|
||||
142
|
||||
375
|
||||
696
|
||||
895
|
||||
302
|
||||
75
|
||||
576
|
||||
899
|
||||
524
|
||||
362
|
||||
721
|
||||
916
|
||||
883
|
||||
347
|
||||
980
|
||||
29
|
||||
392
|
||||
839
|
||||
971
|
||||
593
|
||||
708
|
||||
804
|
||||
678
|
||||
234
|
||||
719
|
||||
659
|
||||
418
|
||||
914
|
||||
437
|
||||
550
|
||||
418
|
||||
576
|
||||
776
|
||||
293
|
||||
737
|
||||
348
|
||||
292
|
||||
48
|
||||
975
|
||||
547
|
||||
205
|
||||
831
|
||||
783
|
||||
587
|
||||
657
|
||||
132
|
||||
733
|
||||
53
|
||||
700
|
||||
785
|
||||
292
|
||||
332
|
||||
771
|
||||
849
|
||||
994
|
||||
905
|
||||
460
|
||||
420
|
||||
923
|
||||
663
|
||||
134
|
||||
658
|
||||
673
|
||||
618
|
||||
779
|
||||
951
|
||||
244
|
||||
425
|
||||
312
|
||||
436
|
||||
878
|
||||
538
|
||||
236
|
||||
805
|
||||
457
|
||||
897
|
||||
799
|
||||
134
|
||||
469
|
||||
56
|
||||
724
|
||||
370
|
||||
521
|
||||
654
|
||||
20
|
||||
260
|
||||
315
|
||||
525
|
||||
501
|
||||
433
|
||||
90
|
||||
368
|
||||
192
|
||||
162
|
||||
198
|
||||
65
|
||||
652
|
||||
613
|
||||
222
|
||||
160
|
||||
76
|
||||
755
|
||||
541
|
||||
305
|
||||
257
|
||||
669
|
||||
179
|
||||
849
|
||||
878
|
||||
249
|
||||
224
|
||||
4
|
||||
1
|
||||
860
|
||||
967
|
||||
738
|
||||
712
|
||||
281
|
||||
834
|
||||
908
|
||||
774
|
||||
964
|
||||
880
|
||||
902
|
||||
234
|
||||
635
|
||||
138
|
||||
305
|
||||
532
|
||||
585
|
||||
956
|
||||
68
|
||||
21
|
||||
278
|
||||
639
|
||||
622
|
||||
473
|
||||
769
|
||||
161
|
||||
580
|
||||
285
|
||||
204
|
||||
410
|
||||
115
|
||||
430
|
||||
953
|
||||
968
|
||||
593
|
||||
703
|
||||
704
|
||||
469
|
||||
835
|
||||
623
|
||||
991
|
4
balakhonov_danila_lab_2/sigma_app_1/.dockerignore
Normal file
4
balakhonov_danila_lab_2/sigma_app_1/.dockerignore
Normal file
@ -0,0 +1,4 @@
|
||||
bin/
|
||||
obj/
|
||||
Dockerfile
|
||||
README.md
|
484
balakhonov_danila_lab_2/sigma_app_1/.gitignore
vendored
Normal file
484
balakhonov_danila_lab_2/sigma_app_1/.gitignore
vendored
Normal file
@ -0,0 +1,484 @@
|
||||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
##
|
||||
## Get latest from `dotnet new gitignore`
|
||||
|
||||
# dotenv files
|
||||
.env
|
||||
|
||||
# User-specific files
|
||||
*.rsuser
|
||||
*.suo
|
||||
*.user
|
||||
*.userosscache
|
||||
*.sln.docstates
|
||||
|
||||
# User-specific files (MonoDevelop/Xamarin Studio)
|
||||
*.userprefs
|
||||
|
||||
# Mono auto generated files
|
||||
mono_crash.*
|
||||
|
||||
# Build results
|
||||
[Dd]ebug/
|
||||
[Dd]ebugPublic/
|
||||
[Rr]elease/
|
||||
[Rr]eleases/
|
||||
x64/
|
||||
x86/
|
||||
[Ww][Ii][Nn]32/
|
||||
[Aa][Rr][Mm]/
|
||||
[Aa][Rr][Mm]64/
|
||||
bld/
|
||||
[Bb]in/
|
||||
[Oo]bj/
|
||||
[Ll]og/
|
||||
[Ll]ogs/
|
||||
|
||||
# Visual Studio 2015/2017 cache/options directory
|
||||
.vs/
|
||||
# Uncomment if you have tasks that create the project's static files in wwwroot
|
||||
#wwwroot/
|
||||
|
||||
# Visual Studio 2017 auto generated files
|
||||
Generated\ Files/
|
||||
|
||||
# MSTest test Results
|
||||
[Tt]est[Rr]esult*/
|
||||
[Bb]uild[Ll]og.*
|
||||
|
||||
# NUnit
|
||||
*.VisualState.xml
|
||||
TestResult.xml
|
||||
nunit-*.xml
|
||||
|
||||
# Build Results of an ATL Project
|
||||
[Dd]ebugPS/
|
||||
[Rr]eleasePS/
|
||||
dlldata.c
|
||||
|
||||
# Benchmark Results
|
||||
BenchmarkDotNet.Artifacts/
|
||||
|
||||
# .NET
|
||||
project.lock.json
|
||||
project.fragment.lock.json
|
||||
artifacts/
|
||||
|
||||
# Tye
|
||||
.tye/
|
||||
|
||||
# ASP.NET Scaffolding
|
||||
ScaffoldingReadMe.txt
|
||||
|
||||
# StyleCop
|
||||
StyleCopReport.xml
|
||||
|
||||
# Files built by Visual Studio
|
||||
*_i.c
|
||||
*_p.c
|
||||
*_h.h
|
||||
*.ilk
|
||||
*.meta
|
||||
*.obj
|
||||
*.iobj
|
||||
*.pch
|
||||
*.pdb
|
||||
*.ipdb
|
||||
*.pgc
|
||||
*.pgd
|
||||
*.rsp
|
||||
*.sbr
|
||||
*.tlb
|
||||
*.tli
|
||||
*.tlh
|
||||
*.tmp
|
||||
*.tmp_proj
|
||||
*_wpftmp.csproj
|
||||
*.log
|
||||
*.tlog
|
||||
*.vspscc
|
||||
*.vssscc
|
||||
.builds
|
||||
*.pidb
|
||||
*.svclog
|
||||
*.scc
|
||||
|
||||
# Chutzpah Test files
|
||||
_Chutzpah*
|
||||
|
||||
# Visual C++ cache files
|
||||
ipch/
|
||||
*.aps
|
||||
*.ncb
|
||||
*.opendb
|
||||
*.opensdf
|
||||
*.sdf
|
||||
*.cachefile
|
||||
*.VC.db
|
||||
*.VC.VC.opendb
|
||||
|
||||
# Visual Studio profiler
|
||||
*.psess
|
||||
*.vsp
|
||||
*.vspx
|
||||
*.sap
|
||||
|
||||
# Visual Studio Trace Files
|
||||
*.e2e
|
||||
|
||||
# TFS 2012 Local Workspace
|
||||
$tf/
|
||||
|
||||
# Guidance Automation Toolkit
|
||||
*.gpState
|
||||
|
||||
# ReSharper is a .NET coding add-in
|
||||
_ReSharper*/
|
||||
*.[Rr]e[Ss]harper
|
||||
*.DotSettings.user
|
||||
|
||||
# TeamCity is a build add-in
|
||||
_TeamCity*
|
||||
|
||||
# DotCover is a Code Coverage Tool
|
||||
*.dotCover
|
||||
|
||||
# AxoCover is a Code Coverage Tool
|
||||
.axoCover/*
|
||||
!.axoCover/settings.json
|
||||
|
||||
# Coverlet is a free, cross platform Code Coverage Tool
|
||||
coverage*.json
|
||||
coverage*.xml
|
||||
coverage*.info
|
||||
|
||||
# Visual Studio code coverage results
|
||||
*.coverage
|
||||
*.coveragexml
|
||||
|
||||
# NCrunch
|
||||
_NCrunch_*
|
||||
.*crunch*.local.xml
|
||||
nCrunchTemp_*
|
||||
|
||||
# MightyMoose
|
||||
*.mm.*
|
||||
AutoTest.Net/
|
||||
|
||||
# Web workbench (sass)
|
||||
.sass-cache/
|
||||
|
||||
# Installshield output folder
|
||||
[Ee]xpress/
|
||||
|
||||
# DocProject is a documentation generator add-in
|
||||
DocProject/buildhelp/
|
||||
DocProject/Help/*.HxT
|
||||
DocProject/Help/*.HxC
|
||||
DocProject/Help/*.hhc
|
||||
DocProject/Help/*.hhk
|
||||
DocProject/Help/*.hhp
|
||||
DocProject/Help/Html2
|
||||
DocProject/Help/html
|
||||
|
||||
# Click-Once directory
|
||||
publish/
|
||||
|
||||
# Publish Web Output
|
||||
*.[Pp]ublish.xml
|
||||
*.azurePubxml
|
||||
# Note: Comment the next line if you want to checkin your web deploy settings,
|
||||
# but database connection strings (with potential passwords) will be unencrypted
|
||||
*.pubxml
|
||||
*.publishproj
|
||||
|
||||
# Microsoft Azure Web App publish settings. Comment the next line if you want to
|
||||
# checkin your Azure Web App publish settings, but sensitive information contained
|
||||
# in these scripts will be unencrypted
|
||||
PublishScripts/
|
||||
|
||||
# NuGet Packages
|
||||
*.nupkg
|
||||
# NuGet Symbol Packages
|
||||
*.snupkg
|
||||
# The packages folder can be ignored because of Package Restore
|
||||
**/[Pp]ackages/*
|
||||
# except build/, which is used as an MSBuild target.
|
||||
!**/[Pp]ackages/build/
|
||||
# Uncomment if necessary however generally it will be regenerated when needed
|
||||
#!**/[Pp]ackages/repositories.config
|
||||
# NuGet v3's project.json files produces more ignorable files
|
||||
*.nuget.props
|
||||
*.nuget.targets
|
||||
|
||||
# Microsoft Azure Build Output
|
||||
csx/
|
||||
*.build.csdef
|
||||
|
||||
# Microsoft Azure Emulator
|
||||
ecf/
|
||||
rcf/
|
||||
|
||||
# Windows Store app package directories and files
|
||||
AppPackages/
|
||||
BundleArtifacts/
|
||||
Package.StoreAssociation.xml
|
||||
_pkginfo.txt
|
||||
*.appx
|
||||
*.appxbundle
|
||||
*.appxupload
|
||||
|
||||
# Visual Studio cache files
|
||||
# files ending in .cache can be ignored
|
||||
*.[Cc]ache
|
||||
# but keep track of directories ending in .cache
|
||||
!?*.[Cc]ache/
|
||||
|
||||
# Others
|
||||
ClientBin/
|
||||
~$*
|
||||
*~
|
||||
*.dbmdl
|
||||
*.dbproj.schemaview
|
||||
*.jfm
|
||||
*.pfx
|
||||
*.publishsettings
|
||||
orleans.codegen.cs
|
||||
|
||||
# Including strong name files can present a security risk
|
||||
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
|
||||
#*.snk
|
||||
|
||||
# Since there are multiple workflows, uncomment next line to ignore bower_components
|
||||
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
|
||||
#bower_components/
|
||||
|
||||
# RIA/Silverlight projects
|
||||
Generated_Code/
|
||||
|
||||
# Backup & report files from converting an old project file
|
||||
# to a newer Visual Studio version. Backup files are not needed,
|
||||
# because we have git ;-)
|
||||
_UpgradeReport_Files/
|
||||
Backup*/
|
||||
UpgradeLog*.XML
|
||||
UpgradeLog*.htm
|
||||
ServiceFabricBackup/
|
||||
*.rptproj.bak
|
||||
|
||||
# SQL Server files
|
||||
*.mdf
|
||||
*.ldf
|
||||
*.ndf
|
||||
|
||||
# Business Intelligence projects
|
||||
*.rdl.data
|
||||
*.bim.layout
|
||||
*.bim_*.settings
|
||||
*.rptproj.rsuser
|
||||
*- [Bb]ackup.rdl
|
||||
*- [Bb]ackup ([0-9]).rdl
|
||||
*- [Bb]ackup ([0-9][0-9]).rdl
|
||||
|
||||
# Microsoft Fakes
|
||||
FakesAssemblies/
|
||||
|
||||
# GhostDoc plugin setting file
|
||||
*.GhostDoc.xml
|
||||
|
||||
# Node.js Tools for Visual Studio
|
||||
.ntvs_analysis.dat
|
||||
node_modules/
|
||||
|
||||
# Visual Studio 6 build log
|
||||
*.plg
|
||||
|
||||
# Visual Studio 6 workspace options file
|
||||
*.opt
|
||||
|
||||
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
|
||||
*.vbw
|
||||
|
||||
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
|
||||
*.vbp
|
||||
|
||||
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
|
||||
*.dsw
|
||||
*.dsp
|
||||
|
||||
# Visual Studio 6 technical files
|
||||
*.ncb
|
||||
*.aps
|
||||
|
||||
# Visual Studio LightSwitch build output
|
||||
**/*.HTMLClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/ModelManifest.xml
|
||||
**/*.Server/GeneratedArtifacts
|
||||
**/*.Server/ModelManifest.xml
|
||||
_Pvt_Extensions
|
||||
|
||||
# Paket dependency manager
|
||||
.paket/paket.exe
|
||||
paket-files/
|
||||
|
||||
# FAKE - F# Make
|
||||
.fake/
|
||||
|
||||
# CodeRush personal settings
|
||||
.cr/personal
|
||||
|
||||
# Python Tools for Visual Studio (PTVS)
|
||||
__pycache__/
|
||||
*.pyc
|
||||
|
||||
# Cake - Uncomment if you are using it
|
||||
# tools/**
|
||||
# !tools/packages.config
|
||||
|
||||
# Tabs Studio
|
||||
*.tss
|
||||
|
||||
# Telerik's JustMock configuration file
|
||||
*.jmconfig
|
||||
|
||||
# BizTalk build output
|
||||
*.btp.cs
|
||||
*.btm.cs
|
||||
*.odx.cs
|
||||
*.xsd.cs
|
||||
|
||||
# OpenCover UI analysis results
|
||||
OpenCover/
|
||||
|
||||
# Azure Stream Analytics local run output
|
||||
ASALocalRun/
|
||||
|
||||
# MSBuild Binary and Structured Log
|
||||
*.binlog
|
||||
|
||||
# NVidia Nsight GPU debugger configuration file
|
||||
*.nvuser
|
||||
|
||||
# MFractors (Xamarin productivity tool) working folder
|
||||
.mfractor/
|
||||
|
||||
# Local History for Visual Studio
|
||||
.localhistory/
|
||||
|
||||
# Visual Studio History (VSHistory) files
|
||||
.vshistory/
|
||||
|
||||
# BeatPulse healthcheck temp database
|
||||
healthchecksdb
|
||||
|
||||
# Backup folder for Package Reference Convert tool in Visual Studio 2017
|
||||
MigrationBackup/
|
||||
|
||||
# Ionide (cross platform F# VS Code tools) working folder
|
||||
.ionide/
|
||||
|
||||
# Fody - auto-generated XML schema
|
||||
FodyWeavers.xsd
|
||||
|
||||
# VS Code files for those working on multiple tools
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
*.code-workspace
|
||||
|
||||
# Local History for Visual Studio Code
|
||||
.history/
|
||||
|
||||
# Windows Installer files from build outputs
|
||||
*.cab
|
||||
*.msi
|
||||
*.msix
|
||||
*.msm
|
||||
*.msp
|
||||
|
||||
# JetBrains Rider
|
||||
*.sln.iml
|
||||
.idea
|
||||
|
||||
##
|
||||
## Visual studio for Mac
|
||||
##
|
||||
|
||||
|
||||
# globs
|
||||
Makefile.in
|
||||
*.userprefs
|
||||
*.usertasks
|
||||
config.make
|
||||
config.status
|
||||
aclocal.m4
|
||||
install-sh
|
||||
autom4te.cache/
|
||||
*.tar.gz
|
||||
tarballs/
|
||||
test-results/
|
||||
|
||||
# Mac bundle stuff
|
||||
*.dmg
|
||||
*.app
|
||||
|
||||
# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore
|
||||
# General
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
|
||||
# Icon must end with two \r
|
||||
Icon
|
||||
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
|
||||
# Files that might appear in the root of a volume
|
||||
.DocumentRevisions-V100
|
||||
.fseventsd
|
||||
.Spotlight-V100
|
||||
.TemporaryItems
|
||||
.Trashes
|
||||
.VolumeIcon.icns
|
||||
.com.apple.timemachine.donotpresent
|
||||
|
||||
# Directories potentially created on remote AFP share
|
||||
.AppleDB
|
||||
.AppleDesktop
|
||||
Network Trash Folder
|
||||
Temporary Items
|
||||
.apdisk
|
||||
|
||||
# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore
|
||||
# Windows thumbnail cache files
|
||||
Thumbs.db
|
||||
ehthumbs.db
|
||||
ehthumbs_vista.db
|
||||
|
||||
# Dump file
|
||||
*.stackdump
|
||||
|
||||
# Folder config file
|
||||
[Dd]esktop.ini
|
||||
|
||||
# Recycle Bin used on file shares
|
||||
$RECYCLE.BIN/
|
||||
|
||||
# Windows Installer files
|
||||
*.cab
|
||||
*.msi
|
||||
*.msix
|
||||
*.msm
|
||||
*.msp
|
||||
|
||||
# Windows shortcuts
|
||||
*.lnk
|
||||
|
||||
# Vim temporary swap files
|
||||
*.swp
|
14
balakhonov_danila_lab_2/sigma_app_1/Dockerfile
Normal file
14
balakhonov_danila_lab_2/sigma_app_1/Dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
|
||||
WORKDIR /App
|
||||
|
||||
# Copy everything
|
||||
COPY . ./
|
||||
# Restore as distinct layers
|
||||
RUN dotnet restore
|
||||
# Build and publish a release
|
||||
RUN dotnet publish -c Release -o out
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/runtime:8.0 AS runtime
|
||||
WORKDIR /App
|
||||
COPY --from=build /App/out .
|
||||
ENTRYPOINT ["dotnet", "sigma_app_1.dll"]
|
14
balakhonov_danila_lab_2/sigma_app_1/Program.fs
Normal file
14
balakhonov_danila_lab_2/sigma_app_1/Program.fs
Normal file
@ -0,0 +1,14 @@
|
||||
let PATH = @"/var/data/"
|
||||
let RESULT_PATH = @"/var/result/data.txt"
|
||||
|
||||
let getFiles(path: string): seq<string> =
|
||||
System.IO.Directory.EnumerateFiles(path)
|
||||
|
||||
let getRandFile(files: seq<string>) =
|
||||
let rand = System.Random()
|
||||
let index = rand.Next(Seq.length files)
|
||||
Seq.item index files
|
||||
|
||||
let files = getFiles(PATH)
|
||||
let randFile = getRandFile(files)
|
||||
System.IO.File.Copy(randFile, RESULT_PATH)
|
4
balakhonov_danila_lab_2/sigma_app_1/README.md
Normal file
4
balakhonov_danila_lab_2/sigma_app_1/README.md
Normal file
@ -0,0 +1,4 @@
|
||||
# Первая программа лабораторной работы номер 2
|
||||
> Вариант 6
|
||||
>
|
||||
> Берёт из каталога `/var/data` случайный файл и перекладывает его в `/var/result/data.txt`
|
12
balakhonov_danila_lab_2/sigma_app_1/sigma_app_1.fsproj
Normal file
12
balakhonov_danila_lab_2/sigma_app_1/sigma_app_1.fsproj
Normal file
@ -0,0 +1,12 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Compile Include="Program.fs" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
4
balakhonov_danila_lab_2/skibidi_app_2/.dockerignore
Normal file
4
balakhonov_danila_lab_2/skibidi_app_2/.dockerignore
Normal file
@ -0,0 +1,4 @@
|
||||
bin/
|
||||
obj/
|
||||
Dockerfile
|
||||
README.md
|
484
balakhonov_danila_lab_2/skibidi_app_2/.gitignore
vendored
Normal file
484
balakhonov_danila_lab_2/skibidi_app_2/.gitignore
vendored
Normal file
@ -0,0 +1,484 @@
|
||||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
##
|
||||
## Get latest from `dotnet new gitignore`
|
||||
|
||||
# dotenv files
|
||||
.env
|
||||
|
||||
# User-specific files
|
||||
*.rsuser
|
||||
*.suo
|
||||
*.user
|
||||
*.userosscache
|
||||
*.sln.docstates
|
||||
|
||||
# User-specific files (MonoDevelop/Xamarin Studio)
|
||||
*.userprefs
|
||||
|
||||
# Mono auto generated files
|
||||
mono_crash.*
|
||||
|
||||
# Build results
|
||||
[Dd]ebug/
|
||||
[Dd]ebugPublic/
|
||||
[Rr]elease/
|
||||
[Rr]eleases/
|
||||
x64/
|
||||
x86/
|
||||
[Ww][Ii][Nn]32/
|
||||
[Aa][Rr][Mm]/
|
||||
[Aa][Rr][Mm]64/
|
||||
bld/
|
||||
[Bb]in/
|
||||
[Oo]bj/
|
||||
[Ll]og/
|
||||
[Ll]ogs/
|
||||
|
||||
# Visual Studio 2015/2017 cache/options directory
|
||||
.vs/
|
||||
# Uncomment if you have tasks that create the project's static files in wwwroot
|
||||
#wwwroot/
|
||||
|
||||
# Visual Studio 2017 auto generated files
|
||||
Generated\ Files/
|
||||
|
||||
# MSTest test Results
|
||||
[Tt]est[Rr]esult*/
|
||||
[Bb]uild[Ll]og.*
|
||||
|
||||
# NUnit
|
||||
*.VisualState.xml
|
||||
TestResult.xml
|
||||
nunit-*.xml
|
||||
|
||||
# Build Results of an ATL Project
|
||||
[Dd]ebugPS/
|
||||
[Rr]eleasePS/
|
||||
dlldata.c
|
||||
|
||||
# Benchmark Results
|
||||
BenchmarkDotNet.Artifacts/
|
||||
|
||||
# .NET
|
||||
project.lock.json
|
||||
project.fragment.lock.json
|
||||
artifacts/
|
||||
|
||||
# Tye
|
||||
.tye/
|
||||
|
||||
# ASP.NET Scaffolding
|
||||
ScaffoldingReadMe.txt
|
||||
|
||||
# StyleCop
|
||||
StyleCopReport.xml
|
||||
|
||||
# Files built by Visual Studio
|
||||
*_i.c
|
||||
*_p.c
|
||||
*_h.h
|
||||
*.ilk
|
||||
*.meta
|
||||
*.obj
|
||||
*.iobj
|
||||
*.pch
|
||||
*.pdb
|
||||
*.ipdb
|
||||
*.pgc
|
||||
*.pgd
|
||||
*.rsp
|
||||
*.sbr
|
||||
*.tlb
|
||||
*.tli
|
||||
*.tlh
|
||||
*.tmp
|
||||
*.tmp_proj
|
||||
*_wpftmp.csproj
|
||||
*.log
|
||||
*.tlog
|
||||
*.vspscc
|
||||
*.vssscc
|
||||
.builds
|
||||
*.pidb
|
||||
*.svclog
|
||||
*.scc
|
||||
|
||||
# Chutzpah Test files
|
||||
_Chutzpah*
|
||||
|
||||
# Visual C++ cache files
|
||||
ipch/
|
||||
*.aps
|
||||
*.ncb
|
||||
*.opendb
|
||||
*.opensdf
|
||||
*.sdf
|
||||
*.cachefile
|
||||
*.VC.db
|
||||
*.VC.VC.opendb
|
||||
|
||||
# Visual Studio profiler
|
||||
*.psess
|
||||
*.vsp
|
||||
*.vspx
|
||||
*.sap
|
||||
|
||||
# Visual Studio Trace Files
|
||||
*.e2e
|
||||
|
||||
# TFS 2012 Local Workspace
|
||||
$tf/
|
||||
|
||||
# Guidance Automation Toolkit
|
||||
*.gpState
|
||||
|
||||
# ReSharper is a .NET coding add-in
|
||||
_ReSharper*/
|
||||
*.[Rr]e[Ss]harper
|
||||
*.DotSettings.user
|
||||
|
||||
# TeamCity is a build add-in
|
||||
_TeamCity*
|
||||
|
||||
# DotCover is a Code Coverage Tool
|
||||
*.dotCover
|
||||
|
||||
# AxoCover is a Code Coverage Tool
|
||||
.axoCover/*
|
||||
!.axoCover/settings.json
|
||||
|
||||
# Coverlet is a free, cross platform Code Coverage Tool
|
||||
coverage*.json
|
||||
coverage*.xml
|
||||
coverage*.info
|
||||
|
||||
# Visual Studio code coverage results
|
||||
*.coverage
|
||||
*.coveragexml
|
||||
|
||||
# NCrunch
|
||||
_NCrunch_*
|
||||
.*crunch*.local.xml
|
||||
nCrunchTemp_*
|
||||
|
||||
# MightyMoose
|
||||
*.mm.*
|
||||
AutoTest.Net/
|
||||
|
||||
# Web workbench (sass)
|
||||
.sass-cache/
|
||||
|
||||
# Installshield output folder
|
||||
[Ee]xpress/
|
||||
|
||||
# DocProject is a documentation generator add-in
|
||||
DocProject/buildhelp/
|
||||
DocProject/Help/*.HxT
|
||||
DocProject/Help/*.HxC
|
||||
DocProject/Help/*.hhc
|
||||
DocProject/Help/*.hhk
|
||||
DocProject/Help/*.hhp
|
||||
DocProject/Help/Html2
|
||||
DocProject/Help/html
|
||||
|
||||
# Click-Once directory
|
||||
publish/
|
||||
|
||||
# Publish Web Output
|
||||
*.[Pp]ublish.xml
|
||||
*.azurePubxml
|
||||
# Note: Comment the next line if you want to checkin your web deploy settings,
|
||||
# but database connection strings (with potential passwords) will be unencrypted
|
||||
*.pubxml
|
||||
*.publishproj
|
||||
|
||||
# Microsoft Azure Web App publish settings. Comment the next line if you want to
|
||||
# checkin your Azure Web App publish settings, but sensitive information contained
|
||||
# in these scripts will be unencrypted
|
||||
PublishScripts/
|
||||
|
||||
# NuGet Packages
|
||||
*.nupkg
|
||||
# NuGet Symbol Packages
|
||||
*.snupkg
|
||||
# The packages folder can be ignored because of Package Restore
|
||||
**/[Pp]ackages/*
|
||||
# except build/, which is used as an MSBuild target.
|
||||
!**/[Pp]ackages/build/
|
||||
# Uncomment if necessary however generally it will be regenerated when needed
|
||||
#!**/[Pp]ackages/repositories.config
|
||||
# NuGet v3's project.json files produces more ignorable files
|
||||
*.nuget.props
|
||||
*.nuget.targets
|
||||
|
||||
# Microsoft Azure Build Output
|
||||
csx/
|
||||
*.build.csdef
|
||||
|
||||
# Microsoft Azure Emulator
|
||||
ecf/
|
||||
rcf/
|
||||
|
||||
# Windows Store app package directories and files
|
||||
AppPackages/
|
||||
BundleArtifacts/
|
||||
Package.StoreAssociation.xml
|
||||
_pkginfo.txt
|
||||
*.appx
|
||||
*.appxbundle
|
||||
*.appxupload
|
||||
|
||||
# Visual Studio cache files
|
||||
# files ending in .cache can be ignored
|
||||
*.[Cc]ache
|
||||
# but keep track of directories ending in .cache
|
||||
!?*.[Cc]ache/
|
||||
|
||||
# Others
|
||||
ClientBin/
|
||||
~$*
|
||||
*~
|
||||
*.dbmdl
|
||||
*.dbproj.schemaview
|
||||
*.jfm
|
||||
*.pfx
|
||||
*.publishsettings
|
||||
orleans.codegen.cs
|
||||
|
||||
# Including strong name files can present a security risk
|
||||
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
|
||||
#*.snk
|
||||
|
||||
# Since there are multiple workflows, uncomment next line to ignore bower_components
|
||||
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
|
||||
#bower_components/
|
||||
|
||||
# RIA/Silverlight projects
|
||||
Generated_Code/
|
||||
|
||||
# Backup & report files from converting an old project file
|
||||
# to a newer Visual Studio version. Backup files are not needed,
|
||||
# because we have git ;-)
|
||||
_UpgradeReport_Files/
|
||||
Backup*/
|
||||
UpgradeLog*.XML
|
||||
UpgradeLog*.htm
|
||||
ServiceFabricBackup/
|
||||
*.rptproj.bak
|
||||
|
||||
# SQL Server files
|
||||
*.mdf
|
||||
*.ldf
|
||||
*.ndf
|
||||
|
||||
# Business Intelligence projects
|
||||
*.rdl.data
|
||||
*.bim.layout
|
||||
*.bim_*.settings
|
||||
*.rptproj.rsuser
|
||||
*- [Bb]ackup.rdl
|
||||
*- [Bb]ackup ([0-9]).rdl
|
||||
*- [Bb]ackup ([0-9][0-9]).rdl
|
||||
|
||||
# Microsoft Fakes
|
||||
FakesAssemblies/
|
||||
|
||||
# GhostDoc plugin setting file
|
||||
*.GhostDoc.xml
|
||||
|
||||
# Node.js Tools for Visual Studio
|
||||
.ntvs_analysis.dat
|
||||
node_modules/
|
||||
|
||||
# Visual Studio 6 build log
|
||||
*.plg
|
||||
|
||||
# Visual Studio 6 workspace options file
|
||||
*.opt
|
||||
|
||||
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
|
||||
*.vbw
|
||||
|
||||
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
|
||||
*.vbp
|
||||
|
||||
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
|
||||
*.dsw
|
||||
*.dsp
|
||||
|
||||
# Visual Studio 6 technical files
|
||||
*.ncb
|
||||
*.aps
|
||||
|
||||
# Visual Studio LightSwitch build output
|
||||
**/*.HTMLClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/ModelManifest.xml
|
||||
**/*.Server/GeneratedArtifacts
|
||||
**/*.Server/ModelManifest.xml
|
||||
_Pvt_Extensions
|
||||
|
||||
# Paket dependency manager
|
||||
.paket/paket.exe
|
||||
paket-files/
|
||||
|
||||
# FAKE - F# Make
|
||||
.fake/
|
||||
|
||||
# CodeRush personal settings
|
||||
.cr/personal
|
||||
|
||||
# Python Tools for Visual Studio (PTVS)
|
||||
__pycache__/
|
||||
*.pyc
|
||||
|
||||
# Cake - Uncomment if you are using it
|
||||
# tools/**
|
||||
# !tools/packages.config
|
||||
|
||||
# Tabs Studio
|
||||
*.tss
|
||||
|
||||
# Telerik's JustMock configuration file
|
||||
*.jmconfig
|
||||
|
||||
# BizTalk build output
|
||||
*.btp.cs
|
||||
*.btm.cs
|
||||
*.odx.cs
|
||||
*.xsd.cs
|
||||
|
||||
# OpenCover UI analysis results
|
||||
OpenCover/
|
||||
|
||||
# Azure Stream Analytics local run output
|
||||
ASALocalRun/
|
||||
|
||||
# MSBuild Binary and Structured Log
|
||||
*.binlog
|
||||
|
||||
# NVidia Nsight GPU debugger configuration file
|
||||
*.nvuser
|
||||
|
||||
# MFractors (Xamarin productivity tool) working folder
|
||||
.mfractor/
|
||||
|
||||
# Local History for Visual Studio
|
||||
.localhistory/
|
||||
|
||||
# Visual Studio History (VSHistory) files
|
||||
.vshistory/
|
||||
|
||||
# BeatPulse healthcheck temp database
|
||||
healthchecksdb
|
||||
|
||||
# Backup folder for Package Reference Convert tool in Visual Studio 2017
|
||||
MigrationBackup/
|
||||
|
||||
# Ionide (cross platform F# VS Code tools) working folder
|
||||
.ionide/
|
||||
|
||||
# Fody - auto-generated XML schema
|
||||
FodyWeavers.xsd
|
||||
|
||||
# VS Code files for those working on multiple tools
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
*.code-workspace
|
||||
|
||||
# Local History for Visual Studio Code
|
||||
.history/
|
||||
|
||||
# Windows Installer files from build outputs
|
||||
*.cab
|
||||
*.msi
|
||||
*.msix
|
||||
*.msm
|
||||
*.msp
|
||||
|
||||
# JetBrains Rider
|
||||
*.sln.iml
|
||||
.idea
|
||||
|
||||
##
|
||||
## Visual studio for Mac
|
||||
##
|
||||
|
||||
|
||||
# globs
|
||||
Makefile.in
|
||||
*.userprefs
|
||||
*.usertasks
|
||||
config.make
|
||||
config.status
|
||||
aclocal.m4
|
||||
install-sh
|
||||
autom4te.cache/
|
||||
*.tar.gz
|
||||
tarballs/
|
||||
test-results/
|
||||
|
||||
# Mac bundle stuff
|
||||
*.dmg
|
||||
*.app
|
||||
|
||||
# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore
|
||||
# General
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
|
||||
# Icon must end with two \r
|
||||
Icon
|
||||
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
|
||||
# Files that might appear in the root of a volume
|
||||
.DocumentRevisions-V100
|
||||
.fseventsd
|
||||
.Spotlight-V100
|
||||
.TemporaryItems
|
||||
.Trashes
|
||||
.VolumeIcon.icns
|
||||
.com.apple.timemachine.donotpresent
|
||||
|
||||
# Directories potentially created on remote AFP share
|
||||
.AppleDB
|
||||
.AppleDesktop
|
||||
Network Trash Folder
|
||||
Temporary Items
|
||||
.apdisk
|
||||
|
||||
# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore
|
||||
# Windows thumbnail cache files
|
||||
Thumbs.db
|
||||
ehthumbs.db
|
||||
ehthumbs_vista.db
|
||||
|
||||
# Dump file
|
||||
*.stackdump
|
||||
|
||||
# Folder config file
|
||||
[Dd]esktop.ini
|
||||
|
||||
# Recycle Bin used on file shares
|
||||
$RECYCLE.BIN/
|
||||
|
||||
# Windows Installer files
|
||||
*.cab
|
||||
*.msi
|
||||
*.msix
|
||||
*.msm
|
||||
*.msp
|
||||
|
||||
# Windows shortcuts
|
||||
*.lnk
|
||||
|
||||
# Vim temporary swap files
|
||||
*.swp
|
14
balakhonov_danila_lab_2/skibidi_app_2/Dockerfile
Normal file
14
balakhonov_danila_lab_2/skibidi_app_2/Dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
|
||||
WORKDIR /App
|
||||
|
||||
# Copy everything
|
||||
COPY . ./
|
||||
# Restore as distinct layers
|
||||
RUN dotnet restore
|
||||
# Build and publish a release
|
||||
RUN dotnet publish -c Release -o out
|
||||
|
||||
FROM mcr.microsoft.com/dotnet/runtime:8.0 AS runtime
|
||||
WORKDIR /App
|
||||
COPY --from=build /App/out .
|
||||
ENTRYPOINT ["dotnet", "skibidi_app_2.dll"]
|
16
balakhonov_danila_lab_2/skibidi_app_2/Program.fs
Normal file
16
balakhonov_danila_lab_2/skibidi_app_2/Program.fs
Normal file
@ -0,0 +1,16 @@
|
||||
let INPUT_FILE = @"/var/result/data.txt"
|
||||
let OUTPUT_FILE = @"/var/result/result.txt"
|
||||
|
||||
let getNumbersFromFile(path: string): seq<int> =
|
||||
System.IO.File.ReadLines(path)
|
||||
|> Seq.map int
|
||||
|
||||
let getCountOfMaxNumber(numbers: seq<int>): int =
|
||||
numbers
|
||||
|> Seq.max
|
||||
|> fun maxNum -> Seq.filter ((=) maxNum) numbers
|
||||
|> Seq.length
|
||||
|
||||
let numbers = getNumbersFromFile(INPUT_FILE)
|
||||
let count = getCountOfMaxNumber(numbers)
|
||||
System.IO.File.WriteAllText(OUTPUT_FILE, string count)
|
4
balakhonov_danila_lab_2/skibidi_app_2/README.md
Normal file
4
balakhonov_danila_lab_2/skibidi_app_2/README.md
Normal file
@ -0,0 +1,4 @@
|
||||
# Вторая программа лабораторной работы номер 2
|
||||
> Вариант 3
|
||||
>
|
||||
> Ищет набольшее число из файла `/var/result/data.txt` и сохраняет количество таких чисел из последовательности в `/var/result/result.txt`
|
12
balakhonov_danila_lab_2/skibidi_app_2/skibidi_app_2.fsproj
Normal file
12
balakhonov_danila_lab_2/skibidi_app_2/skibidi_app_2.fsproj
Normal file
@ -0,0 +1,12 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Compile Include="Program.fs" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
2
bazunov_andrew_lab_1/.gitignore
vendored
Normal file
2
bazunov_andrew_lab_1/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
ollama
|
||||
./ollama
|
33
bazunov_andrew_lab_1/README.md
Normal file
33
bazunov_andrew_lab_1/README.md
Normal file
@ -0,0 +1,33 @@
|
||||
# Распределенные вычисления и приложения Л1
|
||||
## _Автор Базунов Андрей Игревич ПИбд-42_
|
||||
|
||||
В качестве сервисов были выбраны:
|
||||
- 1.Ollama (_Сервис для использования LLMs моделей_)
|
||||
- 2.Open Web Ui (_Сервис для удобного общения с моделью из сервиса Ollama_)
|
||||
- 3.Gitea (_Гит сервис_)
|
||||
|
||||
# Docker
|
||||
|
||||
>Перед исполнением вполняем установку docker и проверяем версию
|
||||
|
||||
```sh
|
||||
docker-compose --version
|
||||
```
|
||||
|
||||
>Далее производим настройку файла docker-compose.yaml и запускаем контейнер
|
||||
|
||||
```sh
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
>Для завершения работы контейнера используем команду
|
||||
```sh
|
||||
docker-compose down
|
||||
```
|
||||
---
|
||||
> Замечание: после запуска контейнера, необходимо перейти в контейнер **ollamа** и выполнить установку модели [gemma2](https://ollama.com/library/gemma2:2b)
|
||||
> ```sh
|
||||
> docker-compose exec ollama ollama run ollama run gemma2:2b
|
||||
> ```
|
||||
---
|
||||
Далее можно использовать веб сервис Open Web Ui по адресу **localhost:8080** для общения с моделью и Gitea по адресу **localhost:3000** - [демонстрация работы](https://vk.com/video/@viltskaa?z=video236673313_456239574%2Fpl_236673313_-2)
|
61
bazunov_andrew_lab_1/docker-compose.yml
Normal file
61
bazunov_andrew_lab_1/docker-compose.yml
Normal file
@ -0,0 +1,61 @@
|
||||
services:
|
||||
gitea: # Имя сервиса
|
||||
image: gitea/gitea:latest # Имя образа
|
||||
container_name: gitea # Имя контейнера, может быть произовольным
|
||||
ports:
|
||||
- "3000:3000" # Проброс порта Gitea на хост
|
||||
volumes: # хранилище
|
||||
- data:/data
|
||||
environment: # переменные окружения
|
||||
USER_UID: 1000
|
||||
USER_GID: 1000
|
||||
|
||||
ollama:
|
||||
image: ollama/ollama:latest
|
||||
container_name: ollama
|
||||
restart: always
|
||||
ports:
|
||||
- 7869:11434
|
||||
pull_policy: always
|
||||
tty: true
|
||||
volumes:
|
||||
- .:/code
|
||||
- ./ollama/ollama:/root/.ollama # Директория для данных Ollama
|
||||
environment:
|
||||
- OLLAMA_KEEP_ALIVE=24h
|
||||
- OLLAMA_HOST=0.0.0.0 # Указываем хост для API Ollama
|
||||
networks:
|
||||
- ollama-docker
|
||||
command: ["serve"] # Запускаем Ollama в режиме сервера
|
||||
|
||||
ollama-webui:
|
||||
image: ghcr.io/open-webui/open-webui:main # Образ Open Web UI
|
||||
container_name: ollama-webui
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ./ollama/ollama-webui:/app/backend/data
|
||||
ports:
|
||||
- 8080:8080 # Порт для веб-интерфейса
|
||||
environment: # https://docs.openwebui.com/getting-started/env-configuration#default_models
|
||||
- OLLAMA_BASE_URLS=http://host.docker.internal:7869
|
||||
- ENV=dev
|
||||
- WEBUI_AUTH=False
|
||||
- WEBUI_NAME=Viltskaa AI
|
||||
- WEBUI_URL=http://localhost:8080
|
||||
- WEBUI_SECRET_KEY=t0p-s3cr3t
|
||||
depends_on:
|
||||
- ollama
|
||||
extra_hosts:
|
||||
- host.docker.internal:host-gateway
|
||||
networks:
|
||||
- ollama-docker
|
||||
|
||||
networks:
|
||||
ollama-docker:
|
||||
external: false
|
||||
|
||||
volumes:
|
||||
ollama:
|
||||
driver: local
|
||||
data:
|
||||
driver: local
|
14
bazunov_andrew_lab_2/FileCreator/Dockerfile
Normal file
14
bazunov_andrew_lab_2/FileCreator/Dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
# Используем официальный образ Go в качестве базового
|
||||
FROM golang:1.23
|
||||
|
||||
# Устанавливаем рабочую директорию
|
||||
WORKDIR /app
|
||||
|
||||
# Копируем файлы модуля
|
||||
COPY . .
|
||||
|
||||
# Сборка модуля
|
||||
RUN go build -o /bin/FileCreator
|
||||
|
||||
# Запуск модуля
|
||||
CMD ["/bin/FileCreator"]
|
1
bazunov_andrew_lab_2/FileCreator/go.mod
Normal file
1
bazunov_andrew_lab_2/FileCreator/go.mod
Normal file
@ -0,0 +1 @@
|
||||
module FileCreator
|
92
bazunov_andrew_lab_2/FileCreator/main.go
Normal file
92
bazunov_andrew_lab_2/FileCreator/main.go
Normal file
@ -0,0 +1,92 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
const DIR = "/var/data"
|
||||
|
||||
func Exists(name string) (bool, error) {
|
||||
_, err := os.Stat(name)
|
||||
if os.IsNotExist(err) {
|
||||
return false, nil
|
||||
}
|
||||
return err != nil, err
|
||||
}
|
||||
|
||||
func CreateDirectory(dirs string) error {
|
||||
if _, err := os.Stat(dirs); os.IsNotExist(err) {
|
||||
err := os.MkdirAll(dirs, 0664)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func CreateFileOrOpenIfExist(name string) (*os.File, error) {
|
||||
err := CreateDirectory(filepath.Dir(name))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
exists, err := Exists(name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if exists {
|
||||
return os.OpenFile(name, os.O_WRONLY|os.O_CREATE, 0664)
|
||||
}
|
||||
return os.Create(name)
|
||||
}
|
||||
|
||||
func CreateFileAndWriteData(filename string) error {
|
||||
file, err := CreateFileOrOpenIfExist(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
lines := rand.Intn(1000) + 100
|
||||
|
||||
for i := 0; i < lines; i++ {
|
||||
randomValueForLine := rand.Intn(1_000_000)
|
||||
_, err = fmt.Fprintf(file, "%d\r\n", randomValueForLine)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
err = file.Close()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func GetMD5Hash(text string) string {
|
||||
hash := md5.Sum([]byte(text))
|
||||
return hex.EncodeToString(hash[:])
|
||||
}
|
||||
|
||||
func main() {
|
||||
for i := 0; i < 10; i++ {
|
||||
filename := fmt.Sprintf("%s/%s.txt", DIR, GetMD5Hash(fmt.Sprintf("%d", i)))
|
||||
err := CreateFileAndWriteData(filename)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
} else {
|
||||
fmt.Printf("Created file %s\n", filename)
|
||||
}
|
||||
}
|
||||
|
||||
err := CreateFileAndWriteData(DIR + "/data.txt")
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
} else {
|
||||
fmt.Printf("Created file %s\n", DIR+"/data.txt")
|
||||
}
|
||||
}
|
14
bazunov_andrew_lab_2/FirstService/Dockerfile
Normal file
14
bazunov_andrew_lab_2/FirstService/Dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
# Используем официальный образ Go в качестве базового
|
||||
FROM golang:1.23
|
||||
|
||||
# Устанавливаем рабочую директорию
|
||||
WORKDIR /app
|
||||
|
||||
# Копируем файлы модуля
|
||||
COPY . .
|
||||
|
||||
# Сборка модуля
|
||||
RUN go build -o /bin/FirstService
|
||||
|
||||
# Запуск модуля
|
||||
CMD ["/bin/FirstService"]
|
1
bazunov_andrew_lab_2/FirstService/go.mod
Normal file
1
bazunov_andrew_lab_2/FirstService/go.mod
Normal file
@ -0,0 +1 @@
|
||||
module RVIP2
|
94
bazunov_andrew_lab_2/FirstService/main.go
Normal file
94
bazunov_andrew_lab_2/FirstService/main.go
Normal file
@ -0,0 +1,94 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
// Формирует файл /var/result/data.txt так,
|
||||
// что каждая строка файла - количество символов в именах файлов из каталога /var/data.
|
||||
|
||||
const INPUT = "/var/data"
|
||||
const OUTPUT = "/data/result"
|
||||
|
||||
func GetListFilesInDirectory(directory string) ([]string, error) {
|
||||
f, err := os.Open(directory)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return nil, err
|
||||
}
|
||||
files, err := f.Readdir(0)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var fileNames []string
|
||||
for _, file := range files {
|
||||
fileName := file.Name()
|
||||
fileNames = append(fileNames, fileName)
|
||||
}
|
||||
return fileNames, nil
|
||||
}
|
||||
|
||||
func Exists(name string) (bool, error) {
|
||||
_, err := os.Stat(name)
|
||||
if os.IsNotExist(err) {
|
||||
return false, nil
|
||||
}
|
||||
return err != nil, err
|
||||
}
|
||||
|
||||
func CreateFileOrOpenIfExist(name string) (*os.File, error) {
|
||||
exists, err := Exists(name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if exists {
|
||||
return os.OpenFile(name, os.O_WRONLY|os.O_CREATE, 0664)
|
||||
}
|
||||
return os.Create(name)
|
||||
}
|
||||
|
||||
func CreateFileAndWriteData(filename string, lines []string) error {
|
||||
file, err := CreateFileOrOpenIfExist(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, line := range lines {
|
||||
_, err = fmt.Fprintf(file, line)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
err = file.Close()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
filenames, err := GetListFilesInDirectory(INPUT)
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
|
||||
var lenghtOfFilenames []string
|
||||
for _, filename := range filenames {
|
||||
fmt.Println(filename)
|
||||
lenghtOfFilenames = append(lenghtOfFilenames, fmt.Sprintf("%d", len(filename)))
|
||||
}
|
||||
|
||||
err = CreateFileAndWriteData(OUTPUT+"/data.txt", filenames)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Println("First Service is end.")
|
||||
}
|
14
bazunov_andrew_lab_2/SecondService/Dockerfile
Normal file
14
bazunov_andrew_lab_2/SecondService/Dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
# Используем официальный образ Go в качестве базового
|
||||
FROM golang:1.23
|
||||
|
||||
# Устанавливаем рабочую директорию
|
||||
WORKDIR /app
|
||||
|
||||
# Копируем файлы модуля
|
||||
COPY . .
|
||||
|
||||
# Сборка модуля
|
||||
RUN go build -o /bin/SecondService
|
||||
|
||||
# Запуск модуля
|
||||
CMD ["/bin/SecondService"]
|
1
bazunov_andrew_lab_2/SecondService/go.mod
Normal file
1
bazunov_andrew_lab_2/SecondService/go.mod
Normal file
@ -0,0 +1 @@
|
||||
module SecondService
|
79
bazunov_andrew_lab_2/SecondService/main.go
Normal file
79
bazunov_andrew_lab_2/SecondService/main.go
Normal file
@ -0,0 +1,79 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
//Ищет наименьшее число из файла /var/data/data.txt и сохраняет его третью степень в /var/result/result.txt.
|
||||
|
||||
const INPUT = "/var/data/data.txt"
|
||||
const OUTPUT = "/var/result/result.txt"
|
||||
|
||||
func ReadlinesFromFile(filename string) ([]string, error) {
|
||||
file, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var output []string
|
||||
scanner := bufio.NewScanner(file)
|
||||
for scanner.Scan() {
|
||||
output = append(output, scanner.Text())
|
||||
}
|
||||
|
||||
err = file.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return output, nil
|
||||
}
|
||||
|
||||
func WriteIntToFile(filename string, i int) error {
|
||||
file, err := os.Create(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func(file *os.File) {
|
||||
err := file.Close()
|
||||
if err != nil {
|
||||
|
||||
}
|
||||
}(file)
|
||||
|
||||
_, err = file.WriteString(fmt.Sprintf("%d\n", i))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
lines, err := ReadlinesFromFile(INPUT)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
}
|
||||
|
||||
minValue := 0
|
||||
|
||||
for _, line := range lines {
|
||||
if intValue, err := fmt.Sscanf(line, "%d", &minValue); err != nil {
|
||||
fmt.Println(err)
|
||||
} else {
|
||||
if minValue >= intValue {
|
||||
minValue = intValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err = WriteIntToFile(OUTPUT, minValue); err != nil {
|
||||
return
|
||||
} else {
|
||||
fmt.Printf("Write %d to %s\n", minValue, OUTPUT)
|
||||
}
|
||||
|
||||
fmt.Println("Second Service is end.")
|
||||
}
|
27
bazunov_andrew_lab_2/docker-compose.yaml
Normal file
27
bazunov_andrew_lab_2/docker-compose.yaml
Normal file
@ -0,0 +1,27 @@
|
||||
services:
|
||||
file_generate:
|
||||
build:
|
||||
context: ./FileCreator
|
||||
dockerfile: Dockerfile
|
||||
volumes:
|
||||
- ./data:/var/data # Монтирование локальной папки data в /var/data в контейнере
|
||||
|
||||
first_service:
|
||||
build:
|
||||
context: ./FirstService
|
||||
dockerfile: Dockerfile
|
||||
volumes:
|
||||
- ./data:/var/data
|
||||
- ./data:/var/result
|
||||
depends_on:
|
||||
- file_generate
|
||||
|
||||
second_service:
|
||||
build:
|
||||
context: ./SecondService
|
||||
dockerfile: Dockerfile
|
||||
volumes:
|
||||
- ./data:/var/data
|
||||
- ./data:/var/result
|
||||
depends_on:
|
||||
- first_service
|
BIN
bazunov_andrew_lab_3/PersonApp/.DS_Store
vendored
Normal file
BIN
bazunov_andrew_lab_3/PersonApp/.DS_Store
vendored
Normal file
Binary file not shown.
4
bazunov_andrew_lab_3/PersonApp/.env
Normal file
4
bazunov_andrew_lab_3/PersonApp/.env
Normal file
@ -0,0 +1,4 @@
|
||||
PORT=8080
|
||||
TASK_APP_URL=http://task-app:8000
|
||||
TIMEOUT=15
|
||||
DATABASE=./database.db
|
14
bazunov_andrew_lab_3/PersonApp/Dockerfile
Normal file
14
bazunov_andrew_lab_3/PersonApp/Dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
FROM golang:1.23
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN go build -o /bin/PersonApp
|
||||
|
||||
EXPOSE 8080
|
||||
|
||||
CMD ["/bin/PersonApp"]
|
BIN
bazunov_andrew_lab_3/PersonApp/database.db
Normal file
BIN
bazunov_andrew_lab_3/PersonApp/database.db
Normal file
Binary file not shown.
10
bazunov_andrew_lab_3/PersonApp/go.mod
Normal file
10
bazunov_andrew_lab_3/PersonApp/go.mod
Normal file
@ -0,0 +1,10 @@
|
||||
module PersonApp
|
||||
|
||||
go 1.23.1
|
||||
|
||||
require (
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/mattn/go-sqlite3 v1.14.24
|
||||
)
|
||||
|
||||
require github.com/joho/godotenv v1.5.1 // indirect
|
6
bazunov_andrew_lab_3/PersonApp/go.sum
Normal file
6
bazunov_andrew_lab_3/PersonApp/go.sum
Normal file
@ -0,0 +1,6 @@
|
||||
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
|
||||
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
|
||||
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
|
||||
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
157
bazunov_andrew_lab_3/PersonApp/handlers/handlers.go
Normal file
157
bazunov_andrew_lab_3/PersonApp/handlers/handlers.go
Normal file
@ -0,0 +1,157 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"PersonApp/httpClient"
|
||||
"PersonApp/models"
|
||||
"PersonApp/repository"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/gorilla/mux"
|
||||
"net/http"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func InitRoutes(r *mux.Router, rep repository.PersonRepository, cln httpClient.Client) {
|
||||
r.HandleFunc("/", GetPersons(rep, cln)).Methods("GET")
|
||||
r.HandleFunc("/{id:[0-9]+}", GetPersonById(rep, cln)).Methods("GET")
|
||||
r.HandleFunc("/", CreatePerson(rep)).Methods("POST")
|
||||
r.HandleFunc("/{id:[0-9]+}", UpdatePerson(rep)).Methods("PUT")
|
||||
r.HandleFunc("/{id:[0-9]+}", DeletePerson(rep)).Methods("DELETE")
|
||||
}
|
||||
|
||||
func GetPersons(rep repository.PersonRepository, cln httpClient.Client) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
fmt.Println("GET PERSONS")
|
||||
|
||||
persons, err := rep.GetAllPersons()
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
for i := 0; i < len(persons); i++ {
|
||||
tasks, _ := cln.GetPersonTasks(persons[i].Id)
|
||||
persons[i].Tasks = tasks
|
||||
}
|
||||
|
||||
err = json.NewEncoder(w).Encode(persons)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func GetPersonById(rep repository.PersonRepository, cln httpClient.Client) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
id, err := strconv.Atoi(mux.Vars(r)["id"])
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
person, err := rep.GetPersonById(id)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
tasks, err := cln.GetPersonTasks(id)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
} else {
|
||||
person.Tasks = tasks
|
||||
}
|
||||
|
||||
err = json.NewEncoder(w).Encode(person)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func CreatePerson(rep repository.PersonRepository) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
var person *models.Person
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&person)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
person, err = rep.CreatePerson(*person)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusCreated)
|
||||
err = json.NewEncoder(w).Encode(person)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func UpdatePerson(rep repository.PersonRepository) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
id, err := strconv.Atoi(mux.Vars(r)["id"])
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
var person *models.Person
|
||||
err = json.NewDecoder(r.Body).Decode(&person)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
person, err = rep.UpdatePerson(models.Person{
|
||||
Id: id,
|
||||
Name: person.Name,
|
||||
Tasks: nil,
|
||||
})
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusAccepted)
|
||||
err = json.NewEncoder(w).Encode(person)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func DeletePerson(rep repository.PersonRepository) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
id, err := strconv.Atoi(mux.Vars(r)["id"])
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
err = rep.DeletePerson(id)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}
|
||||
}
|
72
bazunov_andrew_lab_3/PersonApp/httpClient/client.go
Normal file
72
bazunov_andrew_lab_3/PersonApp/httpClient/client.go
Normal file
@ -0,0 +1,72 @@
|
||||
package httpClient
|
||||
|
||||
import (
|
||||
"PersonApp/models"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Client interface {
|
||||
GetPersonTasks(id int) ([]models.Task, error)
|
||||
TestConnection() (bool, error)
|
||||
}
|
||||
|
||||
type client struct {
|
||||
BaseUrl string
|
||||
Timeout time.Duration
|
||||
}
|
||||
|
||||
func (c *client) TestConnection() (bool, error) {
|
||||
client := &http.Client{Timeout: c.Timeout}
|
||||
url := fmt.Sprintf("%s/", c.BaseUrl)
|
||||
resp, err := client.Get(url)
|
||||
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
defer func(Body io.ReadCloser) {
|
||||
err := Body.Close()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}(resp.Body)
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return false, fmt.Errorf("bad status code: %d", resp.StatusCode)
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (c *client) GetPersonTasks(id int) ([]models.Task, error) {
|
||||
client := &http.Client{Timeout: c.Timeout * time.Second}
|
||||
url := fmt.Sprintf("%s/f/%d", c.BaseUrl, id)
|
||||
|
||||
resp, err := client.Get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func(Body io.ReadCloser) {
|
||||
err := Body.Close()
|
||||
if err != nil {
|
||||
|
||||
}
|
||||
}(resp.Body)
|
||||
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
|
||||
var tasks []models.Task
|
||||
if err := json.Unmarshal(body, &tasks); err != nil {
|
||||
fmt.Printf("Unmarshal error: %s", err)
|
||||
return []models.Task{}, err
|
||||
}
|
||||
|
||||
return tasks, nil
|
||||
}
|
||||
|
||||
func NewClient(baseUrl string, timeout time.Duration) Client {
|
||||
return &client{BaseUrl: baseUrl, Timeout: timeout}
|
||||
}
|
34
bazunov_andrew_lab_3/PersonApp/httpTests/test.http
Normal file
34
bazunov_andrew_lab_3/PersonApp/httpTests/test.http
Normal file
@ -0,0 +1,34 @@
|
||||
GET http://localhost/person-app/
|
||||
Accept: application/json
|
||||
|
||||
###
|
||||
|
||||
GET http://localhost/person-app/1
|
||||
Accept: application/json
|
||||
|
||||
###
|
||||
|
||||
POST http://localhost/person-app/
|
||||
Accept: application/json
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"name": "TEST3"
|
||||
}
|
||||
|
||||
###
|
||||
|
||||
PUT http://localhost/person-app/3
|
||||
Accept: application/json
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"name": "TEST11"
|
||||
}
|
||||
|
||||
###
|
||||
|
||||
DELETE http://localhost/person-app/3
|
||||
Accept: application/json
|
||||
|
||||
###
|
47
bazunov_andrew_lab_3/PersonApp/main.go
Normal file
47
bazunov_andrew_lab_3/PersonApp/main.go
Normal file
@ -0,0 +1,47 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"PersonApp/handlers"
|
||||
"PersonApp/httpClient"
|
||||
"PersonApp/repository"
|
||||
"PersonApp/storage"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/joho/godotenv"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
func main() {
|
||||
err := godotenv.Load(".env")
|
||||
if err != nil {
|
||||
panic("Error loading .env file")
|
||||
}
|
||||
|
||||
url := os.Getenv("TASK_APP_URL")
|
||||
port := os.Getenv("PORT")
|
||||
databasePath := os.Getenv("DATABASE")
|
||||
timeout, err := strconv.Atoi(os.Getenv("TIMEOUT"))
|
||||
|
||||
if err != nil {
|
||||
panic("Error converting timeout to int")
|
||||
}
|
||||
|
||||
database, err := storage.Init(databasePath)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
cln := httpClient.NewClient(url, time.Duration(timeout))
|
||||
rep := repository.NewPersonRepository(database)
|
||||
router := mux.NewRouter()
|
||||
handlers.InitRoutes(router, rep, cln)
|
||||
|
||||
err = http.ListenAndServe(":"+port, router)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
storage.Close(database)
|
||||
}
|
24
bazunov_andrew_lab_3/PersonApp/models/models.go
Normal file
24
bazunov_andrew_lab_3/PersonApp/models/models.go
Normal file
@ -0,0 +1,24 @@
|
||||
package models
|
||||
|
||||
type Person struct {
|
||||
Id int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Tasks []Task `json:"tasks"`
|
||||
}
|
||||
|
||||
type PersonCreate struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type Task struct {
|
||||
Id int `json:"id"`
|
||||
PersonId int `json:"person_id"`
|
||||
Name string `json:"name"`
|
||||
Date string `json:"date"`
|
||||
}
|
||||
|
||||
type TaskCreate struct {
|
||||
PersonId int `json:"person_id"`
|
||||
Name string `json:"name"`
|
||||
Date string `json:"date"`
|
||||
}
|
@ -0,0 +1,99 @@
|
||||
package repository
|
||||
|
||||
import (
|
||||
"PersonApp/models"
|
||||
"database/sql"
|
||||
)
|
||||
|
||||
type PersonRepository interface {
|
||||
GetAllPersons() ([]models.Person, error)
|
||||
GetPersonById(id int) (*models.Person, error)
|
||||
CreatePerson(person models.Person) (*models.Person, error)
|
||||
UpdatePerson(person models.Person) (*models.Person, error)
|
||||
DeletePerson(id int) error
|
||||
}
|
||||
|
||||
type personRepository struct {
|
||||
DB *sql.DB
|
||||
}
|
||||
|
||||
func NewPersonRepository(db *sql.DB) PersonRepository {
|
||||
return &personRepository{DB: db}
|
||||
}
|
||||
|
||||
func (pr *personRepository) GetAllPersons() ([]models.Person, error) {
|
||||
rows, err := pr.DB.Query("select * from Persons")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer func(rows *sql.Rows) {
|
||||
err := rows.Close()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}(rows)
|
||||
|
||||
var persons []models.Person
|
||||
|
||||
for rows.Next() {
|
||||
p := models.Person{}
|
||||
err := rows.Scan(&p.Id, &p.Name)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
persons = append(persons, p)
|
||||
}
|
||||
|
||||
return persons, err
|
||||
}
|
||||
|
||||
func (pr *personRepository) GetPersonById(id int) (*models.Person, error) {
|
||||
row := pr.DB.QueryRow("select * from Persons where id=?", id)
|
||||
|
||||
person := models.Person{}
|
||||
err := row.Scan(&person.Id, &person.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &person, err
|
||||
}
|
||||
|
||||
func (pr *personRepository) CreatePerson(p models.Person) (*models.Person, error) {
|
||||
res, err := pr.DB.Exec("INSERT INTO Persons (name) values (?)", p.Name)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if res == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return &p, err
|
||||
}
|
||||
|
||||
func (pr *personRepository) UpdatePerson(p models.Person) (*models.Person, error) {
|
||||
res, err := pr.DB.Exec("UPDATE Persons SET name = ? WHERE id = ?", p.Name, p.Id)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if res == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return &p, err
|
||||
}
|
||||
|
||||
func (pr *personRepository) DeletePerson(id int) error {
|
||||
_, err := pr.DB.Exec("DELETE FROM Persons WHERE id = ?", id)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
0
bazunov_andrew_lab_3/PersonApp/storage/database.db
Normal file
0
bazunov_andrew_lab_3/PersonApp/storage/database.db
Normal file
36
bazunov_andrew_lab_3/PersonApp/storage/db.go
Normal file
36
bazunov_andrew_lab_3/PersonApp/storage/db.go
Normal file
@ -0,0 +1,36 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
func Init(databasePath string) (*sql.DB, error) {
|
||||
db, err := sql.Open("sqlite3", databasePath)
|
||||
|
||||
if err != nil || db == nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := createTableIfNotExists(db); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
func Close(db *sql.DB) {
|
||||
err := db.Close()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func createTableIfNotExists(db *sql.DB) error {
|
||||
if result, err := db.Exec(
|
||||
"CREATE TABLE IF NOT EXISTS `Persons`(Id integer primary key autoincrement, Name text not null);",
|
||||
); err != nil || result == nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
25
bazunov_andrew_lab_3/README.md
Normal file
25
bazunov_andrew_lab_3/README.md
Normal file
@ -0,0 +1,25 @@
|
||||
# Распределенные вычисления и приложения Л3
|
||||
## _Автор Базунов Андрей Игревич ПИбд-42_
|
||||
|
||||
В качестве основного языка был выбран GoLang. Для каждого сервиса был создан DOCKERFILE где были прописаны условия и действия для сборки каждого из модулей
|
||||
|
||||
# Docker
|
||||
|
||||
>Перед исполнением вполняем установку docker и проверяем версию
|
||||
|
||||
```sh
|
||||
docker-compose --version
|
||||
```
|
||||
|
||||
>Далее производим настройку файла docker-compose.yaml и запускаем контейнер с сборкой образов
|
||||
|
||||
```sh
|
||||
docker-compose up -d --build
|
||||
```
|
||||
|
||||
>Для завершения работы контейнера используем команду
|
||||
```sh
|
||||
docker-compose down
|
||||
```
|
||||
|
||||
[Демонстрация работы](https://vk.com/video/@viltskaa?z=video236673313_456239577%2Fpl_236673313_-2)
|
4
bazunov_andrew_lab_3/TaskApp/.env
Normal file
4
bazunov_andrew_lab_3/TaskApp/.env
Normal file
@ -0,0 +1,4 @@
|
||||
PORT=8000
|
||||
PERSON_APP_URL=http://person-app:8080
|
||||
TIMEOUT=15
|
||||
DATABASE=./database.db
|
14
bazunov_andrew_lab_3/TaskApp/Dockerfile
Normal file
14
bazunov_andrew_lab_3/TaskApp/Dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
FROM golang:1.23
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN go build -o /bin/TaskApp
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["/bin/TaskApp"]
|
BIN
bazunov_andrew_lab_3/TaskApp/database.db
Normal file
BIN
bazunov_andrew_lab_3/TaskApp/database.db
Normal file
Binary file not shown.
10
bazunov_andrew_lab_3/TaskApp/go.mod
Normal file
10
bazunov_andrew_lab_3/TaskApp/go.mod
Normal file
@ -0,0 +1,10 @@
|
||||
module TaskApp
|
||||
|
||||
go 1.23.1
|
||||
|
||||
require (
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/mattn/go-sqlite3 v1.14.24
|
||||
)
|
||||
|
||||
require github.com/joho/godotenv v1.5.1
|
6
bazunov_andrew_lab_3/TaskApp/go.sum
Normal file
6
bazunov_andrew_lab_3/TaskApp/go.sum
Normal file
@ -0,0 +1,6 @@
|
||||
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
|
||||
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
|
||||
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
|
||||
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
177
bazunov_andrew_lab_3/TaskApp/handlers/handlers.go
Normal file
177
bazunov_andrew_lab_3/TaskApp/handlers/handlers.go
Normal file
@ -0,0 +1,177 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"TaskApp/httpClient"
|
||||
"TaskApp/models"
|
||||
"TaskApp/repository"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/gorilla/mux"
|
||||
"net/http"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func InitRoutes(r *mux.Router, rep repository.TaskRepository, cln httpClient.Client) {
|
||||
r.HandleFunc("/", GetTasks(rep)).Methods("GET")
|
||||
r.HandleFunc("/{id:[0-9]+}", GetTaskById(rep)).Methods("GET")
|
||||
r.HandleFunc("/", CreateTask(rep, cln)).Methods("POST")
|
||||
r.HandleFunc("/{id:[0-9]+}", UpdateTask(rep)).Methods("PUT")
|
||||
r.HandleFunc("/{id:[0-9]+}", DeleteTask(rep)).Methods("DELETE")
|
||||
r.HandleFunc("/f/{id:[0-9]+}", GetPersonTasks(rep)).Methods("GET")
|
||||
}
|
||||
|
||||
func GetTasks(rep repository.TaskRepository) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
tasks, err := rep.GetAllTasks()
|
||||
if err != nil {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
err = json.NewEncoder(w).Encode(tasks)
|
||||
if err != nil {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func GetTaskById(rep repository.TaskRepository) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
id, err := strconv.Atoi(mux.Vars(r)["id"])
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
person, err := rep.GetTaskById(id)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
err = json.NewEncoder(w).Encode(person)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func GetPersonTasks(rep repository.TaskRepository) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
id, err := strconv.Atoi(mux.Vars(r)["id"])
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
tasks, err := rep.GetUserTasks(id)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
err = json.NewEncoder(w).Encode(tasks)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func CreateTask(rep repository.TaskRepository, cln httpClient.Client) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
var task *models.TaskCreate
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&task)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if &task.Name == nil || &task.PersonId == nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
person, err := cln.GetPerson(task.PersonId)
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
http.Error(w, "Connection to PersonApp is confused.", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if person == nil {
|
||||
http.Error(w, fmt.Sprintf("Person with id=%d is't founded.", person.Id), http.StatusBadGateway)
|
||||
return
|
||||
}
|
||||
|
||||
newTask, err := rep.CreateTask(*task)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusCreated)
|
||||
err = json.NewEncoder(w).Encode(newTask)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func UpdateTask(rep repository.TaskRepository) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
id, err := strconv.Atoi(mux.Vars(r)["id"])
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var task *models.TaskCreate
|
||||
|
||||
err = json.NewDecoder(r.Body).Decode(&task)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
newTask, err := rep.UpdateTask(models.Task{Id: id, Name: task.Name, Date: task.Date})
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
err = json.NewEncoder(w).Encode(newTask)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func DeleteTask(rep repository.TaskRepository) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
|
||||
id, err := strconv.Atoi(mux.Vars(r)["id"])
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
err = rep.DeleteTask(id)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}
|
||||
}
|
73
bazunov_andrew_lab_3/TaskApp/httpClient/client.go
Normal file
73
bazunov_andrew_lab_3/TaskApp/httpClient/client.go
Normal file
@ -0,0 +1,73 @@
|
||||
package httpClient
|
||||
|
||||
import (
|
||||
"TaskApp/models"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Client interface {
|
||||
GetPerson(id int) (*models.Person, error)
|
||||
TestConnection() (bool, error)
|
||||
}
|
||||
|
||||
type client struct {
|
||||
BaseUrl string
|
||||
Timeout time.Duration
|
||||
}
|
||||
|
||||
func (c *client) TestConnection() (bool, error) {
|
||||
client := &http.Client{Timeout: c.Timeout}
|
||||
url := fmt.Sprintf("%s/", c.BaseUrl)
|
||||
resp, err := client.Get(url)
|
||||
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
defer func(Body io.ReadCloser) {
|
||||
err := Body.Close()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}(resp.Body)
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return false, fmt.Errorf("bad status code: %d", resp.StatusCode)
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (c *client) GetPerson(id int) (*models.Person, error) {
|
||||
client := &http.Client{Timeout: c.Timeout * time.Second}
|
||||
url := fmt.Sprintf("%s/%d", c.BaseUrl, id)
|
||||
|
||||
resp, err := client.Get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func(Body io.ReadCloser) {
|
||||
err := Body.Close()
|
||||
if err != nil {
|
||||
|
||||
}
|
||||
}(resp.Body)
|
||||
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
|
||||
var person models.Person
|
||||
if err := json.Unmarshal(body, &person); err != nil {
|
||||
log.Printf("Unmarshal error: %s", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &person, nil
|
||||
}
|
||||
|
||||
func NewClient(baseUrl string, timeout time.Duration) Client {
|
||||
return &client{BaseUrl: baseUrl, Timeout: timeout}
|
||||
}
|
37
bazunov_andrew_lab_3/TaskApp/httpTests/tests.http
Normal file
37
bazunov_andrew_lab_3/TaskApp/httpTests/tests.http
Normal file
@ -0,0 +1,37 @@
|
||||
GET http://localhost/task-app/
|
||||
Accept: application/json
|
||||
|
||||
###
|
||||
|
||||
GET http://localhost/task-app/4
|
||||
Accept: application/json
|
||||
|
||||
###
|
||||
|
||||
POST http://localhost/task-app/
|
||||
Accept: application/json
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"name": "TEST2",
|
||||
"person_id": 1,
|
||||
"date": "19.02.2202"
|
||||
}
|
||||
|
||||
###
|
||||
|
||||
PUT http://localhost/task-app/4
|
||||
Accept: application/json
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"name": "TEST5",
|
||||
"date": "19.02.2202"
|
||||
}
|
||||
|
||||
###
|
||||
|
||||
DELETE http://localhost/task-app/4
|
||||
Accept: application/json
|
||||
|
||||
###
|
47
bazunov_andrew_lab_3/TaskApp/main.go
Normal file
47
bazunov_andrew_lab_3/TaskApp/main.go
Normal file
@ -0,0 +1,47 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"TaskApp/handlers"
|
||||
"TaskApp/httpClient"
|
||||
"TaskApp/repository"
|
||||
"TaskApp/storage"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/joho/godotenv"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
func main() {
|
||||
err := godotenv.Load(".env")
|
||||
if err != nil {
|
||||
panic("Error loading .env file")
|
||||
}
|
||||
|
||||
url := os.Getenv("PERSON_APP_URL")
|
||||
port := os.Getenv("PORT")
|
||||
databasePath := os.Getenv("DATABASE")
|
||||
timeout, err := strconv.Atoi(os.Getenv("TIMEOUT"))
|
||||
|
||||
if err != nil {
|
||||
panic("Error converting timeout to int")
|
||||
}
|
||||
|
||||
database, err := storage.Init(databasePath)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
cln := httpClient.NewClient(url, time.Duration(timeout))
|
||||
rep := repository.NewTaskRepository(database)
|
||||
router := mux.NewRouter()
|
||||
handlers.InitRoutes(router, rep, cln)
|
||||
|
||||
err = http.ListenAndServe(":"+port, router)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
storage.Close(database)
|
||||
}
|
24
bazunov_andrew_lab_3/TaskApp/models/models.go
Normal file
24
bazunov_andrew_lab_3/TaskApp/models/models.go
Normal file
@ -0,0 +1,24 @@
|
||||
package models
|
||||
|
||||
type Person struct {
|
||||
Id int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Tasks []Task `json:"tasks"`
|
||||
}
|
||||
|
||||
type PersonCreate struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
type Task struct {
|
||||
Id int `json:"id"`
|
||||
PersonId int `json:"person_id"`
|
||||
Name string `json:"name"`
|
||||
Date string `json:"date"`
|
||||
}
|
||||
|
||||
type TaskCreate struct {
|
||||
PersonId int `json:"person_id"`
|
||||
Name string `json:"name"`
|
||||
Date string `json:"date"`
|
||||
}
|
121
bazunov_andrew_lab_3/TaskApp/repository/taskRepository.go
Normal file
121
bazunov_andrew_lab_3/TaskApp/repository/taskRepository.go
Normal file
@ -0,0 +1,121 @@
|
||||
package repository
|
||||
|
||||
import (
|
||||
"TaskApp/models"
|
||||
"database/sql"
|
||||
)
|
||||
|
||||
type TaskRepository interface {
|
||||
GetAllTasks() ([]models.Task, error)
|
||||
GetTaskById(id int) (*models.Task, error)
|
||||
GetUserTasks(id int) ([]models.Task, error)
|
||||
CreateTask(task models.TaskCreate) (*models.Task, error)
|
||||
UpdateTask(task models.Task) (*models.Task, error)
|
||||
DeleteTask(id int) error
|
||||
}
|
||||
|
||||
type taskRepository struct {
|
||||
DB *sql.DB
|
||||
}
|
||||
|
||||
func (t taskRepository) GetUserTasks(id int) ([]models.Task, error) {
|
||||
rows, err := t.DB.Query("select * from Tasks where PersonId = ?", id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer func(rows *sql.Rows) {
|
||||
err := rows.Close()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}(rows)
|
||||
|
||||
var tasks []models.Task
|
||||
|
||||
for rows.Next() {
|
||||
p := models.Task{}
|
||||
err := rows.Scan(&p.Id, &p.Name, &p.PersonId, &p.Date)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
tasks = append(tasks, p)
|
||||
}
|
||||
|
||||
return tasks, err
|
||||
}
|
||||
|
||||
func (t taskRepository) GetAllTasks() ([]models.Task, error) {
|
||||
rows, err := t.DB.Query("select * from Tasks")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer func(rows *sql.Rows) {
|
||||
err := rows.Close()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}(rows)
|
||||
|
||||
var tasks []models.Task
|
||||
|
||||
for rows.Next() {
|
||||
p := models.Task{}
|
||||
err := rows.Scan(&p.Id, &p.Name, &p.PersonId, &p.Date)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
tasks = append(tasks, p)
|
||||
}
|
||||
|
||||
return tasks, err
|
||||
}
|
||||
|
||||
func (t taskRepository) GetTaskById(id int) (*models.Task, error) {
|
||||
row := t.DB.QueryRow("select * from Tasks where id=?", id)
|
||||
|
||||
task := models.Task{}
|
||||
err := row.Scan(&task.Id, &task.Name, &task.PersonId, &task.Date)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &task, err
|
||||
}
|
||||
|
||||
func (t taskRepository) CreateTask(task models.TaskCreate) (*models.Task, error) {
|
||||
_, err := t.DB.Exec("INSERT INTO Tasks(Name, PersonId, Date) VALUES (?, ?, ?)", task.Name, task.PersonId, task.Date)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &models.Task{
|
||||
Id: 0,
|
||||
PersonId: task.PersonId,
|
||||
Name: task.Name,
|
||||
Date: task.Date,
|
||||
}, err
|
||||
}
|
||||
|
||||
func (t taskRepository) UpdateTask(task models.Task) (*models.Task, error) {
|
||||
_, err := t.DB.Exec("UPDATE Tasks SET name = ?, date = ? WHERE id = ?", task.Name, task.Date, task.Id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &task, err
|
||||
}
|
||||
|
||||
func (t taskRepository) DeleteTask(id int) error {
|
||||
_, err := t.DB.Exec("DELETE FROM Tasks WHERE id = ?", id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewTaskRepository(db *sql.DB) TaskRepository {
|
||||
return &taskRepository{DB: db}
|
||||
}
|
36
bazunov_andrew_lab_3/TaskApp/storage/db.go
Normal file
36
bazunov_andrew_lab_3/TaskApp/storage/db.go
Normal file
@ -0,0 +1,36 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
func Init(databasePath string) (*sql.DB, error) {
|
||||
db, err := sql.Open("sqlite3", databasePath)
|
||||
|
||||
if err != nil || db == nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := createTableIfNotExists(db); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
func Close(db *sql.DB) {
|
||||
err := db.Close()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func createTableIfNotExists(db *sql.DB) error {
|
||||
if result, err := db.Exec(
|
||||
"CREATE TABLE IF NOT EXISTS `Tasks`(Id integer primary key autoincrement, Name text not null, PersonId integer not null, Date text not null);",
|
||||
); err != nil || result == nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
34
bazunov_andrew_lab_3/docker-compose.yaml
Normal file
34
bazunov_andrew_lab_3/docker-compose.yaml
Normal file
@ -0,0 +1,34 @@
|
||||
services:
|
||||
person-app:
|
||||
build:
|
||||
context: ./PersonApp
|
||||
dockerfile: Dockerfile
|
||||
networks:
|
||||
- network
|
||||
ports:
|
||||
- "8080:8080"
|
||||
|
||||
task-app:
|
||||
build:
|
||||
context: ./TaskApp
|
||||
dockerfile: Dockerfile
|
||||
networks:
|
||||
- network
|
||||
ports:
|
||||
- "8000:8000"
|
||||
|
||||
nginx:
|
||||
image: nginx
|
||||
ports:
|
||||
- "80:80"
|
||||
volumes:
|
||||
- ./nginx.conf:/etc/nginx/nginx.conf
|
||||
networks:
|
||||
- network
|
||||
depends_on:
|
||||
- person-app
|
||||
- task-app
|
||||
|
||||
networks:
|
||||
network:
|
||||
driver: bridge
|
59
bazunov_andrew_lab_3/nginx.conf
Normal file
59
bazunov_andrew_lab_3/nginx.conf
Normal file
@ -0,0 +1,59 @@
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
|
||||
location /person-app/ {
|
||||
proxy_pass http://person-app:8080/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
add_header 'Access-Control-Allow-Origin' '*';
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
|
||||
add_header 'Access-Control-Allow-Headers' 'Origin, Content-Type, Accept, Authorization';
|
||||
|
||||
}
|
||||
|
||||
location /task-app/ {
|
||||
proxy_pass http://task-app:8000/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
add_header 'Access-Control-Allow-Origin' '*';
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
|
||||
add_header 'Access-Control-Allow-Headers' 'Origin, Content-Type, Accept, Authorization';
|
||||
}
|
||||
|
||||
# Прокси для Swagger (Stream-сервис)
|
||||
#location /stream-service/swagger/ {
|
||||
# proxy_pass http://stream-service:8000/swagger/;
|
||||
# proxy_set_header Host $host;
|
||||
# proxy_set_header X-Real-IP $remote_addr;
|
||||
# proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# proxy_set_header X-Forwarded-Proto $scheme;
|
||||
#}
|
||||
|
||||
# Прокси для Swagger (Message-сервис)
|
||||
#location /message-service/swagger/ {
|
||||
# proxy_pass http://message-service:8080/swagger/;
|
||||
# proxy_set_header Host $host;
|
||||
# proxy_set_header X-Real-IP $remote_addr;
|
||||
# proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# proxy_set_header X-Forwarded-Proto $scheme;
|
||||
#}
|
||||
|
||||
#location /stream-service/doc.json {
|
||||
# proxy_pass http://stream-service:8000/doc.json;
|
||||
#}
|
||||
|
||||
#location /message-service/doc.json {
|
||||
# proxy_pass http://message-service:8080/doc.json;
|
||||
#}
|
||||
}
|
||||
}
|
34
bazunov_andrew_lab_4/README.md
Normal file
34
bazunov_andrew_lab_4/README.md
Normal file
@ -0,0 +1,34 @@
|
||||
# Лабораторная работа №4: Работа с брокером сообщений (RabbitMQ)
|
||||
|
||||
## Цель
|
||||
|
||||
Изучение проектирования приложений с использованием брокера сообщений RabbitMQ.
|
||||
|
||||
---
|
||||
|
||||
## Задачи
|
||||
|
||||
> 1. **Установить RabbitMQ**
|
||||
Установите RabbitMQ на локальный компьютер (или используйте Docker).
|
||||
>- [Скачивание RabbitMQ](https://www.rabbitmq.com/download.html)
|
||||
>- [Релизы RabbitMQ](https://github.com/rabbitmq/rabbitmq-server/releases/)
|
||||
>- **Пройти уроки RabbitMQ**
|
||||
>- Сделайте скриншоты, показывающие запуск `producer` и `consumer` и передачу сообщений.
|
||||
|
||||
---
|
||||
## Первый урок
|
||||
> ![img.png](static/img1.png)
|
||||
|
||||
---
|
||||
## Второй урок
|
||||
>![img.png](static/img2.png)
|
||||
>![img_1.png](static/img3.png)
|
||||
|
||||
---
|
||||
## Третий урок
|
||||
> ![img.png](static/img4.png)
|
||||
|
||||
---
|
||||
## Задача
|
||||
>![img.png](static/img5.png)
|
||||
> ![img.png](static/img.png)
|
17
bazunov_andrew_lab_4/docker-compose.yaml
Normal file
17
bazunov_andrew_lab_4/docker-compose.yaml
Normal file
@ -0,0 +1,17 @@
|
||||
version: "3.2"
|
||||
services:
|
||||
rabbitmq:
|
||||
image: rabbitmq:3-management-alpine
|
||||
container_name: 'rabbitmq'
|
||||
ports:
|
||||
- "5672:5672"
|
||||
- "15672:15672"
|
||||
volumes:
|
||||
- ~/.docker-conf/rabbitmq/data/:/var/lib/rabbitmq/
|
||||
- ~/.docker-conf/rabbitmq/log/:/var/log/rabbitmq
|
||||
networks:
|
||||
- rabbitmq_go_net
|
||||
|
||||
networks:
|
||||
rabbitmq_go_net:
|
||||
driver: bridge
|
47
bazunov_andrew_lab_4/example/vk_author.py
Normal file
47
bazunov_andrew_lab_4/example/vk_author.py
Normal file
@ -0,0 +1,47 @@
|
||||
from datetime import datetime
|
||||
import random
|
||||
import threading
|
||||
|
||||
import pika
|
||||
import sys
|
||||
|
||||
_alphabet = [chr(i) for i in range(97, 123)]
|
||||
|
||||
|
||||
def run_every_n_seconds(seconds, action, *args):
|
||||
threading.Timer(seconds, run_every_n_seconds, [seconds, action] + list(args)).start()
|
||||
action(*args)
|
||||
|
||||
|
||||
def generate_message():
|
||||
now = datetime.now()
|
||||
current_time = now.strftime("%H:%M:%S")
|
||||
return f"[{current_time}] " + "".join(random.choices(_alphabet, k=random.randint(1, 10)))
|
||||
|
||||
|
||||
def send_message(channel_local):
|
||||
message = generate_message()
|
||||
channel_local.basic_publish(
|
||||
exchange='vk_messages',
|
||||
routing_key='vk_messages',
|
||||
body=message,
|
||||
properties=pika.BasicProperties(
|
||||
delivery_mode=pika.DeliveryMode.Persistent
|
||||
))
|
||||
print(f"[vkAuthor] Sent {message}")
|
||||
|
||||
|
||||
def main(conn: pika.BlockingConnection):
|
||||
channel = conn.channel()
|
||||
channel.exchange_declare(exchange='vk_messages', exchange_type='fanout')
|
||||
run_every_n_seconds(1, send_message, channel)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
|
||||
|
||||
try:
|
||||
main(connection)
|
||||
except KeyboardInterrupt:
|
||||
connection.close()
|
||||
sys.exit(0)
|
44
bazunov_andrew_lab_4/example/vk_reader.py
Normal file
44
bazunov_andrew_lab_4/example/vk_reader.py
Normal file
@ -0,0 +1,44 @@
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
import pika
|
||||
|
||||
_QUEUE_NAME = "vk_messages_queue"
|
||||
_EXCHANGE_NAME = "vk_messages"
|
||||
|
||||
|
||||
def main():
|
||||
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
|
||||
channel = connection.channel()
|
||||
|
||||
channel.exchange_declare(
|
||||
exchange=_EXCHANGE_NAME,
|
||||
exchange_type='fanout'
|
||||
)
|
||||
|
||||
channel.queue_declare(queue=_QUEUE_NAME, exclusive=True)
|
||||
channel.queue_bind(exchange=_EXCHANGE_NAME, queue=_QUEUE_NAME)
|
||||
|
||||
def callback(ch, method, properties, body):
|
||||
now = datetime.now()
|
||||
current_time = now.strftime("%H:%M:%S")
|
||||
|
||||
print(f"[vkReader] Received [{str(body)}] in [{current_time}]")
|
||||
ch.basic_ack(delivery_tag=method.delivery_tag)
|
||||
|
||||
channel.basic_consume(
|
||||
queue=_QUEUE_NAME,
|
||||
on_message_callback=callback,
|
||||
auto_ack=False
|
||||
)
|
||||
|
||||
print('[*] Waiting for messages. To exit press CTRL+C')
|
||||
channel.start_consuming()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
print('Interrupted')
|
||||
sys.exit(0)
|
47
bazunov_andrew_lab_4/example/vk_slow_reader.py
Normal file
47
bazunov_andrew_lab_4/example/vk_slow_reader.py
Normal file
@ -0,0 +1,47 @@
|
||||
import time
|
||||
import random
|
||||
from datetime import datetime
|
||||
|
||||
import pika
|
||||
import sys
|
||||
|
||||
_QUEUE_NAME = "vk_messages_queue_slow"
|
||||
_EXCHANGE_NAME = "vk_messages"
|
||||
|
||||
|
||||
def main():
|
||||
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
|
||||
channel = connection.channel()
|
||||
|
||||
channel.exchange_declare(
|
||||
exchange=_EXCHANGE_NAME,
|
||||
exchange_type='fanout'
|
||||
)
|
||||
channel.queue_declare(queue=_QUEUE_NAME, exclusive=True)
|
||||
channel.queue_bind(exchange=_EXCHANGE_NAME, queue=_QUEUE_NAME)
|
||||
|
||||
def callback(ch, method, properties, body):
|
||||
now = datetime.now()
|
||||
current_time = now.strftime("%H:%M:%S")
|
||||
|
||||
print(f"[vkSlowReader] Received [{str(body)}] in [{current_time}]")
|
||||
read_time = random.randint(2, 5)
|
||||
time.sleep(read_time)
|
||||
ch.basic_ack(delivery_tag=method.delivery_tag)
|
||||
|
||||
channel.basic_consume(
|
||||
queue=_QUEUE_NAME,
|
||||
on_message_callback=callback,
|
||||
auto_ack=False
|
||||
)
|
||||
|
||||
print('[*] Waiting for messages. To exit press CTRL+C')
|
||||
channel.start_consuming()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
print('Interrupted')
|
||||
sys.exit(0)
|
25
bazunov_andrew_lab_4/first/receive.py
Normal file
25
bazunov_andrew_lab_4/first/receive.py
Normal file
@ -0,0 +1,25 @@
|
||||
import pika
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
|
||||
channel = connection.channel()
|
||||
|
||||
channel.queue_declare(queue='hello')
|
||||
|
||||
def callback(ch, method, properties, body):
|
||||
print(f" [x] Received {body}")
|
||||
|
||||
channel.basic_consume(queue='hello', on_message_callback=callback, auto_ack=True)
|
||||
|
||||
print(' [*] Waiting for messages. To exit press CTRL+C')
|
||||
channel.start_consuming()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
print('Interrupted')
|
||||
sys.exit(0)
|
11
bazunov_andrew_lab_4/first/send.py
Normal file
11
bazunov_andrew_lab_4/first/send.py
Normal file
@ -0,0 +1,11 @@
|
||||
import pika
|
||||
|
||||
connection = pika.BlockingConnection(
|
||||
pika.ConnectionParameters(host='localhost'))
|
||||
channel = connection.channel()
|
||||
|
||||
channel.queue_declare(queue='hello')
|
||||
|
||||
channel.basic_publish(exchange='', routing_key='hello', body='Hello World!')
|
||||
print(" [x] Sent 'Hello World!'")
|
||||
connection.close()
|
19
bazunov_andrew_lab_4/second/new_task.py
Normal file
19
bazunov_andrew_lab_4/second/new_task.py
Normal file
@ -0,0 +1,19 @@
|
||||
import pika
|
||||
import sys
|
||||
|
||||
connection = pika.BlockingConnection(
|
||||
pika.ConnectionParameters(host='localhost'))
|
||||
channel = connection.channel()
|
||||
|
||||
channel.queue_declare(queue='task_queue', durable=True)
|
||||
|
||||
message = ' '.join(sys.argv[1:]) or "Hello World!"
|
||||
channel.basic_publish(
|
||||
exchange='',
|
||||
routing_key='task_queue',
|
||||
body=message,
|
||||
properties=pika.BasicProperties(
|
||||
delivery_mode=pika.DeliveryMode.Persistent
|
||||
))
|
||||
print(f" [x] Sent {message}")
|
||||
connection.close()
|
22
bazunov_andrew_lab_4/second/worker.py
Normal file
22
bazunov_andrew_lab_4/second/worker.py
Normal file
@ -0,0 +1,22 @@
|
||||
import pika
|
||||
import time
|
||||
|
||||
connection = pika.BlockingConnection(
|
||||
pika.ConnectionParameters(host='localhost'))
|
||||
channel = connection.channel()
|
||||
|
||||
channel.queue_declare(queue='task_queue', durable=True)
|
||||
print(' [*] Waiting for messages. To exit press CTRL+C')
|
||||
|
||||
|
||||
def callback(ch, method, properties, body):
|
||||
print(f" [x] Received {body.decode()}")
|
||||
time.sleep(body.count(b'.'))
|
||||
print(" [x] Done")
|
||||
ch.basic_ack(delivery_tag=method.delivery_tag)
|
||||
|
||||
|
||||
channel.basic_qos(prefetch_count=1)
|
||||
channel.basic_consume(queue='task_queue', on_message_callback=callback)
|
||||
|
||||
channel.start_consuming()
|
BIN
bazunov_andrew_lab_4/static/img.png
Normal file
BIN
bazunov_andrew_lab_4/static/img.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 35 KiB |
BIN
bazunov_andrew_lab_4/static/img1.png
Normal file
BIN
bazunov_andrew_lab_4/static/img1.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 37 KiB |
BIN
bazunov_andrew_lab_4/static/img2.png
Normal file
BIN
bazunov_andrew_lab_4/static/img2.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 14 KiB |
BIN
bazunov_andrew_lab_4/static/img3.png
Normal file
BIN
bazunov_andrew_lab_4/static/img3.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 24 KiB |
BIN
bazunov_andrew_lab_4/static/img4.png
Normal file
BIN
bazunov_andrew_lab_4/static/img4.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 29 KiB |
BIN
bazunov_andrew_lab_4/static/img5.png
Normal file
BIN
bazunov_andrew_lab_4/static/img5.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 204 KiB |
13
bazunov_andrew_lab_4/third/emit_log.py
Normal file
13
bazunov_andrew_lab_4/third/emit_log.py
Normal file
@ -0,0 +1,13 @@
|
||||
import pika
|
||||
import sys
|
||||
|
||||
connection = pika.BlockingConnection(
|
||||
pika.ConnectionParameters(host='localhost'))
|
||||
channel = connection.channel()
|
||||
|
||||
channel.exchange_declare(exchange='logs', exchange_type='fanout')
|
||||
|
||||
message = ' '.join(sys.argv[1:]) or "info: Hello World!"
|
||||
channel.basic_publish(exchange='logs', routing_key='', body=message)
|
||||
print(f" [x] Sent {message}")
|
||||
connection.close()
|
24
bazunov_andrew_lab_4/third/receive_logs.py
Normal file
24
bazunov_andrew_lab_4/third/receive_logs.py
Normal file
@ -0,0 +1,24 @@
|
||||
import pika
|
||||
|
||||
connection = pika.BlockingConnection(
|
||||
pika.ConnectionParameters(host='localhost'))
|
||||
channel = connection.channel()
|
||||
|
||||
channel.exchange_declare(exchange='logs', exchange_type='fanout')
|
||||
|
||||
result = channel.queue_declare(queue='', exclusive=True)
|
||||
queue_name = result.method.queue
|
||||
|
||||
channel.queue_bind(exchange='logs', queue=queue_name)
|
||||
|
||||
print(' [*] Waiting for logs. To exit press CTRL+C')
|
||||
|
||||
|
||||
def callback(ch, method, properties, body):
|
||||
print(f" [x] {body}")
|
||||
|
||||
|
||||
channel.basic_consume(
|
||||
queue=queue_name, on_message_callback=callback, auto_ack=True)
|
||||
|
||||
channel.start_consuming()
|
35
bogdanov_dmitry_lab_1/README.md
Normal file
35
bogdanov_dmitry_lab_1/README.md
Normal file
@ -0,0 +1,35 @@
|
||||
# Лабораторная работа №1
|
||||
|
||||
## Богданов Дмитрий ПИбд-42
|
||||
|
||||
### Для выполнения были развернуты следующие сервисы:
|
||||
|
||||
* PostgreSQL - база данных
|
||||
* Mediawiki - движок вики
|
||||
* Gitea - движок гита
|
||||
|
||||
### С использованием следующих технологий:
|
||||
|
||||
* git
|
||||
* docker
|
||||
* docker-compose
|
||||
|
||||
|
||||
### Запуск лабораторной:
|
||||
Необходимо перейти в папку с файлом docker-compose.yaml и ввести следующую команду:
|
||||
```
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
## Результат запуска:
|
||||
```
|
||||
[+] Running 4/4
|
||||
✔ Network bogdanov_dmitry_lab_1_default Created 0.0s
|
||||
✔ Container bogdanov_dmitry_lab_1-mediawiki-1 Started 0.7s
|
||||
✔ Container bogdanov_dmitry_lab_1-git-1 Started 0.8s
|
||||
✔ Container bogdanov_dmitry_lab_1-db-1 Started 0.7s
|
||||
```
|
||||
|
||||
## Видео с результатом запуска:
|
||||
|
||||
Видео можно посмотреть по данной [ссылке](https://drive.google.com/file/d/1TES58HIeCnnKbtwWgED2oig4N7plBmol/view).
|
40
bogdanov_dmitry_lab_1/docker-compose.yaml
Normal file
40
bogdanov_dmitry_lab_1/docker-compose.yaml
Normal file
@ -0,0 +1,40 @@
|
||||
services:
|
||||
# PostgreSQL
|
||||
db:
|
||||
# Образ контейнера
|
||||
image: postgres
|
||||
# Перезапуск при падении
|
||||
restart: always
|
||||
# Порт для подключения
|
||||
ports:
|
||||
- 5432:5432
|
||||
# Каталог с данными. Каталог компьютера:каталог контейнера
|
||||
volumes:
|
||||
- ./volumes/postgres:/var/lib/postgresql/data
|
||||
# Переменные среды для определения хотя бы одного пользователя при запуске
|
||||
environment:
|
||||
- POSTGRES_USER=admin
|
||||
- POSTGRES_PASSWORD=admin
|
||||
# Mediawiki
|
||||
mediawiki:
|
||||
# Образ
|
||||
image: mediawiki
|
||||
# Перезапуск при падении
|
||||
restart: always
|
||||
# Порт для подключения
|
||||
ports:
|
||||
- 8080:80
|
||||
# Каталоги
|
||||
volumes:
|
||||
- ./volumes/mediawiki:/var/www/html/images
|
||||
# Gitea
|
||||
git:
|
||||
image: gitea/gitea:latest
|
||||
restart: always
|
||||
ports:
|
||||
- "3000:3000"
|
||||
- "222:22"
|
||||
volumes:
|
||||
- ./volumes/gitea:/data
|
||||
- ./volumes/timezone:/etc/timezone:ro
|
||||
- ./volumes/localtime:/etc/localtime:ro
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user