Compare commits

...

203 Commits

Author SHA1 Message Date
528309ab84 Merge pull request 'kashin_maxim_lab_5' (#124) from kashin_maxim_lab_5 into main
Reviewed-on: #124
2024-11-20 22:45:51 +04:00
0814d8533d Merge pull request 'kashin_maxim_lab_4' (#123) from kashin_maxim_lab_4 into main
Reviewed-on: #123
2024-11-20 22:45:28 +04:00
354ee2679e Merge pull request 'yakovleva_yulia_lab_8 is ready' (#122) from yakovleva_yulia_lab_8 into main
Reviewed-on: #122
2024-11-20 22:45:02 +04:00
d302bd2213 Merge pull request 'yakovleva_yulia_lab_7 is ready' (#121) from yakovleva_yulia_lab_7 into main
Reviewed-on: #121
2024-11-20 22:44:39 +04:00
2aed7bf385 Merge pull request 'yakovleva_yulia_lab_6 is ready' (#120) from yakovleva_yulia_lab_6 into main
Reviewed-on: #120
2024-11-20 22:44:06 +04:00
d4e24db25e Merge pull request 'kadyrov_aydar_lab_5' (#119) from kadyrov_aydar_lab_5 into main
Reviewed-on: #119
2024-11-20 22:43:23 +04:00
c0ca1d4bb5 Merge pull request 'kadyrov_aydar_lab_4' (#117) from kadyrov_aydar_lab_4 into main
Reviewed-on: #117
2024-11-20 22:43:05 +04:00
6eeb90ea45 Merge pull request 'tukaeva_alfiya_lab_8' (#116) from tukaeva_alfiya_lab_8 into main
Reviewed-on: #116
2024-11-20 22:38:42 +04:00
bc2d7cb2f6 Merge pull request 'tukaeva_alfiya_lab_7' (#115) from tukaeva_alfiya_lab_7 into main
Reviewed-on: #115
2024-11-20 22:37:46 +04:00
e1da6f26ab Merge pull request 'tukaeva_alfiya_lab_6' (#114) from tukaeva_alfiya_lab_6 into main
Reviewed-on: #114
2024-11-20 22:37:01 +04:00
e5df53b5c2 Merge pull request 'turner_ilya_lab_2' (#113) from turner_ilya_lab_2 into main
Reviewed-on: #113
2024-11-20 22:36:40 +04:00
c98770752e Merge pull request 'mochalov_danila_lab_3' (#112) from mochalov_danila_lab_3 into main
Reviewed-on: #112
2024-11-20 22:36:16 +04:00
a800c3df86 Merge pull request 'Bazunov Andrew Lab 4' (#111) from bazunov_andrew_lab_4 into main
Reviewed-on: #111
2024-11-20 22:35:35 +04:00
a51e33a201 Merge pull request 'turner_ilya_lab_1' (#110) from turner_ilya_lab_1 into main
Reviewed-on: #110
2024-11-20 22:34:54 +04:00
a9af84010a Merge pull request 'Bazunov Andrew lab3' (#109) from bazunov_andrew_lab_3 into main
Reviewed-on: #109
2024-11-20 22:34:26 +04:00
3645d0c1cd Merge pull request 'yakovleva_yulia_lab_5 is ready' (#107) from yakovleva_yulia_lab_5 into main
Reviewed-on: #107
Reviewed-by: Alexey <a.zhelepov@mail.ru>
2024-11-20 22:33:27 +04:00
08f2f63ad4 Готово 2024-10-27 19:42:27 +04:00
e4e3748a3d Выполнено 2024-10-27 19:09:16 +04:00
JulYakJul
5e522fbcc0 yakovleva_yulia_lab_8 is ready 2024-10-27 15:10:30 +04:00
JulYakJul
cae7189c1e fix 2024-10-27 14:06:02 +04:00
JulYakJul
2bfc8a0a43 yakovleva_yulia_lab_7 is ready 2024-10-27 14:02:15 +04:00
JulYakJul
1f89960672 fix 2024-10-27 13:06:24 +04:00
JulYakJul
ffb4c2a8a4 yakovleva_yulia_lab_6 is ready 2024-10-27 13:04:11 +04:00
NAP
1dc621e0be kadyrov_aydar_lab_5 2024-10-27 02:16:28 +04:00
NAP
11c62d9bf7 kadyrov_aydar_lab_5 2024-10-27 02:13:51 +04:00
NAP
03910a9a3f kadyrov_aydar_lab_4 2024-10-27 01:53:34 +04:00
f7d483196c tukaeva_alfiya_lab_8 is ready 2024-10-26 23:16:19 +04:00
545377f948 tukaeva_alfiya_lab_7 fix 2024-10-26 22:58:30 +04:00
bb867da520 tukaeva_alfiya_lab_7 is ready 2024-10-26 22:41:45 +04:00
c4a260ebda tukaeva_alfiya_lab_6 is ready 2024-10-26 22:26:14 +04:00
88392a8041 turner_ilya_lab_2 is ready 2024-10-26 21:09:43 +04:00
JulYakJul
400de30b49 fix 2024-10-26 20:04:39 +04:00
96a4e6ac43 mochalov_danila_lab_3 is ready 2024-10-26 18:18:28 +04:00
Bazunov Andrew Igorevich
03c52d0c76 Complete lab4 2024-10-26 17:51:52 +04:00
6dd4835f54 turner_ilya_lab_1 is ready 2024-10-26 17:34:47 +04:00
Bazunov Andrew Igorevich
5187005e6a complete lab 3 2024-10-26 14:46:33 +04:00
3b9698ac38 Merge pull request 'tsukanova_irina_lab_5' (#108) from tsukanova_irina_lab_5 into main
Reviewed-on: #108
2024-10-26 13:01:34 +04:00
a456344432 Merge pull request 'rogashova_ekaterina_lab_3' (#106) from rogashova_ekaterina_lab_3 into main
Reviewed-on: #106
2024-10-26 13:00:05 +04:00
383a5e3b25 Merge pull request 'kadyrov_aydar_lab_3' (#105) from kadyrov_aydar_lab_3 into main
Reviewed-on: #105
2024-10-26 12:59:18 +04:00
2834efbbce Merge pull request 'kadyrov_aydar_lab_2' (#104) from kadyrov_aydar_lab_2 into main
Reviewed-on: #104
2024-10-26 12:58:55 +04:00
decc46b37c Merge pull request 'tukaeva_alfiya_lab_5' (#103) from tukaeva_alfiya_lab_5 into main
Reviewed-on: #103
2024-10-26 12:58:23 +04:00
a41e76795f Merge pull request 'artamonova_tatyana_lab_2 is ready' (#102) from artamonova_tatyana_lab_2 into main
Reviewed-on: #102
2024-10-26 12:57:40 +04:00
bcfec37329 Merge pull request 'bogdanov_dmitry_lab_5' (#101) from bogdanov_dmitry_lab_5 into main
Reviewed-on: #101
2024-10-26 12:56:47 +04:00
e17b0b0d61 Merge pull request 'bogdanov_dmitry_lab_4' (#100) from bogdanov_dmitry_lab_4 into main
Reviewed-on: #100
2024-10-26 12:56:28 +04:00
62290fc43d Merge pull request 'zhimolostnova_anna_lab_6' (#95) from zhimolostnova_anna_lab_6 into main
Reviewed-on: #95
2024-10-26 12:56:04 +04:00
0b5fb8da2e Merge pull request 'zhimolostnova lab 5 complete' (#94) from zhimolostnova_anna_lab_5 into main
Reviewed-on: #94
2024-10-26 12:53:57 +04:00
9c6ef7e89e Merge pull request 'vaksman_valeria_lab_6' (#91) from vaksman_valeria_lab_6 into main
Reviewed-on: #91
2024-10-26 12:52:19 +04:00
e763cf36e2 Merge pull request 'yakovleva_yulia_lab_4 is ready' (#90) from yakovleva_yulia_lab_4 into main
Reviewed-on: #90
2024-10-26 12:51:56 +04:00
adf3f384a3 Merge pull request 'dozorova_alena_lab_8' (#99) from dozorova_alena_lab_8 into main
Reviewed-on: #99
2024-10-26 12:50:30 +04:00
5ae6cd3cf1 Merge pull request 'dozorova_alena_lab_7' (#98) from dozorova_alena_lab_7 into main
Reviewed-on: #98
2024-10-26 12:42:04 +04:00
daf3742ce6 Merge pull request 'zhimolostnova lab 8 complete' (#97) from zhimolostnova_anna_lab_8 into main
Reviewed-on: #97
2024-10-26 12:35:58 +04:00
fb37a53f66 Merge pull request 'zhimolostnova lab 7 complete' (#96) from zhimolostnova_anna_lab_7 into main
Reviewed-on: #96
2024-10-26 12:35:18 +04:00
23e035f9b2 Merge pull request 'vaksman_valeria_lab_8' (#93) from vaksman_valeria_lab_8 into main
Reviewed-on: #93
2024-10-26 12:31:26 +04:00
556d8cf262 Merge pull request 'vaksman_valeria_lab_7' (#92) from vaksman_valeria_lab_7 into main
Reviewed-on: #92
2024-10-26 12:30:20 +04:00
419790f5df Merge pull request 'borschevskaya_anna_lab_8' (#89) from borschevskaya_anna_lab_8 into main
Reviewed-on: #89
2024-10-26 12:27:12 +04:00
54a9b8a778 Merge pull request 'kadyrov_aydar_lab_1' (#88) from kadyrov_aydar_lab_1 into main
Reviewed-on: #88
2024-10-26 12:23:44 +04:00
3aeae245fa Merge pull request 'lazarev_andrey_lab_3' (#87) from lazarev_andrey_lab_3 into main
Reviewed-on: #87
2024-10-26 12:23:21 +04:00
382273ccb8 Merge pull request 'borschevskaya_anna_lab_7 is ready' (#86) from borschevskaya_anna_lab_7 into main
Reviewed-on: #86
2024-10-26 12:20:06 +04:00
4a37f55328 Merge pull request 'rogashova_ekaterina_lab_2' (#85) from rogashova_ekaterina_lab_2 into main
Reviewed-on: #85
2024-10-26 12:14:14 +04:00
4e32398903 Merge pull request 'artamonova_tatyana_lab_1' (#84) from artamonova_tatyana_lab_1 into main
Reviewed-on: #84
2024-10-26 12:13:43 +04:00
e69819aedd Merge pull request 'tukaeva_alfiya_lab_4 is ready' (#83) from tukaeva_alfiya_lab_4 into main
Reviewed-on: #83
2024-10-26 12:12:59 +04:00
d9c4402ec9 Merge pull request 'kuzarin_maxim_lab_8' (#81) from kuzarin_maxim_lab_8 into main
Reviewed-on: #81
2024-10-26 12:07:16 +04:00
93687ad850 Merge pull request 'kuzarin_maxim_lab_7' (#80) from kuzarin_maxim_lab_7 into main
Reviewed-on: #80
2024-10-26 11:37:45 +04:00
4528bcd22c Merge pull request 'emelyanov_artem_lab_8' (#79) from emelyanov_artem_lab_8 into main
Reviewed-on: #79
2024-10-26 11:35:53 +04:00
eef1d03249 Merge pull request 'emelyanov_artem_lab_7' (#78) from emelyanov_artem_lab_7 into main
Reviewed-on: #78
2024-10-26 11:34:21 +04:00
7e09109cd2 Merge pull request 'emelyanov_artem_lab_6' (#77) from emelyanov_artem_lab_6 into main
Reviewed-on: #77
2024-10-26 11:33:34 +04:00
f46724e5cf Merge pull request 'dozorova_alena_lab_6' (#76) from dozorova_alena_lab_6 into main
Reviewed-on: #76
2024-10-26 11:27:22 +04:00
72b0b63e58 видео 2024-10-25 21:03:05 +04:00
fd54e426b5 осталось видео 2024-10-25 20:52:46 +04:00
JulYakJul
a5f0403627 yakovleva_yulia_lab_5 is ready 2024-10-25 18:12:36 +04:00
ad8894c0ca сделано, осталось дописать ридми 2024-10-25 16:56:56 +04:00
edea94a4f2 Готово 2024-10-25 14:10:33 +04:00
NAP
5700e75965 kadyrov_aydar_lab_3 2024-10-25 01:10:46 +04:00
NAP
9e9711f004 kadyrov_aydar_lab_2 2024-10-24 20:07:47 +04:00
014845df45 tukaeva_alfiya_lab_5 is ready 2024-10-24 15:46:08 +04:00
636592bbac artamonova_tatyana_lab_2 is ready 2024-10-23 21:29:01 +04:00
the
6711e8b0f6 Lab5 2024-10-23 18:36:31 +04:00
the
c91aa6e1f3 Исправление 2024-10-23 15:58:26 +04:00
the
d340d34c0b README, исправления, изображения 2024-10-23 15:56:48 +04:00
the
aaff3b8183 Lab4 2024-10-23 14:11:04 +04:00
06a7114499 lab 8 complete 2024-10-22 20:52:00 +03:00
0246f32bcf lab 7 complete 2024-10-22 20:19:09 +03:00
417368d25e fix readme 2024-10-22 19:31:03 +03:00
20a39fa9a5 lab 6 complete 2024-10-22 19:30:00 +03:00
fb15f87160 lab 5 complete 2024-10-22 18:23:30 +03:00
f86dfba785 lab8 wow 2024-10-21 21:21:28 +04:00
e874c69b62 lab7 is ready 2024-10-21 21:17:41 +04:00
6f0726185a lab six is ready yep 2024-10-21 21:10:24 +04:00
JulYakJul
b4b0ef7730 fix readme 2024-10-21 14:35:40 +04:00
JulYakJul
4d51941016 fix readme 2024-10-21 14:34:02 +04:00
JulYakJul
a07b272c79 yakovleva_yulia_lab_4 is ready 2024-10-21 14:31:58 +04:00
7cb94c14b0 borschevskaya_anna_lab_8 is ready 2024-10-21 08:52:52 +04:00
NAP
506d544060 kadyrov_aydar_lab_1 2024-10-21 02:36:18 +04:00
1ef9e02d32 lazarev_andrey_lab_3 is ready 2024-10-20 23:32:58 +04:00
ff8a87ebb8 borschevskaya_anna_lab_7 is ready 2024-10-20 22:37:56 +04:00
740d49d368 Готово 2024-10-20 21:59:10 +04:00
df1b8bd8ce artamonova_tatyana_lab_1 is ready 2024-10-20 19:29:17 +04:00
7549429b6b artamonova_tatyana_lab_1 is ready 2024-10-20 19:25:21 +04:00
00d9e2409a tukaeva_alfiya_lab_4 is ready 2024-10-20 19:04:32 +04:00
098cb9b9ad Обновить kuzarin_maxim_lab_8/README.md
маленькая стилистическая доработка
2024-10-19 19:44:12 +04:00
af39fdc505 Текст написан, нужно проверить отображение Md 2024-10-19 18:42:32 +03:00
ef603a8056 Обновить kuzarin_maxim_lab_7/README.md
Небольшая проблема с двум \n
2024-10-19 19:36:31 +04:00
c8b3124074 Добавлено эссе в виде MD файла 2024-10-19 18:34:37 +03:00
ce853de348 feature: completed lab 8 2024-10-19 18:40:59 +04:00
c3ac60eaa2 fix: delete .idea 2024-10-19 18:03:24 +04:00
e12438b727 feature: completed lab 7 2024-10-19 18:00:48 +04:00
aa54f9187f написали эссе 2024-10-19 14:47:04 +04:00
b1d8660774 + 2024-10-19 14:18:21 +04:00
6c66654acc feature: deleted lab 6 2024-10-19 14:17:40 +04:00
1d9c308bb4 правим в принципе разметку 2024-10-19 14:17:25 +04:00
a64b6c7329 feature: completed lab 6 2024-10-19 14:17:22 +04:00
7ec5c45faa попытка выравнивания 2024-10-19 14:06:09 +04:00
340dc6aa19 добавляем эссе 2024-10-19 14:04:46 +04:00
a152275bb7 Merge branch 'main' into dozorova_alena_lab_6 2024-10-19 13:17:33 +04:00
6e3ec51fe7 Merge branch 'main' into dozorova_alena_lab_6 2024-10-19 13:17:09 +04:00
131dc39f6c Merge pull request 'borschevskaya_anna_lab_6 is ready' (#75) from borschevskaya_anna_lab_6 into main
Reviewed-on: #75
2024-10-19 13:08:27 +04:00
d82f47e04c Merge pull request 'emelyanov_artem_lab_5' (#74) from emelyanov_artem_lab_5 into main
Reviewed-on: #74
2024-10-19 12:56:19 +04:00
3175352d02 Merge pull request 'emelyanov_artem_lab_4' (#73) from emelyanov_artem_lab_4 into main
Reviewed-on: #73
2024-10-19 12:49:26 +04:00
2e86e68e12 Merge pull request 'aleikin_artem_lab_1' (#72) from aleikin_artem_lab_1 into main
Reviewed-on: #72
2024-10-19 12:46:13 +04:00
63dd60f20e Merge pull request 'bondarenko_max_lab_1' (#71) from bondarenko_max_lab_1 into main
Reviewed-on: #71
2024-10-19 12:43:27 +04:00
63e031ef17 Merge pull request 'vaksman_valeria_lab_5' (#70) from vaksman_valeria_lab_5 into main
Reviewed-on: #70
2024-10-19 12:33:16 +04:00
5fdabedcd6 Merge pull request 'kuzarin_maxim_lab_6' (#69) from kuzarin_maxim_lab_6 into main
Reviewed-on: #69
2024-10-19 12:30:30 +04:00
9eadb70f85 fix link 2024-10-19 12:28:06 +04:00
5fd241a980 Merge pull request 'dozorova_alena_lab_5' (#67) from dozorova_alena_lab_5 into main
Reviewed-on: #67
2024-10-19 12:26:46 +04:00
4f53dff75f Merge branch 'dozorova_alena_lab_5' of https://git.is.ulstu.ru/Alexey/DAS_2024_1 into dozorova_alena_lab_5 2024-10-19 12:24:25 +04:00
57b7675030 fix link 2024-10-19 12:24:07 +04:00
b1c16dc76c Merge pull request 'bogdanov_dmitry_lab_3' (#68) from bogdanov_dmitry_lab_3 into main
Reviewed-on: #68
2024-10-19 12:23:02 +04:00
309911ed75 Merge pull request 'rogashova_ekaterina_lab_1 is ready' (#66) from rogashova_ekaterina_lab_1 into main
Reviewed-on: #66
2024-10-19 12:08:44 +04:00
d23e808325 Merge pull request 'lazarev_andrey_lab_2' (#65) from lazarev_andrey_lab_2 into main
Reviewed-on: #65
2024-10-19 12:05:03 +04:00
4c974bfb51 Merge pull request 'tsukanova_irina_lab_4' (#64) from tsukanova_irina_lab_4 into main
Reviewed-on: #64
2024-10-19 12:00:40 +04:00
b573569a97 borschevskaya_anna_lab_6 is ready 2024-10-19 10:46:15 +04:00
60c79b64fb feature: deleted lab 5 2024-10-18 17:42:21 +04:00
07105e81a0 feature: completed lab 5 2024-10-18 17:41:49 +04:00
JulYakJul
0ebd562be2 Merge branch 'main' into yakovleva_yulia_lab_4 2024-10-18 17:02:27 +04:00
JulYakJul
22a3917d28 work 3 done 2024-10-18 16:59:19 +04:00
46b94ea885 feature: completed lab 4 2024-10-18 16:27:06 +04:00
JulYakJul
94b8ba783c work 2.2 done 2024-10-18 16:24:18 +04:00
JulYakJul
060bd2321e work 2 done 2024-10-18 16:11:22 +04:00
JulYakJul
a8f1b39dd7 work 1 done 2024-10-18 15:42:10 +04:00
d3a7046f97 aleikin_artem_lab1 is ready 2024-10-18 00:09:32 +04:00
06d65650ab aleikin_artem_lab1 is ready 2024-10-18 00:05:35 +04:00
992a169c9b Merge branch 'main' into bondarenko_max_lab_1 2024-10-17 23:22:03 +04:00
b82a13c106 bondarenko_max_lab_1 is ready 2024-10-17 23:20:01 +04:00
e33ffef85e fix 2024-10-17 22:43:46 +04:00
9362e62999 йееес lab 5 is ready 2024-10-17 20:01:43 +04:00
430fad9ef4 Merge pull request 'borschevskaya_anna_lab_5 is ready' (#63) from borschevskaya_anna_lab_5 into main
Reviewed-on: #63
2024-10-16 16:50:14 +04:00
d0aedf8495 Merge pull request 'klyushenkova_ksenia_lab_1 is ready' (#62) from klyushenkova_ksenia_lab_1 into main
Reviewed-on: #62
2024-10-16 16:49:29 +04:00
effd849042 Merge pull request 'emelaynov_artem_lab_3' (#61) from emelaynov_artem_lab_3 into main
Reviewed-on: #61
2024-10-16 16:48:48 +04:00
55e18b6a64 Merge pull request 'vaksman_valeria_lab_3' (#60) from vaksman_valeria_lab_3 into main
Reviewed-on: #60
2024-10-16 16:47:28 +04:00
5a7409d60c Merge pull request 'mochalov_danila_lab_2' (#59) from mochalov_danila_lab_2 into main
Reviewed-on: #59
2024-10-16 16:46:55 +04:00
265cf478bf Merge pull request 'tukaeva_alfiya_lab_3 is ready' (#58) from tukaeva_alfiya_lab_3 into main
Reviewed-on: #58
2024-10-16 16:45:44 +04:00
c6f29a13a1 Merge pull request 'vaksman_valeria_lab_4' (#57) from vaksman_valeria_lab_4 into main
Reviewed-on: #57
2024-10-16 16:45:11 +04:00
4103a23984 Merge pull request 'Presnyakova Victoria Lab2' (#56) from presnyakova_victoria_lab_2 into main
Reviewed-on: #56
2024-10-16 16:20:58 +04:00
f8ac151629 Merge pull request 'zhimolostnova_anna_lab 4 complete' (#55) from zhimolostnova_anna_lab_4 into main
Reviewed-on: #55
2024-10-16 15:02:32 +04:00
13b5dfc707 Merge branch 'main' into dozorova_alena_lab_5 2024-10-16 14:31:12 +04:00
5d3517c2b0 Merge pull request 'dozorova_alena_lab_4' (#49) from dozorova_alena_lab_4 into main
Reviewed-on: #49
2024-10-16 14:26:29 +04:00
f3bbfb2efd rogashova_ekaterina_lab_1 is ready 2024-10-14 23:20:57 +04:00
3c6c7f47e8 second lab done 2024-10-14 16:27:36 +04:00
dc7c2c9694 видео 2024-10-14 16:09:46 +04:00
481631cda5 Merge pull request 'yakovleva_yulia_lab_3' (#54) from yakovleva_yulia_lab_3 into main
Reviewed-on: #54
2024-10-14 15:48:22 +04:00
9b4f9b608c все готово, осталось сделать видео 2024-10-14 15:37:29 +04:00
3b842c2228 Merge pull request 'kalyshev_yan_lab_2 is ready' (#53) from kalyshev_yan_lab_2 into main
Reviewed-on: #53
2024-10-14 15:18:08 +04:00
c4b8f4b4de Merge pull request 'kuzarin_maxim_lab_5' (#52) from kuzarin_maxim_lab_5 into main
Reviewed-on: #52
2024-10-14 12:29:54 +04:00
85567eea48 Merge pull request 'bogdanov_dmitry_lab_2' (#51) from bogdanov_dmitry_lab_2 into main
Reviewed-on: #51
2024-10-14 12:19:26 +04:00
ea8da8c665 Merge pull request 'borschevskaya_anna_lab_4 is ready' (#50) from borschevskaya_anna_lab_4 into main
Reviewed-on: #50
2024-10-14 11:03:34 +04:00
2497e3c742 borschevskaya_anna_lab_5 is ready 2024-10-13 11:03:08 +04:00
Pineapple
a628469960 klyushenkova_ksenia_lab_1 is ready 2024-10-12 23:40:16 +04:00
f107797a2d fix: deleted trash 2024-10-12 16:47:05 +04:00
98e9047b45 feature: completed lab 3 2024-10-12 16:45:56 +04:00
53f96303bc commit for commit 2024-10-11 19:32:26 +04:00
eb7211c6f9 Init. 2024-10-11 19:19:17 +04:00
a0209b612e tukaeva_alfiya_lab_3 is ready 2024-10-11 01:01:25 +04:00
1f72d4dc70 ох уж этот редми 2024-10-10 21:03:09 +04:00
b351431f51 lab4 now is ready 2024-10-10 21:02:25 +04:00
56baf52b61 lab4 ready 2024-10-10 21:00:15 +04:00
f5ec3f1767 lab2 2024-10-10 18:52:01 +04:00
77790c37fb lab 4 complete 2024-10-09 17:12:11 +03:00
the
735a403027 Добавлен README 2024-10-09 16:38:42 +04:00
the
c67049687b Done 2024-10-09 16:17:26 +04:00
022e2dc49e + 2024-10-08 23:46:12 +04:00
8f24aad349 + 2024-10-08 23:45:44 +04:00
a54e13f7ee + 2024-10-08 23:45:00 +04:00
1bb988ea2f dozorova_alena_lab_6 2024-10-08 23:43:24 +04:00
f7668394b0 dozorova_alena_lab_5 2024-10-08 22:46:32 +04:00
JulYakJul
a6a247cabf delete trash 2024-10-08 17:02:29 +04:00
JulYakJul
f5194bf885 Create README.md 2024-10-08 16:56:07 +04:00
JulYakJul
12cd98aa7d yakovleva_yulia_lab_3 is ready 2024-10-08 16:30:55 +04:00
JulYakJul
3db4a0fcd4 Admin 2024-10-07 11:28:34 +04:00
a4f9cf13cc borschevskaya_anna_lab_4 is ready 2024-10-06 17:16:44 +04:00
the
75b118ba6e Исправлен README, готово 2024-10-04 15:52:10 +04:00
the
d8441a0989 Чистый неподкупный рабочий код 2-й лабораторной 2024-10-04 15:49:14 +04:00
the
1213b5db3c Чистый неподкупный рабочий код 2-й лабораторной 2024-10-04 14:33:05 +04:00
Zyzf
f0b48bba28 kalyshev_yan_lab_2 is ready 2024-09-29 20:05:33 +04:00
23087c87ea Обновить kuzarin_maxim_lab_6/README.md 2024-09-26 23:14:31 +04:00
5a6580ff8c Фикс README 2024-09-26 22:14:06 +03:00
5f6472b5ff Лаба реализована. Нужно всё проверить в контексте описания 2024-09-26 22:12:33 +03:00
e1950c80ea поправили readme 2024-09-26 22:33:36 +04:00
5586bec4b8 готовая работа 2024-09-26 22:31:01 +04:00
4c74a16753 туториал 3 2024-09-23 22:45:22 +04:00
a830cb2198 туториал 2 2024-09-23 22:42:39 +04:00
9d0fa199f7 first work 2024-09-23 21:35:24 +04:00
6de5160da9 ЛР 5 готова. 2024-09-22 22:14:00 +03:00
f0083bc4cd feature: add .yml and .env files, start readme 2024-09-20 01:42:54 +04:00
2316 changed files with 322209 additions and 7 deletions

1
.idea/.name Normal file
View File

@ -0,0 +1 @@
main.py

View File

@ -0,0 +1,12 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredIdentifiers">
<list>
<option value="str.__pos__" />
</list>
</option>
</inspection_tool>
</profile>
</component>

View File

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

4
.idea/misc.xml Normal file
View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (tukaeva_alfiya_lab_4)" project-jdk-type="Python SDK" />
</project>

View File

@ -0,0 +1,32 @@
# Лабораторная работа 1 - Знакомство с Docker и Docker Compose
## ПИбд-42 || Алейкин Артем
### Описание
В данной лабораторной работе мы разворачиваем три популярных сервиса — MediaWiki и Redmine — с использованием Docker Compose. Каждый сервис работает в своем контейнере и использует общую базу данных PostgreSQL для хранения данных. Мы также настраиваем проброс портов для доступа к веб-интерфейсам сервисов и используем Docker volumes для сохранения данных вне контейнеров.
### Цель проекта
изучение современных технологий контейнеризации
### Шаги для запуска:
1. Клонирование репозитория:
```
git clone <ссылка-на-репозиторий>
cd <папка репозитория>
```
2. Запуск контейнеров:
```
docker-compose up -d
```
3. После запуска должны быть доступны следующие контейнеры:
MediaWiki: http://localhost:8080
Redmine: http://localhost:8081
4. Чтобы остановить контейнеры:
```
docker-compose down
```
Видео демонстрации работы: https://vk.com/video248424990_456239601?list=ln-sCRa9IIiV1VpInn2d1

View File

@ -0,0 +1,45 @@
services:
mediawiki:
image: mediawiki
container_name: mediawiki
ports:
- "8080:80" # Пробрасываем порт 8080 на хост для доступа к MediaWiki
volumes:
- mediawiki_data:/var/www/html/images # Создаем volume для хранения данных MediaWiki
environment:
- MEDIAWIKI_DB_HOST=db
- MEDIAWIKI_DB_NAME=mediawiki
- MEDIAWIKI_DB_USER=root
- MEDIAWIKI_DB_PASSWORD=example
depends_on:
- db
redmine:
image: redmine
container_name: redmine
ports:
- "8081:3000" # Пробрасываем порт 8081 на хост для доступа к Redmine
volumes:
- redmine_data:/usr/src/redmine/files # Создаем volume для хранения данных Redmine
environment:
- REDMINE_DB_POSTGRESQL=db
- REDMINE_DB_DATABASE=redmine
- REDMINE_DB_USERNAME=root
- REDMINE_DB_PASSWORD=example
depends_on:
- db
db:
image: postgres:latest
container_name: db
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: example
POSTGRES_DB: postgres
volumes:
- db_data:/var/lib/postgresql # Volume для базы данных
volumes:
mediawiki_data: # volume для MediaWiki
redmine_data: # volume для Redmine
db_data: # volume для базы данных

View File

@ -0,0 +1,48 @@
## Отчет по Docker Compose конфигурации
### Краткое описание:
Данная конфигурация Docker Compose запускает набор сервисов, необходимых для работы WordPress и MediaWiki. Она включает в себя:
- **WordPress:** веб-сервис для блогов и CMS
- **MySQL:** база данных для хранения данных WordPress
- **RabbitMQ:** брокер сообщений для потенциального использования в будущем
- **MediaWiki:** вики-движок для создания и редактирования вики-страниц
### Запуск лабораторной работы:
1. Установить Docker и Docker Compose.
2. Сохранить конфигурацию в файл docker-compose.yml.
3. Запустить команду docker-compose up --build
### Используемые технологии:
- **Docker Compose:** инструмент для определения и запуска многоконтейнерных приложений.
- **Docker:** платформа для создания, развертывания и запуска контейнеров.
- **WordPress:** популярная платформа для создания блогов и CMS.
- **MySQL:** популярная система управления базами данных.
- **RabbitMQ:** брокер сообщений, используемый для асинхронного обмена сообщениями.
- **MediaWiki:** свободное программное обеспечение для создания и редактирования вики-страниц.
### Функциональность:
Конфигурация запускает следующие сервисы:
- **WordPress:** работает на порту 8080, доступен по адресу http://localhost:8080.
- **MySQL:** предоставляет базу данных для WordPress и MediaWiki.
- **RabbitMQ:** работает на порту 5672, доступен по адресу http://localhost:15672 для управления.
- **MediaWiki:** работает на порту 8081, доступен по адресу http://localhost:8081.
### Дополнительные сведения
- **Volumes**: используются для хранения данных сервисов, чтобы они не терялись при перезапуске контейнеров.
- **Depends_on**: указывает на зависимость между сервисами, например, WordPress зависит от MySQL.
- **Restart policy**: определяет, как сервисы будут перезапускаться после сбоя.
### Видео
https://vk.com/video/@artamonovat?z=video212084908_456239356%2Fpl_212084908_-2
### Заключение:
Данная конфигурация Docker Compose обеспечивает простой и удобный способ запуска и управления несколькими сервисами, связанными с WordPress и MediaWiki. Она позволяет разработчикам легко развертывать и управлять приложениями в изолированной среде.

View File

@ -0,0 +1,61 @@
version: '3.7'
services:
wordpress:
image: wordpress:latest
ports:
- "8080:80"
volumes:
- wordpress_data:/var/www/html
environment:
WORDPRESS_DB_HOST: db
WORDPRESS_DB_NAME: wordpress
WORDPRESS_DB_USER: wordpress
WORDPRESS_DB_PASSWORD: password
depends_on:
- db
restart: unless-stopped
db:
image: mysql:latest
volumes:
- db_data:/var/lib/mysql
environment:
MYSQL_DATABASE: wordpress
MYSQL_USER: wordpress
MYSQL_PASSWORD: dbpassword
MYSQL_ROOT_PASSWORD: rootpassword
restart: unless-stopped
rabbitmq:
image: rabbitmq:3-management
ports:
- "5672:5672"
- "15672:15672"
volumes:
- rabbitmq_data:/var/lib/rabbitmq
environment:
RABBITMQ_DEFAULT_USER: guest
RABBITMQ_DEFAULT_PASS: password
restart: unless-stopped
mediawiki:
image: mediawiki:latest
ports:
- "8081:80"
volumes:
- mediawiki_data:/var/www/html
environment:
MW_DB_SERVER: db
MW_DB_NAME: mediawiki
MW_DB_USER: mediawiki
MW_DB_PASSWORD: mediawiki_password
depends_on:
- db
restart: unless-stopped
volumes:
wordpress_data:
db_data:
rabbitmq_data:
mediawiki_data:

5
artamonova_tatyana_lab_2/.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
*.pyc
__pycache__
*.egg-info
*.dist-info
.DS_Store

View File

@ -0,0 +1,22 @@
## Лабораторная работа №2
### Выполнила Артамонова Татьяна ПИбд-42
**Вариант 1: Программа 4 - Количество символов в именах файлов из каталога /var/data**
- Формирует файл /var/result/data1.txt так, что каждая строка файла - количество символов в именах файлов из каталога /var/data.
**Вариант 2: Программа 3 - Количество чисел в последовательности**
- Ищет набольшее число из файла /var/result/data1.txt и сохраняет количество таких чисел из последовательности в /var/result/data2.txt.
**Структура проекта:**
1. В папках worker-1, worker-2 лежат выполняемые файлы .py и Dockerfile-ы с необходимым набором инструкций.
2. В папке data лежат файлы, длину имен которых нужно посчитать.
3. В папке result лежат файлы с результатами выполнения программ. data1.txt - результат выполнения main1.py (worker-1), data2.txt - результат выполнения main2.py (worker-2). Данные в data2 рассчитываются из данных data1.
4. Файл .gitignore - для указания, какие файлы отслеживать, а какие - нет.
5. docker-compose.yml - для определения и управления контейнерами Docker.
**Команда для запуска** - docker-compose up --build
**Ссылка на видео:** https://vk.com/artamonovat?z=video212084908_456239357%2Fvideos212084908%2Fpl_212084908_-2

View File

@ -0,0 +1,22 @@
services:
worker-1:
build:
context: ./worker-1
volumes:
- ./worker-1:/app
- ./data:/var/data
- ./result:/var/result
depends_on:
- worker-2
worker-2:
build:
context: ./worker-2
volumes:
- ./worker-2:/app
- ./data:/var/data
- ./result:/var/result
volumes:
data:
result:

View File

@ -0,0 +1,3 @@
15
18
18

View File

@ -0,0 +1 @@
2

View File

@ -0,0 +1,14 @@
# Используем образ Python 3.10-slim как основу для нашего контейнера.
# slim-версия образа более компактная, что делает контейнер меньше.
FROM python:3.10-slim
# Устанавливаем рабочую директорию в контейнере как /app.
# Все последующие команды будут выполняться в этой директории.
WORKDIR /app
# Копируем файл main1.py из текущей директории в директорию /app в контейнере.
COPY main1.py .
# Определяем команду, которая будет выполняться при запуске контейнера.
# В данном случае запускается Python-скрипт main1.py.
CMD ["python", "main1.py"]

View File

@ -0,0 +1,21 @@
import os
import glob
# Формирует файл data1.txt так, что каждая строка файла - кол-во символов в именах файла из каталога /data
def main():
data_dir = "/var/data"
result_file = "/var/result/data1.txt"
result_dir = os.path.dirname(result_file)
if not os.path.exists(result_dir):
os.makedirs(result_dir)
files = glob.glob(os.path.join(data_dir, '*'))
with open(result_file, 'w') as f:
for file in files:
filename = os.path.basename(file)
f.write(f"{len(filename)}\n")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,14 @@
# Используем образ Python 3.10-slim как основу для нашего контейнера.
# slim-версия образа более компактная, что делает контейнер меньше.
FROM python:3.10-slim
# Устанавливаем рабочую директорию в контейнере как /app.
# Все последующие команды будут выполняться в этой директории.
WORKDIR /app
# Копируем файл main2.py из текущей директории в директорию /app в контейнере.
COPY main2.py .
# Определяем команду, которая будет выполняться при запуске контейнера.
# В данном случае запускается Python-скрипт main2.py.
CMD ["python", "main2.py"]

View File

@ -0,0 +1,26 @@
import os
# Ищет наибольшее число из файла data1.txt и сохраняет количество таких чисел из последовательности в data2.txt
def main():
data_file_path = "/var/result/data1.txt"
result_file_path = "/var/result/data2.txt"
if not os.path.exists(data_file_path):
data_dir = os.path.dirname(data_file_path)
if not os.path.exists(result_file_path):
result_dir = os.path.dirname(result_file_path)
with open(data_file_path, 'r') as f:
numbers = [int(x.strip()) for x in f.read().splitlines()]
max_number = max(numbers)
count = numbers.count(max_number)
with open(result_file_path, 'w') as f:
f.write(str(count))
print(f"Количество наибольших чисел: {count}")
if __name__ == "__main__":
main()

BIN
bazunov_andrew_lab_3/PersonApp/.DS_Store vendored Normal file

Binary file not shown.

View File

@ -0,0 +1,4 @@
PORT=8080
TASK_APP_URL=http://task-app:8000
TIMEOUT=15
DATABASE=./database.db

View File

@ -0,0 +1,14 @@
FROM golang:1.23
WORKDIR /app
COPY go.mod go.sum ./
RUN go mod download
COPY . .
RUN go build -o /bin/PersonApp
EXPOSE 8080
CMD ["/bin/PersonApp"]

Binary file not shown.

View File

@ -0,0 +1,10 @@
module PersonApp
go 1.23.1
require (
github.com/gorilla/mux v1.8.1
github.com/mattn/go-sqlite3 v1.14.24
)
require github.com/joho/godotenv v1.5.1 // indirect

View File

@ -0,0 +1,6 @@
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=

View File

@ -0,0 +1,157 @@
package handlers
import (
"PersonApp/httpClient"
"PersonApp/models"
"PersonApp/repository"
"encoding/json"
"fmt"
"github.com/gorilla/mux"
"net/http"
"strconv"
)
func InitRoutes(r *mux.Router, rep repository.PersonRepository, cln httpClient.Client) {
r.HandleFunc("/", GetPersons(rep, cln)).Methods("GET")
r.HandleFunc("/{id:[0-9]+}", GetPersonById(rep, cln)).Methods("GET")
r.HandleFunc("/", CreatePerson(rep)).Methods("POST")
r.HandleFunc("/{id:[0-9]+}", UpdatePerson(rep)).Methods("PUT")
r.HandleFunc("/{id:[0-9]+}", DeletePerson(rep)).Methods("DELETE")
}
func GetPersons(rep repository.PersonRepository, cln httpClient.Client) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
fmt.Println("GET PERSONS")
persons, err := rep.GetAllPersons()
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
for i := 0; i < len(persons); i++ {
tasks, _ := cln.GetPersonTasks(persons[i].Id)
persons[i].Tasks = tasks
}
err = json.NewEncoder(w).Encode(persons)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
}
func GetPersonById(rep repository.PersonRepository, cln httpClient.Client) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
id, err := strconv.Atoi(mux.Vars(r)["id"])
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
person, err := rep.GetPersonById(id)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
tasks, err := cln.GetPersonTasks(id)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
} else {
person.Tasks = tasks
}
err = json.NewEncoder(w).Encode(person)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
}
func CreatePerson(rep repository.PersonRepository) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
var person *models.Person
err := json.NewDecoder(r.Body).Decode(&person)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
person, err = rep.CreatePerson(*person)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.WriteHeader(http.StatusCreated)
err = json.NewEncoder(w).Encode(person)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
}
func UpdatePerson(rep repository.PersonRepository) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
id, err := strconv.Atoi(mux.Vars(r)["id"])
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
var person *models.Person
err = json.NewDecoder(r.Body).Decode(&person)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
person, err = rep.UpdatePerson(models.Person{
Id: id,
Name: person.Name,
Tasks: nil,
})
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.WriteHeader(http.StatusAccepted)
err = json.NewEncoder(w).Encode(person)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
}
}
func DeletePerson(rep repository.PersonRepository) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
id, err := strconv.Atoi(mux.Vars(r)["id"])
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
err = rep.DeletePerson(id)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.WriteHeader(http.StatusOK)
}
}

View File

@ -0,0 +1,72 @@
package httpClient
import (
"PersonApp/models"
"encoding/json"
"fmt"
"io"
"net/http"
"time"
)
type Client interface {
GetPersonTasks(id int) ([]models.Task, error)
TestConnection() (bool, error)
}
type client struct {
BaseUrl string
Timeout time.Duration
}
func (c *client) TestConnection() (bool, error) {
client := &http.Client{Timeout: c.Timeout}
url := fmt.Sprintf("%s/", c.BaseUrl)
resp, err := client.Get(url)
if err != nil {
return false, err
}
defer func(Body io.ReadCloser) {
err := Body.Close()
if err != nil {
return
}
}(resp.Body)
if resp.StatusCode != http.StatusOK {
return false, fmt.Errorf("bad status code: %d", resp.StatusCode)
}
return true, nil
}
func (c *client) GetPersonTasks(id int) ([]models.Task, error) {
client := &http.Client{Timeout: c.Timeout * time.Second}
url := fmt.Sprintf("%s/f/%d", c.BaseUrl, id)
resp, err := client.Get(url)
if err != nil {
return nil, err
}
defer func(Body io.ReadCloser) {
err := Body.Close()
if err != nil {
}
}(resp.Body)
body, _ := io.ReadAll(resp.Body)
var tasks []models.Task
if err := json.Unmarshal(body, &tasks); err != nil {
fmt.Printf("Unmarshal error: %s", err)
return []models.Task{}, err
}
return tasks, nil
}
func NewClient(baseUrl string, timeout time.Duration) Client {
return &client{BaseUrl: baseUrl, Timeout: timeout}
}

View File

@ -0,0 +1,34 @@
GET http://localhost/person-app/
Accept: application/json
###
GET http://localhost/person-app/1
Accept: application/json
###
POST http://localhost/person-app/
Accept: application/json
Content-Type: application/json
{
"name": "TEST3"
}
###
PUT http://localhost/person-app/3
Accept: application/json
Content-Type: application/json
{
"name": "TEST11"
}
###
DELETE http://localhost/person-app/3
Accept: application/json
###

View File

@ -0,0 +1,47 @@
package main
import (
"PersonApp/handlers"
"PersonApp/httpClient"
"PersonApp/repository"
"PersonApp/storage"
"github.com/gorilla/mux"
"github.com/joho/godotenv"
"net/http"
"os"
"strconv"
"time"
)
func main() {
err := godotenv.Load(".env")
if err != nil {
panic("Error loading .env file")
}
url := os.Getenv("TASK_APP_URL")
port := os.Getenv("PORT")
databasePath := os.Getenv("DATABASE")
timeout, err := strconv.Atoi(os.Getenv("TIMEOUT"))
if err != nil {
panic("Error converting timeout to int")
}
database, err := storage.Init(databasePath)
if err != nil {
panic(err)
}
cln := httpClient.NewClient(url, time.Duration(timeout))
rep := repository.NewPersonRepository(database)
router := mux.NewRouter()
handlers.InitRoutes(router, rep, cln)
err = http.ListenAndServe(":"+port, router)
if err != nil {
panic(err)
}
storage.Close(database)
}

View File

@ -0,0 +1,24 @@
package models
type Person struct {
Id int `json:"id"`
Name string `json:"name"`
Tasks []Task `json:"tasks"`
}
type PersonCreate struct {
Name string `json:"name"`
}
type Task struct {
Id int `json:"id"`
PersonId int `json:"person_id"`
Name string `json:"name"`
Date string `json:"date"`
}
type TaskCreate struct {
PersonId int `json:"person_id"`
Name string `json:"name"`
Date string `json:"date"`
}

View File

@ -0,0 +1,99 @@
package repository
import (
"PersonApp/models"
"database/sql"
)
type PersonRepository interface {
GetAllPersons() ([]models.Person, error)
GetPersonById(id int) (*models.Person, error)
CreatePerson(person models.Person) (*models.Person, error)
UpdatePerson(person models.Person) (*models.Person, error)
DeletePerson(id int) error
}
type personRepository struct {
DB *sql.DB
}
func NewPersonRepository(db *sql.DB) PersonRepository {
return &personRepository{DB: db}
}
func (pr *personRepository) GetAllPersons() ([]models.Person, error) {
rows, err := pr.DB.Query("select * from Persons")
if err != nil {
return nil, err
}
defer func(rows *sql.Rows) {
err := rows.Close()
if err != nil {
panic(err)
}
}(rows)
var persons []models.Person
for rows.Next() {
p := models.Person{}
err := rows.Scan(&p.Id, &p.Name)
if err != nil {
panic(err)
}
persons = append(persons, p)
}
return persons, err
}
func (pr *personRepository) GetPersonById(id int) (*models.Person, error) {
row := pr.DB.QueryRow("select * from Persons where id=?", id)
person := models.Person{}
err := row.Scan(&person.Id, &person.Name)
if err != nil {
return nil, err
}
return &person, err
}
func (pr *personRepository) CreatePerson(p models.Person) (*models.Person, error) {
res, err := pr.DB.Exec("INSERT INTO Persons (name) values (?)", p.Name)
if err != nil {
return nil, err
}
if res == nil {
return nil, nil
}
return &p, err
}
func (pr *personRepository) UpdatePerson(p models.Person) (*models.Person, error) {
res, err := pr.DB.Exec("UPDATE Persons SET name = ? WHERE id = ?", p.Name, p.Id)
if err != nil {
return nil, err
}
if res == nil {
return nil, nil
}
return &p, err
}
func (pr *personRepository) DeletePerson(id int) error {
_, err := pr.DB.Exec("DELETE FROM Persons WHERE id = ?", id)
if err != nil {
return err
}
return nil
}

View File

@ -0,0 +1,36 @@
package storage
import (
"database/sql"
_ "github.com/mattn/go-sqlite3"
)
func Init(databasePath string) (*sql.DB, error) {
db, err := sql.Open("sqlite3", databasePath)
if err != nil || db == nil {
return nil, err
}
if err := createTableIfNotExists(db); err != nil {
return nil, err
}
return db, nil
}
func Close(db *sql.DB) {
err := db.Close()
if err != nil {
return
}
}
func createTableIfNotExists(db *sql.DB) error {
if result, err := db.Exec(
"CREATE TABLE IF NOT EXISTS `Persons`(Id integer primary key autoincrement, Name text not null);",
); err != nil || result == nil {
return err
}
return nil
}

View File

@ -1,11 +1,6 @@
# Распределенные вычисления и приложения Л2
# Распределенные вычисления и приложения Л3
## _Автор Базунов Андрей Игревич ПИбд-42_
Сервисы ( _порядок исполнения сервисов соблюден_ ):
- 1.FileCreator - (_Создание тестовых данных_)
- 2.FirstService - (_Выполнение 1.4 варианта задания_)
- 3.SecondService - (_Выполнение 2.2 варианта задания_)
В качестве основного языка был выбран GoLang. Для каждого сервиса был создан DOCKERFILE где были прописаны условия и действия для сборки каждого из модулей
# Docker
@ -27,4 +22,4 @@ docker-compose up -d --build
docker-compose down
```
[Демонстрация работы](https://vk.com/video236673313_456239575)
[Демонстрация работы](https://vk.com/video/@viltskaa?z=video236673313_456239577%2Fpl_236673313_-2)

View File

@ -0,0 +1,4 @@
PORT=8000
PERSON_APP_URL=http://person-app:8080
TIMEOUT=15
DATABASE=./database.db

View File

@ -0,0 +1,14 @@
FROM golang:1.23
WORKDIR /app
COPY go.mod go.sum ./
RUN go mod download
COPY . .
RUN go build -o /bin/TaskApp
EXPOSE 8000
CMD ["/bin/TaskApp"]

Binary file not shown.

View File

@ -0,0 +1,10 @@
module TaskApp
go 1.23.1
require (
github.com/gorilla/mux v1.8.1
github.com/mattn/go-sqlite3 v1.14.24
)
require github.com/joho/godotenv v1.5.1

View File

@ -0,0 +1,6 @@
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=

View File

@ -0,0 +1,177 @@
package handlers
import (
"TaskApp/httpClient"
"TaskApp/models"
"TaskApp/repository"
"encoding/json"
"fmt"
"github.com/gorilla/mux"
"net/http"
"strconv"
)
func InitRoutes(r *mux.Router, rep repository.TaskRepository, cln httpClient.Client) {
r.HandleFunc("/", GetTasks(rep)).Methods("GET")
r.HandleFunc("/{id:[0-9]+}", GetTaskById(rep)).Methods("GET")
r.HandleFunc("/", CreateTask(rep, cln)).Methods("POST")
r.HandleFunc("/{id:[0-9]+}", UpdateTask(rep)).Methods("PUT")
r.HandleFunc("/{id:[0-9]+}", DeleteTask(rep)).Methods("DELETE")
r.HandleFunc("/f/{id:[0-9]+}", GetPersonTasks(rep)).Methods("GET")
}
func GetTasks(rep repository.TaskRepository) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
tasks, err := rep.GetAllTasks()
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
return
}
err = json.NewEncoder(w).Encode(tasks)
if err != nil {
w.WriteHeader(http.StatusInternalServerError)
}
}
}
func GetTaskById(rep repository.TaskRepository) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
id, err := strconv.Atoi(mux.Vars(r)["id"])
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
person, err := rep.GetTaskById(id)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
err = json.NewEncoder(w).Encode(person)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
}
func GetPersonTasks(rep repository.TaskRepository) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
id, err := strconv.Atoi(mux.Vars(r)["id"])
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
tasks, err := rep.GetUserTasks(id)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
err = json.NewEncoder(w).Encode(tasks)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
}
func CreateTask(rep repository.TaskRepository, cln httpClient.Client) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
var task *models.TaskCreate
err := json.NewDecoder(r.Body).Decode(&task)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if &task.Name == nil || &task.PersonId == nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
person, err := cln.GetPerson(task.PersonId)
if err != nil {
fmt.Println(err)
http.Error(w, "Connection to PersonApp is confused.", http.StatusInternalServerError)
return
}
if person == nil {
http.Error(w, fmt.Sprintf("Person with id=%d is't founded.", person.Id), http.StatusBadGateway)
return
}
newTask, err := rep.CreateTask(*task)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.WriteHeader(http.StatusCreated)
err = json.NewEncoder(w).Encode(newTask)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
}
func UpdateTask(rep repository.TaskRepository) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
id, err := strconv.Atoi(mux.Vars(r)["id"])
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
var task *models.TaskCreate
err = json.NewDecoder(r.Body).Decode(&task)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
newTask, err := rep.UpdateTask(models.Task{Id: id, Name: task.Name, Date: task.Date})
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
err = json.NewEncoder(w).Encode(newTask)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
}
}
func DeleteTask(rep repository.TaskRepository) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
id, err := strconv.Atoi(mux.Vars(r)["id"])
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
err = rep.DeleteTask(id)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.WriteHeader(http.StatusOK)
}
}

View File

@ -0,0 +1,73 @@
package httpClient
import (
"TaskApp/models"
"encoding/json"
"fmt"
"io"
"log"
"net/http"
"time"
)
type Client interface {
GetPerson(id int) (*models.Person, error)
TestConnection() (bool, error)
}
type client struct {
BaseUrl string
Timeout time.Duration
}
func (c *client) TestConnection() (bool, error) {
client := &http.Client{Timeout: c.Timeout}
url := fmt.Sprintf("%s/", c.BaseUrl)
resp, err := client.Get(url)
if err != nil {
return false, err
}
defer func(Body io.ReadCloser) {
err := Body.Close()
if err != nil {
return
}
}(resp.Body)
if resp.StatusCode != http.StatusOK {
return false, fmt.Errorf("bad status code: %d", resp.StatusCode)
}
return true, nil
}
func (c *client) GetPerson(id int) (*models.Person, error) {
client := &http.Client{Timeout: c.Timeout * time.Second}
url := fmt.Sprintf("%s/%d", c.BaseUrl, id)
resp, err := client.Get(url)
if err != nil {
return nil, err
}
defer func(Body io.ReadCloser) {
err := Body.Close()
if err != nil {
}
}(resp.Body)
body, _ := io.ReadAll(resp.Body)
var person models.Person
if err := json.Unmarshal(body, &person); err != nil {
log.Printf("Unmarshal error: %s", err)
return nil, err
}
return &person, nil
}
func NewClient(baseUrl string, timeout time.Duration) Client {
return &client{BaseUrl: baseUrl, Timeout: timeout}
}

View File

@ -0,0 +1,37 @@
GET http://localhost/task-app/
Accept: application/json
###
GET http://localhost/task-app/4
Accept: application/json
###
POST http://localhost/task-app/
Accept: application/json
Content-Type: application/json
{
"name": "TEST2",
"person_id": 1,
"date": "19.02.2202"
}
###
PUT http://localhost/task-app/4
Accept: application/json
Content-Type: application/json
{
"name": "TEST5",
"date": "19.02.2202"
}
###
DELETE http://localhost/task-app/4
Accept: application/json
###

View File

@ -0,0 +1,47 @@
package main
import (
"TaskApp/handlers"
"TaskApp/httpClient"
"TaskApp/repository"
"TaskApp/storage"
"github.com/gorilla/mux"
"github.com/joho/godotenv"
"net/http"
"os"
"strconv"
"time"
)
func main() {
err := godotenv.Load(".env")
if err != nil {
panic("Error loading .env file")
}
url := os.Getenv("PERSON_APP_URL")
port := os.Getenv("PORT")
databasePath := os.Getenv("DATABASE")
timeout, err := strconv.Atoi(os.Getenv("TIMEOUT"))
if err != nil {
panic("Error converting timeout to int")
}
database, err := storage.Init(databasePath)
if err != nil {
panic(err)
}
cln := httpClient.NewClient(url, time.Duration(timeout))
rep := repository.NewTaskRepository(database)
router := mux.NewRouter()
handlers.InitRoutes(router, rep, cln)
err = http.ListenAndServe(":"+port, router)
if err != nil {
panic(err)
}
storage.Close(database)
}

View File

@ -0,0 +1,24 @@
package models
type Person struct {
Id int `json:"id"`
Name string `json:"name"`
Tasks []Task `json:"tasks"`
}
type PersonCreate struct {
Name string `json:"name"`
}
type Task struct {
Id int `json:"id"`
PersonId int `json:"person_id"`
Name string `json:"name"`
Date string `json:"date"`
}
type TaskCreate struct {
PersonId int `json:"person_id"`
Name string `json:"name"`
Date string `json:"date"`
}

View File

@ -0,0 +1,121 @@
package repository
import (
"TaskApp/models"
"database/sql"
)
type TaskRepository interface {
GetAllTasks() ([]models.Task, error)
GetTaskById(id int) (*models.Task, error)
GetUserTasks(id int) ([]models.Task, error)
CreateTask(task models.TaskCreate) (*models.Task, error)
UpdateTask(task models.Task) (*models.Task, error)
DeleteTask(id int) error
}
type taskRepository struct {
DB *sql.DB
}
func (t taskRepository) GetUserTasks(id int) ([]models.Task, error) {
rows, err := t.DB.Query("select * from Tasks where PersonId = ?", id)
if err != nil {
return nil, err
}
defer func(rows *sql.Rows) {
err := rows.Close()
if err != nil {
panic(err)
}
}(rows)
var tasks []models.Task
for rows.Next() {
p := models.Task{}
err := rows.Scan(&p.Id, &p.Name, &p.PersonId, &p.Date)
if err != nil {
panic(err)
}
tasks = append(tasks, p)
}
return tasks, err
}
func (t taskRepository) GetAllTasks() ([]models.Task, error) {
rows, err := t.DB.Query("select * from Tasks")
if err != nil {
return nil, err
}
defer func(rows *sql.Rows) {
err := rows.Close()
if err != nil {
panic(err)
}
}(rows)
var tasks []models.Task
for rows.Next() {
p := models.Task{}
err := rows.Scan(&p.Id, &p.Name, &p.PersonId, &p.Date)
if err != nil {
panic(err)
}
tasks = append(tasks, p)
}
return tasks, err
}
func (t taskRepository) GetTaskById(id int) (*models.Task, error) {
row := t.DB.QueryRow("select * from Tasks where id=?", id)
task := models.Task{}
err := row.Scan(&task.Id, &task.Name, &task.PersonId, &task.Date)
if err != nil {
return nil, err
}
return &task, err
}
func (t taskRepository) CreateTask(task models.TaskCreate) (*models.Task, error) {
_, err := t.DB.Exec("INSERT INTO Tasks(Name, PersonId, Date) VALUES (?, ?, ?)", task.Name, task.PersonId, task.Date)
if err != nil {
return nil, err
}
return &models.Task{
Id: 0,
PersonId: task.PersonId,
Name: task.Name,
Date: task.Date,
}, err
}
func (t taskRepository) UpdateTask(task models.Task) (*models.Task, error) {
_, err := t.DB.Exec("UPDATE Tasks SET name = ?, date = ? WHERE id = ?", task.Name, task.Date, task.Id)
if err != nil {
return nil, err
}
return &task, err
}
func (t taskRepository) DeleteTask(id int) error {
_, err := t.DB.Exec("DELETE FROM Tasks WHERE id = ?", id)
if err != nil {
return err
}
return nil
}
func NewTaskRepository(db *sql.DB) TaskRepository {
return &taskRepository{DB: db}
}

View File

@ -0,0 +1,36 @@
package storage
import (
"database/sql"
_ "github.com/mattn/go-sqlite3"
)
func Init(databasePath string) (*sql.DB, error) {
db, err := sql.Open("sqlite3", databasePath)
if err != nil || db == nil {
return nil, err
}
if err := createTableIfNotExists(db); err != nil {
return nil, err
}
return db, nil
}
func Close(db *sql.DB) {
err := db.Close()
if err != nil {
return
}
}
func createTableIfNotExists(db *sql.DB) error {
if result, err := db.Exec(
"CREATE TABLE IF NOT EXISTS `Tasks`(Id integer primary key autoincrement, Name text not null, PersonId integer not null, Date text not null);",
); err != nil || result == nil {
return err
}
return nil
}

View File

@ -0,0 +1,34 @@
services:
person-app:
build:
context: ./PersonApp
dockerfile: Dockerfile
networks:
- network
ports:
- "8080:8080"
task-app:
build:
context: ./TaskApp
dockerfile: Dockerfile
networks:
- network
ports:
- "8000:8000"
nginx:
image: nginx
ports:
- "80:80"
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf
networks:
- network
depends_on:
- person-app
- task-app
networks:
network:
driver: bridge

View File

@ -0,0 +1,59 @@
events {
worker_connections 1024;
}
http {
server {
listen 80;
server_name localhost;
location /person-app/ {
proxy_pass http://person-app:8080/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
add_header 'Access-Control-Allow-Origin' '*';
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
add_header 'Access-Control-Allow-Headers' 'Origin, Content-Type, Accept, Authorization';
}
location /task-app/ {
proxy_pass http://task-app:8000/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
add_header 'Access-Control-Allow-Origin' '*';
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
add_header 'Access-Control-Allow-Headers' 'Origin, Content-Type, Accept, Authorization';
}
# Прокси для Swagger (Stream-сервис)
#location /stream-service/swagger/ {
# proxy_pass http://stream-service:8000/swagger/;
# proxy_set_header Host $host;
# proxy_set_header X-Real-IP $remote_addr;
# proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
# proxy_set_header X-Forwarded-Proto $scheme;
#}
# Прокси для Swagger (Message-сервис)
#location /message-service/swagger/ {
# proxy_pass http://message-service:8080/swagger/;
# proxy_set_header Host $host;
# proxy_set_header X-Real-IP $remote_addr;
# proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
# proxy_set_header X-Forwarded-Proto $scheme;
#}
#location /stream-service/doc.json {
# proxy_pass http://stream-service:8000/doc.json;
#}
#location /message-service/doc.json {
# proxy_pass http://message-service:8080/doc.json;
#}
}
}

View File

@ -0,0 +1,34 @@
# Лабораторная работа №4: Работа с брокером сообщений (RabbitMQ)
## Цель
Изучение проектирования приложений с использованием брокера сообщений RabbitMQ.
---
## Задачи
> 1. **Установить RabbitMQ**
Установите RabbitMQ на локальный компьютер (или используйте Docker).
>- [Скачивание RabbitMQ](https://www.rabbitmq.com/download.html)
>- [Релизы RabbitMQ](https://github.com/rabbitmq/rabbitmq-server/releases/)
>- **Пройти уроки RabbitMQ**
>- Сделайте скриншоты, показывающие запуск `producer` и `consumer` и передачу сообщений.
---
## Первый урок
> ![img.png](static/img1.png)
---
## Второй урок
>![img.png](static/img2.png)
>![img_1.png](static/img3.png)
---
## Третий урок
> ![img.png](static/img4.png)
---
## Задача
>![img.png](static/img5.png)
> ![img.png](static/img.png)

View File

@ -0,0 +1,17 @@
version: "3.2"
services:
rabbitmq:
image: rabbitmq:3-management-alpine
container_name: 'rabbitmq'
ports:
- "5672:5672"
- "15672:15672"
volumes:
- ~/.docker-conf/rabbitmq/data/:/var/lib/rabbitmq/
- ~/.docker-conf/rabbitmq/log/:/var/log/rabbitmq
networks:
- rabbitmq_go_net
networks:
rabbitmq_go_net:
driver: bridge

View File

@ -0,0 +1,47 @@
from datetime import datetime
import random
import threading
import pika
import sys
_alphabet = [chr(i) for i in range(97, 123)]
def run_every_n_seconds(seconds, action, *args):
threading.Timer(seconds, run_every_n_seconds, [seconds, action] + list(args)).start()
action(*args)
def generate_message():
now = datetime.now()
current_time = now.strftime("%H:%M:%S")
return f"[{current_time}] " + "".join(random.choices(_alphabet, k=random.randint(1, 10)))
def send_message(channel_local):
message = generate_message()
channel_local.basic_publish(
exchange='vk_messages',
routing_key='vk_messages',
body=message,
properties=pika.BasicProperties(
delivery_mode=pika.DeliveryMode.Persistent
))
print(f"[vkAuthor] Sent {message}")
def main(conn: pika.BlockingConnection):
channel = conn.channel()
channel.exchange_declare(exchange='vk_messages', exchange_type='fanout')
run_every_n_seconds(1, send_message, channel)
if __name__ == '__main__':
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
try:
main(connection)
except KeyboardInterrupt:
connection.close()
sys.exit(0)

View File

@ -0,0 +1,44 @@
import sys
from datetime import datetime
import pika
_QUEUE_NAME = "vk_messages_queue"
_EXCHANGE_NAME = "vk_messages"
def main():
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.exchange_declare(
exchange=_EXCHANGE_NAME,
exchange_type='fanout'
)
channel.queue_declare(queue=_QUEUE_NAME, exclusive=True)
channel.queue_bind(exchange=_EXCHANGE_NAME, queue=_QUEUE_NAME)
def callback(ch, method, properties, body):
now = datetime.now()
current_time = now.strftime("%H:%M:%S")
print(f"[vkReader] Received [{str(body)}] in [{current_time}]")
ch.basic_ack(delivery_tag=method.delivery_tag)
channel.basic_consume(
queue=_QUEUE_NAME,
on_message_callback=callback,
auto_ack=False
)
print('[*] Waiting for messages. To exit press CTRL+C')
channel.start_consuming()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('Interrupted')
sys.exit(0)

View File

@ -0,0 +1,47 @@
import time
import random
from datetime import datetime
import pika
import sys
_QUEUE_NAME = "vk_messages_queue_slow"
_EXCHANGE_NAME = "vk_messages"
def main():
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.exchange_declare(
exchange=_EXCHANGE_NAME,
exchange_type='fanout'
)
channel.queue_declare(queue=_QUEUE_NAME, exclusive=True)
channel.queue_bind(exchange=_EXCHANGE_NAME, queue=_QUEUE_NAME)
def callback(ch, method, properties, body):
now = datetime.now()
current_time = now.strftime("%H:%M:%S")
print(f"[vkSlowReader] Received [{str(body)}] in [{current_time}]")
read_time = random.randint(2, 5)
time.sleep(read_time)
ch.basic_ack(delivery_tag=method.delivery_tag)
channel.basic_consume(
queue=_QUEUE_NAME,
on_message_callback=callback,
auto_ack=False
)
print('[*] Waiting for messages. To exit press CTRL+C')
channel.start_consuming()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('Interrupted')
sys.exit(0)

View File

@ -0,0 +1,25 @@
import pika
import sys
def main():
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='hello')
def callback(ch, method, properties, body):
print(f" [x] Received {body}")
channel.basic_consume(queue='hello', on_message_callback=callback, auto_ack=True)
print(' [*] Waiting for messages. To exit press CTRL+C')
channel.start_consuming()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('Interrupted')
sys.exit(0)

View File

@ -0,0 +1,11 @@
import pika
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='hello')
channel.basic_publish(exchange='', routing_key='hello', body='Hello World!')
print(" [x] Sent 'Hello World!'")
connection.close()

View File

@ -0,0 +1,19 @@
import pika
import sys
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='task_queue', durable=True)
message = ' '.join(sys.argv[1:]) or "Hello World!"
channel.basic_publish(
exchange='',
routing_key='task_queue',
body=message,
properties=pika.BasicProperties(
delivery_mode=pika.DeliveryMode.Persistent
))
print(f" [x] Sent {message}")
connection.close()

View File

@ -0,0 +1,22 @@
import pika
import time
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='task_queue', durable=True)
print(' [*] Waiting for messages. To exit press CTRL+C')
def callback(ch, method, properties, body):
print(f" [x] Received {body.decode()}")
time.sleep(body.count(b'.'))
print(" [x] Done")
ch.basic_ack(delivery_tag=method.delivery_tag)
channel.basic_qos(prefetch_count=1)
channel.basic_consume(queue='task_queue', on_message_callback=callback)
channel.start_consuming()

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 204 KiB

View File

@ -0,0 +1,13 @@
import pika
import sys
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.exchange_declare(exchange='logs', exchange_type='fanout')
message = ' '.join(sys.argv[1:]) or "info: Hello World!"
channel.basic_publish(exchange='logs', routing_key='', body=message)
print(f" [x] Sent {message}")
connection.close()

View File

@ -0,0 +1,24 @@
import pika
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.exchange_declare(exchange='logs', exchange_type='fanout')
result = channel.queue_declare(queue='', exclusive=True)
queue_name = result.method.queue
channel.queue_bind(exchange='logs', queue=queue_name)
print(' [*] Waiting for logs. To exit press CTRL+C')
def callback(ch, method, properties, body):
print(f" [x] {body}")
channel.basic_consume(
queue=queue_name, on_message_callback=callback, auto_ack=True)
channel.start_consuming()

2
bogdanov_dmitry_lab_2/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
data/
result/

View File

@ -0,0 +1,41 @@
# Лабораторная работа №2
## Богданов Дмитрий ПИбд-42
### Для выполнения была проделана следующая работа:
Были написаны и развернуты 3 сервиса: генератор файлов, 2 приложения для работы с этими файлами по вариантам 2 и 1 соответственно:
Вариант 2 (для первого приложения):
```Формирует файл /var/result/data.txt из первых строк всех файлов каталога /var/data.```
Вариант 1 (для второго приложения):
```Ищет набольшее число из файла /var/data/data.txt и сохраняет его вторую степень в /var/result/result.txt.```
Приложения работают совместно, используя общий монтированный том для записи и получения информации:
```
volumes:
- ./data:/var/data
```
```
volumes:
- ./data:/var/data
- ./result:/var/result
```
```
volumes:
- ./result:/var/result
```
### Запуск лабораторной:
Необходимо перейти в папку с файлом docker-compose.yaml и ввести следующую команду:
```
docker compose up --build
```
Сервис генератора сгенерирует папки data и result, где будут сгенерированы входные файлы и файл-результат их обработки соответственно.
## Видео с результатом запуска:
Видео-демонстрацию работы можно посмотреть по данной [ссылке](https://drive.google.com/file/d/1CmVZjJuMStqNFFKbsMLjw4ihTiMnR7it/view).

View File

@ -0,0 +1,7 @@
FROM python:latest
WORKDIR /app
COPY app.py /app/
CMD ["python", "app.py"]

View File

@ -0,0 +1,30 @@
import os
# Вариант 2 - Формирует файл /var/result/data.txt из первых строк всех файлов каталога /var/data.
def solve(dir_files, dir_result, filename_result):
# Получаем список файлов в директории
filenames = os.listdir(dir_files)
result = ''
# Проходим через каждый файл
for filename in filenames:
filepath = os.path.join(dir_files, filename)
file = open(filepath, "r")
# Читаем первую строку, добавляем к результату
result += f"{file.readline()}"
file.close()
# Если директории для сохранения результата нет - создаём
if not os.path.exists(dir_result):
os.makedirs(dir_result)
# Если директория с результатом не пустая - завершаем работу
if os.listdir(dir_result):
return
# Пишем результат в файл
filepath_result = os.path.join(dir_result, filename_result)
result_file = open(filepath_result, "w")
result_file.write(result)
print(f"Результат записан в файл {filepath_result}")
result_file.close()
if __name__ == "__main__":
solve('/var/data', '/var/result', 'data.txt')

View File

@ -0,0 +1,7 @@
FROM python:latest
WORKDIR /app
COPY app.py /app/
CMD ["python", "app.py"]

View File

@ -0,0 +1,21 @@
import os
# Вариант 1 - Ищет набольшее число из файла /var/data/data.txt и сохраняет его вторую степень в /var/result/result.txt.
def solve(dir_input, dir_result, filename_result):
file_input = open(os.path.join(dir_input, 'data.txt'))
# Считываем все числа из файла
inputs = [int(line) for line in file_input.readlines()]
if inputs:
# Максимальное число
max_num = max(inputs)
print(f"Наибольшее число: {max_num}")
# Возводим во 2 степень
result = max(inputs) ** 2
file_result = open(os.path.join(dir_result, filename_result), "w")
# Пишем результат в файл
file_result.write(str(result))
print(f"Получен результат {result}")
file_result.close()
if __name__ == "__main__":
solve("/var/result", '/var/result', 'result.txt')

View File

@ -0,0 +1,7 @@
FROM python:latest
WORKDIR /app
COPY generator.py /app/
CMD ["python", "generate_files.py"]

View File

@ -0,0 +1,30 @@
import os
import random as rnd
import string
# Генератор названий файлов
def generate_filename(l):
return ''.join(rnd.choices(string.ascii_lowercase + string.digits, k=l)) + '.txt'
def generate_files(dir, num_files, num_lines):
# Если директории для сохранения файлов нет - создаём
if not os.path.exists(dir):
os.makedirs(dir)
# Если директория для сохранения файлов не пустая - завершаем работу
if os.listdir(dir):
return
# Создание файлов
for i in range(num_files):
filename = generate_filename(20)
filepath = os.path.join(dir, filename)
file = open(filepath, "w")
# Запись строк в файл
for j in range(num_lines):
file.write(f"{rnd.randint(-1000, 1000)}\n")
file.close()
if __name__ == "__main__":
generate_files('/var/data', 50, 50)

View File

@ -0,0 +1,27 @@
services:
# Генератор файлов
generator:
build:
context: ./app-generator # Путь к контексту (докер файл + скрипт)
volumes:
- ./data:/var/data # Папка контейнера : папка локальная
entrypoint: python generator.py # Точка входа
# Первое приложение
app1:
build:
context: ./app-1 # Путь к контексту
volumes:
- ./data:/var/data # Монтирование папок
- ./result:/var/result
depends_on:
- generator # Указываем, что запускается только после успешной работы сервиса generator
# Второе приложение, настройка аналогична сервисам выше
app2:
build:
context: ./app-2
volumes:
- ./result:/var/result
depends_on:
- app1

2
bogdanov_dmitry_lab_3/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
/.idea
/.venv

View File

@ -0,0 +1,22 @@
# Лабораторная работа №3
## Богданов Дмитрий ПИбд-42
### Для выполнения были выбраны следующие сущности:
* Message - содержит uuid (генерируется), text, datetime_sent, user_id
* User - содержит uuid (генерируется), name, surname
Одному пользователю может быть присвоено несколько сообщений.
Соответственно были развернуты 2 сервиса для управления этими сущностями.
### Запуск лабораторной:
Необходимо перейти в папку с файлом compose.yaml и ввести следующую команду:
```
docker-compose up --build -d
```
## Видео с результатом запуска и тестами...
...можно посмотреть по данной [ссылке](https://drive.google.com/file/d/1cJz0z4KduSz1oltmAuieUW7GxxVLNPNo/view).

View File

@ -0,0 +1,27 @@
services:
user_service:
container_name: userService
build:
context: .
dockerfile: ./userService/Dockerfile
expose:
- 20001
message_service:
container_name: messageService
build:
context: .
dockerfile: ./messageService/Dockerfile
expose:
- 20002
nginx:
image: nginx:latest
ports:
- "80:80"
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf
depends_on:
- user_service
- message_service

View File

@ -0,0 +1,11 @@
FROM python:latest
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY messageService/messageService.py .
CMD ["python", "messageService.py"]

View File

@ -0,0 +1,138 @@
from flask import Flask, request, jsonify
from uuid import uuid4
import uuid
import datetime
import requests
class Message:
def __init__(self, text: str, datetime_sent: datetime, uuid_: uuid, user_id: uuid):
if uuid_ is None:
self.uuid_ = uuid4()
else:
self.uuid_ = uuid.UUID(uuid_)
self.text = text
self.datetime_sent = datetime_sent
self.user_id = uuid.UUID(user_id)
def to_dict(self):
return {
'text': self.text,
'datetime_sent': self.datetime_sent,
'user_id': self.user_id,
'uuid': self.uuid_
}
def to_dict_for_users(self):
return {
'title': self.text,
'datetime_sent': self.datetime_sent,
'uuid': self.uuid_
}
def to_dict_with_info(self, user: dict):
return {
'title': self.text,
'datetime_sent': self.datetime_sent,
'user_id': self.user_id,
'user_info': user,
'uuid': self.uuid_
}
messages = [
Message(text='Hi!', datetime_sent=datetime.datetime.now(), uuid_='4add0525-1857-477d-ad35-56790d400b72', user_id='94b171ea-39f6-4a67-9c67-061743f67cfd'),
Message(text='Hello this is a message', datetime_sent=datetime.datetime.now(), uuid_='dd69758d-89e8-49b5-86bf-54ae2adb64e8', user_id='724a3192-70dd-4909-9b0f-c9060a4ab1bd'),
Message(text='Test', datetime_sent=datetime.datetime.now(), uuid_='92389e8d-4365-457e-b37e-78abbc07f194', user_id='94b171ea-39f6-4a67-9c67-061743f67cfd'),
Message(text='Anyone here?', datetime_sent=datetime.datetime.now(), uuid_='f3a1c526-aca2-47e2-afd3-a1c2eac92458', user_id='724a3192-70dd-4909-9b0f-c9060a4ab1bd'),
Message(text='Mambo', datetime_sent=datetime.datetime.now(), uuid_='00abbdb5-e480-4842-bc32-f916894757eb', user_id='46672ea5-3d7b-4137-a0ac-efd898ca4db6')
]
def list_jsonify():
return jsonify([message.to_dict() for message in messages])
app = Flask(__name__)
users_url = 'http://userService:20001/'
@app.route('/', methods=['GET'])
def get_all():
return list_jsonify(), 200
@app.route('/info', methods=['GET'])
def get_all_full():
users: list[dict] = requests.get(users_url).json()
response = []
for message in messages:
for user in users:
if message.user_id == uuid.UUID(user.get('uuid')):
response.append(message.to_dict_with_info(user))
return response, 200
@app.route('/by-user/<uuid:user_uuid>', methods=['GET'])
def get_by_user_id(user_uuid):
return [message.to_dict_for_users() for message in messages if message.user_id == user_uuid], 200
@app.route('/info/<uuid:uuid_>', methods=['GET'])
def get_one_full(uuid_):
for message in messages:
if message.uuid_ == uuid_:
response = requests.get(users_url + str(message.user_id))
return message.to_dict_with_info(response.json()), 200
return f'Сообщение с uuid {uuid_} не найдено', 404
@app.route('/', methods=['POST'])
def create():
data = request.json
text = data.get('text', None)
datetime_sent = datetime.datetime.now()
user_id = data.get('user_id', None)
checking = requests.get(users_url + f'/check/{user_id}')
print(checking)
if checking.status_code == 200:
new_message = Message(text, datetime_sent, None, user_id)
messages.append(new_message)
return get_one(new_message.uuid_)
if checking.status_code == 404:
return f'Пользователь с uuid {user_id} не существует', 404
return 'Неизвестная ошибка', 500
@app.route('/<uuid:uuid_>', methods=['PUT'])
def update_by_id(uuid_):
data = request.json
new_text = data.get('text', None)
for message in messages:
print(message.uuid_)
if message.uuid_ == uuid_:
if new_text is not None:
message.text = new_text
return get_one(message.uuid_)
return f'Сообщение с uuid {uuid_} не найдено', 404
@app.route('/<uuid:uuid_>', methods=['DELETE'])
def delete(uuid_):
for message in messages:
if message.uuid_ == uuid_:
messages.remove(message)
return 'Сообщение успешно удалено', 200
return f'Сообщение с uuid {uuid_} не найдено', 404
@app.route('/<uuid:uuid_>', methods=['GET'])
def get_one(uuid_):
for message in messages:
if message.uuid_ == uuid_:
return message.to_dict(), 200
return f'Сообщение с uuid {uuid_} не найдено', 404
if __name__ == '__main__':
app.run(host='0.0.0.0', port=20002, debug=True)

View File

@ -0,0 +1,25 @@
events { worker_connections 1024; }
http {
server {
listen 80;
listen [::]:80;
server_name localhost;
location /userService/ {
proxy_pass http://userService:20001/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location /messageService/ {
proxy_pass http://messageService:20002/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
}
}

View File

@ -0,0 +1,2 @@
Flask==3.0.3
requests==2.32.3

View File

@ -0,0 +1,11 @@
FROM python:latest
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY userService/userService.py .
CMD ["python", "userService.py"]

View File

@ -0,0 +1,115 @@
from flask import Flask, jsonify, request
from uuid import uuid4
import uuid
import requests
class User:
def __init__(self, name, surname, uuid_: uuid):
if uuid_ is None:
self.uuid_: uuid = uuid4()
else:
self.uuid_: uuid = uuid.UUID(uuid_)
self.name: str = name
self.surname: str = surname
def to_dict(self):
return {
"uuid": self.uuid_,
"name": self.name,
"surname": self.surname
}
def to_dict_with_messages(self, messages: list):
return {
"uuid": self.uuid_,
"name": self.name,
"surname": self.surname,
"messages": messages
}
app = Flask(__name__)
users: list[User] = [
User(name='Dr.', surname='Kino', uuid_='94b171ea-39f6-4a67-9c67-061743f67cfd'),
User(name='Caspian', surname='Holstrom', uuid_='724a3192-70dd-4909-9b0f-c9060a4ab1bd'),
User(name='Admin', surname='Admin', uuid_='46672ea5-3d7b-4137-a0ac-efd898ca4db6')
]
messages_url = 'http://messageService:20002/'
def list_jsonify():
return jsonify([user.to_dict() for user in users])
@app.route('/', methods=['GET'])
def get_all():
return list_jsonify(), 200
@app.route('/<uuid:uuid_>', methods=['GET'])
def get_one(uuid_):
for user in users:
if user.uuid_ == uuid_:
return user.to_dict(), 200
return f'Пользователь с uuid {uuid_} не найден', 404
@app.route('/info/<uuid:uuid_>', methods=['GET'])
def get_one_with_messages(uuid_):
for user in users:
if user.uuid_ == uuid_:
response = requests.get(messages_url + f'by-user/{uuid_}')
print(response.json())
return user.to_dict_with_messages(response.json()), 200
return f'Пользователь с uuid {uuid_} не найден', 404
@app.route('/check/<uuid:uuid_>', methods=['GET'])
def check_exist(uuid_):
for user in users:
if user.uuid_ == uuid_:
return '', 200
return '', 404
@app.route('/', methods=['POST'])
def create():
data = request.json
name = data.get('name', None)
surname = data.get('surname', None)
if name is None or surname is None:
return 'Недостаточно информации для создания пользователя', 404
new_user = User(name, surname, None)
users.append(new_user)
return get_one(new_user.uuid_)
@app.route('/<uuid:uuid_>', methods=['PUT'])
def update_by_id(uuid_):
data = request.json
new_name = data.get('name', None)
new_surname = data.get('surname', None)
for user in users:
if user.uuid_ == uuid_:
if new_name is not None:
user.name = new_name
if new_surname is not None:
user.surname = new_surname
return get_one(user.uuid_)
return f'Пользователь с uuid {uuid_} не найден', 404
@app.route('/<uuid:uuid_>', methods=['DELETE'])
def delete(uuid_):
for user in users:
if user.uuid_ == uuid_:
users.remove(user)
return 'Пользователь удален', 200
return f'Пользователь с uuid {uuid_} не найден', 404
if __name__ == '__main__':
app.run(host='0.0.0.0', port=20001, debug=True)

View File

@ -0,0 +1,34 @@
# Богданов Дмитрий ПИбд-42
# Лабораторная работа №4
## Предметная область:
Автоматизация работы теплицы
## Результаты выполнения туториалов:
- Первый туториал:
![изображение 1](./images/tut1.png)
- Второй туториал:
![изображение 2](./images/tut2.png)
- Третий туториал:
![изображение 3](./images/tut3.png)
## Данные из RabbitMQ:
![изображение 1](./images/rmq1.png)
![изображение 2](./images/rmq2.png)
![изображение 3](./images/rmq3.png)
![изображение 3](./images/rmq4.png)
### Вывод:
Из-за моментальной обработки сообщений в Consumer2, его очередь никогда не заполняется.
Consumer1 же тратит на обработку 2 секунды, из-за чего соответствующая очередь существенно заполняется при одном
запущенном экземпляре.
При нескольких запущенных экземплярах Consumer1 очередь заполняется существенно медленнее, и перестаёт заполняться совсем при определенном кол-ве запущенных экземпляров.
## [Видео](https://drive.google.com/file/d/1KWHHYWiK8OX48OfhDnEKDtMz-Umfs0uj/view?usp=sharing)

View File

@ -0,0 +1,27 @@
import pika
import time
def callback(ch, method, properties, body):
print(f'Receiver 1: получено сообщение. {body.decode()}')
time.sleep(3)
print('Receiver 1 закончил обработку')
def consume_events_1():
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost', port=5672, credentials=pika.PlainCredentials("user", "password")))
channel = connection.channel()
channel.queue_declare(queue='receiver1_queue')
channel.queue_bind(exchange='greenhouse_events', queue='receiver1_queue')
channel.basic_consume(queue='receiver1_queue', on_message_callback=callback, auto_ack=True)
print('Ожидание сообщения...')
channel.start_consuming()
if __name__ == "__main__":
consume_events_1()

View File

@ -0,0 +1,24 @@
import pika
def callback(ch, method, properties, body):
print(f'Receiver 2: получено сообщение. {body.decode()}')
print('Receiver 2 закончил обработку')
def consume_events_2():
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost', port=5672, credentials=pika.PlainCredentials("user", "password")))
channel = connection.channel()
channel.queue_declare(queue='receiver2_queue')
channel.queue_bind(exchange='greenhouse_events', queue='receiver2_queue')
channel.basic_consume(queue='receiver2_queue', on_message_callback=callback, auto_ack=True)
print('Ожидание сообщения...')
channel.start_consuming()
if __name__ == "__main__":
consume_events_2()

View File

@ -0,0 +1,25 @@
import pika
import time
def publish_events():
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost', port=5672, credentials=pika.PlainCredentials("user", "password")))
channel = connection.channel()
channel.exchange_declare(exchange='greenhouse_events', exchange_type='fanout')
events = [
"Влажность превысила верхнюю границу",
"Влажность упала за нижнюю границу",
"Полив начат",
"Полив остановлен"
]
while True:
event = events[int(time.time()) % len(events)]
channel.basic_publish(exchange='greenhouse_events', routing_key='', body=event)
print(f'Отправлено: {event}')
time.sleep(1)
if __name__ == "__main__":
publish_events()

View File

@ -0,0 +1,25 @@
import pika, sys, os
def main():
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost', port=5672, credentials=pika.PlainCredentials("user", "password")))
channel = connection.channel()
channel.queue_declare(queue='hello')
def callback(ch, method, properties, body):
print(f" [x] Received {body}")
channel.basic_consume(queue='hello', on_message_callback=callback, auto_ack=True)
print(' [*] Waiting for messages. To exit press CTRL+C')
channel.start_consuming()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('Interrupted')
try:
sys.exit(0)
except SystemExit:
os._exit(0)

View File

@ -0,0 +1,13 @@
import pika
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost', port=5672, credentials=pika.PlainCredentials("user", "password")))
channel = connection.channel()
channel.queue_declare('hello')
channel.basic_publish(exchange='',
routing_key='hello',
body='Hello world!')
print(" [x] Sent 'Hello world!'")
connection.close()

View File

@ -0,0 +1,19 @@
import pika
import sys
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost', port=5672, credentials=pika.PlainCredentials("user", "password")))
channel = connection.channel()
channel.queue_declare(queue='task_queue', durable=True)
message = ' '.join(sys.argv[1:]) or "Hello World!"
channel.basic_publish(
exchange='',
routing_key='task_queue',
body=message,
properties=pika.BasicProperties(
delivery_mode=pika.DeliveryMode.Persistent
))
print(f" [x] Sent {message}")
connection.close()

View File

@ -0,0 +1,22 @@
import pika
import time
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost', port=5672, credentials=pika.PlainCredentials("user", "password")))
channel = connection.channel()
channel.queue_declare(queue='task_queue', durable=True)
print(' [*] Waiting for messages. To exit press CTRL+C')
def callback(ch, method, properties, body):
print(f" [x] Received {body.decode()}")
time.sleep(body.count(b'.'))
print(" [x] Done")
ch.basic_ack(delivery_tag=method.delivery_tag)
channel.basic_qos(prefetch_count=1)
channel.basic_consume(queue='task_queue', on_message_callback=callback)
channel.start_consuming()

View File

@ -0,0 +1,13 @@
import pika
import sys
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost', port=5672, credentials=pika.PlainCredentials("user", "password")))
channel = connection.channel()
channel.exchange_declare(exchange='logs', exchange_type='fanout')
message = ' '.join(sys.argv[1:]) or "info: Hello World!"
channel.basic_publish(exchange='logs', routing_key='', body=message)
print(f" [x] Sent {message}")
connection.close()

View File

@ -0,0 +1,22 @@
import pika
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost', port=5672, credentials=pika.PlainCredentials("user", "password")))
channel = connection.channel()
channel.exchange_declare(exchange='logs', exchange_type='fanout')
result = channel.queue_declare(queue='', exclusive=True)
queue_name = result.method.queue
channel.queue_bind(exchange='logs', queue=queue_name)
print(' [*] Waiting for logs. To exit press CTRL+C')
def callback(ch, method, properties, body):
print(f" [x] {body}")
channel.basic_consume(
queue=queue_name, on_message_callback=callback, auto_ack=True)
channel.start_consuming()

View File

@ -0,0 +1,12 @@
version: '3.8'
services:
rabbitmq:
image: rabbitmq:3-management
container_name: rabbitmq
environment:
RABBITMQ_DEFAULT_USER: user
RABBITMQ_DEFAULT_PASS: password
ports:
- "5672:5672"
- "15672:15672"

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Some files were not shown because too many files have changed in this diff Show More