Compare commits

...

270 Commits

Author SHA1 Message Date
4c2e646f1a Merge pull request 'lazarev_andrey_lab_6' (#149) from lazarev_andrey_lab_6 into main
Reviewed-on: Alexey/DAS_2024_1#149
2024-11-25 21:25:30 +04:00
1cca5bf31f Merge pull request 'kalyshev_yan_lab_4' (#148) from kalyshev_yan_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#148
2024-11-25 21:25:11 +04:00
9e3caec178 Merge pull request 'afanasev_dmitry_lab_3 is ready' (#147) from afanasev_dmitry_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#147
2024-11-25 21:24:01 +04:00
eaaeff7389 Merge pull request 'mochalov_danila_lab_5' (#146) from mochalov_danila_lab_5 into main
Reviewed-on: Alexey/DAS_2024_1#146
2024-11-25 21:23:26 +04:00
ce8583c049 Merge pull request 'afanasev_dmitry_lab_2 is ready' (#145) from afanasev_dmitry_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#145
2024-11-25 21:22:02 +04:00
ec1cd5f98c Merge pull request 'lazarev_andrey_lab_5' (#144) from lazarev_andrey_lab_5 into main
Reviewed-on: Alexey/DAS_2024_1#144
2024-11-25 21:21:30 +04:00
2e0ffdec18 Merge pull request 'morozov_vladimir_lab_2 is ready' (#143) from morozov_vladimir_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#143
2024-11-25 21:21:12 +04:00
358ae2153c Merge pull request 'afanasev_dmitry_lab_1 is ready' (#142) from afanasev_dmitry_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#142
2024-11-25 21:20:30 +04:00
17594169bf Merge pull request 'lazarev_andrey_lab_4' (#141) from lazarev_andrey_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#141
2024-11-25 21:19:34 +04:00
e065588c61 Merge pull request 'balakhonov_danila_lab_3' (#140) from balakhonov_danila_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#140
2024-11-25 21:18:43 +04:00
94f5db29dc Merge pull request 'artamonova_tatyana_lab_4 is ready' (#139) from artamonova_tatyana_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#139
2024-11-25 21:17:40 +04:00
22cb8b936f Merge pull request 'artamonova_tatyana_lab_3 is ready' (#138) from artamonova_tatyana_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#138
2024-11-25 21:17:24 +04:00
7bd8910fcb Merge pull request 'kalyshev_yan_lab_3' (#137) from kalyshev_yan_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#137
2024-11-25 21:14:13 +04:00
f951a6547f Merge pull request 'presnyakova_victoria_lab5' (#136) from presnyakova_victoria_lab5 into main
Reviewed-on: Alexey/DAS_2024_1#136
2024-11-25 21:13:38 +04:00
65de84d901 Merge pull request 'presnyakova_victoria_lab4' (#135) from presnyakova_victoria_lab4 into main
Reviewed-on: Alexey/DAS_2024_1#135
2024-11-25 21:13:18 +04:00
5c234ec865 Merge pull request 'presnyakova_victoria_lab_3' (#133) from presnyakova_victoria_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#133
2024-11-25 21:12:53 +04:00
a4dfbe6a81 Merge pull request 'morozov_vladimir_lab_1 is ready' (#132) from morozov_vladimir_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#132
2024-11-25 21:06:31 +04:00
06990fa209 Merge pull request 'mochalov_danila_lab_4' (#131) from mochalov_danila_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#131
2024-11-25 21:05:54 +04:00
f9f85d023f Merge pull request 'Bazunov Andrew Lab 5' (#130) from bazunov_andrew_lab_5 into main
Reviewed-on: Alexey/DAS_2024_1#130
2024-11-25 21:05:34 +04:00
3de9ba03c2 Merge pull request 'ismailov_rovshan_lab_1 is ready' (#129) from ismailov_rovshan_lab_1_fix into main
Reviewed-on: Alexey/DAS_2024_1#129
2024-11-25 20:59:16 +04:00
a3938f3051 Merge pull request 'kashin_maxim_lab_8' (#128) from kashin_maxim_lab_8 into main
Reviewed-on: Alexey/DAS_2024_1#128
2024-11-25 20:58:18 +04:00
f08e5d0ea2 Merge pull request 'kashin_maxim_lab_7' (#127) from kashin_maxim_lab_7 into main
Reviewed-on: Alexey/DAS_2024_1#127
2024-11-25 20:57:52 +04:00
77bd667129 Merge pull request 'kashin_maxim_lab_6' (#126) from kashin_maxim_lab_6 into main
Reviewed-on: Alexey/DAS_2024_1#126
2024-11-25 20:57:10 +04:00
b92c0ca599 Merge pull request 'turner_ilya_lab_3' (#125) from turner_ilya_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#125
2024-11-25 20:56:30 +04:00
528309ab84 Merge pull request 'kashin_maxim_lab_5' (#124) from kashin_maxim_lab_5 into main
Reviewed-on: Alexey/DAS_2024_1#124
2024-11-20 22:45:51 +04:00
0814d8533d Merge pull request 'kashin_maxim_lab_4' (#123) from kashin_maxim_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#123
2024-11-20 22:45:28 +04:00
354ee2679e Merge pull request 'yakovleva_yulia_lab_8 is ready' (#122) from yakovleva_yulia_lab_8 into main
Reviewed-on: Alexey/DAS_2024_1#122
2024-11-20 22:45:02 +04:00
d302bd2213 Merge pull request 'yakovleva_yulia_lab_7 is ready' (#121) from yakovleva_yulia_lab_7 into main
Reviewed-on: Alexey/DAS_2024_1#121
2024-11-20 22:44:39 +04:00
2aed7bf385 Merge pull request 'yakovleva_yulia_lab_6 is ready' (#120) from yakovleva_yulia_lab_6 into main
Reviewed-on: Alexey/DAS_2024_1#120
2024-11-20 22:44:06 +04:00
d4e24db25e Merge pull request 'kadyrov_aydar_lab_5' (#119) from kadyrov_aydar_lab_5 into main
Reviewed-on: Alexey/DAS_2024_1#119
2024-11-20 22:43:23 +04:00
c0ca1d4bb5 Merge pull request 'kadyrov_aydar_lab_4' (#117) from kadyrov_aydar_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#117
2024-11-20 22:43:05 +04:00
6eeb90ea45 Merge pull request 'tukaeva_alfiya_lab_8' (#116) from tukaeva_alfiya_lab_8 into main
Reviewed-on: Alexey/DAS_2024_1#116
2024-11-20 22:38:42 +04:00
bc2d7cb2f6 Merge pull request 'tukaeva_alfiya_lab_7' (#115) from tukaeva_alfiya_lab_7 into main
Reviewed-on: Alexey/DAS_2024_1#115
2024-11-20 22:37:46 +04:00
e1da6f26ab Merge pull request 'tukaeva_alfiya_lab_6' (#114) from tukaeva_alfiya_lab_6 into main
Reviewed-on: Alexey/DAS_2024_1#114
2024-11-20 22:37:01 +04:00
e5df53b5c2 Merge pull request 'turner_ilya_lab_2' (#113) from turner_ilya_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#113
2024-11-20 22:36:40 +04:00
c98770752e Merge pull request 'mochalov_danila_lab_3' (#112) from mochalov_danila_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#112
2024-11-20 22:36:16 +04:00
a800c3df86 Merge pull request 'Bazunov Andrew Lab 4' (#111) from bazunov_andrew_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#111
2024-11-20 22:35:35 +04:00
a51e33a201 Merge pull request 'turner_ilya_lab_1' (#110) from turner_ilya_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#110
2024-11-20 22:34:54 +04:00
a9af84010a Merge pull request 'Bazunov Andrew lab3' (#109) from bazunov_andrew_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#109
2024-11-20 22:34:26 +04:00
3645d0c1cd Merge pull request 'yakovleva_yulia_lab_5 is ready' (#107) from yakovleva_yulia_lab_5 into main
Reviewed-on: Alexey/DAS_2024_1#107
Reviewed-by: Alexey <a.zhelepov@mail.ru>
2024-11-20 22:33:27 +04:00
Zyzf
e4080abb6a clean branch 2024-11-08 21:31:13 +04:00
059a3cc811 lazarev_andrey_lab_6 2024-11-08 01:13:53 +04:00
Zyzf
07eb35b6b9 done 2024-11-08 01:04:56 +04:00
0e7013bf28 afanasev_dmitry_lab_2 is ready 2024-11-07 02:54:45 +04:00
a939a84307 afanasev_dmitry_lab_3 is ready 2024-11-07 02:52:34 +04:00
6fdc64cc1f readme fix 2024-11-06 02:59:28 +04:00
4ab74fbd57 mochalov_danila_lab_5_is_ready 2024-11-06 02:56:57 +04:00
e28a89e71e readme fix 2024-11-05 22:41:01 +04:00
8186f826c3 lazarev_andrey_lab_5 2024-11-05 22:38:45 +04:00
cb141cc4e7 morozov_vladimir_lab_2 is ready 2024-11-05 20:48:50 +04:00
cd438c3d61 afanasev_dmitry_lab_1 is ready 2024-11-05 16:21:13 +04:00
08b8a6f487 lazarev_andrey_lab_4 2024-11-04 02:01:55 +04:00
56e663d772 добавлен ридми 2024-11-04 01:34:14 +04:00
27daaeb0aa исправлен конфиг нджинкса 2024-11-04 01:34:00 +04:00
a9461abeb5 artamonova_tatyana_lab_4 is ready 2024-11-04 01:31:41 +04:00
54157ec2bf небольшая добавка чтоб понять что происходит вообще... 2024-11-04 00:42:54 +04:00
54e0f8d8f3 может быть... сейчас? 2024-11-03 23:47:31 +04:00
4b5baf4272 извините у меня голова уже плывет 2024-11-03 22:47:46 +04:00
e295c3fa5f заходит как-то в бар чебурашка с чемоданом 2024-11-03 22:38:30 +04:00
8f2a9c126c работай!!!!!!!! 2024-11-03 22:31:00 +04:00
97c7b71067 ну и ну 2024-11-03 22:17:59 +04:00
1ee9e2c3fb последний рывок 2024-11-03 22:15:01 +04:00
df3af5e2cd сдаюсь 2024-11-03 21:41:01 +04:00
14338ff5ac еще разок попытаем удачу 2024-11-03 20:51:32 +04:00
c89b6ebb57 artamonova_tatyana_lab_3 is ready 2024-11-03 20:37:42 +04:00
74e61987dc еще разок 2024-11-03 20:30:29 +04:00
4fc879212e эээ... 2024-11-03 20:15:43 +04:00
e8cf19d932 ой беда 2024-11-03 19:31:46 +04:00
c46ade29f9 жесть позор 2024-11-03 19:22:37 +04:00
048453ed2f первая попытка 2024-11-03 19:18:40 +04:00
Zyzf
a02eb5e9d9 done 2024-11-02 20:14:47 +04:00
1331b40dcf lab 5 done 2024-11-02 18:57:36 +04:00
fe8a57dc0d lab 4 done 2024-11-01 21:02:03 +04:00
9c9dfde6dc lab 3 done 2024-11-01 19:10:39 +04:00
018a64ff09 morozov_vladimir_lab_1 is ready 2024-11-01 15:45:20 +04:00
f0b4e6d172 mochalov_danila_lab_4 is ready 2024-10-30 20:28:03 +04:00
Zyzf
e3390d53dd init 2024-10-30 13:57:42 +04:00
Bazunov Andrew Igorevich
4b86dfd750 complete lab 2024-10-28 22:27:53 +04:00
9d12336abe ismailov_rovshan_lab_1 is ready
Исправлен конфликт
2024-10-27 22:20:11 +04:00
eddd99b94d Готово 2024-10-27 20:19:04 +04:00
aa62ce56db Готово 2024-10-27 20:11:39 +04:00
6baa4ddabe Готово 2024-10-27 20:01:01 +04:00
f698c1fc72 turner_ilya_lab_3 is ready 2024-10-27 19:51:36 +04:00
08f2f63ad4 Готово 2024-10-27 19:42:27 +04:00
e4e3748a3d Выполнено 2024-10-27 19:09:16 +04:00
JulYakJul
5e522fbcc0 yakovleva_yulia_lab_8 is ready 2024-10-27 15:10:30 +04:00
JulYakJul
cae7189c1e fix 2024-10-27 14:06:02 +04:00
JulYakJul
2bfc8a0a43 yakovleva_yulia_lab_7 is ready 2024-10-27 14:02:15 +04:00
JulYakJul
1f89960672 fix 2024-10-27 13:06:24 +04:00
JulYakJul
ffb4c2a8a4 yakovleva_yulia_lab_6 is ready 2024-10-27 13:04:11 +04:00
NAP
1dc621e0be kadyrov_aydar_lab_5 2024-10-27 02:16:28 +04:00
NAP
11c62d9bf7 kadyrov_aydar_lab_5 2024-10-27 02:13:51 +04:00
NAP
03910a9a3f kadyrov_aydar_lab_4 2024-10-27 01:53:34 +04:00
f7d483196c tukaeva_alfiya_lab_8 is ready 2024-10-26 23:16:19 +04:00
545377f948 tukaeva_alfiya_lab_7 fix 2024-10-26 22:58:30 +04:00
bb867da520 tukaeva_alfiya_lab_7 is ready 2024-10-26 22:41:45 +04:00
c4a260ebda tukaeva_alfiya_lab_6 is ready 2024-10-26 22:26:14 +04:00
88392a8041 turner_ilya_lab_2 is ready 2024-10-26 21:09:43 +04:00
JulYakJul
400de30b49 fix 2024-10-26 20:04:39 +04:00
96a4e6ac43 mochalov_danila_lab_3 is ready 2024-10-26 18:18:28 +04:00
Bazunov Andrew Igorevich
03c52d0c76 Complete lab4 2024-10-26 17:51:52 +04:00
6dd4835f54 turner_ilya_lab_1 is ready 2024-10-26 17:34:47 +04:00
Bazunov Andrew Igorevich
5187005e6a complete lab 3 2024-10-26 14:46:33 +04:00
3b9698ac38 Merge pull request 'tsukanova_irina_lab_5' (#108) from tsukanova_irina_lab_5 into main
Reviewed-on: Alexey/DAS_2024_1#108
2024-10-26 13:01:34 +04:00
a456344432 Merge pull request 'rogashova_ekaterina_lab_3' (#106) from rogashova_ekaterina_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#106
2024-10-26 13:00:05 +04:00
383a5e3b25 Merge pull request 'kadyrov_aydar_lab_3' (#105) from kadyrov_aydar_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#105
2024-10-26 12:59:18 +04:00
2834efbbce Merge pull request 'kadyrov_aydar_lab_2' (#104) from kadyrov_aydar_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#104
2024-10-26 12:58:55 +04:00
decc46b37c Merge pull request 'tukaeva_alfiya_lab_5' (#103) from tukaeva_alfiya_lab_5 into main
Reviewed-on: Alexey/DAS_2024_1#103
2024-10-26 12:58:23 +04:00
a41e76795f Merge pull request 'artamonova_tatyana_lab_2 is ready' (#102) from artamonova_tatyana_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#102
2024-10-26 12:57:40 +04:00
bcfec37329 Merge pull request 'bogdanov_dmitry_lab_5' (#101) from bogdanov_dmitry_lab_5 into main
Reviewed-on: Alexey/DAS_2024_1#101
2024-10-26 12:56:47 +04:00
e17b0b0d61 Merge pull request 'bogdanov_dmitry_lab_4' (#100) from bogdanov_dmitry_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#100
2024-10-26 12:56:28 +04:00
62290fc43d Merge pull request 'zhimolostnova_anna_lab_6' (#95) from zhimolostnova_anna_lab_6 into main
Reviewed-on: Alexey/DAS_2024_1#95
2024-10-26 12:56:04 +04:00
0b5fb8da2e Merge pull request 'zhimolostnova lab 5 complete' (#94) from zhimolostnova_anna_lab_5 into main
Reviewed-on: Alexey/DAS_2024_1#94
2024-10-26 12:53:57 +04:00
9c6ef7e89e Merge pull request 'vaksman_valeria_lab_6' (#91) from vaksman_valeria_lab_6 into main
Reviewed-on: Alexey/DAS_2024_1#91
2024-10-26 12:52:19 +04:00
e763cf36e2 Merge pull request 'yakovleva_yulia_lab_4 is ready' (#90) from yakovleva_yulia_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#90
2024-10-26 12:51:56 +04:00
adf3f384a3 Merge pull request 'dozorova_alena_lab_8' (#99) from dozorova_alena_lab_8 into main
Reviewed-on: Alexey/DAS_2024_1#99
2024-10-26 12:50:30 +04:00
5ae6cd3cf1 Merge pull request 'dozorova_alena_lab_7' (#98) from dozorova_alena_lab_7 into main
Reviewed-on: Alexey/DAS_2024_1#98
2024-10-26 12:42:04 +04:00
daf3742ce6 Merge pull request 'zhimolostnova lab 8 complete' (#97) from zhimolostnova_anna_lab_8 into main
Reviewed-on: Alexey/DAS_2024_1#97
2024-10-26 12:35:58 +04:00
fb37a53f66 Merge pull request 'zhimolostnova lab 7 complete' (#96) from zhimolostnova_anna_lab_7 into main
Reviewed-on: Alexey/DAS_2024_1#96
2024-10-26 12:35:18 +04:00
23e035f9b2 Merge pull request 'vaksman_valeria_lab_8' (#93) from vaksman_valeria_lab_8 into main
Reviewed-on: Alexey/DAS_2024_1#93
2024-10-26 12:31:26 +04:00
556d8cf262 Merge pull request 'vaksman_valeria_lab_7' (#92) from vaksman_valeria_lab_7 into main
Reviewed-on: Alexey/DAS_2024_1#92
2024-10-26 12:30:20 +04:00
419790f5df Merge pull request 'borschevskaya_anna_lab_8' (#89) from borschevskaya_anna_lab_8 into main
Reviewed-on: Alexey/DAS_2024_1#89
2024-10-26 12:27:12 +04:00
54a9b8a778 Merge pull request 'kadyrov_aydar_lab_1' (#88) from kadyrov_aydar_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#88
2024-10-26 12:23:44 +04:00
3aeae245fa Merge pull request 'lazarev_andrey_lab_3' (#87) from lazarev_andrey_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#87
2024-10-26 12:23:21 +04:00
382273ccb8 Merge pull request 'borschevskaya_anna_lab_7 is ready' (#86) from borschevskaya_anna_lab_7 into main
Reviewed-on: Alexey/DAS_2024_1#86
2024-10-26 12:20:06 +04:00
4a37f55328 Merge pull request 'rogashova_ekaterina_lab_2' (#85) from rogashova_ekaterina_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#85
2024-10-26 12:14:14 +04:00
4e32398903 Merge pull request 'artamonova_tatyana_lab_1' (#84) from artamonova_tatyana_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#84
2024-10-26 12:13:43 +04:00
e69819aedd Merge pull request 'tukaeva_alfiya_lab_4 is ready' (#83) from tukaeva_alfiya_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#83
2024-10-26 12:12:59 +04:00
d9c4402ec9 Merge pull request 'kuzarin_maxim_lab_8' (#81) from kuzarin_maxim_lab_8 into main
Reviewed-on: Alexey/DAS_2024_1#81
2024-10-26 12:07:16 +04:00
93687ad850 Merge pull request 'kuzarin_maxim_lab_7' (#80) from kuzarin_maxim_lab_7 into main
Reviewed-on: Alexey/DAS_2024_1#80
2024-10-26 11:37:45 +04:00
4528bcd22c Merge pull request 'emelyanov_artem_lab_8' (#79) from emelyanov_artem_lab_8 into main
Reviewed-on: Alexey/DAS_2024_1#79
2024-10-26 11:35:53 +04:00
eef1d03249 Merge pull request 'emelyanov_artem_lab_7' (#78) from emelyanov_artem_lab_7 into main
Reviewed-on: Alexey/DAS_2024_1#78
2024-10-26 11:34:21 +04:00
7e09109cd2 Merge pull request 'emelyanov_artem_lab_6' (#77) from emelyanov_artem_lab_6 into main
Reviewed-on: Alexey/DAS_2024_1#77
2024-10-26 11:33:34 +04:00
f46724e5cf Merge pull request 'dozorova_alena_lab_6' (#76) from dozorova_alena_lab_6 into main
Reviewed-on: Alexey/DAS_2024_1#76
2024-10-26 11:27:22 +04:00
72b0b63e58 видео 2024-10-25 21:03:05 +04:00
fd54e426b5 осталось видео 2024-10-25 20:52:46 +04:00
JulYakJul
a5f0403627 yakovleva_yulia_lab_5 is ready 2024-10-25 18:12:36 +04:00
ad8894c0ca сделано, осталось дописать ридми 2024-10-25 16:56:56 +04:00
edea94a4f2 Готово 2024-10-25 14:10:33 +04:00
NAP
5700e75965 kadyrov_aydar_lab_3 2024-10-25 01:10:46 +04:00
NAP
9e9711f004 kadyrov_aydar_lab_2 2024-10-24 20:07:47 +04:00
014845df45 tukaeva_alfiya_lab_5 is ready 2024-10-24 15:46:08 +04:00
636592bbac artamonova_tatyana_lab_2 is ready 2024-10-23 21:29:01 +04:00
the
6711e8b0f6 Lab5 2024-10-23 18:36:31 +04:00
the
c91aa6e1f3 Исправление 2024-10-23 15:58:26 +04:00
the
d340d34c0b README, исправления, изображения 2024-10-23 15:56:48 +04:00
the
aaff3b8183 Lab4 2024-10-23 14:11:04 +04:00
06a7114499 lab 8 complete 2024-10-22 20:52:00 +03:00
0246f32bcf lab 7 complete 2024-10-22 20:19:09 +03:00
417368d25e fix readme 2024-10-22 19:31:03 +03:00
20a39fa9a5 lab 6 complete 2024-10-22 19:30:00 +03:00
fb15f87160 lab 5 complete 2024-10-22 18:23:30 +03:00
f86dfba785 lab8 wow 2024-10-21 21:21:28 +04:00
e874c69b62 lab7 is ready 2024-10-21 21:17:41 +04:00
6f0726185a lab six is ready yep 2024-10-21 21:10:24 +04:00
JulYakJul
b4b0ef7730 fix readme 2024-10-21 14:35:40 +04:00
JulYakJul
4d51941016 fix readme 2024-10-21 14:34:02 +04:00
JulYakJul
a07b272c79 yakovleva_yulia_lab_4 is ready 2024-10-21 14:31:58 +04:00
7cb94c14b0 borschevskaya_anna_lab_8 is ready 2024-10-21 08:52:52 +04:00
NAP
506d544060 kadyrov_aydar_lab_1 2024-10-21 02:36:18 +04:00
1ef9e02d32 lazarev_andrey_lab_3 is ready 2024-10-20 23:32:58 +04:00
ff8a87ebb8 borschevskaya_anna_lab_7 is ready 2024-10-20 22:37:56 +04:00
740d49d368 Готово 2024-10-20 21:59:10 +04:00
df1b8bd8ce artamonova_tatyana_lab_1 is ready 2024-10-20 19:29:17 +04:00
7549429b6b artamonova_tatyana_lab_1 is ready 2024-10-20 19:25:21 +04:00
00d9e2409a tukaeva_alfiya_lab_4 is ready 2024-10-20 19:04:32 +04:00
098cb9b9ad Обновить kuzarin_maxim_lab_8/README.md
маленькая стилистическая доработка
2024-10-19 19:44:12 +04:00
af39fdc505 Текст написан, нужно проверить отображение Md 2024-10-19 18:42:32 +03:00
ef603a8056 Обновить kuzarin_maxim_lab_7/README.md
Небольшая проблема с двум \n
2024-10-19 19:36:31 +04:00
c8b3124074 Добавлено эссе в виде MD файла 2024-10-19 18:34:37 +03:00
ce853de348 feature: completed lab 8 2024-10-19 18:40:59 +04:00
c3ac60eaa2 fix: delete .idea 2024-10-19 18:03:24 +04:00
e12438b727 feature: completed lab 7 2024-10-19 18:00:48 +04:00
aa54f9187f написали эссе 2024-10-19 14:47:04 +04:00
b1d8660774 + 2024-10-19 14:18:21 +04:00
6c66654acc feature: deleted lab 6 2024-10-19 14:17:40 +04:00
1d9c308bb4 правим в принципе разметку 2024-10-19 14:17:25 +04:00
a64b6c7329 feature: completed lab 6 2024-10-19 14:17:22 +04:00
7ec5c45faa попытка выравнивания 2024-10-19 14:06:09 +04:00
340dc6aa19 добавляем эссе 2024-10-19 14:04:46 +04:00
a152275bb7 Merge branch 'main' into dozorova_alena_lab_6 2024-10-19 13:17:33 +04:00
6e3ec51fe7 Merge branch 'main' into dozorova_alena_lab_6 2024-10-19 13:17:09 +04:00
131dc39f6c Merge pull request 'borschevskaya_anna_lab_6 is ready' (#75) from borschevskaya_anna_lab_6 into main
Reviewed-on: Alexey/DAS_2024_1#75
2024-10-19 13:08:27 +04:00
d82f47e04c Merge pull request 'emelyanov_artem_lab_5' (#74) from emelyanov_artem_lab_5 into main
Reviewed-on: Alexey/DAS_2024_1#74
2024-10-19 12:56:19 +04:00
3175352d02 Merge pull request 'emelyanov_artem_lab_4' (#73) from emelyanov_artem_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#73
2024-10-19 12:49:26 +04:00
2e86e68e12 Merge pull request 'aleikin_artem_lab_1' (#72) from aleikin_artem_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#72
2024-10-19 12:46:13 +04:00
63dd60f20e Merge pull request 'bondarenko_max_lab_1' (#71) from bondarenko_max_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#71
2024-10-19 12:43:27 +04:00
63e031ef17 Merge pull request 'vaksman_valeria_lab_5' (#70) from vaksman_valeria_lab_5 into main
Reviewed-on: Alexey/DAS_2024_1#70
2024-10-19 12:33:16 +04:00
5fdabedcd6 Merge pull request 'kuzarin_maxim_lab_6' (#69) from kuzarin_maxim_lab_6 into main
Reviewed-on: Alexey/DAS_2024_1#69
2024-10-19 12:30:30 +04:00
9eadb70f85 fix link 2024-10-19 12:28:06 +04:00
5fd241a980 Merge pull request 'dozorova_alena_lab_5' (#67) from dozorova_alena_lab_5 into main
Reviewed-on: Alexey/DAS_2024_1#67
2024-10-19 12:26:46 +04:00
4f53dff75f Merge branch 'dozorova_alena_lab_5' of https://git.is.ulstu.ru/Alexey/DAS_2024_1 into dozorova_alena_lab_5 2024-10-19 12:24:25 +04:00
57b7675030 fix link 2024-10-19 12:24:07 +04:00
b1c16dc76c Merge pull request 'bogdanov_dmitry_lab_3' (#68) from bogdanov_dmitry_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#68
2024-10-19 12:23:02 +04:00
309911ed75 Merge pull request 'rogashova_ekaterina_lab_1 is ready' (#66) from rogashova_ekaterina_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#66
2024-10-19 12:08:44 +04:00
d23e808325 Merge pull request 'lazarev_andrey_lab_2' (#65) from lazarev_andrey_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#65
2024-10-19 12:05:03 +04:00
4c974bfb51 Merge pull request 'tsukanova_irina_lab_4' (#64) from tsukanova_irina_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#64
2024-10-19 12:00:40 +04:00
b573569a97 borschevskaya_anna_lab_6 is ready 2024-10-19 10:46:15 +04:00
60c79b64fb feature: deleted lab 5 2024-10-18 17:42:21 +04:00
07105e81a0 feature: completed lab 5 2024-10-18 17:41:49 +04:00
JulYakJul
0ebd562be2 Merge branch 'main' into yakovleva_yulia_lab_4 2024-10-18 17:02:27 +04:00
JulYakJul
22a3917d28 work 3 done 2024-10-18 16:59:19 +04:00
46b94ea885 feature: completed lab 4 2024-10-18 16:27:06 +04:00
JulYakJul
94b8ba783c work 2.2 done 2024-10-18 16:24:18 +04:00
JulYakJul
060bd2321e work 2 done 2024-10-18 16:11:22 +04:00
JulYakJul
a8f1b39dd7 work 1 done 2024-10-18 15:42:10 +04:00
d3a7046f97 aleikin_artem_lab1 is ready 2024-10-18 00:09:32 +04:00
06d65650ab aleikin_artem_lab1 is ready 2024-10-18 00:05:35 +04:00
992a169c9b Merge branch 'main' into bondarenko_max_lab_1 2024-10-17 23:22:03 +04:00
b82a13c106 bondarenko_max_lab_1 is ready 2024-10-17 23:20:01 +04:00
e33ffef85e fix 2024-10-17 22:43:46 +04:00
9362e62999 йееес lab 5 is ready 2024-10-17 20:01:43 +04:00
430fad9ef4 Merge pull request 'borschevskaya_anna_lab_5 is ready' (#63) from borschevskaya_anna_lab_5 into main
Reviewed-on: Alexey/DAS_2024_1#63
2024-10-16 16:50:14 +04:00
d0aedf8495 Merge pull request 'klyushenkova_ksenia_lab_1 is ready' (#62) from klyushenkova_ksenia_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#62
2024-10-16 16:49:29 +04:00
effd849042 Merge pull request 'emelaynov_artem_lab_3' (#61) from emelaynov_artem_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#61
2024-10-16 16:48:48 +04:00
55e18b6a64 Merge pull request 'vaksman_valeria_lab_3' (#60) from vaksman_valeria_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#60
2024-10-16 16:47:28 +04:00
5a7409d60c Merge pull request 'mochalov_danila_lab_2' (#59) from mochalov_danila_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#59
2024-10-16 16:46:55 +04:00
265cf478bf Merge pull request 'tukaeva_alfiya_lab_3 is ready' (#58) from tukaeva_alfiya_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#58
2024-10-16 16:45:44 +04:00
c6f29a13a1 Merge pull request 'vaksman_valeria_lab_4' (#57) from vaksman_valeria_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#57
2024-10-16 16:45:11 +04:00
4103a23984 Merge pull request 'Presnyakova Victoria Lab2' (#56) from presnyakova_victoria_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#56
2024-10-16 16:20:58 +04:00
f8ac151629 Merge pull request 'zhimolostnova_anna_lab 4 complete' (#55) from zhimolostnova_anna_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#55
2024-10-16 15:02:32 +04:00
13b5dfc707 Merge branch 'main' into dozorova_alena_lab_5 2024-10-16 14:31:12 +04:00
5d3517c2b0 Merge pull request 'dozorova_alena_lab_4' (#49) from dozorova_alena_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#49
2024-10-16 14:26:29 +04:00
f3bbfb2efd rogashova_ekaterina_lab_1 is ready 2024-10-14 23:20:57 +04:00
3c6c7f47e8 second lab done 2024-10-14 16:27:36 +04:00
dc7c2c9694 видео 2024-10-14 16:09:46 +04:00
481631cda5 Merge pull request 'yakovleva_yulia_lab_3' (#54) from yakovleva_yulia_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#54
2024-10-14 15:48:22 +04:00
9b4f9b608c все готово, осталось сделать видео 2024-10-14 15:37:29 +04:00
3b842c2228 Merge pull request 'kalyshev_yan_lab_2 is ready' (#53) from kalyshev_yan_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#53
2024-10-14 15:18:08 +04:00
c4b8f4b4de Merge pull request 'kuzarin_maxim_lab_5' (#52) from kuzarin_maxim_lab_5 into main
Reviewed-on: Alexey/DAS_2024_1#52
2024-10-14 12:29:54 +04:00
85567eea48 Merge pull request 'bogdanov_dmitry_lab_2' (#51) from bogdanov_dmitry_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#51
2024-10-14 12:19:26 +04:00
ea8da8c665 Merge pull request 'borschevskaya_anna_lab_4 is ready' (#50) from borschevskaya_anna_lab_4 into main
Reviewed-on: Alexey/DAS_2024_1#50
2024-10-14 11:03:34 +04:00
2497e3c742 borschevskaya_anna_lab_5 is ready 2024-10-13 11:03:08 +04:00
Pineapple
a628469960 klyushenkova_ksenia_lab_1 is ready 2024-10-12 23:40:16 +04:00
f107797a2d fix: deleted trash 2024-10-12 16:47:05 +04:00
98e9047b45 feature: completed lab 3 2024-10-12 16:45:56 +04:00
66ffe827f8 mochalov_danila_lab_2 is ready 2024-10-11 05:18:46 +04:00
a0209b612e tukaeva_alfiya_lab_3 is ready 2024-10-11 01:01:25 +04:00
5ff5bf22b1 lab 3 no swagger 2024-10-10 22:53:30 +04:00
1f72d4dc70 ох уж этот редми 2024-10-10 21:03:09 +04:00
b351431f51 lab4 now is ready 2024-10-10 21:02:25 +04:00
56baf52b61 lab4 ready 2024-10-10 21:00:15 +04:00
f5ec3f1767 lab2 2024-10-10 18:52:01 +04:00
77790c37fb lab 4 complete 2024-10-09 17:12:11 +03:00
the
735a403027 Добавлен README 2024-10-09 16:38:42 +04:00
the
c67049687b Done 2024-10-09 16:17:26 +04:00
022e2dc49e + 2024-10-08 23:46:12 +04:00
8f24aad349 + 2024-10-08 23:45:44 +04:00
a54e13f7ee + 2024-10-08 23:45:00 +04:00
1bb988ea2f dozorova_alena_lab_6 2024-10-08 23:43:24 +04:00
f7668394b0 dozorova_alena_lab_5 2024-10-08 22:46:32 +04:00
JulYakJul
a6a247cabf delete trash 2024-10-08 17:02:29 +04:00
JulYakJul
f5194bf885 Create README.md 2024-10-08 16:56:07 +04:00
JulYakJul
12cd98aa7d yakovleva_yulia_lab_3 is ready 2024-10-08 16:30:55 +04:00
JulYakJul
3db4a0fcd4 Admin 2024-10-07 11:28:34 +04:00
a4f9cf13cc borschevskaya_anna_lab_4 is ready 2024-10-06 17:16:44 +04:00
the
75b118ba6e Исправлен README, готово 2024-10-04 15:52:10 +04:00
the
d8441a0989 Чистый неподкупный рабочий код 2-й лабораторной 2024-10-04 15:49:14 +04:00
the
1213b5db3c Чистый неподкупный рабочий код 2-й лабораторной 2024-10-04 14:33:05 +04:00
Zyzf
f0b48bba28 kalyshev_yan_lab_2 is ready 2024-09-29 20:05:33 +04:00
23087c87ea Обновить kuzarin_maxim_lab_6/README.md 2024-09-26 23:14:31 +04:00
5a6580ff8c Фикс README 2024-09-26 22:14:06 +03:00
5f6472b5ff Лаба реализована. Нужно всё проверить в контексте описания 2024-09-26 22:12:33 +03:00
e1950c80ea поправили readme 2024-09-26 22:33:36 +04:00
5586bec4b8 готовая работа 2024-09-26 22:31:01 +04:00
4c74a16753 туториал 3 2024-09-23 22:45:22 +04:00
a830cb2198 туториал 2 2024-09-23 22:42:39 +04:00
9d0fa199f7 first work 2024-09-23 21:35:24 +04:00
6de5160da9 ЛР 5 готова. 2024-09-22 22:14:00 +03:00
f0083bc4cd feature: add .yml and .env files, start readme 2024-09-20 01:42:54 +04:00
2625 changed files with 332452 additions and 7 deletions

1
.idea/.name Normal file
View File

@ -0,0 +1 @@
main.py

View File

@ -0,0 +1,12 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredIdentifiers">
<list>
<option value="str.__pos__" />
</list>
</option>
</inspection_tool>
</profile>
</component>

View File

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

4
.idea/misc.xml Normal file
View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (tukaeva_alfiya_lab_4)" project-jdk-type="Python SDK" />
</project>

View File

@ -0,0 +1,44 @@
# Лабораторная работа 1
## Описание
Данная лабораторная работа предназначена для настройки 3 сервисов — **Gitea**, **Redmine** и БД **MySQL**с использованием Docker Compose. **Gitea** — это лёгкая система контроля версий с веб-интерфейсом, а **Redmine** — система управления проектами и задачами, а также баг-трекер. **MySQL** — база данных, используемая Redmine для хранения данных.
## Запуск проекта
1. Убедитесь, что у вас установлены **Docker** и **Docker Compose**.
2. Клонируйте репозиторий с данным проектом или создайте файл `docker-compose.yml` с конфигурацией, представленной там.
3. В командной строке перейдите в директорию с файлом `docker-compose.yml`.
4. Запустите команды:
```bash
docker-compose up -d
```
Эта команда запустит контейнеры в фоновом режиме.
5. После запуска:
- Gitea будет доступен по адресу: [http://localhost:3000](http://localhost:3000)
- Redmine будет доступен по адресу: [http://localhost:8080](http://localhost:8080)
## Конфигурация
В файле `docker-compose.yml` определены следующие сервисы:
- **Gitea**:
- Запускается из официального образа `gitea/gitea:latest`.
- Использует `SQLite` для хранения данных.
- Настроен на порту 3000 для веб-доступа и 2222 для SSH.
- **Redmine**:
- Запускается из официального образа `redmine`.
- Подключен к базе данных MySQL.
- Доступен на порту 8080.
- **MySQL**:
- Запускается из образа `mysql:8.0`.
- Используется Redmine для хранения данных.
- Настроен с дефолтными пользователем, базой и паролем.
## Остановка проекта
Для остановки контейнеров запустите:
```bash
docker-compose down
```
Это завершит работу всех контейнеров и освободит порты.
## Примечания
- При необходимости вы можете изменить порты или другие параметры, отредактировав файл `docker-compose.yml`.
- Данные хранятся в именованных томах `gitea_data` и `db_data`, что позволяет сохранять данные при перезапуске контейнеров.
- Ссылка на демонстрацию работы программы: https://vk.com/video215756667_456239451?list=ln-AMZSRDejYptijuOt9u

View File

@ -0,0 +1,46 @@
version: '3.9' # Версия Docker Compose
services:
# Gitea сервис
gitea:
image: gitea/gitea:latest # Образ Gitea для запуска сервиса
container_name: gitea # Имя контейнера для удобства
environment: # Переменные среды
USER_UID: 1000 # UID пользователя внутри контейнера
USER_GID: 1000 # GID пользователя внутри контейнера
GITEA__database__DB_TYPE: sqlite3 # Тип бд (SQLite для простоты)
GITEA__database__PATH: /data/gitea/gitea.db # Путь к базе данных
GITEA__server__ROOT_URL: http://localhost:3000 # URL для доступа
GITEA__server__HTTP_PORT: 3000 # Порт для веб-интерфейса
volumes:
- gitea_data:/data # Монтирование директории данных для сохранения данных
ports:
- "3000:3000" # Порт для доступа к веб-интерфейсу Gitea
- "2222:22" # SSH порт для клонирования репозиториев
restart: always # Автоматический перезапуск контейнера в случае сбоя
# Redmine сервис
redmine:
image: redmine # Образ Redmine для запуска сервиса
restart: always # Автоматический перезапуск контейнера
ports:
- 8080:3000 # Порт для доступа к веб-интерфейсу
environment: # Переменные среды
REDMINE_DB_MYSQL: db # Имя хоста бд для подключения
REDMINE_DB_PASSWORD: example # Пароль для подключения к базе данных
# MySQL база данных для Redmine
db:
image: mysql:8.0 # Образ MySQL для бд
restart: always # Автоматический перезапуск контейнера
environment: # Переменные среды
MYSQL_ROOT_PASSWORD: example # Пароль пользователя root для MySQL
MYSQL_DATABASE: redmine # Имя бд для Redmine
MYSQL_USER: user # Пользователь MySQL
MYSQL_PASSWORD: password # Пароль для пользователя MySQL
volumes:
- db_data:/var/lib/mysql # Монтирование для сохранения данных бд
volumes: # Именованные тома
gitea_data: # Том для данных Gitea
db_data: # Том для данных MySQL

92
afanasev_dmitry_lab_2/.gitignore vendored Normal file
View File

@ -0,0 +1,92 @@
data/
##############################
## Java
##############################
.mtj.tmp/
*.class
*.jar
*.war
*.ear
*.nar
hs_err_pid*
replay_pid*
##############################
## Maven
##############################
target/
pom.xml.tag
pom.xml.releaseBackup
pom.xml.versionsBackup
pom.xml.next
pom.xml.bak
release.properties
dependency-reduced-pom.xml
buildNumber.properties
.mvn/timing.properties
.mvn/wrapper/maven-wrapper.jar
##############################
## Gradle
##############################
bin/
build/
.gradle
.gradletasknamecache
gradle-app.setting
!gradle-wrapper.jar
##############################
## IntelliJ
##############################
out/
.idea/
.idea_modules/
*.iml
*.ipr
*.iws
##############################
## Eclipse
##############################
.settings/
bin/
tmp/
.metadata
.classpath
.project
*.tmp
*.bak
*.swp
*~.nib
local.properties
.loadpath
.factorypath
##############################
## NetBeans
##############################
nbproject/private/
build/
nbbuild/
dist/
nbdist/
nbactions.xml
nb-configuration.xml
##############################
## Visual Studio Code
##############################
.vscode/
.code-workspace
##############################
## OS X
##############################
.DS_Store
##############################
## Miscellaneous
##############################
*.log

View File

@ -0,0 +1,38 @@
# Лабораторная работа 2
## Описание
Данная лабораторная работа предназначена для настройки 2 сервисов (простейшего распределенного приложения) с использованием Docker Compose. **FirstService** — ищет в каталоге /var/data файл с наибольшим количеством строк и перекладывает его в /var/result/data.txt. **SecondService** — ищет наименьшее число из файла /var/result/data.txt (сгенерирован 1-ым сервисом) и сохраняет его третью степень в /var/result/result.txt.
## Запуск проекта
1. Убедитесь, что у вас установлены **Docker** и **Docker Compose**.
2. Клонируйте репозиторий с данным проектом.
3. В командной строке перейдите в директорию с файлом `docker-compose.yml`.
4. Запустите команды:
```bash
docker-compose up -d
```
Эта команда запустит контейнеры в фоновом режиме.
5. После запуска:
- Посмотреть логи первого сервиса о том, что файл создался успешно.
- Посмотреть логи второго сервиса о том, что он обработал созданный первым сервисом файл.
## Конфигурация
В файле `docker-compose.yml` определены следующие сервисы:
- **FirstService**:
- Создает образ из директории `firstService`.
- Использует локальную директорию `/var/data` и общую `/var/result` для хранения данных.
- **SecondService**:
- Создает образ из директории `secondService`.
- Использует общую `/var/result` директорию для хранения данных.
- Запускается после первого сервиса.
## Остановка проекта
Для остановки контейнеров запустите:
```bash
docker-compose down
```
Это завершит работу всех контейнеров.
## Примечания
- При необходимости можно изменить директорию с данными или другие параметры, отредактировав файл `docker-compose.yml`.
- Ссылка на демонстрацию работы программы: https://vk.com/video215756667_456239452?list=ln-rAyQWJj8q7ezqCaZzL

View File

@ -0,0 +1,17 @@
version: '3.9'
services:
first-service:
build: ./firstService # Путь к докер-файлу 1 приложения
volumes:
- D:/java/DAS_2024_1/afanasev_dmitry_lab_2/data:/var/data # Монтируем директорию с данными
- common-volume:/var/result # Монтируем общую директорию (нужна 2-му сервису для работы)
second-service:
build: ./secondService # Путь к докер-файлу 2 приложения
volumes:
- common-volume:/var/result # Монтируем общую директорию (нужна 2-му сервису для работы)
depends_on:
- first-service # Запуск после первого сервиса
volumes: # Именованные тома
common-volume: # Общий для 2-ух сервисов

View File

@ -0,0 +1,17 @@
# Используем образ с Java 17
FROM bellsoft/liberica-openjdk-alpine:17.0.8
# Создаем директорию для исходных файлов
RUN mkdir /var/data
# Создаем директорию для приложения
WORKDIR /app
# Копируем файлы приложения в контейнер
COPY src /app/src
# Компилируем приложение
RUN javac /app/src/FirstService.java
# Определяем команду для запуска приложения
CMD ["java", "-cp", "/app/src", "FirstService"]

View File

@ -0,0 +1,52 @@
import java.io.*;
import java.nio.file.*;
public class FirstService {
// 1. Ищет в каталоге /var/data файл с наибольшим количеством строк и перекладывает его в /var/result/data.txt.
public static void main(String[] args) {
Path sourceDir = Paths.get("/var/data");
Path destinationDir = Paths.get("/var/result");
Path destinationFile = destinationDir.resolve("data.txt");
Path largestFile = null;
long maxLineCount = 0;
try {
// существует ли каталог /var/result, если нет, создаем
if (!Files.exists(destinationDir)) {
Files.createDirectories(destinationDir);
} else {
// иначе чистим
try (DirectoryStream<Path> stream = Files.newDirectoryStream(destinationDir)) {
for (Path file : stream) {
Files.delete(file);
}
}
}
// поиск файла с наибольшим количеством строк в каталоге /var/data
try (DirectoryStream<Path> stream = Files.newDirectoryStream(sourceDir)) {
for (Path file : stream) {
if (Files.isRegularFile(file)) {
long lineCount = Files.lines(file).count();
if (lineCount > maxLineCount) {
maxLineCount = lineCount;
largestFile = file;
}
}
}
}
// копируем файл с наибольшим количеством строк в /var/result/data.txt
if (largestFile != null) {
Files.copy(largestFile, destinationFile, StandardCopyOption.REPLACE_EXISTING);
System.out.println("Файл " + largestFile + " скопирован в " + destinationFile);
} else {
System.out.println("В каталоге " + sourceDir + " нет файлов.");
}
} catch (IOException e) {
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,17 @@
# Используем образ с Java 17
FROM bellsoft/liberica-openjdk-alpine:17.0.8
# Создаем директорию для исходных файлов
RUN mkdir /var/data
# Создаем директорию для приложения
WORKDIR /app
# Копируем файлы приложения в контейнер
COPY src /app/src
# Компилируем приложение
RUN javac /app/src/SecondService.java
# Определяем команду для запуска приложения
CMD ["java", "-cp", "/app/src", "SecondService"]

View File

@ -0,0 +1,51 @@
import java.io.*;
import java.nio.file.*;
import java.util.*;
public class SecondService {
// 2. Ищет наименьшее число из файла /var/result/data.txt и сохраняет его третью степень в /var/result/result.txt.
public static void main(String[] args) {
Path sourceFile = Paths.get("/var/result/data.txt");
Path destinationDir = Paths.get("/var/result");
Path destinationFile = destinationDir.resolve("result.txt");
try {
// создание /var/result, если не существует
if (!Files.exists(destinationDir)) {
Files.createDirectories(destinationDir);
}
// читаем числа из файла и находим наименьшее
List<Integer> numbers = new ArrayList<>();
try (BufferedReader reader = Files.newBufferedReader(sourceFile)) {
String line;
while ((line = reader.readLine()) != null) {
try {
numbers.add(Integer.parseInt(line.trim()));
} catch (NumberFormatException e) {
System.out.println("Некорректная строка: " + line);
}
}
}
if (!numbers.isEmpty()) {
// находим наименьшее число и его третью степень
int minNumber = Collections.min(numbers);
int minNumberCubed = (int) Math.pow(minNumber, 3);
// записываем результат в /var/result/result.txt
try (BufferedWriter writer = Files.newBufferedWriter(destinationFile)) {
writer.write(String.valueOf(minNumberCubed));
System.out.println("Третья степень наименьшего числа - " + minNumber + " (" + minNumberCubed +
") сохранена в " + destinationFile);
}
} else {
System.out.println("Файл " + sourceFile + " пуст или не содержит чисел.");
}
} catch (IOException e) {
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,41 @@
# Лабораторная работа 3
## Описание
Данная лабораторная работа предназначена для настройки 3 сервисов — **Melon**, **Water** и прокси-сервер **Nginx**с использованием Docker Compose. **Melon** — сервис с дынями, где для каждого свой арбуз, а **Water** — сервис с водой и связанных с ними арбузами. **Nginx** — прокси-сервер, работающий на Unix-подобных операционных системах.
## Запуск проекта
1. Убедитесь, что у вас установлены **Docker** и **Docker Compose**.
2. Клонируйте репозиторий с данным проектом.
3. В командной строке перейдите в директорию с файлом `docker-compose.yml`.
4. Запустите команды:
```bash
docker-compose up -d
```
Эта команда запустит контейнеры в фоновом режиме.
5. После запуска:
- Melon будет доступен по адресу: [http://localhost:8080](http://localhost:8080)
- Water будет доступен по адресу: [http://localhost:8081](http://localhost:8081)
## Конфигурация
В файле `docker-compose.yml` определены следующие сервисы:
- **Melon**:
- Настроен на порту 8080 для веб-доступа.
- Обращается к **Nginx** для доступа к сервису **Water**.
- Реализует базовые CRUD-операции.
- **Water**:
- Настроен на порту 8081 для веб-доступа.
- Реализует базовые CRUD-операции.
- **Nginx**:
- Запускается из образа `nginx`.
- Используется для проксирования запросов.
## Остановка проекта
Для остановки контейнеров запустите:
```bash
docker-compose down
```
Это завершит работу всех контейнеров и освободит порты.
## Примечания
- При необходимости вы можете изменить порты или другие параметры, отредактировав файл `docker-compose.yml`.
- Ссылка на демонстрацию работы программы: https://vk.com/video215756667_456239453?list=ln-6zVfNOSwMQtpVWKkGe

View File

@ -0,0 +1,25 @@
version: '3.9'
services:
melon:
build: ./melon
ports:
- "8080:8080"
expose: # Указывает, какой порт будет открыт внутри контейнера
- 8080
water:
build: ./water
ports:
- "8081:8081"
expose: # Указывает, какой порт будет открыт внутри контейнера
- 8081
nginx:
image: nginx
ports:
- "80:80"
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf
depends_on:
- melon
- water

View File

@ -0,0 +1,2 @@
/mvnw text eol=lf
*.cmd text eol=crlf

33
afanasev_dmitry_lab_3/melon/.gitignore vendored Normal file
View File

@ -0,0 +1,33 @@
HELP.md
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**/target/
!**/src/test/**/target/
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
!**/src/main/**/build/
!**/src/test/**/build/
### VS Code ###
.vscode/

View File

@ -0,0 +1,19 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
wrapperVersion=3.3.2
distributionType=only-script
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip

View File

@ -0,0 +1,4 @@
FROM bellsoft/liberica-openjdk-alpine:17.0.8
ADD target/melon-0.0.1-SNAPSHOT.jar /app/
CMD ["java", "-Xmx200m", "-jar", "/app/melon-0.0.1-SNAPSHOT.jar"]
WORKDIR /app

259
afanasev_dmitry_lab_3/melon/mvnw vendored Normal file
View File

@ -0,0 +1,259 @@
#!/bin/sh
# ----------------------------------------------------------------------------
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
# Apache Maven Wrapper startup batch script, version 3.3.2
#
# Optional ENV vars
# -----------------
# JAVA_HOME - location of a JDK home dir, required when download maven via java source
# MVNW_REPOURL - repo url base for downloading maven distribution
# MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven
# MVNW_VERBOSE - true: enable verbose log; debug: trace the mvnw script; others: silence the output
# ----------------------------------------------------------------------------
set -euf
[ "${MVNW_VERBOSE-}" != debug ] || set -x
# OS specific support.
native_path() { printf %s\\n "$1"; }
case "$(uname)" in
CYGWIN* | MINGW*)
[ -z "${JAVA_HOME-}" ] || JAVA_HOME="$(cygpath --unix "$JAVA_HOME")"
native_path() { cygpath --path --windows "$1"; }
;;
esac
# set JAVACMD and JAVACCMD
set_java_home() {
# For Cygwin and MinGW, ensure paths are in Unix format before anything is touched
if [ -n "${JAVA_HOME-}" ]; then
if [ -x "$JAVA_HOME/jre/sh/java" ]; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
JAVACCMD="$JAVA_HOME/jre/sh/javac"
else
JAVACMD="$JAVA_HOME/bin/java"
JAVACCMD="$JAVA_HOME/bin/javac"
if [ ! -x "$JAVACMD" ] || [ ! -x "$JAVACCMD" ]; then
echo "The JAVA_HOME environment variable is not defined correctly, so mvnw cannot run." >&2
echo "JAVA_HOME is set to \"$JAVA_HOME\", but \"\$JAVA_HOME/bin/java\" or \"\$JAVA_HOME/bin/javac\" does not exist." >&2
return 1
fi
fi
else
JAVACMD="$(
'set' +e
'unset' -f command 2>/dev/null
'command' -v java
)" || :
JAVACCMD="$(
'set' +e
'unset' -f command 2>/dev/null
'command' -v javac
)" || :
if [ ! -x "${JAVACMD-}" ] || [ ! -x "${JAVACCMD-}" ]; then
echo "The java/javac command does not exist in PATH nor is JAVA_HOME set, so mvnw cannot run." >&2
return 1
fi
fi
}
# hash string like Java String::hashCode
hash_string() {
str="${1:-}" h=0
while [ -n "$str" ]; do
char="${str%"${str#?}"}"
h=$(((h * 31 + $(LC_CTYPE=C printf %d "'$char")) % 4294967296))
str="${str#?}"
done
printf %x\\n $h
}
verbose() { :; }
[ "${MVNW_VERBOSE-}" != true ] || verbose() { printf %s\\n "${1-}"; }
die() {
printf %s\\n "$1" >&2
exit 1
}
trim() {
# MWRAPPER-139:
# Trims trailing and leading whitespace, carriage returns, tabs, and linefeeds.
# Needed for removing poorly interpreted newline sequences when running in more
# exotic environments such as mingw bash on Windows.
printf "%s" "${1}" | tr -d '[:space:]'
}
# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties
while IFS="=" read -r key value; do
case "${key-}" in
distributionUrl) distributionUrl=$(trim "${value-}") ;;
distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;;
esac
done <"${0%/*}/.mvn/wrapper/maven-wrapper.properties"
[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in ${0%/*}/.mvn/wrapper/maven-wrapper.properties"
case "${distributionUrl##*/}" in
maven-mvnd-*bin.*)
MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/
case "${PROCESSOR_ARCHITECTURE-}${PROCESSOR_ARCHITEW6432-}:$(uname -a)" in
*AMD64:CYGWIN* | *AMD64:MINGW*) distributionPlatform=windows-amd64 ;;
:Darwin*x86_64) distributionPlatform=darwin-amd64 ;;
:Darwin*arm64) distributionPlatform=darwin-aarch64 ;;
:Linux*x86_64*) distributionPlatform=linux-amd64 ;;
*)
echo "Cannot detect native platform for mvnd on $(uname)-$(uname -m), use pure java version" >&2
distributionPlatform=linux-amd64
;;
esac
distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip"
;;
maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;;
*) MVN_CMD="mvn${0##*/mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;;
esac
# apply MVNW_REPOURL and calculate MAVEN_HOME
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash>
[ -z "${MVNW_REPOURL-}" ] || distributionUrl="$MVNW_REPOURL$_MVNW_REPO_PATTERN${distributionUrl#*"$_MVNW_REPO_PATTERN"}"
distributionUrlName="${distributionUrl##*/}"
distributionUrlNameMain="${distributionUrlName%.*}"
distributionUrlNameMain="${distributionUrlNameMain%-bin}"
MAVEN_USER_HOME="${MAVEN_USER_HOME:-${HOME}/.m2}"
MAVEN_HOME="${MAVEN_USER_HOME}/wrapper/dists/${distributionUrlNameMain-}/$(hash_string "$distributionUrl")"
exec_maven() {
unset MVNW_VERBOSE MVNW_USERNAME MVNW_PASSWORD MVNW_REPOURL || :
exec "$MAVEN_HOME/bin/$MVN_CMD" "$@" || die "cannot exec $MAVEN_HOME/bin/$MVN_CMD"
}
if [ -d "$MAVEN_HOME" ]; then
verbose "found existing MAVEN_HOME at $MAVEN_HOME"
exec_maven "$@"
fi
case "${distributionUrl-}" in
*?-bin.zip | *?maven-mvnd-?*-?*.zip) ;;
*) die "distributionUrl is not valid, must match *-bin.zip or maven-mvnd-*.zip, but found '${distributionUrl-}'" ;;
esac
# prepare tmp dir
if TMP_DOWNLOAD_DIR="$(mktemp -d)" && [ -d "$TMP_DOWNLOAD_DIR" ]; then
clean() { rm -rf -- "$TMP_DOWNLOAD_DIR"; }
trap clean HUP INT TERM EXIT
else
die "cannot create temp dir"
fi
mkdir -p -- "${MAVEN_HOME%/*}"
# Download and Install Apache Maven
verbose "Couldn't find MAVEN_HOME, downloading and installing it ..."
verbose "Downloading from: $distributionUrl"
verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName"
# select .zip or .tar.gz
if ! command -v unzip >/dev/null; then
distributionUrl="${distributionUrl%.zip}.tar.gz"
distributionUrlName="${distributionUrl##*/}"
fi
# verbose opt
__MVNW_QUIET_WGET=--quiet __MVNW_QUIET_CURL=--silent __MVNW_QUIET_UNZIP=-q __MVNW_QUIET_TAR=''
[ "${MVNW_VERBOSE-}" != true ] || __MVNW_QUIET_WGET='' __MVNW_QUIET_CURL='' __MVNW_QUIET_UNZIP='' __MVNW_QUIET_TAR=v
# normalize http auth
case "${MVNW_PASSWORD:+has-password}" in
'') MVNW_USERNAME='' MVNW_PASSWORD='' ;;
has-password) [ -n "${MVNW_USERNAME-}" ] || MVNW_USERNAME='' MVNW_PASSWORD='' ;;
esac
if [ -z "${MVNW_USERNAME-}" ] && command -v wget >/dev/null; then
verbose "Found wget ... using wget"
wget ${__MVNW_QUIET_WGET:+"$__MVNW_QUIET_WGET"} "$distributionUrl" -O "$TMP_DOWNLOAD_DIR/$distributionUrlName" || die "wget: Failed to fetch $distributionUrl"
elif [ -z "${MVNW_USERNAME-}" ] && command -v curl >/dev/null; then
verbose "Found curl ... using curl"
curl ${__MVNW_QUIET_CURL:+"$__MVNW_QUIET_CURL"} -f -L -o "$TMP_DOWNLOAD_DIR/$distributionUrlName" "$distributionUrl" || die "curl: Failed to fetch $distributionUrl"
elif set_java_home; then
verbose "Falling back to use Java to download"
javaSource="$TMP_DOWNLOAD_DIR/Downloader.java"
targetZip="$TMP_DOWNLOAD_DIR/$distributionUrlName"
cat >"$javaSource" <<-END
public class Downloader extends java.net.Authenticator
{
protected java.net.PasswordAuthentication getPasswordAuthentication()
{
return new java.net.PasswordAuthentication( System.getenv( "MVNW_USERNAME" ), System.getenv( "MVNW_PASSWORD" ).toCharArray() );
}
public static void main( String[] args ) throws Exception
{
setDefault( new Downloader() );
java.nio.file.Files.copy( java.net.URI.create( args[0] ).toURL().openStream(), java.nio.file.Paths.get( args[1] ).toAbsolutePath().normalize() );
}
}
END
# For Cygwin/MinGW, switch paths to Windows format before running javac and java
verbose " - Compiling Downloader.java ..."
"$(native_path "$JAVACCMD")" "$(native_path "$javaSource")" || die "Failed to compile Downloader.java"
verbose " - Running Downloader.java ..."
"$(native_path "$JAVACMD")" -cp "$(native_path "$TMP_DOWNLOAD_DIR")" Downloader "$distributionUrl" "$(native_path "$targetZip")"
fi
# If specified, validate the SHA-256 sum of the Maven distribution zip file
if [ -n "${distributionSha256Sum-}" ]; then
distributionSha256Result=false
if [ "$MVN_CMD" = mvnd.sh ]; then
echo "Checksum validation is not supported for maven-mvnd." >&2
echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
exit 1
elif command -v sha256sum >/dev/null; then
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c >/dev/null 2>&1; then
distributionSha256Result=true
fi
elif command -v shasum >/dev/null; then
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | shasum -a 256 -c >/dev/null 2>&1; then
distributionSha256Result=true
fi
else
echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." >&2
echo "Please install either command, or disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
exit 1
fi
if [ $distributionSha256Result = false ]; then
echo "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised." >&2
echo "If you updated your Maven version, you need to update the specified distributionSha256Sum property." >&2
exit 1
fi
fi
# unzip and move
if command -v unzip >/dev/null; then
unzip ${__MVNW_QUIET_UNZIP:+"$__MVNW_QUIET_UNZIP"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -d "$TMP_DOWNLOAD_DIR" || die "failed to unzip"
else
tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar"
fi
printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/mvnw.url"
mv -- "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME"
clean || :
exec_maven "$@"

149
afanasev_dmitry_lab_3/melon/mvnw.cmd vendored Normal file
View File

@ -0,0 +1,149 @@
<# : batch portion
@REM ----------------------------------------------------------------------------
@REM Licensed to the Apache Software Foundation (ASF) under one
@REM or more contributor license agreements. See the NOTICE file
@REM distributed with this work for additional information
@REM regarding copyright ownership. The ASF licenses this file
@REM to you under the Apache License, Version 2.0 (the
@REM "License"); you may not use this file except in compliance
@REM with the License. You may obtain a copy of the License at
@REM
@REM http://www.apache.org/licenses/LICENSE-2.0
@REM
@REM Unless required by applicable law or agreed to in writing,
@REM software distributed under the License is distributed on an
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@REM KIND, either express or implied. See the License for the
@REM specific language governing permissions and limitations
@REM under the License.
@REM ----------------------------------------------------------------------------
@REM ----------------------------------------------------------------------------
@REM Apache Maven Wrapper startup batch script, version 3.3.2
@REM
@REM Optional ENV vars
@REM MVNW_REPOURL - repo url base for downloading maven distribution
@REM MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven
@REM MVNW_VERBOSE - true: enable verbose log; others: silence the output
@REM ----------------------------------------------------------------------------
@IF "%__MVNW_ARG0_NAME__%"=="" (SET __MVNW_ARG0_NAME__=%~nx0)
@SET __MVNW_CMD__=
@SET __MVNW_ERROR__=
@SET __MVNW_PSMODULEP_SAVE=%PSModulePath%
@SET PSModulePath=
@FOR /F "usebackq tokens=1* delims==" %%A IN (`powershell -noprofile "& {$scriptDir='%~dp0'; $script='%__MVNW_ARG0_NAME__%'; icm -ScriptBlock ([Scriptblock]::Create((Get-Content -Raw '%~f0'))) -NoNewScope}"`) DO @(
IF "%%A"=="MVN_CMD" (set __MVNW_CMD__=%%B) ELSE IF "%%B"=="" (echo %%A) ELSE (echo %%A=%%B)
)
@SET PSModulePath=%__MVNW_PSMODULEP_SAVE%
@SET __MVNW_PSMODULEP_SAVE=
@SET __MVNW_ARG0_NAME__=
@SET MVNW_USERNAME=
@SET MVNW_PASSWORD=
@IF NOT "%__MVNW_CMD__%"=="" (%__MVNW_CMD__% %*)
@echo Cannot start maven from wrapper >&2 && exit /b 1
@GOTO :EOF
: end batch / begin powershell #>
$ErrorActionPreference = "Stop"
if ($env:MVNW_VERBOSE -eq "true") {
$VerbosePreference = "Continue"
}
# calculate distributionUrl, requires .mvn/wrapper/maven-wrapper.properties
$distributionUrl = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionUrl
if (!$distributionUrl) {
Write-Error "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties"
}
switch -wildcard -casesensitive ( $($distributionUrl -replace '^.*/','') ) {
"maven-mvnd-*" {
$USE_MVND = $true
$distributionUrl = $distributionUrl -replace '-bin\.[^.]*$',"-windows-amd64.zip"
$MVN_CMD = "mvnd.cmd"
break
}
default {
$USE_MVND = $false
$MVN_CMD = $script -replace '^mvnw','mvn'
break
}
}
# apply MVNW_REPOURL and calculate MAVEN_HOME
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash>
if ($env:MVNW_REPOURL) {
$MVNW_REPO_PATTERN = if ($USE_MVND) { "/org/apache/maven/" } else { "/maven/mvnd/" }
$distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace '^.*'+$MVNW_REPO_PATTERN,'')"
}
$distributionUrlName = $distributionUrl -replace '^.*/',''
$distributionUrlNameMain = $distributionUrlName -replace '\.[^.]*$','' -replace '-bin$',''
$MAVEN_HOME_PARENT = "$HOME/.m2/wrapper/dists/$distributionUrlNameMain"
if ($env:MAVEN_USER_HOME) {
$MAVEN_HOME_PARENT = "$env:MAVEN_USER_HOME/wrapper/dists/$distributionUrlNameMain"
}
$MAVEN_HOME_NAME = ([System.Security.Cryptography.MD5]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join ''
$MAVEN_HOME = "$MAVEN_HOME_PARENT/$MAVEN_HOME_NAME"
if (Test-Path -Path "$MAVEN_HOME" -PathType Container) {
Write-Verbose "found existing MAVEN_HOME at $MAVEN_HOME"
Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD"
exit $?
}
if (! $distributionUrlNameMain -or ($distributionUrlName -eq $distributionUrlNameMain)) {
Write-Error "distributionUrl is not valid, must end with *-bin.zip, but found $distributionUrl"
}
# prepare tmp dir
$TMP_DOWNLOAD_DIR_HOLDER = New-TemporaryFile
$TMP_DOWNLOAD_DIR = New-Item -Itemtype Directory -Path "$TMP_DOWNLOAD_DIR_HOLDER.dir"
$TMP_DOWNLOAD_DIR_HOLDER.Delete() | Out-Null
trap {
if ($TMP_DOWNLOAD_DIR.Exists) {
try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null }
catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" }
}
}
New-Item -Itemtype Directory -Path "$MAVEN_HOME_PARENT" -Force | Out-Null
# Download and Install Apache Maven
Write-Verbose "Couldn't find MAVEN_HOME, downloading and installing it ..."
Write-Verbose "Downloading from: $distributionUrl"
Write-Verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName"
$webclient = New-Object System.Net.WebClient
if ($env:MVNW_USERNAME -and $env:MVNW_PASSWORD) {
$webclient.Credentials = New-Object System.Net.NetworkCredential($env:MVNW_USERNAME, $env:MVNW_PASSWORD)
}
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
$webclient.DownloadFile($distributionUrl, "$TMP_DOWNLOAD_DIR/$distributionUrlName") | Out-Null
# If specified, validate the SHA-256 sum of the Maven distribution zip file
$distributionSha256Sum = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionSha256Sum
if ($distributionSha256Sum) {
if ($USE_MVND) {
Write-Error "Checksum validation is not supported for maven-mvnd. `nPlease disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties."
}
Import-Module $PSHOME\Modules\Microsoft.PowerShell.Utility -Function Get-FileHash
if ((Get-FileHash "$TMP_DOWNLOAD_DIR/$distributionUrlName" -Algorithm SHA256).Hash.ToLower() -ne $distributionSha256Sum) {
Write-Error "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised. If you updated your Maven version, you need to update the specified distributionSha256Sum property."
}
}
# unzip and move
Expand-Archive "$TMP_DOWNLOAD_DIR/$distributionUrlName" -DestinationPath "$TMP_DOWNLOAD_DIR" | Out-Null
Rename-Item -Path "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" -NewName $MAVEN_HOME_NAME | Out-Null
try {
Move-Item -Path "$TMP_DOWNLOAD_DIR/$MAVEN_HOME_NAME" -Destination $MAVEN_HOME_PARENT | Out-Null
} catch {
if (! (Test-Path -Path "$MAVEN_HOME" -PathType Container)) {
Write-Error "fail to move MAVEN_HOME"
}
} finally {
try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null }
catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" }
}
Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD"

View File

@ -0,0 +1,71 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>3.3.5</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>ru.ulstu</groupId>
<artifactId>melon</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>melon</name>
<description>Demo project for Spring Boot</description>
<url/>
<licenses>
<license/>
</licenses>
<developers>
<developer/>
</developers>
<scm>
<connection/>
<developerConnection/>
<tag/>
<url/>
</scm>
<properties>
<java.version>17</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springdoc</groupId>
<artifactId>springdoc-openapi-starter-webmvc-ui</artifactId>
<version>2.5.0</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<excludes>
<exclude>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,13 @@
package ru.ulstu.melon;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class MelonApplication {
public static void main(String[] args) {
SpringApplication.run(MelonApplication.class, args);
}
}

View File

@ -0,0 +1,13 @@
package ru.ulstu.melon.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.client.RestTemplate;
@Configuration
public class RestTemplateConfig {
@Bean
public RestTemplate restTemplate() {
return new RestTemplate();
}
}

View File

@ -0,0 +1,54 @@
package ru.ulstu.melon.controller;
import lombok.RequiredArgsConstructor;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import ru.ulstu.melon.dto.CreateMelonDto;
import ru.ulstu.melon.dto.MelonDto;
import ru.ulstu.melon.dto.UpdateMelonDto;
import ru.ulstu.melon.model.Melon;
import ru.ulstu.melon.service.MelonService;
import java.util.Collection;
import java.util.UUID;
@RestController
@RequiredArgsConstructor
@RequestMapping("/melon")
public class MelonController {
private final MelonService melonService;
@GetMapping
public ResponseEntity<Collection<Melon>> get() {
return new ResponseEntity<>(melonService.get(), HttpStatus.OK);
}
@GetMapping("/{id}")
public ResponseEntity<MelonDto> get(@PathVariable UUID id) {
return new ResponseEntity<>(melonService.get(id), HttpStatus.OK);
}
@PostMapping
public ResponseEntity<MelonDto> add(@RequestBody CreateMelonDto dto) {
return new ResponseEntity<>(melonService.add(dto), HttpStatus.OK);
}
@PutMapping("/{id}")
public ResponseEntity<MelonDto> update(@PathVariable UUID id, @RequestBody UpdateMelonDto dto) {
return new ResponseEntity<>(melonService.update(id, dto), HttpStatus.OK);
}
@DeleteMapping("/{id}")
public ResponseEntity<Void> delete(@PathVariable UUID id) {
melonService.delete(id);
return new ResponseEntity<>(HttpStatus.OK);
}
}

View File

@ -0,0 +1,14 @@
package ru.ulstu.melon.dto;
import lombok.AllArgsConstructor;
import lombok.Getter;
import java.util.UUID;
@AllArgsConstructor
@Getter
public class CreateMelonDto {
private Boolean isRipe;
private Double weight;
private UUID waterMelonId;
}

View File

@ -0,0 +1,28 @@
package ru.ulstu.melon.dto;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.Setter;
import ru.ulstu.melon.model.Melon;
import ru.ulstu.melon.model.Water;
import java.util.UUID;
@AllArgsConstructor
@Getter
@Setter
public class MelonDto {
private UUID id;
private Boolean isRipe;
private Double weight;
private UUID waterMelonId;
private WaterDto waterMelon;
public MelonDto(Melon melon, Water water) {
this.id = melon.getId();
this.isRipe = melon.getIsRipe();
this.weight = melon.getWeight();
this.waterMelonId = melon.getWaterMelonId();
this.waterMelon = new WaterDto(water);
}
}

View File

@ -0,0 +1,11 @@
package ru.ulstu.melon.dto;
import lombok.AllArgsConstructor;
import lombok.Getter;
@AllArgsConstructor
@Getter
public class UpdateMelonDto {
private Boolean isRipe;
private Double weight;
}

View File

@ -0,0 +1,23 @@
package ru.ulstu.melon.dto;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.Setter;
import ru.ulstu.melon.model.Water;
import java.util.UUID;
@Getter
@Setter
@AllArgsConstructor
public class WaterDto {
private UUID id;
private Boolean isSweetBottom;
private Double volume;
public WaterDto(Water water) {
this.id = water.getId();
this.isSweetBottom = water.getIsSweetBottom();
this.volume = water.getVolume();
}
}

View File

@ -0,0 +1,19 @@
package ru.ulstu.melon.model;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import java.util.UUID;
@AllArgsConstructor
@NoArgsConstructor
@Getter
@Setter
public class Melon {
private UUID id;
private Boolean isRipe;
private Double weight;
private UUID waterMelonId;
}

View File

@ -0,0 +1,18 @@
package ru.ulstu.melon.model;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.UUID;
@AllArgsConstructor
@NoArgsConstructor
@Getter
public class Water {
private UUID id;
private Boolean isSweetBottom;
private Double volume;
private List<Melon> waterMelons;
}

View File

@ -0,0 +1,88 @@
package ru.ulstu.melon.service;
import lombok.RequiredArgsConstructor;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service;
import org.springframework.web.client.RestClientException;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.server.ResponseStatusException;
import ru.ulstu.melon.dto.CreateMelonDto;
import ru.ulstu.melon.dto.MelonDto;
import ru.ulstu.melon.dto.UpdateMelonDto;
import ru.ulstu.melon.model.Melon;
import ru.ulstu.melon.model.Water;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
@Service
@RequiredArgsConstructor
public class MelonService {
private final Map<UUID, Melon> melons = new HashMap<>();
private final RestTemplate restTemplate;
private static final String WATER_SERVICE_PATH = "http://nginx/water/water/";
public Collection<Melon> get() {
return melons.values();
}
public MelonDto get(UUID id) {
if (!melons.containsKey(id)) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Melon not found");
}
final Melon melon = melons.get(id);
return new MelonDto(melon, getWater(melon.getWaterMelonId()));
}
public MelonDto add(CreateMelonDto dto) {
Melon melon = new Melon(UUID.randomUUID(),
dto.getIsRipe(),
dto.getWeight(),
dto.getWaterMelonId());
melons.put(melon.getId(), melon);
Water water;
try {
String baseUrl = WATER_SERVICE_PATH + melon.getWaterMelonId() + "/addMelon";
water = restTemplate.postForObject(
baseUrl,
melon,
Water.class
);
} catch (RestClientException e) {
throw new RuntimeException("Failed to add melon to waterMelons: " + e.getMessage(), e);
}
return new MelonDto(melon, water);
}
public MelonDto update(UUID id, UpdateMelonDto dto) {
if (!melons.containsKey(id)) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Melon not found");
}
Melon melon = melons.get(id);
if (dto.getWeight() != null)
melon.setWeight(dto.getWeight());
if (dto.getIsRipe() != null)
melon.setIsRipe(dto.getIsRipe());
return new MelonDto(melon, getWater(melon.getWaterMelonId()));
}
public void delete(UUID id) {
if (!melons.containsKey(id)) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Melon not found");
}
melons.remove(id);
}
private Water getWater(UUID id) {
Water water;
try {
String baseUrl = WATER_SERVICE_PATH + id;
water = restTemplate.getForEntity(baseUrl, Water.class).getBody();
} catch (RestClientException e) {
throw new RuntimeException("Failed get waterMelon for melon: " + e.getMessage());
}
return water;
}
}

View File

@ -0,0 +1 @@
spring.application.name=melon

View File

@ -0,0 +1,13 @@
package ru.ulstu.melon;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class MelonApplicationTests {
@Test
void contextLoads() {
}
}

View File

@ -0,0 +1,19 @@
events {
worker_connections 1024;
}
http {
server {
listen 80;
listen [::]:80;
server_name localhost;
location /melon/ {
proxy_pass http://melon:8080/;
}
location /water/ {
proxy_pass http://water:8081/;
}
}
}

View File

@ -0,0 +1,2 @@
/mvnw text eol=lf
*.cmd text eol=crlf

33
afanasev_dmitry_lab_3/water/.gitignore vendored Normal file
View File

@ -0,0 +1,33 @@
HELP.md
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**/target/
!**/src/test/**/target/
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
!**/src/main/**/build/
!**/src/test/**/build/
### VS Code ###
.vscode/

View File

@ -0,0 +1,19 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
wrapperVersion=3.3.2
distributionType=only-script
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip

View File

@ -0,0 +1,4 @@
FROM bellsoft/liberica-openjdk-alpine:17.0.8
ADD target/water-0.0.1-SNAPSHOT.jar /app/
CMD ["java", "-Xmx200m", "-jar", "/app/water-0.0.1-SNAPSHOT.jar"]
WORKDIR /app

259
afanasev_dmitry_lab_3/water/mvnw vendored Normal file
View File

@ -0,0 +1,259 @@
#!/bin/sh
# ----------------------------------------------------------------------------
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# ----------------------------------------------------------------------------
# ----------------------------------------------------------------------------
# Apache Maven Wrapper startup batch script, version 3.3.2
#
# Optional ENV vars
# -----------------
# JAVA_HOME - location of a JDK home dir, required when download maven via java source
# MVNW_REPOURL - repo url base for downloading maven distribution
# MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven
# MVNW_VERBOSE - true: enable verbose log; debug: trace the mvnw script; others: silence the output
# ----------------------------------------------------------------------------
set -euf
[ "${MVNW_VERBOSE-}" != debug ] || set -x
# OS specific support.
native_path() { printf %s\\n "$1"; }
case "$(uname)" in
CYGWIN* | MINGW*)
[ -z "${JAVA_HOME-}" ] || JAVA_HOME="$(cygpath --unix "$JAVA_HOME")"
native_path() { cygpath --path --windows "$1"; }
;;
esac
# set JAVACMD and JAVACCMD
set_java_home() {
# For Cygwin and MinGW, ensure paths are in Unix format before anything is touched
if [ -n "${JAVA_HOME-}" ]; then
if [ -x "$JAVA_HOME/jre/sh/java" ]; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
JAVACCMD="$JAVA_HOME/jre/sh/javac"
else
JAVACMD="$JAVA_HOME/bin/java"
JAVACCMD="$JAVA_HOME/bin/javac"
if [ ! -x "$JAVACMD" ] || [ ! -x "$JAVACCMD" ]; then
echo "The JAVA_HOME environment variable is not defined correctly, so mvnw cannot run." >&2
echo "JAVA_HOME is set to \"$JAVA_HOME\", but \"\$JAVA_HOME/bin/java\" or \"\$JAVA_HOME/bin/javac\" does not exist." >&2
return 1
fi
fi
else
JAVACMD="$(
'set' +e
'unset' -f command 2>/dev/null
'command' -v java
)" || :
JAVACCMD="$(
'set' +e
'unset' -f command 2>/dev/null
'command' -v javac
)" || :
if [ ! -x "${JAVACMD-}" ] || [ ! -x "${JAVACCMD-}" ]; then
echo "The java/javac command does not exist in PATH nor is JAVA_HOME set, so mvnw cannot run." >&2
return 1
fi
fi
}
# hash string like Java String::hashCode
hash_string() {
str="${1:-}" h=0
while [ -n "$str" ]; do
char="${str%"${str#?}"}"
h=$(((h * 31 + $(LC_CTYPE=C printf %d "'$char")) % 4294967296))
str="${str#?}"
done
printf %x\\n $h
}
verbose() { :; }
[ "${MVNW_VERBOSE-}" != true ] || verbose() { printf %s\\n "${1-}"; }
die() {
printf %s\\n "$1" >&2
exit 1
}
trim() {
# MWRAPPER-139:
# Trims trailing and leading whitespace, carriage returns, tabs, and linefeeds.
# Needed for removing poorly interpreted newline sequences when running in more
# exotic environments such as mingw bash on Windows.
printf "%s" "${1}" | tr -d '[:space:]'
}
# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties
while IFS="=" read -r key value; do
case "${key-}" in
distributionUrl) distributionUrl=$(trim "${value-}") ;;
distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;;
esac
done <"${0%/*}/.mvn/wrapper/maven-wrapper.properties"
[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in ${0%/*}/.mvn/wrapper/maven-wrapper.properties"
case "${distributionUrl##*/}" in
maven-mvnd-*bin.*)
MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/
case "${PROCESSOR_ARCHITECTURE-}${PROCESSOR_ARCHITEW6432-}:$(uname -a)" in
*AMD64:CYGWIN* | *AMD64:MINGW*) distributionPlatform=windows-amd64 ;;
:Darwin*x86_64) distributionPlatform=darwin-amd64 ;;
:Darwin*arm64) distributionPlatform=darwin-aarch64 ;;
:Linux*x86_64*) distributionPlatform=linux-amd64 ;;
*)
echo "Cannot detect native platform for mvnd on $(uname)-$(uname -m), use pure java version" >&2
distributionPlatform=linux-amd64
;;
esac
distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip"
;;
maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;;
*) MVN_CMD="mvn${0##*/mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;;
esac
# apply MVNW_REPOURL and calculate MAVEN_HOME
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash>
[ -z "${MVNW_REPOURL-}" ] || distributionUrl="$MVNW_REPOURL$_MVNW_REPO_PATTERN${distributionUrl#*"$_MVNW_REPO_PATTERN"}"
distributionUrlName="${distributionUrl##*/}"
distributionUrlNameMain="${distributionUrlName%.*}"
distributionUrlNameMain="${distributionUrlNameMain%-bin}"
MAVEN_USER_HOME="${MAVEN_USER_HOME:-${HOME}/.m2}"
MAVEN_HOME="${MAVEN_USER_HOME}/wrapper/dists/${distributionUrlNameMain-}/$(hash_string "$distributionUrl")"
exec_maven() {
unset MVNW_VERBOSE MVNW_USERNAME MVNW_PASSWORD MVNW_REPOURL || :
exec "$MAVEN_HOME/bin/$MVN_CMD" "$@" || die "cannot exec $MAVEN_HOME/bin/$MVN_CMD"
}
if [ -d "$MAVEN_HOME" ]; then
verbose "found existing MAVEN_HOME at $MAVEN_HOME"
exec_maven "$@"
fi
case "${distributionUrl-}" in
*?-bin.zip | *?maven-mvnd-?*-?*.zip) ;;
*) die "distributionUrl is not valid, must match *-bin.zip or maven-mvnd-*.zip, but found '${distributionUrl-}'" ;;
esac
# prepare tmp dir
if TMP_DOWNLOAD_DIR="$(mktemp -d)" && [ -d "$TMP_DOWNLOAD_DIR" ]; then
clean() { rm -rf -- "$TMP_DOWNLOAD_DIR"; }
trap clean HUP INT TERM EXIT
else
die "cannot create temp dir"
fi
mkdir -p -- "${MAVEN_HOME%/*}"
# Download and Install Apache Maven
verbose "Couldn't find MAVEN_HOME, downloading and installing it ..."
verbose "Downloading from: $distributionUrl"
verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName"
# select .zip or .tar.gz
if ! command -v unzip >/dev/null; then
distributionUrl="${distributionUrl%.zip}.tar.gz"
distributionUrlName="${distributionUrl##*/}"
fi
# verbose opt
__MVNW_QUIET_WGET=--quiet __MVNW_QUIET_CURL=--silent __MVNW_QUIET_UNZIP=-q __MVNW_QUIET_TAR=''
[ "${MVNW_VERBOSE-}" != true ] || __MVNW_QUIET_WGET='' __MVNW_QUIET_CURL='' __MVNW_QUIET_UNZIP='' __MVNW_QUIET_TAR=v
# normalize http auth
case "${MVNW_PASSWORD:+has-password}" in
'') MVNW_USERNAME='' MVNW_PASSWORD='' ;;
has-password) [ -n "${MVNW_USERNAME-}" ] || MVNW_USERNAME='' MVNW_PASSWORD='' ;;
esac
if [ -z "${MVNW_USERNAME-}" ] && command -v wget >/dev/null; then
verbose "Found wget ... using wget"
wget ${__MVNW_QUIET_WGET:+"$__MVNW_QUIET_WGET"} "$distributionUrl" -O "$TMP_DOWNLOAD_DIR/$distributionUrlName" || die "wget: Failed to fetch $distributionUrl"
elif [ -z "${MVNW_USERNAME-}" ] && command -v curl >/dev/null; then
verbose "Found curl ... using curl"
curl ${__MVNW_QUIET_CURL:+"$__MVNW_QUIET_CURL"} -f -L -o "$TMP_DOWNLOAD_DIR/$distributionUrlName" "$distributionUrl" || die "curl: Failed to fetch $distributionUrl"
elif set_java_home; then
verbose "Falling back to use Java to download"
javaSource="$TMP_DOWNLOAD_DIR/Downloader.java"
targetZip="$TMP_DOWNLOAD_DIR/$distributionUrlName"
cat >"$javaSource" <<-END
public class Downloader extends java.net.Authenticator
{
protected java.net.PasswordAuthentication getPasswordAuthentication()
{
return new java.net.PasswordAuthentication( System.getenv( "MVNW_USERNAME" ), System.getenv( "MVNW_PASSWORD" ).toCharArray() );
}
public static void main( String[] args ) throws Exception
{
setDefault( new Downloader() );
java.nio.file.Files.copy( java.net.URI.create( args[0] ).toURL().openStream(), java.nio.file.Paths.get( args[1] ).toAbsolutePath().normalize() );
}
}
END
# For Cygwin/MinGW, switch paths to Windows format before running javac and java
verbose " - Compiling Downloader.java ..."
"$(native_path "$JAVACCMD")" "$(native_path "$javaSource")" || die "Failed to compile Downloader.java"
verbose " - Running Downloader.java ..."
"$(native_path "$JAVACMD")" -cp "$(native_path "$TMP_DOWNLOAD_DIR")" Downloader "$distributionUrl" "$(native_path "$targetZip")"
fi
# If specified, validate the SHA-256 sum of the Maven distribution zip file
if [ -n "${distributionSha256Sum-}" ]; then
distributionSha256Result=false
if [ "$MVN_CMD" = mvnd.sh ]; then
echo "Checksum validation is not supported for maven-mvnd." >&2
echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
exit 1
elif command -v sha256sum >/dev/null; then
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c >/dev/null 2>&1; then
distributionSha256Result=true
fi
elif command -v shasum >/dev/null; then
if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | shasum -a 256 -c >/dev/null 2>&1; then
distributionSha256Result=true
fi
else
echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." >&2
echo "Please install either command, or disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2
exit 1
fi
if [ $distributionSha256Result = false ]; then
echo "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised." >&2
echo "If you updated your Maven version, you need to update the specified distributionSha256Sum property." >&2
exit 1
fi
fi
# unzip and move
if command -v unzip >/dev/null; then
unzip ${__MVNW_QUIET_UNZIP:+"$__MVNW_QUIET_UNZIP"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -d "$TMP_DOWNLOAD_DIR" || die "failed to unzip"
else
tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar"
fi
printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/mvnw.url"
mv -- "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME"
clean || :
exec_maven "$@"

149
afanasev_dmitry_lab_3/water/mvnw.cmd vendored Normal file
View File

@ -0,0 +1,149 @@
<# : batch portion
@REM ----------------------------------------------------------------------------
@REM Licensed to the Apache Software Foundation (ASF) under one
@REM or more contributor license agreements. See the NOTICE file
@REM distributed with this work for additional information
@REM regarding copyright ownership. The ASF licenses this file
@REM to you under the Apache License, Version 2.0 (the
@REM "License"); you may not use this file except in compliance
@REM with the License. You may obtain a copy of the License at
@REM
@REM http://www.apache.org/licenses/LICENSE-2.0
@REM
@REM Unless required by applicable law or agreed to in writing,
@REM software distributed under the License is distributed on an
@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@REM KIND, either express or implied. See the License for the
@REM specific language governing permissions and limitations
@REM under the License.
@REM ----------------------------------------------------------------------------
@REM ----------------------------------------------------------------------------
@REM Apache Maven Wrapper startup batch script, version 3.3.2
@REM
@REM Optional ENV vars
@REM MVNW_REPOURL - repo url base for downloading maven distribution
@REM MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven
@REM MVNW_VERBOSE - true: enable verbose log; others: silence the output
@REM ----------------------------------------------------------------------------
@IF "%__MVNW_ARG0_NAME__%"=="" (SET __MVNW_ARG0_NAME__=%~nx0)
@SET __MVNW_CMD__=
@SET __MVNW_ERROR__=
@SET __MVNW_PSMODULEP_SAVE=%PSModulePath%
@SET PSModulePath=
@FOR /F "usebackq tokens=1* delims==" %%A IN (`powershell -noprofile "& {$scriptDir='%~dp0'; $script='%__MVNW_ARG0_NAME__%'; icm -ScriptBlock ([Scriptblock]::Create((Get-Content -Raw '%~f0'))) -NoNewScope}"`) DO @(
IF "%%A"=="MVN_CMD" (set __MVNW_CMD__=%%B) ELSE IF "%%B"=="" (echo %%A) ELSE (echo %%A=%%B)
)
@SET PSModulePath=%__MVNW_PSMODULEP_SAVE%
@SET __MVNW_PSMODULEP_SAVE=
@SET __MVNW_ARG0_NAME__=
@SET MVNW_USERNAME=
@SET MVNW_PASSWORD=
@IF NOT "%__MVNW_CMD__%"=="" (%__MVNW_CMD__% %*)
@echo Cannot start maven from wrapper >&2 && exit /b 1
@GOTO :EOF
: end batch / begin powershell #>
$ErrorActionPreference = "Stop"
if ($env:MVNW_VERBOSE -eq "true") {
$VerbosePreference = "Continue"
}
# calculate distributionUrl, requires .mvn/wrapper/maven-wrapper.properties
$distributionUrl = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionUrl
if (!$distributionUrl) {
Write-Error "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties"
}
switch -wildcard -casesensitive ( $($distributionUrl -replace '^.*/','') ) {
"maven-mvnd-*" {
$USE_MVND = $true
$distributionUrl = $distributionUrl -replace '-bin\.[^.]*$',"-windows-amd64.zip"
$MVN_CMD = "mvnd.cmd"
break
}
default {
$USE_MVND = $false
$MVN_CMD = $script -replace '^mvnw','mvn'
break
}
}
# apply MVNW_REPOURL and calculate MAVEN_HOME
# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-<version>,maven-mvnd-<version>-<platform>}/<hash>
if ($env:MVNW_REPOURL) {
$MVNW_REPO_PATTERN = if ($USE_MVND) { "/org/apache/maven/" } else { "/maven/mvnd/" }
$distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace '^.*'+$MVNW_REPO_PATTERN,'')"
}
$distributionUrlName = $distributionUrl -replace '^.*/',''
$distributionUrlNameMain = $distributionUrlName -replace '\.[^.]*$','' -replace '-bin$',''
$MAVEN_HOME_PARENT = "$HOME/.m2/wrapper/dists/$distributionUrlNameMain"
if ($env:MAVEN_USER_HOME) {
$MAVEN_HOME_PARENT = "$env:MAVEN_USER_HOME/wrapper/dists/$distributionUrlNameMain"
}
$MAVEN_HOME_NAME = ([System.Security.Cryptography.MD5]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join ''
$MAVEN_HOME = "$MAVEN_HOME_PARENT/$MAVEN_HOME_NAME"
if (Test-Path -Path "$MAVEN_HOME" -PathType Container) {
Write-Verbose "found existing MAVEN_HOME at $MAVEN_HOME"
Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD"
exit $?
}
if (! $distributionUrlNameMain -or ($distributionUrlName -eq $distributionUrlNameMain)) {
Write-Error "distributionUrl is not valid, must end with *-bin.zip, but found $distributionUrl"
}
# prepare tmp dir
$TMP_DOWNLOAD_DIR_HOLDER = New-TemporaryFile
$TMP_DOWNLOAD_DIR = New-Item -Itemtype Directory -Path "$TMP_DOWNLOAD_DIR_HOLDER.dir"
$TMP_DOWNLOAD_DIR_HOLDER.Delete() | Out-Null
trap {
if ($TMP_DOWNLOAD_DIR.Exists) {
try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null }
catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" }
}
}
New-Item -Itemtype Directory -Path "$MAVEN_HOME_PARENT" -Force | Out-Null
# Download and Install Apache Maven
Write-Verbose "Couldn't find MAVEN_HOME, downloading and installing it ..."
Write-Verbose "Downloading from: $distributionUrl"
Write-Verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName"
$webclient = New-Object System.Net.WebClient
if ($env:MVNW_USERNAME -and $env:MVNW_PASSWORD) {
$webclient.Credentials = New-Object System.Net.NetworkCredential($env:MVNW_USERNAME, $env:MVNW_PASSWORD)
}
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
$webclient.DownloadFile($distributionUrl, "$TMP_DOWNLOAD_DIR/$distributionUrlName") | Out-Null
# If specified, validate the SHA-256 sum of the Maven distribution zip file
$distributionSha256Sum = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionSha256Sum
if ($distributionSha256Sum) {
if ($USE_MVND) {
Write-Error "Checksum validation is not supported for maven-mvnd. `nPlease disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties."
}
Import-Module $PSHOME\Modules\Microsoft.PowerShell.Utility -Function Get-FileHash
if ((Get-FileHash "$TMP_DOWNLOAD_DIR/$distributionUrlName" -Algorithm SHA256).Hash.ToLower() -ne $distributionSha256Sum) {
Write-Error "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised. If you updated your Maven version, you need to update the specified distributionSha256Sum property."
}
}
# unzip and move
Expand-Archive "$TMP_DOWNLOAD_DIR/$distributionUrlName" -DestinationPath "$TMP_DOWNLOAD_DIR" | Out-Null
Rename-Item -Path "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" -NewName $MAVEN_HOME_NAME | Out-Null
try {
Move-Item -Path "$TMP_DOWNLOAD_DIR/$MAVEN_HOME_NAME" -Destination $MAVEN_HOME_PARENT | Out-Null
} catch {
if (! (Test-Path -Path "$MAVEN_HOME" -PathType Container)) {
Write-Error "fail to move MAVEN_HOME"
}
} finally {
try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null }
catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" }
}
Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD"

View File

@ -0,0 +1,71 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>3.3.5</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>ru.ulstu</groupId>
<artifactId>water</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>water</name>
<description>Demo project for Spring Boot</description>
<url/>
<licenses>
<license/>
</licenses>
<developers>
<developer/>
</developers>
<scm>
<connection/>
<developerConnection/>
<tag/>
<url/>
</scm>
<properties>
<java.version>17</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springdoc</groupId>
<artifactId>springdoc-openapi-starter-webmvc-ui</artifactId>
<version>2.5.0</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<excludes>
<exclude>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,13 @@
package ru.ulstu.water;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class WaterApplication {
public static void main(String[] args) {
SpringApplication.run(WaterApplication.class, args);
}
}

View File

@ -0,0 +1,58 @@
package ru.ulstu.water.controller;
import lombok.RequiredArgsConstructor;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import ru.ulstu.water.dto.WaterDto;
import ru.ulstu.water.model.Melon;
import ru.ulstu.water.model.Water;
import ru.ulstu.water.service.WaterService;
import java.util.Collection;
import java.util.UUID;
@RestController
@RequiredArgsConstructor
@RequestMapping("/water")
public class WaterController {
private final WaterService waterService;
@GetMapping
public ResponseEntity<Collection<Water>> get() {
return new ResponseEntity<>(waterService.get(), HttpStatus.OK);
}
@GetMapping("/{id}")
public ResponseEntity<Water> get(@PathVariable UUID id) {
return new ResponseEntity<>(waterService.get(id), HttpStatus.OK);
}
@PostMapping
public ResponseEntity<Water> add(@RequestBody WaterDto dto) {
return new ResponseEntity<>(waterService.add(dto), HttpStatus.OK);
}
@PutMapping("/{id}")
public ResponseEntity<Water> update(@PathVariable UUID id, @RequestBody WaterDto dto) {
return new ResponseEntity<>(waterService.update(id, dto), HttpStatus.OK);
}
@DeleteMapping("/{id}")
public ResponseEntity<Void> delete(@PathVariable UUID id) {
waterService.delete(id);
return new ResponseEntity<>(HttpStatus.OK);
}
@PostMapping("/{id}/addMelon")
public ResponseEntity<Water> addMelon(@PathVariable UUID id, @RequestBody Melon melon) {
return new ResponseEntity<>(waterService.addMelon(id, melon), HttpStatus.OK);
}
}

View File

@ -0,0 +1,11 @@
package ru.ulstu.water.dto;
import lombok.AllArgsConstructor;
import lombok.Getter;
@AllArgsConstructor
@Getter
public class WaterDto {
private Boolean isSweetBottom;
private Double volume;
}

View File

@ -0,0 +1,19 @@
package ru.ulstu.water.model;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import java.util.UUID;
@AllArgsConstructor
@NoArgsConstructor
@Getter
@Setter
public class Melon {
private UUID id;
private Boolean isRipe;
private Double weight;
private UUID waterMelonId;
}

View File

@ -0,0 +1,20 @@
package ru.ulstu.water.model;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import java.util.List;
import java.util.UUID;
@AllArgsConstructor
@NoArgsConstructor
@Getter
@Setter
public class Water {
private UUID id;
private Boolean isSweetBottom;
private Double volume;
private List<Melon> waterMelons;
}

View File

@ -0,0 +1,64 @@
package ru.ulstu.water.service;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service;
import org.springframework.web.server.ResponseStatusException;
import ru.ulstu.water.dto.WaterDto;
import ru.ulstu.water.model.Melon;
import ru.ulstu.water.model.Water;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
@Service
public class WaterService {
private final Map<UUID, Water> waters = new HashMap<>();
public Collection<Water> get() {
return waters.values();
}
public Water get(UUID id) {
if (!waters.containsKey(id)) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Water not found");
}
return waters.get(id);
}
public Water add(WaterDto dto) {
Water water = new Water(UUID.randomUUID(),
dto.getIsSweetBottom(),
dto.getVolume(),
new ArrayList<>());
waters.put(water.getId(), water);
return water;
}
public Water update(UUID id, WaterDto dto) {
Water water = waters.get(id);
if (dto.getIsSweetBottom() != null)
water.setIsSweetBottom(dto.getIsSweetBottom());
if (dto.getVolume() != null)
water.setVolume(dto.getVolume());
return water;
}
public void delete(UUID id) {
if (!waters.containsKey(id)) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Water not found");
}
waters.remove(id);
}
public Water addMelon(UUID id, Melon melon) {
if (!waters.containsKey(id)) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Water not found");
}
final Water water = waters.get(id);
water.getWaterMelons().add(melon);
return water;
}
}

View File

@ -0,0 +1,2 @@
spring.application.name=water
server.port=8081

View File

@ -0,0 +1,13 @@
package ru.ulstu.water;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class WaterApplicationTests {
@Test
void contextLoads() {
}
}

View File

@ -0,0 +1,32 @@
# Лабораторная работа 1 - Знакомство с Docker и Docker Compose
## ПИбд-42 || Алейкин Артем
### Описание
В данной лабораторной работе мы разворачиваем три популярных сервиса — MediaWiki и Redmine — с использованием Docker Compose. Каждый сервис работает в своем контейнере и использует общую базу данных PostgreSQL для хранения данных. Мы также настраиваем проброс портов для доступа к веб-интерфейсам сервисов и используем Docker volumes для сохранения данных вне контейнеров.
### Цель проекта
изучение современных технологий контейнеризации
### Шаги для запуска:
1. Клонирование репозитория:
```
git clone <ссылка-на-репозиторий>
cd <папка репозитория>
```
2. Запуск контейнеров:
```
docker-compose up -d
```
3. После запуска должны быть доступны следующие контейнеры:
MediaWiki: http://localhost:8080
Redmine: http://localhost:8081
4. Чтобы остановить контейнеры:
```
docker-compose down
```
Видео демонстрации работы: https://vk.com/video248424990_456239601?list=ln-sCRa9IIiV1VpInn2d1

View File

@ -0,0 +1,45 @@
services:
mediawiki:
image: mediawiki
container_name: mediawiki
ports:
- "8080:80" # Пробрасываем порт 8080 на хост для доступа к MediaWiki
volumes:
- mediawiki_data:/var/www/html/images # Создаем volume для хранения данных MediaWiki
environment:
- MEDIAWIKI_DB_HOST=db
- MEDIAWIKI_DB_NAME=mediawiki
- MEDIAWIKI_DB_USER=root
- MEDIAWIKI_DB_PASSWORD=example
depends_on:
- db
redmine:
image: redmine
container_name: redmine
ports:
- "8081:3000" # Пробрасываем порт 8081 на хост для доступа к Redmine
volumes:
- redmine_data:/usr/src/redmine/files # Создаем volume для хранения данных Redmine
environment:
- REDMINE_DB_POSTGRESQL=db
- REDMINE_DB_DATABASE=redmine
- REDMINE_DB_USERNAME=root
- REDMINE_DB_PASSWORD=example
depends_on:
- db
db:
image: postgres:latest
container_name: db
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: example
POSTGRES_DB: postgres
volumes:
- db_data:/var/lib/postgresql # Volume для базы данных
volumes:
mediawiki_data: # volume для MediaWiki
redmine_data: # volume для Redmine
db_data: # volume для базы данных

View File

@ -0,0 +1,48 @@
## Отчет по Docker Compose конфигурации
### Краткое описание:
Данная конфигурация Docker Compose запускает набор сервисов, необходимых для работы WordPress и MediaWiki. Она включает в себя:
- **WordPress:** веб-сервис для блогов и CMS
- **MySQL:** база данных для хранения данных WordPress
- **RabbitMQ:** брокер сообщений для потенциального использования в будущем
- **MediaWiki:** вики-движок для создания и редактирования вики-страниц
### Запуск лабораторной работы:
1. Установить Docker и Docker Compose.
2. Сохранить конфигурацию в файл docker-compose.yml.
3. Запустить команду docker-compose up --build
### Используемые технологии:
- **Docker Compose:** инструмент для определения и запуска многоконтейнерных приложений.
- **Docker:** платформа для создания, развертывания и запуска контейнеров.
- **WordPress:** популярная платформа для создания блогов и CMS.
- **MySQL:** популярная система управления базами данных.
- **RabbitMQ:** брокер сообщений, используемый для асинхронного обмена сообщениями.
- **MediaWiki:** свободное программное обеспечение для создания и редактирования вики-страниц.
### Функциональность:
Конфигурация запускает следующие сервисы:
- **WordPress:** работает на порту 8080, доступен по адресу http://localhost:8080.
- **MySQL:** предоставляет базу данных для WordPress и MediaWiki.
- **RabbitMQ:** работает на порту 5672, доступен по адресу http://localhost:15672 для управления.
- **MediaWiki:** работает на порту 8081, доступен по адресу http://localhost:8081.
### Дополнительные сведения
- **Volumes**: используются для хранения данных сервисов, чтобы они не терялись при перезапуске контейнеров.
- **Depends_on**: указывает на зависимость между сервисами, например, WordPress зависит от MySQL.
- **Restart policy**: определяет, как сервисы будут перезапускаться после сбоя.
### Видео
https://vk.com/video/@artamonovat?z=video212084908_456239356%2Fpl_212084908_-2
### Заключение:
Данная конфигурация Docker Compose обеспечивает простой и удобный способ запуска и управления несколькими сервисами, связанными с WordPress и MediaWiki. Она позволяет разработчикам легко развертывать и управлять приложениями в изолированной среде.

View File

@ -0,0 +1,61 @@
version: '3.7'
services:
wordpress:
image: wordpress:latest
ports:
- "8080:80"
volumes:
- wordpress_data:/var/www/html
environment:
WORDPRESS_DB_HOST: db
WORDPRESS_DB_NAME: wordpress
WORDPRESS_DB_USER: wordpress
WORDPRESS_DB_PASSWORD: password
depends_on:
- db
restart: unless-stopped
db:
image: mysql:latest
volumes:
- db_data:/var/lib/mysql
environment:
MYSQL_DATABASE: wordpress
MYSQL_USER: wordpress
MYSQL_PASSWORD: dbpassword
MYSQL_ROOT_PASSWORD: rootpassword
restart: unless-stopped
rabbitmq:
image: rabbitmq:3-management
ports:
- "5672:5672"
- "15672:15672"
volumes:
- rabbitmq_data:/var/lib/rabbitmq
environment:
RABBITMQ_DEFAULT_USER: guest
RABBITMQ_DEFAULT_PASS: password
restart: unless-stopped
mediawiki:
image: mediawiki:latest
ports:
- "8081:80"
volumes:
- mediawiki_data:/var/www/html
environment:
MW_DB_SERVER: db
MW_DB_NAME: mediawiki
MW_DB_USER: mediawiki
MW_DB_PASSWORD: mediawiki_password
depends_on:
- db
restart: unless-stopped
volumes:
wordpress_data:
db_data:
rabbitmq_data:
mediawiki_data:

5
artamonova_tatyana_lab_2/.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
*.pyc
__pycache__
*.egg-info
*.dist-info
.DS_Store

View File

@ -0,0 +1,22 @@
## Лабораторная работа №2
### Выполнила Артамонова Татьяна ПИбд-42
**Вариант 1: Программа 4 - Количество символов в именах файлов из каталога /var/data**
- Формирует файл /var/result/data1.txt так, что каждая строка файла - количество символов в именах файлов из каталога /var/data.
**Вариант 2: Программа 3 - Количество чисел в последовательности**
- Ищет набольшее число из файла /var/result/data1.txt и сохраняет количество таких чисел из последовательности в /var/result/data2.txt.
**Структура проекта:**
1. В папках worker-1, worker-2 лежат выполняемые файлы .py и Dockerfile-ы с необходимым набором инструкций.
2. В папке data лежат файлы, длину имен которых нужно посчитать.
3. В папке result лежат файлы с результатами выполнения программ. data1.txt - результат выполнения main1.py (worker-1), data2.txt - результат выполнения main2.py (worker-2). Данные в data2 рассчитываются из данных data1.
4. Файл .gitignore - для указания, какие файлы отслеживать, а какие - нет.
5. docker-compose.yml - для определения и управления контейнерами Docker.
**Команда для запуска** - docker-compose up --build
**Ссылка на видео:** https://vk.com/artamonovat?z=video212084908_456239357%2Fvideos212084908%2Fpl_212084908_-2

View File

@ -0,0 +1,22 @@
services:
worker-1:
build:
context: ./worker-1
volumes:
- ./worker-1:/app
- ./data:/var/data
- ./result:/var/result
depends_on:
- worker-2
worker-2:
build:
context: ./worker-2
volumes:
- ./worker-2:/app
- ./data:/var/data
- ./result:/var/result
volumes:
data:
result:

View File

@ -0,0 +1,3 @@
15
18
18

View File

@ -0,0 +1 @@
2

View File

@ -0,0 +1,14 @@
# Используем образ Python 3.10-slim как основу для нашего контейнера.
# slim-версия образа более компактная, что делает контейнер меньше.
FROM python:3.10-slim
# Устанавливаем рабочую директорию в контейнере как /app.
# Все последующие команды будут выполняться в этой директории.
WORKDIR /app
# Копируем файл main1.py из текущей директории в директорию /app в контейнере.
COPY main1.py .
# Определяем команду, которая будет выполняться при запуске контейнера.
# В данном случае запускается Python-скрипт main1.py.
CMD ["python", "main1.py"]

View File

@ -0,0 +1,21 @@
import os
import glob
# Формирует файл data1.txt так, что каждая строка файла - кол-во символов в именах файла из каталога /data
def main():
data_dir = "/var/data"
result_file = "/var/result/data1.txt"
result_dir = os.path.dirname(result_file)
if not os.path.exists(result_dir):
os.makedirs(result_dir)
files = glob.glob(os.path.join(data_dir, '*'))
with open(result_file, 'w') as f:
for file in files:
filename = os.path.basename(file)
f.write(f"{len(filename)}\n")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,14 @@
# Используем образ Python 3.10-slim как основу для нашего контейнера.
# slim-версия образа более компактная, что делает контейнер меньше.
FROM python:3.10-slim
# Устанавливаем рабочую директорию в контейнере как /app.
# Все последующие команды будут выполняться в этой директории.
WORKDIR /app
# Копируем файл main2.py из текущей директории в директорию /app в контейнере.
COPY main2.py .
# Определяем команду, которая будет выполняться при запуске контейнера.
# В данном случае запускается Python-скрипт main2.py.
CMD ["python", "main2.py"]

View File

@ -0,0 +1,26 @@
import os
# Ищет наибольшее число из файла data1.txt и сохраняет количество таких чисел из последовательности в data2.txt
def main():
data_file_path = "/var/result/data1.txt"
result_file_path = "/var/result/data2.txt"
if not os.path.exists(data_file_path):
data_dir = os.path.dirname(data_file_path)
if not os.path.exists(result_file_path):
result_dir = os.path.dirname(result_file_path)
with open(data_file_path, 'r') as f:
numbers = [int(x.strip()) for x in f.read().splitlines()]
max_number = max(numbers)
count = numbers.count(max_number)
with open(result_file_path, 'w') as f:
f.write(str(count))
print(f"Количество наибольших чисел: {count}")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,15 @@
services:
vacancies-service:
build: ./vacancy-service
ports:
- "5000:5000"
resumes-service:
build: ./resume-service
ports:
- "5001:5001"
gateway:
image: nginx:latest
ports:
- "80:80"
volumes:
- ./nginx.conf:/etc/nginx/conf.d/default.conf

View File

@ -0,0 +1,19 @@
upstream vacancies {
server vacancies-service:5000;
}
upstream resumes {
server resumes-service:5001;
}
server {
listen 80;
location /vacancies {
proxy_pass http://vacancies;
}
location /resumes {
proxy_pass http://resumes;
}
}

View File

@ -0,0 +1,35 @@
## Лабораторная работа №3 ПИбд-42 Артамоновой Татьяны
### Цель:
* Реализовать два микросервиса, которые взаимодействуют друг с другом через синхронный обмен сообщениями (HTTP-запросы). Для доступа к микросервисам используется шлюз Nginx, реализованный с помощью Docker Compose.
### Технологии:
* Python: Язык программирования для реализации микросервисов.
* Flask: Фреймворк Python для создания веб-приложений, использован для создания REST API микросервисов.
* requests: Библиотека Python для отправки HTTP-запросов, использован для синхронного обмена сообщениями между микросервисами.
* flask_cors: Расширение Flask, которое позволяет микросервисам получать доступ к данным из других доменов.
* Docker: Технология контейнеризации для упаковки и запуска микросервисов.
* Docker Compose: Инструмент для определения и управления многоконтейнерными приложениями, использован для запуска микросервисов и шлюза Nginx.
* Nginx: Сетевой прокси-сервер, использован как шлюз для доступа к микросервисам.
### Функциональность:
#### Микросервис vacancies-service:
* Реализует CRUD операции для вакансий (GET, POST, PUT, DELETE).
* Сохраняет данные о вакансиях в памяти (в словаре vacancies).
* Получает информацию о резюме из микросервиса resumes-service через HTTP-запрос.
* Включает информацию о резюме в ответ JSON для вакансии.
#### Микросервис resumes-service:
* Реализует CRUD операции для резюме (GET, POST, PUT, DELETE).
* Сохраняет данные о резюме в памяти (в словаре resumes).
#### Шлюз Nginx:
* Перенаправляет HTTP-запросы на соответствующие микросервисы.
* Предоставляет единую точку входа для доступа к микросервисам.
### Запуск программы:
* Запуск команды docker-compose up -d
### Ссылка на видео:
https://vk.com/artamonovat?z=video212084908_456239358%2Fvideos212084908%2Fpl_212084908_-2

View File

@ -0,0 +1,11 @@
FROM python:3.9
WORKDIR /app
COPY requirements.txt .
RUN pip install -r requirements.txt
COPY . .
CMD ["python", "resume.py"]

View File

@ -0,0 +1,3 @@
flask
requests
flask_cors

View File

@ -0,0 +1,79 @@
import uuid
import json
from flask import Flask, request, jsonify
from flask_cors import CORS
class Resume:
def __init__(self, uuid, full_name, skills, experience, phone, email):
self.uuid = uuid
self.full_name = full_name
self.skills = skills
self.experience = experience
self.phone = phone
self.email = email
app = Flask(__name__)
CORS(app)
resumes = {}
@app.route("/resumes", methods=["GET"])
def get_resumes():
return jsonify([resume.__dict__ for resume in resumes.values()])
@app.route("/resumes/<resume_uuid>", methods=["GET"])
def get_resume(resume_uuid):
resume = resumes.get(resume_uuid)
if resume:
return jsonify(resume.__dict__)
else:
return jsonify({"error": "Resume not found"}), 404
@app.route("/resumes", methods=["POST"])
def create_resume():
data = request.get_json()
resume_uuid = str(uuid.uuid4())
resume = Resume(
resume_uuid,
data["full_name"],
data["skills"],
data["experience"],
data["phone"],
data["email"],
)
resumes[resume_uuid] = resume
return jsonify(resume.__dict__), 201
@app.route("/resumes/<resume_uuid>", methods=["PUT"])
def update_resume(resume_uuid):
resume = resumes.get(resume_uuid)
if resume:
data = request.get_json()
resume.full_name = data.get("full_name", resume.full_name)
resume.skills = data.get("skills", resume.skills)
resume.experience = data.get("experience", resume.experience)
resume.phone = data.get("phone", resume.phone)
resume.email = data.get("email", resume.email)
return jsonify(resume.__dict__)
else:
return jsonify({"error": "Resume not found"}), 404
@app.route("/resumes/<resume_uuid>", methods=["DELETE"])
def delete_resume(resume_uuid):
resume = resumes.get(resume_uuid)
if resume:
del resumes[resume_uuid]
return "", 200
else:
return jsonify({"error": "Resume not found"}), 404
if __name__ == "__main__":
app.run(debug=True, host="0.0.0.0", port=5001)

View File

@ -0,0 +1,11 @@
FROM python:3.9
WORKDIR /app
COPY requirements.txt .
RUN pip install -r requirements.txt
COPY . .
CMD ["python", "vacancy.py"]

View File

@ -0,0 +1,3 @@
flask
requests
flask_cors

View File

@ -0,0 +1,124 @@
import uuid
import json
from flask import Flask, request, jsonify
from flask_cors import CORS
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
class Vacancy:
def __init__(self, uuid, title, company, description, salary, resume_uuid):
self.uuid = uuid
self.title = title
self.company = company
self.description = description
self.salary = salary
self.resume_uuid = resume_uuid
self.resume_info = None
def to_dict(self):
if self.resume_info:
return {
"uuid": self.uuid,
"title": self.title,
"company": self.company,
"description": self.description,
"salary": self.salary,
"resume_uuid": self.resume_uuid,
"resume_info": self.resume_info
}
else:
return {
"uuid": self.uuid,
"title": self.title,
"company": self.company,
"description": self.description,
"salary": self.salary,
"resume_uuid": self.resume_uuid
}
app = Flask(__name__)
CORS(app)
vacancies = {}
@app.route("/vacancies", methods=["GET"])
def get_vacancies():
return jsonify([vacancy.to_dict() for vacancy in vacancies.values()])
@app.route("/vacancies/<vacancy_uuid>", methods=["GET"])
def get_vacancy(vacancy_uuid):
vacancy = vacancies.get(vacancy_uuid)
if vacancy:
if not vacancy.resume_info:
vacancy.resume_info = get_resume_info(vacancy.resume_uuid)
return jsonify(vacancy.to_dict())
else:
return jsonify({"error": "Vacancy not found"}), 404
@app.route("/vacancies", methods=["POST"])
def create_vacancy():
data = request.get_json()
vacancy_uuid = str(uuid.uuid4())
vacancy = Vacancy(
vacancy_uuid,
data["title"],
data["company"],
data["description"],
data["salary"],
data["resume_uuid"],
)
vacancies[vacancy_uuid] = vacancy
vacancy.resume_info = get_resume_info(vacancy.resume_uuid)
return jsonify(vacancy.to_dict()), 201
@app.route("/vacancies/<vacancy_uuid>", methods=["PUT"])
def update_vacancy(vacancy_uuid):
vacancy = vacancies.get(vacancy_uuid)
if vacancy:
data = request.get_json()
vacancy.title = data.get("title", vacancy.title)
vacancy.company = data.get("company", vacancy.company)
vacancy.description = data.get("description", vacancy.description)
vacancy.salary = data.get("salary", vacancy.salary)
vacancy.resume_uuid = data.get("resume_uuid", vacancy.resume_uuid)
vacancy.resume_info = get_resume_info(vacancy.resume_uuid)
return jsonify(vacancy.to_dict())
else:
return jsonify({"error": "Vacancy not found"}), 404
@app.route("/vacancies/<vacancy_uuid>", methods=["DELETE"])
def delete_vacancy(vacancy_uuid):
vacancy = vacancies.get(vacancy_uuid)
if vacancy:
del vacancies[vacancy_uuid]
return "", 200
else:
return jsonify({"error": "Vacancy not found"}), 404
def get_resume_info(resume_uuid):
url = f'http://resumes-service:5001/resumes/{resume_uuid}'
# Настройка retry механизма
retries = Retry(
total=3, # Максимальное количество повторов
status_forcelist=[429, 500, 502, 503, 504], # Коды статуса, для которых нужно повторить запрос
backoff_factor=0.3, # Время ожидания перед повторной попыткой
)
adapter = HTTPAdapter(max_retries=retries)
http = requests.Session()
http.mount("https://", adapter)
http.mount("http://", adapter)
try:
response = http.get(url)
if response.status_code == 200:
return response.json()
else:
return None
except requests.exceptions.RequestException as e:
print(f"Ошибка при запросе к resumes-service: {e}")
return None
if __name__ == "__main__":
app.run(debug=True, host="0.0.0.0", port=5000)

View File

@ -0,0 +1,23 @@
import pika
import json
import time
credentials = pika.PlainCredentials('guest', 'guest')
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost', credentials=credentials))
channel = connection.channel()
channel.queue_declare(queue='order_queue_1')
channel.queue_bind(exchange='order_events', queue='order_queue_1')
def callback(ch, method, properties, body):
event = json.loads(body.decode('utf-8'))
print(f'Получено событие (очередь 1): {event}')
print(f'Обработка заказа {event["order_id"]}...')
time.sleep(2)
channel.basic_consume(queue='order_queue_1', on_message_callback=callback, auto_ack=True)
print('Ожидание сообщений (очередь 1)...')
channel.start_consuming()

View File

@ -0,0 +1,22 @@
import pika
import json
import time
credentials = pika.PlainCredentials('guest', 'guest')
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost', credentials=credentials))
channel = connection.channel()
channel.queue_declare(queue='order_queue_2')
channel.queue_bind(exchange='order_events', queue='order_queue_2')
def callback(ch, method, properties, body):
event = json.loads(body.decode('utf-8'))
print(f'Получено событие (очередь 2): {event}')
print(f'Обработка заказа {event["order_id"]} завершена.')
channel.basic_consume(queue='order_queue_2', on_message_callback=callback, auto_ack=True)
print('Ожидание сообщений (очередь 2)...')
channel.start_consuming()

View File

@ -0,0 +1,31 @@
import pika
import json
import time
import random
credentials = pika.PlainCredentials('guest', 'guest')
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost', credentials=credentials))
channel = connection.channel()
channel.exchange_declare(exchange='order_events', exchange_type='fanout')
while True:
event = {
'event_type': 'order_created',
'order_id': random.randint(1000, 9999),
'customer_name': f'Клиент {random.randint(1, 100)}',
'product_name': f'Товар {random.randint(1, 10)}',
'quantity': random.randint(1, 10),
'timestamp': time.time()
}
channel.basic_publish(
exchange='order_events',
routing_key='',
body=json.dumps(event)
)
print(f'Опубликовано событие: {event}')
time.sleep(1)
connection.close()

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 89 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 125 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 109 KiB

View File

@ -0,0 +1,26 @@
## Лабораторная работа №4 ПИбд-42 Артамонова Татьяна
### Прохождение туториала
1. ![tutorial-1.png](images/tutorial-1.png)
2. ![tutorial-2.png](images/tutorial-2.png)
3. ![tutorial-3.png](images/tutorial-3.png)
### Запуск приложений и анализ скорости обработки
1. Запуск Publisher, Consumer 1 и Consumer 2.
* Consumer 1: Очередь order_queue_1 будет иметь некоторое количество сообщений, так как Consumer 1 обрабатывает сообщения с задержкой в 2 секунды.
* Consumer 2: Очередь order_queue_2 будет практически пустой, так как Consumer 2 обрабатывает сообщения моментально.
2. Запуск нескольких копий Consumer 1
* Очередь order_queue_1 будет иметь меньше сообщений, так как 3 Consumer-а обрабатывают сообщения быстрее, чем 1 Consumer.
![queue1.png](images/queue1.png)
![queue2.png](images/queue2.png)
![exchange.png](images/exchange.png)
![consumers.png](images/consumers.png)
### Видео
https://vk.com/video/@artamonovat?section=upload&z=video212084908_456239359

View File

@ -0,0 +1,25 @@
import pika, sys, os
def main():
connection = pika.BlockingConnection(pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='hello')
def callback(ch, method, properties, body):
print(f" [x] Received {body}")
channel.basic_consume(queue='hello', on_message_callback=callback, auto_ack=True)
print(' [*] Waiting for messages. To exit press CTRL+C')
channel.start_consuming()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('Interrupted')
try:
sys.exit(0)
except SystemExit:
os._exit(0)

View File

@ -0,0 +1,11 @@
import pika
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='hello')
channel.basic_publish(exchange='', routing_key='hello', body='Hello World!')
print(" [x] Sent 'Hello World!'")
connection.close()

View File

@ -0,0 +1,19 @@
import pika
import sys
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='task_queue', durable=True)
message = ' '.join(sys.argv[1:]) or "Hello World!"
channel.basic_publish(
exchange='',
routing_key='task_queue',
body=message,
properties=pika.BasicProperties(
delivery_mode=pika.DeliveryMode.Persistent
))
print(f" [x] Sent {message}")
connection.close()

View File

@ -0,0 +1,22 @@
import pika
import time
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='task_queue', durable=True)
print(' [*] Waiting for messages. To exit press CTRL+C')
def callback(ch, method, properties, body):
print(f" [x] Received {body.decode()}")
time.sleep(body.count(b'.'))
print(" [x] Done")
ch.basic_ack(delivery_tag=method.delivery_tag)
channel.basic_qos(prefetch_count=1)
channel.basic_consume(queue='task_queue', on_message_callback=callback)
channel.start_consuming()

View File

@ -0,0 +1,13 @@
import pika
import sys
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.exchange_declare(exchange='logs', exchange_type='fanout')
message = ' '.join(sys.argv[1:]) or "info: Hello World!"
channel.basic_publish(exchange='logs', routing_key='', body=message)
print(f" [x] Sent {message}")
connection.close()

View File

@ -0,0 +1,22 @@
import pika
connection = pika.BlockingConnection(
pika.ConnectionParameters(host='localhost'))
channel = connection.channel()
channel.exchange_declare(exchange='logs', exchange_type='fanout')
result = channel.queue_declare(queue='', exclusive=True)
queue_name = result.method.queue
channel.queue_bind(exchange='logs', queue=queue_name)
print(' [*] Waiting for logs. To exit press CTRL+C')
def callback(ch, method, properties, body):
print(f" [x] {body}")
channel.basic_consume(
queue=queue_name, on_message_callback=callback, auto_ack=True)
channel.start_consuming()

View File

@ -0,0 +1,30 @@
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/azds.yaml
**/bin
**/charts
**/docker-compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/obj
**/secrets.dev.yaml
**/values.dev.yaml
LICENSE
README.md
!**/.gitignore
!.git/HEAD
!.git/config
!.git/packed-refs
!.git/refs/heads/**

View File

@ -0,0 +1,484 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from `dotnet new gitignore`
# dotenv files
.env
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET
project.lock.json
project.fragment.lock.json
artifacts/
# Tye
.tye/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.tlog
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
*.vbp
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
*.dsw
*.dsp
# Visual Studio 6 technical files
*.ncb
*.aps
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# Visual Studio History (VSHistory) files
.vshistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
# VS Code files for those working on multiple tools
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
*.code-workspace
# Local History for Visual Studio Code
.history/
# Windows Installer files from build outputs
*.cab
*.msi
*.msix
*.msm
*.msp
# JetBrains Rider
*.sln.iml
.idea
##
## Visual studio for Mac
##
# globs
Makefile.in
*.userprefs
*.usertasks
config.make
config.status
aclocal.m4
install-sh
autom4te.cache/
*.tar.gz
tarballs/
test-results/
# Mac bundle stuff
*.dmg
*.app
# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore
# Windows thumbnail cache files
Thumbs.db
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
# Vim temporary swap files
*.swp

View File

@ -0,0 +1,18 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<UserSecretsId>9589d6f5-875c-4d7d-8e68-37a2077e80be</UserSecretsId>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
<DockerfileContext>.</DockerfileContext>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="8.0.10" />
<PackageReference Include="Microsoft.VisualStudio.Azure.Containers.Tools.Targets" Version="1.21.0" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.4.0" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,6 @@
@ApiRestaurant_HostAddress = http://localhost:5089
GET {{ApiRestaurant_HostAddress}}/weatherforecast/
Accept: application/json
###

View File

@ -0,0 +1,25 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.11.35222.181
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ApiRestaurant", "ApiRestaurant.csproj", "{6E19ADDC-7351-4145-9C49-B0CC87BD1206}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{6E19ADDC-7351-4145-9C49-B0CC87BD1206}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{6E19ADDC-7351-4145-9C49-B0CC87BD1206}.Debug|Any CPU.Build.0 = Debug|Any CPU
{6E19ADDC-7351-4145-9C49-B0CC87BD1206}.Release|Any CPU.ActiveCfg = Release|Any CPU
{6E19ADDC-7351-4145-9C49-B0CC87BD1206}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {C62DE3C6-63AA-4075-9729-9F5ECD4E7B51}
EndGlobalSection
EndGlobal

View File

@ -0,0 +1,59 @@
using ApiRestaurant.Models;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
namespace ApiRestaurant.Controllers;
[Route("api/[controller]")]
[ApiController]
public class RestaurantController : ControllerBase
{
private RestaurantContext _context;
public RestaurantController(RestaurantContext context)
{
_context = context;
}
[HttpGet]
public Task<List<Restaurant>> GetAll() {
return _context.Restaurants.ToListAsync();
}
[HttpGet("{id}")]
public async Task<ActionResult<Restaurant?>> GetOne(Guid id) {
var restaurant = await _context.Restaurants.FindAsync(id);
if (restaurant == null) return NotFound($"Restourant with [id: '{id}'] not found");
var waiters = await WaiterApiClient.GetAllForRestaurant(id);
restaurant.Waiters = waiters;
return restaurant;
}
[HttpPost]
public ActionResult<Restaurant> Create(RestaurantDTO restaurant) {
var newRestaurant = new Restaurant { Name = restaurant.Name };
var res = _context.Restaurants.Add(newRestaurant);
_context.SaveChanges();
return res.Entity;
}
[HttpPut("{id}")]
public ActionResult<Restaurant> Update(Guid id, RestaurantDTO restaurant) {
var oldRestaurant = _context.Restaurants.FirstOrDefault(r => r.Id == id);
if (oldRestaurant == null) return NotFound();
oldRestaurant.Name = restaurant.Name;
var res = _context.Restaurants.Update(oldRestaurant);
_context.SaveChangesAsync();
return res.Entity;
}
[HttpDelete]
public ActionResult Delete(Guid id) {
var restaurant = _context.Restaurants.FirstOrDefault(r => r.Id == id);
if (restaurant is null) return NotFound();
_context.Restaurants.Remove(restaurant);
_context.SaveChangesAsync();
return Ok();
}
}
public record RestaurantDTO(string Name);

View File

@ -0,0 +1,30 @@
# См. статью по ссылке https://aka.ms/customizecontainer, чтобы узнать как настроить контейнер отладки и как Visual Studio использует этот Dockerfile для создания образов для ускорения отладки.
# Этот этап используется при запуске из VS в быстром режиме (по умолчанию для конфигурации отладки)
FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS base
USER app
WORKDIR /app
EXPOSE 8080
EXPOSE 8081
# Этот этап используется для сборки проекта службы
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["ApiRestaurant.csproj", "."]
RUN dotnet restore "./ApiRestaurant.csproj"
COPY . .
WORKDIR "/src/."
RUN dotnet build "./ApiRestaurant.csproj" -c $BUILD_CONFIGURATION -o /app/build
# Этот этап используется для публикации проекта службы, который будет скопирован на последний этап
FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "./ApiRestaurant.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
# Этот этап используется в рабочей среде или при запуске из VS в обычном режиме (по умолчанию, когда конфигурация отладки не используется)
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "ApiRestaurant.dll"]

View File

@ -0,0 +1,8 @@
namespace ApiRestaurant.Models;
public class Restaurant
{
public Guid Id { get; set; }
public string Name { get; set; } = string.Empty;
public List<Waiter>? Waiters { get; set; }
}

Some files were not shown because too many files have changed in this diff Show More