Compare commits

...

46 Commits

Author SHA1 Message Date
80c666d6b0 fast fix: главный ридми чуть изменен 2024-10-02 22:08:32 +04:00
a589994db5 fix: изменен главный ридми
Добавлена ссылка и указание, в какой папке лежат выходные данные
2024-10-02 22:03:34 +04:00
38ce2bb347 fix: изменен первый сервис
он сохранял не в тот файл...
2024-10-02 21:40:44 +04:00
f25af86d9c add: добавлены докеригноры 2024-10-02 21:40:15 +04:00
45eb2b72c5 fix: докер композ файл
неправильные табы...
2024-10-02 21:07:18 +04:00
77bdc1d8e9 fix: изменен докерфайл для второго сервиса, а также отредактирован общий ридми 2024-10-02 20:53:57 +04:00
da6593c4d0 Создание проектов, докерфайлов и докер композа 2024-10-02 20:46:38 +04:00
8a96320fd5 Merge pull request 'bazunov_andrew_lab_1' (#33) from bazunov_andrew_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#33
2024-09-30 22:18:50 +04:00
bd25930973 Merge pull request 'tsukanova_irina_lab_2' (#32) from tsukanova_irina_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#32
2024-09-30 22:18:28 +04:00
37996c249a Merge pull request 'dolgov_dmitriy_lab_1' (#29) from dolgov_dmitriy_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#29
2024-09-26 10:25:37 +04:00
9456d4fe01 Merge pull request 'borschevskaya_anna_lab_2 is ready' (#25) from borschevskaya_anna_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#25
2024-09-26 10:20:55 +04:00
c14e105db5 Merge pull request 'presnyakova_victoria_lab_1' (#24) from presnyakova_victoria_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#24
2024-09-26 09:59:12 +04:00
4d1e900721 Merge pull request 'yakovleva_yulia_lab_2' (#20) from yakovleva_yulia_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#20
Reviewed-by: Alexey <a.zhelepov@mail.ru>
2024-09-26 08:45:08 +04:00
7184d6d728 Обновить bazunov_andrew_lab_1/README.md 2024-09-25 15:44:25 +04:00
Bazunov Andrew Igorevich
6e7055efa4 update readme 2024-09-25 15:37:57 +04:00
Bazunov Andrew Igorevich
9e40adc53c edit docker compose 2024-09-25 15:19:28 +04:00
Bazunov Andrew Igorevich
4a36528cc7 Complete docker compose 2024-09-25 12:35:39 +04:00
ad3988e5fc добавлено видео 2024-09-25 10:58:41 +04:00
780b4b2924 add readme and fix 2024-09-25 10:51:07 +04:00
5047b16cde files 2024-09-24 16:56:39 +04:00
2b87427299 что-то есть 2024-09-24 16:55:37 +04:00
6b55b7b0fc Merge pull request 'minhasapov_ruslan_lab_1' (#23) from minhasapov_ruslan_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#23
2024-09-24 13:43:10 +04:00
47193155d9 Merge pull request 'kashin_maxim_lab_1' (#22) from kashin_maxim_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#22
2024-09-24 13:21:02 +04:00
bc8c4c887e Merge pull request 'zhimolostnova_anna_lab_2' (#21) from zhimolostnova_anna_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#21
2024-09-24 13:17:26 +04:00
4a2adcc35a Merge pull request 'yakovleva_yulia_lab_1' (#19) from yakovleva_yulia_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#19
2024-09-24 11:59:06 +04:00
d7cb666a0d Merge pull request 'kuzarin_maxim_lab_3' (#17) from kuzarin_maxim_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#17
2024-09-24 11:58:22 +04:00
6c642384c1 Merge pull request 'zhimolostnova_anna_lab_1' (#16) from zhimolostnova_anna_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#16
2024-09-24 11:52:56 +04:00
bdb5cc07ed Обновить dolgov_dmitriy_lab_1/README.md 2024-09-24 01:30:02 +04:00
e761e33201 Обновить dolgov_dmitriy_lab_1/README.md 2024-09-24 01:28:51 +04:00
Аришина)
ceee500b95 ЛР 1 готова 2024-09-24 01:20:27 +04:00
2be2c71b69 перенос 2024-09-23 20:19:10 +04:00
520337f92d borschevskaya_anna_lab_2 is ready 2024-09-23 08:40:17 +04:00
06d1d8cdd4 lab1 2024-09-22 18:06:51 +04:00
4c76a9dea6 minhasapov_ruslan_lab_1 is ready 2024-09-21 22:14:08 +04:00
e5d0aa0b3d Выполнено 2024-09-21 16:19:03 +04:00
d326e64f24 fix readme again 2024-09-21 16:15:48 +04:00
1a118ae71f fix readme 2024-09-21 16:13:24 +04:00
e9b06b1f27 complete lab 2 2024-09-21 16:11:07 +04:00
JulYakJul
5e9e2600f3 yakovleva_yulia_lab_1 is ready 2024-09-19 16:14:05 +04:00
b6e311755e add branch + readme 2024-09-19 15:54:13 +04:00
8eedde24a1 ЛР 3 готова. Нужно проверить пару моментов, но в целом всё должно быть нормально 2024-09-19 10:53:49 +03:00
57970b3333 fix readme 2024-09-19 02:08:16 +04:00
1c77ba3272 fix readme 2024-09-19 02:05:34 +04:00
ce9527b1c9 fix comments 2024-09-19 02:02:41 +04:00
a1419f21ec changes readme 2024-09-19 02:00:03 +04:00
aac01e9f48 complete lab 1 2024-09-19 01:56:40 +04:00
133 changed files with 6848 additions and 0 deletions

View File

@@ -0,0 +1,64 @@
# Лабораторная работа номер 2
> Здравствуйте меня зовут Балахонов Данила группа ПИбд-42
>
> *— Балахонов Данила ПИбд-42*
Видео лабораторной работы номер 2 доступно по этой [ссылке](https://drive.google.com/file/d/1N4NgWsFLlHY5lGOO3Ps7DPvdJbHNxaqz/view?usp=sharing).
## Как запустить лабораторную работу номер 2?
### Необходимые компоненты для запуска лабораторной работы номер 2
> Здесь рассказана установка необходимых компонентов для запуска лабораторной работы номер 2 под дистрибутив GNU/Linux **Ubuntu**.
Для запуска лабораторной работы номер 2 необходимы такие компоненты:
- Git
- Docker
- Docker compose
Чтобы установить **Git**, необходимо ввести данные команды в командную строку:
``` bash
sudo apt-get update
sudo apt-get install git
```
Чтобы установить **Docker** и **Docker compose**, стоит ввести такие команды:
``` bash
# Настройка репозитория Docker
sudo apt-get update
sudo apt-get install ca-certificates curl
sudo install -m 0755 -d /etc/apt/keyrings
sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc
sudo chmod a+r /etc/apt/keyrings/docker.asc
echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
sudo apt-get update
# Установка Docker и его компонентов
sudo apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
```
### Запуск лабораторной работы номер 2
Для запуска лабораторной работы номер 2 необходимо **склонировать** репозиторий в любую папку и **перейти на ветку** balakhonov_danila_lab_2.
Далее в папке с `docker-compose.yaml` нужно вызвать такую команду:
``` bash
sudo docker-compose up --build
```
Таким образом контейнеры будут подготовлены и запущены. Результат выполнения будет находится внутри директории докера. Расположение файлов data.txt и result.txt: `/var/lib/docker/volumes/balakhonov_danila_lab_2_result/_data/`
## Какие технологии были использованы?
Для выполнения лабораторной работы номер 2 были применены такие технологии, как:
- Dockerfile
- Docker compose
- Git
- .NET SDK и F# в частности
Сервисы были написаны с использованием .NET SDK на языке F#.
## Что делает лабораторная работа номер 2?
Лабораторная работа номер 2 запускает два сервиса:
1. Сервис, который берёт из каталога `/var/data` случайный файл и перекладывает его в `/var/result/data.txt`
2. Сервис, который ищет наибольшее число из файла `/var/result/data.txt` и сохраняет количество таких чисел из последовательности в `/var/result/result.txt`
Благодаря лабораторной работе номер 2 были получены навыки создания Dockerfile для развертывания проектов в контейнерах, а также их связки с помощью docker-compose.yaml.

View File

@@ -0,0 +1,22 @@
services:
app1:
build: ./sigma_app_1/
volumes:
# Создание папки /var/data внутри контейнера
# И копирование файлов из ./files в эту папку
- ./files:/var/data
# Создание папки /var/result внутри контейнера
# А также папки result внутри директории докера
- result:/var/result
app2:
build: ./skibidi_app_2/
# Указано, что пока не запуститься app1, app2 не запустится
# Он ЗАВИСИТ от app1 (depends on (с англ.) - зависит от)
depends_on:
- app1
volumes:
- result:/var/result
volumes:
# Указывается, что будет создана папка result
# внутри директории докера
result:

View File

@@ -0,0 +1,323 @@
245
678
12
987
456
234
789
345
678
123
456
789
234
567
890
12
34
56
78
90
123
456
789
321
654
987
432
876
543
210
987
654
321
456
789
12
34
56
78
90
123
456
789
234
567
890
123
456
789
987
654
321
432
876
543
210
678
345
678
123
456
789
234
567
890
12
34
56
78
90
123
456
789
321
654
987
432
876
543
210
678
345
678
123
456
789
234
567
890
12
34
56
78
90
123
456
789
321
654
987
432
876
543
210
678
345
678
123
456
789
234
567
890
12
34
56
78
90
123
456
789
321
654
987
432
876
543
210
678
345
678
123
456
789
234
567
890
12
34
56
78
90
123
456
789
321
654
987
432
876
543
210
678
345
678
123
456
789
234
567
890
12
34
56
78
90
123
456
789
321
654
987
432
876
543
210
678
345
678
123
456
789
234
567
890
12
34
56
78
90
123
456
789
321
654
987
432
876
543
210
678
345
678
123
456
789
234
567
890
12
34
56
78
90
123
456
789
321
654
987
432
876
543
210
678
345
678
123
456
789
234
567
890
12
34
56
78
90
123
456
789
321
654
987
432
876
543
210
678
345
678
123
456
789
234
567
890
12
34
56
78
90
123
456
789
321
654
987
432
876
543
210
678
345
678
123
456
789
234
567
890
12
34
56
78
90
123
456
789
321
654
987
432
876
543
210
678
345
678
123
456
789
234
567
890
12
34
56
78
90
123
456
789
321
654
987
432
876
543
210
678
345
678

View File

@@ -0,0 +1,642 @@
873
62
455
879
235
941
267
811
174
517
382
399
460
221
640
915
384
622
897
212
798
109
477
546
29
995
678
342
135
804
890
453
726
891
664
290
872
190
526
304
12
587
234
753
980
197
824
579
458
15
999
614
704
205
860
537
842
491
668
210
920
477
811
350
731
95
639
287
127
423
1000
394
521
8
267
154
431
715
266
834
173
268
947
582
157
367
882
737
305
472
481
651
960
843
701
122
514
92
658
884
371
458
637
620
793
285
611
785
495
822
849
708
592
465
469
78
734
667
606
241
666
474
569
543
918
68
906
123
501
330
947
111
365
734
249
429
296
16
511
974
317
764
230
542
920
821
718
281
556
575
900
632
720
462
88
275
403
100
418
684
600
119
863
781
225
971
670
80
643
220
176
588
58
202
850
537
934
748
378
817
505
696
21
630
324
117
420
257
493
826
688
305
772
654
927
208
525
511
256
650
447
163
99
74
99
487
306
754
510
132
201
392
785
778
512
258
904
932
589
694
204
884
110
673
152
649
295
387
758
927
538
619
904
651
174
712
104
641
474
198
322
764
204
407
550
42
879
716
368
316
43
600
893
370
137
631
244
571
663
551
907
211
166
746
583
708
771
215
90
829
653
494
563
334
794
745
936
718
126
923
451
668
966
532
935
886
646
75
858
693
859
284
315
679
133
878
292
340
716
128
250
554
482
789
677
308
494
931
144
337
982
713
535
893
939
932
905
805
236
991
781
686
572
951
335
58
303
335
145
608
794
862
792
619
54
292
878
585
293
959
379
20
484
144
678
67
363
946
566
106
442
820
562
109
201
759
481
289
698
25
847
648
733
613
776
989
257
864
32
703
989
465
103
963
515
829
30
303
926
159
586
268
852
953
321
306
978
909
177
835
458
994
885
213
775
385
598
267
754
448
1000
555
354
657
231
979
265
374
68
197
953
648
153
523
761
827
819
63
782
766
882
404
258
672
883
80
111
212
681
812
911
837
194
161
143
427
981
132
357
605
810
414
20
210
772
882
313
186
578
154
523
339
383
903
29
172
62
314
491
289
550
521
327
794
299
678
769
415
266
77
33
438
233
160
11
523
623
254
29
327
924
938
588
444
976
547
775
638
35
23
203
203
927
149
198
150
370
728
775
818
768
99
40
969
435
49
276
360
964
277
283
825
479
331
471
381
652
264
564
891
638
470
291
101
143
93
663
328
841
881
94
327
2
628
474
905
545
421
453
282
276
24
655
295
48
102
49
676
187
773
169
170
165
405
348
4
654
276
343
153
381
756
753
816
474
186
652
67
689
69
920
880
363
637
524
171
753
12
634
648
668
220
408
348
887
341
738
681
408
377
693
234
83
982
417
222
322
253
494
868
951
344
60
23
41
99
944
723
156
813
5
44
62
899
835
482
469
157
637
295
929
992
234
66
31
170
333
92
185
117
627
82
292
796
840
768
532
981
300
125
958
4

View File

@@ -0,0 +1,489 @@
522
173
815
671
284
903
477
639
732
143
928
564
812
109
397
249
868
301
848
376
794
99
506
217
645
12
187
930
811
583
684
455
94
499
118
722
603
267
772
947
845
210
495
632
372
930
908
546
327
685
883
235
613
579
762
491
328
672
156
739
1000
421
731
215
867
610
847
732
204
411
515
150
438
651
174
590
725
963
530
889
577
694
417
261
767
480
934
125
558
282
899
96
653
908
303
774
617
407
482
538
239
472
766
118
920
206
797
420
853
205
340
123
387
497
640
24
999
476
77
920
382
405
55
834
371
167
290
300
611
53
470
81
232
14
451
678
623
564
787
99
648
873
803
888
504
186
256
405
102
999
673
721
434
814
305
582
436
90
774
216
706
855
702
307
59
835
812
234
736
168
523
219
868
365
294
500
207
927
450
521
851
703
992
327
916
554
846
658
88
659
628
764
84
45
10
870
779
320
882
942
93
792
836
137
489
862
391
337
887
114
237
178
874
569
135
919
931
231
50
995
215
658
139
484
292
903
113
755
333
829
942
360
172
689
42
127
799
191
455
533
234
15
404
636
373
884
921
977
113
227
703
173
297
440
604
575
971
855
82
252
589
276
826
206
166
482
375
174
612
818
854
832
809
569
306
993
931
289
148
943
421
784
441
536
426
548
49
687
415
505
951
583
368
172
974
47
173
570
264
754
701
693
796
914
809
310
512
725
963
829
614
220
410
631
860
270
158
168
595
62
715
913
517
157
5
660
274
414
139
300
698
675
263
872
292
142
375
696
895
302
75
576
899
524
362
721
916
883
347
980
29
392
839
971
593
708
804
678
234
719
659
418
914
437
550
418
576
776
293
737
348
292
48
975
547
205
831
783
587
657
132
733
53
700
785
292
332
771
849
994
905
460
420
923
663
134
658
673
618
779
951
244
425
312
436
878
538
236
805
457
897
799
134
469
56
724
370
521
654
20
260
315
525
501
433
90
368
192
162
198
65
652
613
222
160
76
755
541
305
257
669
179
849
878
249
224
4
1
860
967
738
712
281
834
908
774
964
880
902
234
635
138
305
532
585
956
68
21
278
639
622
473
769
161
580
285
204
410
115
430
953
968
593
703
704
469
835
623
991

View File

@@ -0,0 +1,4 @@
bin/
obj/
Dockerfile
README.md

View File

@@ -0,0 +1,484 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from `dotnet new gitignore`
# dotenv files
.env
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET
project.lock.json
project.fragment.lock.json
artifacts/
# Tye
.tye/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.tlog
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
*.vbp
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
*.dsw
*.dsp
# Visual Studio 6 technical files
*.ncb
*.aps
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# Visual Studio History (VSHistory) files
.vshistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
# VS Code files for those working on multiple tools
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
*.code-workspace
# Local History for Visual Studio Code
.history/
# Windows Installer files from build outputs
*.cab
*.msi
*.msix
*.msm
*.msp
# JetBrains Rider
*.sln.iml
.idea
##
## Visual studio for Mac
##
# globs
Makefile.in
*.userprefs
*.usertasks
config.make
config.status
aclocal.m4
install-sh
autom4te.cache/
*.tar.gz
tarballs/
test-results/
# Mac bundle stuff
*.dmg
*.app
# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore
# Windows thumbnail cache files
Thumbs.db
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
# Vim temporary swap files
*.swp

View File

@@ -0,0 +1,14 @@
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
WORKDIR /App
# Copy everything
COPY . ./
# Restore as distinct layers
RUN dotnet restore
# Build and publish a release
RUN dotnet publish -c Release -o out
FROM mcr.microsoft.com/dotnet/runtime:8.0 AS runtime
WORKDIR /App
COPY --from=build /App/out .
ENTRYPOINT ["dotnet", "sigma_app_1.dll"]

View File

@@ -0,0 +1,14 @@
let PATH = @"/var/data/"
let RESULT_PATH = @"/var/result/data.txt"
let getFiles(path: string): seq<string> =
System.IO.Directory.EnumerateFiles(path)
let getRandFile(files: seq<string>) =
let rand = System.Random()
let index = rand.Next(Seq.length files)
Seq.item index files
let files = getFiles(PATH)
let randFile = getRandFile(files)
System.IO.File.Copy(randFile, RESULT_PATH)

View File

@@ -0,0 +1,4 @@
# Первая программа лабораторной работы номер 2
> Вариант 6
>
> Берёт из каталога `/var/data` случайный файл и перекладывает его в `/var/result/data.txt`

View File

@@ -0,0 +1,12 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<Compile Include="Program.fs" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,4 @@
bin/
obj/
Dockerfile
README.md

View File

@@ -0,0 +1,484 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from `dotnet new gitignore`
# dotenv files
.env
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET
project.lock.json
project.fragment.lock.json
artifacts/
# Tye
.tye/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.tlog
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
*.vbp
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
*.dsw
*.dsp
# Visual Studio 6 technical files
*.ncb
*.aps
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# Visual Studio History (VSHistory) files
.vshistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
# VS Code files for those working on multiple tools
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
*.code-workspace
# Local History for Visual Studio Code
.history/
# Windows Installer files from build outputs
*.cab
*.msi
*.msix
*.msm
*.msp
# JetBrains Rider
*.sln.iml
.idea
##
## Visual studio for Mac
##
# globs
Makefile.in
*.userprefs
*.usertasks
config.make
config.status
aclocal.m4
install-sh
autom4te.cache/
*.tar.gz
tarballs/
test-results/
# Mac bundle stuff
*.dmg
*.app
# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore
# Windows thumbnail cache files
Thumbs.db
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
# Vim temporary swap files
*.swp

View File

@@ -0,0 +1,14 @@
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
WORKDIR /App
# Copy everything
COPY . ./
# Restore as distinct layers
RUN dotnet restore
# Build and publish a release
RUN dotnet publish -c Release -o out
FROM mcr.microsoft.com/dotnet/runtime:8.0 AS runtime
WORKDIR /App
COPY --from=build /App/out .
ENTRYPOINT ["dotnet", "skibidi_app_2.dll"]

View File

@@ -0,0 +1,16 @@
let INPUT_FILE = @"/var/result/data.txt"
let OUTPUT_FILE = @"/var/result/result.txt"
let getNumbersFromFile(path: string): seq<int> =
System.IO.File.ReadLines(path)
|> Seq.map int
let getCountOfMaxNumber(numbers: seq<int>): int =
numbers
|> Seq.max
|> fun maxNum -> Seq.filter ((=) maxNum) numbers
|> Seq.length
let numbers = getNumbersFromFile(INPUT_FILE)
let count = getCountOfMaxNumber(numbers)
System.IO.File.WriteAllText(OUTPUT_FILE, string count)

View File

@@ -0,0 +1,4 @@
# Вторая программа лабораторной работы номер 2
> Вариант 3
>
> Ищет набольшее число из файла `/var/result/data.txt` и сохраняет количество таких чисел из последовательности в `/var/result/result.txt`

View File

@@ -0,0 +1,12 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<Compile Include="Program.fs" />
</ItemGroup>
</Project>

2
bazunov_andrew_lab_1/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
ollama
./ollama

View File

@@ -0,0 +1,33 @@
# Распределенные вычисления и приложения Л1
## _Автор Базунов Андрей Игревич ПИбд-42_
В качестве сервисов были выбраны:
- 1.Ollama (_Сервис для использования LLMs моделей_)
- 2.Open Web Ui (_Сервис для удобного общения с моделью из сервиса Ollama_)
- 3.Gitea (_Гит сервис_)
# Docker
>Перед исполнением вполняем установку docker и проверяем версию
```sh
docker-compose --version
```
>Далее производим настройку файла docker-compose.yaml и запускаем контейнер
```sh
docker-compose up -d
```
>Для завершения работы контейнера используем команду
```sh
docker-compose down
```
---
> Замечание: после запуска контейнера, необходимо перейти в контейнер **ollamа** и выполнить установку модели [gemma2](https://ollama.com/library/gemma2:2b)
> ```sh
> docker-compose exec ollama ollama run ollama run gemma2:2b
> ```
---
Далее можно использовать веб сервис Open Web Ui по адресу **localhost:8080** для общения с моделью и Gitea по адресу **localhost:3000** - [демонстрация работы](https://vk.com/video/@viltskaa?z=video236673313_456239574%2Fpl_236673313_-2)

View File

@@ -0,0 +1,61 @@
services:
gitea: # Имя сервиса
image: gitea/gitea:latest # Имя образа
container_name: gitea # Имя контейнера, может быть произовольным
ports:
- "3000:3000" # Проброс порта Gitea на хост
volumes: # хранилище
- data:/data
environment: # переменные окружения
USER_UID: 1000
USER_GID: 1000
ollama:
image: ollama/ollama:latest
container_name: ollama
restart: always
ports:
- 7869:11434
pull_policy: always
tty: true
volumes:
- .:/code
- ./ollama/ollama:/root/.ollama # Директория для данных Ollama
environment:
- OLLAMA_KEEP_ALIVE=24h
- OLLAMA_HOST=0.0.0.0 # Указываем хост для API Ollama
networks:
- ollama-docker
command: ["serve"] # Запускаем Ollama в режиме сервера
ollama-webui:
image: ghcr.io/open-webui/open-webui:main # Образ Open Web UI
container_name: ollama-webui
restart: unless-stopped
volumes:
- ./ollama/ollama-webui:/app/backend/data
ports:
- 8080:8080 # Порт для веб-интерфейса
environment: # https://docs.openwebui.com/getting-started/env-configuration#default_models
- OLLAMA_BASE_URLS=http://host.docker.internal:7869
- ENV=dev
- WEBUI_AUTH=False
- WEBUI_NAME=Viltskaa AI
- WEBUI_URL=http://localhost:8080
- WEBUI_SECRET_KEY=t0p-s3cr3t
depends_on:
- ollama
extra_hosts:
- host.docker.internal:host-gateway
networks:
- ollama-docker
networks:
ollama-docker:
external: false
volumes:
ollama:
driver: local
data:
driver: local

38
borschevskaya_anna_lab_2/.gitignore vendored Normal file
View File

@@ -0,0 +1,38 @@
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**/target/
!**/src/test/**/target/
### IntelliJ IDEA ###
.idea/modules.xml
.idea/jarRepositories.xml
.idea/compiler.xml
.idea/libraries/
*.iws
*.iml
*.ipr
### Eclipse ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
!**/src/main/**/build/
!**/src/test/**/build/
### VS Code ###
.vscode/
### Mac OS ###
.DS_Store

View File

@@ -0,0 +1,43 @@
# Отчет. Лабораторная работа 2
В рамках лабораторной работы №2 были написаны два сервиса, работающих с текстовыми файлами.
Для первого сервиса был выбран вариант задания №5:
```
Ищет в каталоге /var/data файл с самым коротким названием и перекладывает его в /var/result/data.txt.
```
А для второго - №2:
```
Ищет наименьшее число из файла /var/data/data.txt и сохраняет его третью степень в /var/result/result.txt.
```
## Описание
Сначала сервис first перемещает данные из файла с самым коротким названием, находящегося в указанной примонтированной директории, в выходную папку.
Доступ к выходной папке имеет второй сервис, который выводит наименьшее число из помещенного первым сервисом файла
в третьей степени в выходной файл.
Выходной файл расположен в примонтированной директории и доступен на машине, где запускаются сервисы.
В Dockerfile используется многоэтапная сборка с использованием нескольких базовых образов на каждом этапе.
Описание значения каждой строки есть в Dockerfile в сервисе first.
В файле docker-compose.yml приведено описание новых строк, связанных с подключением примонтированных томов.
Стоит отметить, что для "общения" сервисов используется общий том common, который монтируется в контейнер по пути /var/result. Это позволяет сохранять результаты
работы первого сервиса для использования вторым сервисом.
## Как запустить
Для того, чтобы запустить сервисы, необходимо выполнить следующие действия:
1. Установить и запустить Docker Engine или Docker Desktop
2. Через консоль перейти в папку, в которой расположен файл docker-compose.yml
3. Выполнить команду:
```
docker compose up --build
```
В случае успешного запуска всех контейнеров в консоли будет выведено следующее сообщение:
```
✔ Network borschevskaya_anna_lab_2_default Created 0.1s
✔ Container borschevskaya_anna_lab_2-first-1 Created 0.1s
✔ Container borschevskaya_anna_lab_2-second-1 Created 0.1s
Attaching to borschevskaya_anna_lab_2-first-1, borschevskaya_anna_lab_2-second-1
```
Далее, в консоль каждого сервиса будут выведены сообщения о том, как прошла обработка файлов.
В случае отсутствия заданных значений переменных окружения INPUT_PATH и OUTPUT_PATH и
в иных исключительных ситуация будет выведена информация об этом.
## Видео-отчет
Работоспособность лабораторной работы можно оценить в следующем [видео](https://disk.yandex.ru/i/LFxdyRUFQDwXEQ).

View File

@@ -0,0 +1,22 @@
services:
first:
build: ./first # директория, в которой нужно искать Dockerfile для сборки первого сервиса
environment:
INPUT_PATH: /var/data/ # директория с входными данными для обработки файлов
OUTPUT_PATH: /var/result/ # директория с выходными данными обработки
volumes:
- ./volumes/input:/var/data # монтируется локальная папка с входными данными в папку внутри контейнера
- common:/var/result # монтируется общий для двух сервисов том, в который first сложит результаты обработки по варианту
second:
build: ./second # директория, в которой нужно искать Dockerfile для сборки второго сервиса
depends_on: # сервис second зависит от сервиса first и будет запущен после него
- first
environment:
INPUT_PATH: /var/result/
OUTPUT_PATH: /var/data/
volumes:
- ./volumes/output:/var/data
- common:/var/result # монтируется общий для двух сервисов том, из которого second получит результаты обработки first сервиса и выполнит свою логику
volumes:
common:

View File

@@ -0,0 +1,25 @@
# Используем образ Maven для сборки
FROM maven:3.8-eclipse-temurin-21-alpine AS build
# Устанавливаем рабочую директорию
WORKDIR /app
# Копируем только pom.xml и загружаем зависимости
# Так зависимости закэшируются в Docker при изменении кода закэшированные слои с зависимостями будут подгружаться быстрее
COPY pom.xml .
RUN mvn dependency:go-offline
# Копируем остальные исходные файлы
COPY src ./src
# Собираем весь проект
RUN mvn clean package -DskipTests
# Используем официальный образ JDK для запуска собранного jar-файла
FROM eclipse-temurin:21-jdk-alpine
# Копируем jar-файл из предыдущего этапа
COPY --from=build /app/target/*.jar /app.jar
# Указываем команду для запуска приложения
CMD ["java", "-jar", "app.jar"]

View File

@@ -0,0 +1,37 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>ru.first</groupId>
<artifactId>first</artifactId>
<version>1.0.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>21</maven.compiler.source>
<maven.compiler.target>21</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<build>
<plugins>
<plugin>
<!-- Build an executable JAR -->
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.1.0</version>
<configuration>
<archive>
<manifest>
<addClasspath>true</addClasspath>
<classpathPrefix>lib/</classpathPrefix>
<mainClass>ru.first.Main</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,50 @@
package ru.first;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Comparator;
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
import static java.util.Objects.isNull;
public class Main {
public static final String INPUT_PATH = System.getenv("INPUT_PATH");
public static final String OUTPUT_PATH = System.getenv("OUTPUT_PATH");
public static final String RESULT_FILE_NAME = "data.txt";
public static void main(String[] args) throws IOException {
if (isNull(INPUT_PATH) || INPUT_PATH.isEmpty() || isNull(OUTPUT_PATH) || OUTPUT_PATH.isEmpty()) {
System.out.printf("Отсутствуют переменные окружения INPUT_PATH = '%s' или OUTPUT_PATH = '%s'%n",
INPUT_PATH, OUTPUT_PATH);
return;
}
var inputPathDir = Path.of(INPUT_PATH);
if (!Files.exists(inputPathDir)) {
Files.createDirectory(inputPathDir);
}
var inputDirectory = new File(INPUT_PATH);
var allDirFiles = inputDirectory.listFiles();
if (isNull(allDirFiles) || allDirFiles.length == 0) {
System.out.println("Директория пуста");
return;
}
var dirFiles = Arrays.stream(allDirFiles).filter(File::isFile).toList();
if (dirFiles.isEmpty()) {
System.out.println("В указанной директории нет подходящих для обработки файлов");
return;
}
var shortestName = dirFiles.stream().min(Comparator.comparing(file -> file.getName().length())).get();
var outputPathDir = Path.of(OUTPUT_PATH);
if (!Files.exists(outputPathDir)) {
Files.createDirectory(outputPathDir);
}
var resultFilePath = Path.of(OUTPUT_PATH + File.separator + RESULT_FILE_NAME);
Files.move(Path.of(INPUT_PATH + File.separator + shortestName.getName()), resultFilePath, REPLACE_EXISTING);
}
}

View File

@@ -0,0 +1,16 @@
FROM maven:3.8-eclipse-temurin-21-alpine AS build
WORKDIR /app
COPY pom.xml .
RUN mvn dependency:go-offline
COPY src ./src
RUN mvn clean package -DskipTests
FROM eclipse-temurin:21-jdk-alpine
COPY --from=build /app/target/*.jar /app.jar
CMD ["java", "-jar", "app.jar"]

View File

@@ -0,0 +1,36 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>ru.second</groupId>
<artifactId>second</artifactId>
<version>1.0.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>21</maven.compiler.source>
<maven.compiler.target>21</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.1.0</version>
<configuration>
<archive>
<manifest>
<addClasspath>true</addClasspath>
<classpathPrefix>lib/</classpathPrefix>
<mainClass>ru.second.Main</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,51 @@
package ru.second;
import java.io.File;
import java.io.FileWriter;
import java.nio.file.Files;
import static java.util.Objects.isNull;
public class Main {
public static final String INPUT_PATH = System.getenv("INPUT_PATH");
public static final String INPUT_FILE_NAME = "data.txt";
public static final String OUTPUT_PATH = System.getenv("OUTPUT_PATH");
public static final String RESULT_FILE_NAME = "result.txt";
public static void main(String[] args) {
if (isNull(INPUT_PATH) || INPUT_PATH.isEmpty() || isNull(OUTPUT_PATH) || OUTPUT_PATH.isEmpty()) {
System.out.printf("Отсутствуют переменные окружения INPUT_PATH = '%s' или OUTPUT_PATH = '%s'%n",
INPUT_PATH, OUTPUT_PATH);
return;
}
var inputFile = new File(INPUT_PATH + File.separator + INPUT_FILE_NAME);
if (!inputFile.exists()) {
System.out.println("Входной файл не существует");
return;
}
try (var stream = Files.lines(inputFile.toPath());
var writer = new FileWriter(OUTPUT_PATH + File.separator + RESULT_FILE_NAME);
) {
var min = stream.map(Main::parseInt).reduce(Integer::min);
if (min.isEmpty()) {
System.out.println("Не найдено минимальное значение среди строк файла");
return;
}
var minValue = Math.pow(min.get(), 3);
System.out.printf("Get min value = '%d'%n", min.get());
writer.append(Double.toString(minValue));
System.out.printf("To file %s was written value %f%n", RESULT_FILE_NAME, minValue);
} catch (Exception ex) {
System.out.println(ex.getMessage());
}
}
private static Integer parseInt(String line) {
line = line.replace("\\n", "");
return Integer.parseInt(line);
}
}

4
dolgov_dmitriy_lab_1/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
data/
log/
wordpress/
custom/

View File

@@ -0,0 +1,34 @@
# Лабораторная работа №1
## Выполнил: Долгов Дмитрий, группа ПИбд-42
### Были развёрнуты следующие сервисы:
* mediawiki (движок вики)
* wordpress (популярная система управления контентом)
* gitea (сервис для хранения репозиториев git)
* mariaDB
### Были использованы следующие технологии:
* git
* docker
* docker-compose
### Для запуска лабораторной работы необходимо ввести в консоль следующую команду:
```
docker compose up -d
```
## Результат запуска:
```
[+] Running 4/4
✔ Container dolgov_dmitriy_lab_1-wordpress-1 Running 0.0s
✔ Container dolgov_dmitriy_lab_1-database-1 Running 0.0s
✔ Container dolgov_dmitriy_lab_1-mediawiki-1 Running 0.0s
✔ Container gitea Running
```
## Видео с результатом запуска:
Видео можно посмотреть по данной [ссылке](https://drive.google.com/file/d/1hC6HhNvYBRuYVClobXyDMReA4ngwxhwc/view?usp=drive_link).

View File

@@ -0,0 +1,73 @@
# Сервисы по заданию
services:
# Сервис MediaWiki
mediawiki:
# Образ MediaWiki
image: mediawiki
# Автоматический перезапуск при сбое
restart: always
# проброс порта 80 из контейнера на порт 8080 хоста
ports:
- "8080:80"
# связь с сервисом database
links:
- database
# монтирование volume для хранения данных
volumes:
- images:/var/www/html/images
# Сервис WordPress
wordpress:
# Образ WordPress
image: wordpress:latest
# Автоматический перезапуск при сбое
ports:
- "8082:80"
restart: always
volumes:
- ./wordpress:/var/www/html
# Сервис Gitea
server:
image: gitea/gitea:latest
container_name: gitea
restart: always
environment:
- USER_UID=1000
- USER_GID=1000
volumes:
- ./data:/data
- ./custom:/app/gitea/custom
- ./log:/app/gitea/log
ports:
- "8081:3000"
links:
- database
depends_on:
- database
# Сервис MariaDB
database:
# Образ MariaDB
image: mariadb
# Автоматический перезапуск при сбое
restart: always
# переменные окружения для настройки базы данных
environment:
# имя базы данных
MYSQL_DATABASE: my_wiki
# имя пользователя
MYSQL_USER: user
# пароль пользователя
MYSQL_PASSWORD: user
# случайный пароль для пользователя root
MYSQL_RANDOM_ROOT_PASSWORD: 'yes'
# монтирование volume для хранения данных
volumes:
- db:/var/lib/mysql
# тома для хранения данных
volumes:
images:
db:

Binary file not shown.

After

Width:  |  Height:  |  Size: 275 KiB

View File

@@ -0,0 +1,93 @@
# Кашин Максим ПИбд-42
## Описание
Этот проект разворачивает среду с базой данных MySQL, системами управления проектами Redmine и платформой для создания сайтов WordPress с использованием Docker Compose. Ниже приведены шаги для запуска и настройки.
## 0. Предварительные действия
Перед запуском Docker Compose выполнил следующие шаги:
1. Установил [Docker Desktop](https://www.docker.com/products/docker-desktop) для моей операционной системы.
2. Настроил брандмауэр Windows и разрешил доступ Docker.
3. Открыл PowerShell с правами администратора и выполнил следующие команды:
```bash
net start vmcompute
wsl --set-default-version 2
```
## 1. Структура и запуск сервиса
Файл `docker-compose.yml` содержит описание трех сервисов:
- **db (MySQL)** база данных;
- **redmine (Redmine)** система управления проектами;
- **wordpress (WordPress)** платформа для создания сайтов.
Запуск всех сервисов происходит через команду:
```bash
docker-compose up -d
```
## 2. Сервис MySQL
Сервис `db` представляет собой контейнер с MySQL версии 8.0.
- **Образ**: `mysql:8.0`
- **Переменные окружения**:
- `MYSQL_ROOT_PASSWORD` — пароль для пользователя root.
- `MYSQL_DATABASE` — имя создаваемой базы данных (exampledb).
- `MYSQL_USER` — имя пользователя базы данных (exampleuser).
- `MYSQL_PASSWORD` — пароль пользователя базы данных (examplepass).
- **Тома (volumes)**:
- `db-data:/var/lib/mysql` — том для хранения данных базы данных.
Сервис MySQL необходим для работы как Redmine, так и WordPress.
## 3. Сервис Redmine
Сервис `redmine` представляет собой контейнер с системой управления проектами Redmine.
- **Образ**: `redmine`
- **Порты**:
- `8080:3000` — Redmine будет доступен по адресу http://localhost:8080.
- **Переменные окружения**:
- `REDMINE_DB_DATABASE` — имя базы данных (exampledb).
- `REDMINE_DB_MYSQL` — имя хоста базы данных (db).
- `REDMINE_DB_PASSWORD` — пароль для подключения к базе данных (example).
- `REDMINE_SECRET_KEY_BASE` — секретный ключ для работы с сессиями.
- **Тома (volumes)**:
- `redmine:/usr/src/redmine/files` — том для хранения файлов Redmine.
## 4. Сервис WordPress
Сервис `wordpress` представляет собой контейнер с платформой WordPress.
- **Образ**: `wordpress`
- **Порты**:
- `8081:80` — WordPress будет доступен по адресу http://localhost:8081.
- **Переменные окружения**:
- `WORDPRESS_DB_HOST` — хост базы данных (db).
- `WORDPRESS_DB_USER` — имя пользователя базы данных (exampleuser).
- `WORDPRESS_DB_PASSWORD` — пароль для подключения к базе данных (examplepass).
- `WORDPRESS_DB_NAME` — имя базы данных (exampledb).
- **Тома (volumes)**:
- `wordpress:/var/www/html` — том для хранения файлов WordPress.
## 5. Томa данных
Для хранения данных и постоянства контейнеров используются три тома:
- `wordpress` — для данных WordPress.
- `db-data` — для данных MySQL.
- `redmine` — для файлов Redmine.
## 6. Остановка сервисов
Для остановки и удаления всех контейнеров необходимо выполнить команду:
```bash
docker-compose down
```
## 7. Ссылка на видео
[Видео-отчёт Кашин Максим ПИбд-42](https://disk.yandex.ru/i/O8L1qmk4PIbCvA)

View File

@@ -0,0 +1,45 @@
version: '3.1' # Версия Docker Compose файла
services: # Определение сервисов, которые будут запускаться
db: # Сервис базы данных MySQL
image: mysql:8.0 # Образ
restart: always # Автоматический перезапуск контейнера
environment: # Переменные окружения для конфигурации базы данных
MYSQL_ROOT_PASSWORD: example
MYSQL_DATABASE: exampledb
MYSQL_USER: exampleuser
MYSQL_PASSWORD: examplepass
volumes: # Определение томов для сохранения данных базы данных
- db-data:/var/lib/mysql
redmine: # redmine Система учёта багов, т.е. баг-трекер.
image: redmine # Образ
restart: always # Автоматический перезапуск контейнера
ports:
- 8080:3000 # Переадресация локального порта 8080 на порт 3000 в контейнере
environment: # Переменные окружения для настройки Redmine (подключение к бд)
REDMINE_DB_DATABASE: exampledb
REDMINE_DB_MYSQL: db
REDMINE_DB_PASSWORD: example
REDMINE_SECRET_KEY_BASE: supersecretkey
volumes: # Том для хранения данных Redmine
- redmine:/usr/src/redmine/files
wordpress: # wordpress Популярная система управления контентом.
image: wordpress # Образ
restart: always # Автоматический перезапуск контейнера
ports:
- 8081:80 # Переадресация локального порта 8081 на порт 80 в контейнере
environment: # Переменные окружения для настройки WordPress (подключение к бд)
WORDPRESS_DB_HOST: db
WORDPRESS_DB_USER: exampleuser
WORDPRESS_DB_PASSWORD: examplepass
WORDPRESS_DB_NAME: exampledb
volumes: # Том для хранения данных WordPress
- wordpress:/var/www/html
volumes: # Определение томов для хранения данных
wordpress:
db-data:
redmine:

View File

@@ -0,0 +1,30 @@
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/azds.yaml
**/bin
**/charts
**/docker-compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/obj
**/secrets.dev.yaml
**/values.dev.yaml
LICENSE
README.md
!**/.gitignore
!.git/HEAD
!.git/config
!.git/packed-refs
!.git/refs/heads/**

63
kuzarin_maxim_lab_3/.gitattributes vendored Normal file
View File

@@ -0,0 +1,63 @@
###############################################################################
# Set default behavior to automatically normalize line endings.
###############################################################################
* text=auto
###############################################################################
# Set default behavior for command prompt diff.
#
# This is need for earlier builds of msysgit that does not have it on by
# default for csharp files.
# Note: This is only used by command line
###############################################################################
#*.cs diff=csharp
###############################################################################
# Set the merge driver for project and solution files
#
# Merging from the command prompt will add diff markers to the files if there
# are conflicts (Merging from VS is not affected by the settings below, in VS
# the diff markers are never inserted). Diff markers may cause the following
# file extensions to fail to load in VS. An alternative would be to treat
# these files as binary and thus will always conflict and require user
# intervention with every merge. To do so, just uncomment the entries below
###############################################################################
#*.sln merge=binary
#*.csproj merge=binary
#*.vbproj merge=binary
#*.vcxproj merge=binary
#*.vcproj merge=binary
#*.dbproj merge=binary
#*.fsproj merge=binary
#*.lsproj merge=binary
#*.wixproj merge=binary
#*.modelproj merge=binary
#*.sqlproj merge=binary
#*.wwaproj merge=binary
###############################################################################
# behavior for image files
#
# image files are treated as binary by default.
###############################################################################
#*.jpg binary
#*.png binary
#*.gif binary
###############################################################################
# diff behavior for common document formats
#
# Convert binary document formats to text before diffing them. This feature
# is only available from the command line. Turn it on by uncommenting the
# entries below.
###############################################################################
#*.doc diff=astextplain
#*.DOC diff=astextplain
#*.docx diff=astextplain
#*.DOCX diff=astextplain
#*.dot diff=astextplain
#*.DOT diff=astextplain
#*.pdf diff=astextplain
#*.PDF diff=astextplain
#*.rtf diff=astextplain
#*.RTF diff=astextplain

363
kuzarin_maxim_lab_3/.gitignore vendored Normal file
View File

@@ -0,0 +1,363 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Oo]ut/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd

View File

@@ -0,0 +1,31 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.10.34916.146
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "DSaC", "DSaC\DSaC.csproj", "{C1051C12-D7D0-4C77-AFBC-4F5FFD8EE367}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DSaC_second", "DSaC_second\DSaC_second.csproj", "{64F78585-2BBC-4656-BC50-41FBB8917719}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{C1051C12-D7D0-4C77-AFBC-4F5FFD8EE367}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C1051C12-D7D0-4C77-AFBC-4F5FFD8EE367}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C1051C12-D7D0-4C77-AFBC-4F5FFD8EE367}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C1051C12-D7D0-4C77-AFBC-4F5FFD8EE367}.Release|Any CPU.Build.0 = Release|Any CPU
{64F78585-2BBC-4656-BC50-41FBB8917719}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{64F78585-2BBC-4656-BC50-41FBB8917719}.Debug|Any CPU.Build.0 = Debug|Any CPU
{64F78585-2BBC-4656-BC50-41FBB8917719}.Release|Any CPU.ActiveCfg = Release|Any CPU
{64F78585-2BBC-4656-BC50-41FBB8917719}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {30D05708-634E-4FDE-9BCA-5A1B7A5EFF59}
EndGlobalSection
EndGlobal

View File

@@ -0,0 +1,88 @@
using DSaC.Models.DTOs;
using DSaC.Models.Internal.Queries;
using DSaC.Models.Internal.Сommands;
using MediatR;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.RazorPages;
namespace DSaC.Controllers
{
[Route("api/[controller]")]
[ApiController]
public class CounterpartiesController : ControllerBase
{
private readonly IMediator mediator;
public CounterpartiesController(IMediator mediator)
{
this.mediator = mediator;
}
[HttpGet("")]
public async Task<IActionResult> GetCounterparties(
[FromQuery] int page = 0,
[FromQuery] int pageSize = 10,
[FromQuery] List<Guid>? ids = null
)
{
var request = new GetCounterpartiesQuery
{
Page = page,
PageSize = pageSize,
Ids = ids
};
var response = await mediator.Send(request);
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpGet("{uuid:guid}")]
public async Task<IActionResult> GetFullCounterparty([FromRoute] Guid uuid)
{
var request = new GetCounterpartyQuery
{
Id = uuid
};
var response = await mediator.Send(request);
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpPost("")]
public async Task<IActionResult> CreateCounterparty([FromBody] CounterpartyBaseDto dto)
{
var response = await mediator.Send(new CreateCounterpartyCommand()
{
Model = dto
});
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpPut("{uuid:guid}")]
public async Task<IActionResult> UpdateRecord([FromRoute] Guid uuid, [FromBody] CounterpartyViewDto dto)
{
var response = await mediator.Send(new UpdateCounterpartyCommand()
{
Id=uuid,
Model = dto
});
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpDelete("{uuid:guid}")]
public async Task<IActionResult> DeleteRecord([FromRoute] Guid uuid)
{
var response = await mediator.Send(new DeleteCounterpartyCommand()
{
Id = uuid,
});
return !response.IsError ? Ok() : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
}
}

View File

@@ -0,0 +1,26 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<UserSecretsId>55894bef-8317-4e30-a5f0-4dcd5c3f861e</UserSecretsId>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MediatR" Version="12.4.0" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.NewtonsoftJson" Version="8.0.8" />
<PackageReference Include="Microsoft.EntityFrameworkCore" Version="8.0.8" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="8.0.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.VisualStudio.Azure.Containers.Tools.Targets" Version="1.20.1" />
<PackageReference Include="NLog.Extensions.Logging" Version="5.3.12" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="8.0.4" />
<PackageReference Include="PIHelperSh.Configuration" Version="1.0.1" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.4.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,14 @@
using DSaC.Database.Models;
using Microsoft.EntityFrameworkCore;
namespace DSaC.Database
{
public class DsacContext: DbContext
{
public DsacContext(DbContextOptions options) : base(options)
{
}
public DbSet<Counterparty> Counterparties { get; set; }
}
}

View File

@@ -0,0 +1,24 @@
using AutoMapper;
using DSaC.Models.DTOs;
using DSaC.Models.Internal.Сommands;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace DSaC.Database.Models
{
[Table("counterparty")]
[AutoMap(typeof(CounterpartyBaseDto))]
[AutoMap(typeof(CounterpartyViewDto))]
public class Counterparty
{
[Key]
[Column("id")]
public Guid Id { get; set; }
[Required]
[MaxLength(255)]
[Column("name")]
public string Name { get; set; }
}
}

View File

@@ -0,0 +1,24 @@
#See https://aka.ms/customizecontainer to learn how to customize your debug container and how Visual Studio uses this Dockerfile to build your images for faster debugging.
FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS base
WORKDIR /app
EXPOSE 8080
EXPOSE 8081
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["DSaC.csproj", "DSaC/"]
RUN dotnet restore "DSaC/DSaC.csproj"
WORKDIR "/src/DSaC"
COPY . .
RUN dotnet build "DSaC.csproj" -c $BUILD_CONFIGURATION -o /app/build
FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "DSaC.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "DSaC.dll"]

View File

@@ -0,0 +1,51 @@
using AutoMapper;
using DSaC.Database;
using DSaC.Database.Models;
using DSaC.Logic.Handlers.Queries;
using DSaC.Models.DTOs;
using DSaC.Models.Internal;
using DSaC.Models.Internal.Сommands;
using MediatR;
namespace DSaC.Logic.Handlers.Commands
{
public class CreateCounterpartyCommandHandler : IRequestHandler<CreateCounterpartyCommand, ResponseModel<CounterpartyViewDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public CreateCounterpartyCommandHandler(ILogger<CreateCounterpartyCommandHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel<CounterpartyViewDto>> Handle(CreateCounterpartyCommand request, CancellationToken cancellationToken)
{
try
{
var model = _mapper.Map<Counterparty>(request.Model);
var res = await _context.AddAsync(model);
await _context.SaveChangesAsync();
return new()
{
Value = _mapper.Map<CounterpartyViewDto>(res.Entity)
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on creating counterparty");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot create counterparty"
};
}
}
}
}

View File

@@ -0,0 +1,50 @@
using AutoMapper;
using DSaC.Database;
using DSaC.Database.Models;
using DSaC.Models.DTOs;
using DSaC.Models.Internal;
using DSaC.Models.Internal.Сommands;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC.Logic.Handlers.Commands
{
public class DeleteCounterpartyCommandHandler: IRequestHandler<DeleteCounterpartyCommand, ResponseModel>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public DeleteCounterpartyCommandHandler(ILogger<DeleteCounterpartyCommandHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel> Handle(DeleteCounterpartyCommand request, CancellationToken cancellationToken)
{
try
{
var res = await _context.Counterparties.Where(x=>x.Id == request.Id).ExecuteDeleteAsync();
if (res == 1)return new();
return new()
{
ErrorText = "Cannot find object to delete",
StatusCode = System.Net.HttpStatusCode.NotFound
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on deleteing counterparty");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot delete counterparty"
};
}
}
}
}

View File

@@ -0,0 +1,50 @@
using AutoMapper;
using DSaC.Database;
using DSaC.Database.Models;
using DSaC.Models.DTOs;
using DSaC.Models.Internal;
using DSaC.Models.Internal.Сommands;
using MediatR;
namespace DSaC.Logic.Handlers.Commands
{
public class UpdateCounterpartyCommandHandler: IRequestHandler<UpdateCounterpartyCommand, ResponseModel<CounterpartyViewDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public UpdateCounterpartyCommandHandler(ILogger<UpdateCounterpartyCommandHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel<CounterpartyViewDto>> Handle(UpdateCounterpartyCommand request, CancellationToken cancellationToken)
{
try
{
var model = _mapper.Map<Counterparty>(request.Model);
var res = _context.Update(model);
await _context.SaveChangesAsync();
return new()
{
Value = _mapper.Map<CounterpartyViewDto>(res.Entity)
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on updating counterparty");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot update counterparty"
};
}
}
}
}

View File

@@ -0,0 +1,50 @@
using AutoMapper;
using DSaC.Database;
using DSaC.Models.DTOs;
using DSaC.Models.Internal;
using DSaC.Models.Internal.Queries;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC.Logic.Handlers.Queries
{
public class GetCounterpartiesQueryHandler : IRequestHandler<GetCounterpartiesQuery, ResponseModel<List<CounterpartyViewDto>>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public GetCounterpartiesQueryHandler(ILogger<GetCounterpartiesQueryHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel<List<CounterpartyViewDto>>> Handle(GetCounterpartiesQuery request, CancellationToken cancellationToken)
{
try
{
var res = await _context.Counterparties
.Where(x=>request.Ids == null || request.Ids.Contains(x.Id))
.Skip(request.Page * request.PageSize).Take(request.PageSize)
.ToListAsync();
return new()
{
Value = res.Select(_mapper.Map<CounterpartyViewDto>).ToList(),
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on getting records");
return new()
{
StatusCode = System.Net.HttpStatusCode.BadRequest,
ErrorText = "Cannot get Counterparties by this request",
};
}
}
}
}

View File

@@ -0,0 +1,54 @@
using AutoMapper;
using DSaC.Database;
using DSaC.Models.DTOs;
using DSaC.Models.Internal;
using DSaC.Models.Internal.Queries;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC.Logic.Handlers.Queries
{
public class GetCounterpartyQueryHandler: IRequestHandler<GetCounterpartyQuery, ResponseModel<CounterpartyViewDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public GetCounterpartyQueryHandler(ILogger<GetCounterpartyQueryHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel<CounterpartyViewDto>> Handle(GetCounterpartyQuery request, CancellationToken cancellationToken)
{
try
{
var res = await _context.Counterparties.FirstOrDefaultAsync(x=>x.Id == request.Id);
if (res == null)
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Counterparty with this ID does not exsist",
};
return new()
{
Value = _mapper.Map<CounterpartyViewDto>(res),
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on getting record");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot get Counterparty by id",
};
}
}
}
}

View File

@@ -0,0 +1,60 @@
// <auto-generated />
using System;
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC.Migrations
{
[DbContext(typeof(DsacContext))]
[Migration("20240907133944_InitMigraton")]
partial class InitMigraton
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC.Database.Models.Counterparty", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("ContractNumber")
.IsRequired()
.HasMaxLength(50)
.HasColumnType("character varying(50)")
.HasColumnName("contract_number");
b.Property<string>("ManagerName")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("manager_name");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("name");
b.HasKey("Id");
b.ToTable("counterparty");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,36 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DSaC.Migrations
{
/// <inheritdoc />
public partial class InitMigraton : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "counterparty",
columns: table => new
{
id = table.Column<Guid>(type: "uuid", nullable: false),
name = table.Column<string>(type: "character varying(255)", maxLength: 255, nullable: false),
contract_number = table.Column<string>(type: "character varying(50)", maxLength: 50, nullable: false),
manager_name = table.Column<string>(type: "character varying(255)", maxLength: 255, nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_counterparty", x => x.id);
});
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "counterparty");
}
}
}

View File

@@ -0,0 +1,64 @@
// <auto-generated />
using System;
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC.Migrations
{
[DbContext(typeof(DsacContext))]
[Migration("20240907140843_ChangingContractInfo")]
partial class ChangingContractInfo
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC.Database.Models.Counterparty", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("ContractNumber")
.IsRequired()
.HasMaxLength(50)
.HasColumnType("character varying(50)")
.HasColumnName("contract_number");
b.Property<bool>("IsContractClosed")
.HasColumnType("boolean")
.HasColumnName("is_contract_closed");
b.Property<string>("ManagerName")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("manager_name");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("name");
b.HasKey("Id");
b.ToTable("counterparty");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,29 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DSaC.Migrations
{
/// <inheritdoc />
public partial class ChangingContractInfo : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<bool>(
name: "is_contract_closed",
table: "counterparty",
type: "boolean",
nullable: false,
defaultValue: false);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "is_contract_closed",
table: "counterparty");
}
}
}

View File

@@ -0,0 +1,63 @@
// <auto-generated />
using System;
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC.Migrations
{
[DbContext(typeof(DsacContext))]
[Migration("20240907141343_SmallHotfix")]
partial class SmallHotfix
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC.Database.Models.Counterparty", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("ContractNumber")
.IsRequired()
.HasMaxLength(50)
.HasColumnType("character varying(50)")
.HasColumnName("contract_number");
b.Property<bool>("IsContractClosed")
.HasColumnType("boolean")
.HasColumnName("is_contract_closed");
b.Property<string>("ManagerName")
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("manager_name");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("name");
b.HasKey("Id");
b.ToTable("counterparty");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,40 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DSaC.Migrations
{
/// <inheritdoc />
public partial class SmallHotfix : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AlterColumn<string>(
name: "manager_name",
table: "counterparty",
type: "character varying(255)",
maxLength: 255,
nullable: true,
oldClrType: typeof(string),
oldType: "character varying(255)",
oldMaxLength: 255);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.AlterColumn<string>(
name: "manager_name",
table: "counterparty",
type: "character varying(255)",
maxLength: 255,
nullable: false,
defaultValue: "",
oldClrType: typeof(string),
oldType: "character varying(255)",
oldMaxLength: 255,
oldNullable: true);
}
}
}

View File

@@ -0,0 +1,48 @@
// <auto-generated />
using System;
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC.Migrations
{
[DbContext(typeof(DsacContext))]
[Migration("20240914065929_after-reconstruction")]
partial class afterreconstruction
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC.Database.Models.Counterparty", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("name");
b.HasKey("Id");
b.ToTable("counterparty");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,52 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DSaC.Migrations
{
/// <inheritdoc />
public partial class afterreconstruction : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "contract_number",
table: "counterparty");
migrationBuilder.DropColumn(
name: "is_contract_closed",
table: "counterparty");
migrationBuilder.DropColumn(
name: "manager_name",
table: "counterparty");
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "contract_number",
table: "counterparty",
type: "character varying(50)",
maxLength: 50,
nullable: false,
defaultValue: "");
migrationBuilder.AddColumn<bool>(
name: "is_contract_closed",
table: "counterparty",
type: "boolean",
nullable: false,
defaultValue: false);
migrationBuilder.AddColumn<string>(
name: "manager_name",
table: "counterparty",
type: "character varying(255)",
maxLength: 255,
nullable: true);
}
}
}

View File

@@ -0,0 +1,45 @@
// <auto-generated />
using System;
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC.Migrations
{
[DbContext(typeof(DsacContext))]
partial class DsacContextModelSnapshot : ModelSnapshot
{
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC.Database.Models.Counterparty", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("name");
b.HasKey("Id");
b.ToTable("counterparty");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,17 @@
using System.ComponentModel.DataAnnotations;
namespace DSaC.Models.DTOs
{
public class ContractViewDto
{
[Required(ErrorMessage = "Contract number must be present")]
public string ContractNumber { get; set; } = null!;
[Required(ErrorMessage = "Manager name must be present")]
public string ManagerName { get; set; } = null!;
public DateTime CreatinTime { get; set; }
public bool IsClosed { get; set; }
}
}

View File

@@ -0,0 +1,13 @@

using AutoMapper;
using DSaC.Database.Models;
using System.ComponentModel.DataAnnotations;
namespace DSaC.Models.DTOs
{
public class CounterpartyBaseDto
{
[Required(ErrorMessage = "Cpty name must be present")]
public string Name { get; set; } = null!;
}
}

View File

@@ -0,0 +1,11 @@
using AutoMapper;
using DSaC.Database.Models;
namespace DSaC.Models.DTOs
{
[AutoMap(typeof(Counterparty))]
public class CounterpartyViewDto: CounterpartyBaseDto
{
public Guid Id { get; set; }
}
}

View File

@@ -0,0 +1,14 @@
using DSaC.Models.DTOs;
using MediatR;
namespace DSaC.Models.Internal.Queries
{
public class GetCounterpartiesQuery: IRequest<ResponseModel<List<CounterpartyViewDto>>>
{
public List<Guid>? Ids { get; set; }
public int Page { get; set; }
public int PageSize { get; set; }
}
}

View File

@@ -0,0 +1,10 @@
using DSaC.Models.DTOs;
using MediatR;
namespace DSaC.Models.Internal.Queries
{
public class GetCounterpartyQuery: IRequest<ResponseModel<CounterpartyViewDto>>
{
public Guid Id { get; set; }
}
}

View File

@@ -0,0 +1,30 @@
using System.Net;
namespace DSaC.Models.Internal
{
public class ResponseModel
{
public string? ErrorText;
public bool IsError => !string.IsNullOrEmpty(ErrorText);
private int? _errorCode = 200;
public int? ErrorCode
{
get => _errorCode;
set => _errorCode = value;
}
public HttpStatusCode? StatusCode
{
get => (HttpStatusCode?)_errorCode;
set => _errorCode = (int?)value;
}
}
public class ResponseModel<T> : ResponseModel
{
public T? Value { get; set; }
}
}

View File

@@ -0,0 +1,10 @@
using DSaC.Models.DTOs;
using MediatR;
namespace DSaC.Models.Internal.Сommands
{
public class CreateCounterpartyCommand : IRequest<ResponseModel<CounterpartyViewDto>>
{
public CounterpartyBaseDto Model { get; set; } = null!;
}
}

View File

@@ -0,0 +1,9 @@
using MediatR;
namespace DSaC.Models.Internal.Сommands
{
public class DeleteCounterpartyCommand: IRequest<ResponseModel>
{
public Guid Id { get; set; }
}
}

View File

@@ -0,0 +1,14 @@
using DSaC.Models.DTOs;
using MediatR;
using System.ComponentModel.DataAnnotations;
namespace DSaC.Models.Internal.Сommands
{
public class UpdateCounterpartyCommand: IRequest<ResponseModel<CounterpartyViewDto>>
{
public Guid Id { get; set; }
[Required(ErrorMessage = "UpdateMessageMust be present")]
public CounterpartyViewDto Model { get; set; } = null!;
}
}

View File

@@ -0,0 +1,128 @@
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.OpenApi.Models;
using NLog.Config;
using NLog.Extensions.Logging;
using NLog.Targets;
using PIHelperSh.Configuration;
using PIHelperSh.Configuration.Attributes;
using System.Reflection;
using LogLevel = NLog.LogLevel;
[TrackedType]
public class Program
{
private static WebApplication? app;
[Constant(BlockName = "Database")]
private static string ConnectionString;
[Constant(BlockName = "GatewaySettings")]
private static string AppPrefix;
public static void Main(string[] args)
{
var builder = WebApplication.CreateBuilder(args);
ConfigureLogger(builder);
ConfigureServices(builder);
ConfigureDatabase(builder);
app = builder.Build();
var t = MigrateDatabase();
app.UseSwagger(c =>
{
if (!string.IsNullOrEmpty(AppPrefix))
{
//c.RouteTemplate = AppPrefix + "/swagger/{documentName}/swagger.json";
c.PreSerializeFilters.Add((swaggerDoc, httpReq) =>
{
swaggerDoc.Servers = new List<OpenApiServer> { new OpenApiServer { Url = $"{httpReq.Scheme}://{httpReq.Host.Value}/{AppPrefix}" } };
});
}
});
app.UseSwaggerUI(c =>
{
//if (!string.IsNullOrEmpty(AppPrefix))
//{
// c.SwaggerEndpoint($"/{AppPrefix}/swagger/v1/swagger.json", $"APP API");
// c.RoutePrefix = $"{AppPrefix}/swagger";
//}
});
app.UseHttpsRedirection();
app.UseAuthorization();
app.MapControllers();
t.Wait();
app.Run();
}
private static void ConfigureLogger(WebApplicationBuilder builder)
{
var nLogConfig = new LoggingConfiguration();
var logConsole = new ConsoleTarget();
var blackhole = new NullTarget();
var logFile = new FileTarget()
{
FileName = "${basedir}/logs/${shortdate}_logs.log"
};
nLogConfig.AddRule(LogLevel.Trace, LogLevel.Trace, blackhole, "Microsoft.AspNetCore.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Warn, logFile, "Microsoft.EntityFrameworkCore.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Warn, logFile, "Microsoft.AspNetCore.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Warn, logFile, "System.Net.Http.HttpClient.Refit.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Error, logConsole);
nLogConfig.AddRule(LogLevel.Debug, LogLevel.Error, logFile);
builder.Logging.ClearProviders();
builder.Services.AddLogging(m => m.AddNLog(nLogConfig));
}
private static void ConfigureServices(WebApplicationBuilder builder)
{
builder.Services.AddConfigurations(builder.Configuration);
builder.Configuration.AddConstants();
builder.Services.AddAutoMapper(AppDomain.CurrentDomain.GetAssemblies());
builder.Services.AddMediatR(cfg => cfg.RegisterServicesFromAssemblyContaining<Program>());
builder.Services.AddControllers().AddNewtonsoftJson();
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen(c =>
{
c.SwaggerDoc("v1", new OpenApiInfo
{
Title = "Distributed computing and applications",
Version = "v1",
Description = ""
});
//c.EnableAnnotations();
});
}
private static void ConfigureDatabase(WebApplicationBuilder builder)
{
builder.Services.AddDbContext<DsacContext>(options =>
{
options.UseNpgsql(ConnectionString);
});
}
private static async Task MigrateDatabase()
{
var context = app?.Services.CreateScope().ServiceProvider.GetService<DsacContext>();
if(context != null)
await context.Database.MigrateAsync();
}
}

View File

@@ -0,0 +1,52 @@
{
"profiles": {
"http": {
"commandName": "Project",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
},
"dotnetRunMessages": true,
"applicationUrl": "http://localhost:5062"
},
"https": {
"commandName": "Project",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
},
"dotnetRunMessages": true,
"applicationUrl": "https://localhost:7219;http://localhost:5062"
},
"IIS Express": {
"commandName": "IISExpress",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
"Container (Dockerfile)": {
"commandName": "Docker",
"launchBrowser": true,
"launchUrl": "{Scheme}://{ServiceHost}:{ServicePort}/swagger",
"environmentVariables": {
"ASPNETCORE_HTTPS_PORTS": "8081",
"ASPNETCORE_HTTP_PORTS": "8080"
},
"publishAllPorts": true,
"useSSL": true
}
},
"$schema": "http://json.schemastore.org/launchsettings.json",
"iisSettings": {
"windowsAuthentication": false,
"anonymousAuthentication": true,
"iisExpress": {
"applicationUrl": "http://localhost:9154",
"sslPort": 44381
}
}
}

View File

@@ -0,0 +1,14 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"Database": {
"ConnectionString": "Host=db.dev-moio.online;Port=31153;Database=dsac_maxim;Username=postgres;Password=postgres_password"
},
"GatewaySettings": {
"AppPrefix": ""
}
}

View File

@@ -0,0 +1,9 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*"
}

View File

@@ -0,0 +1,88 @@
using DSaC_second.Models.DTOs;
using DSaC_second.Models.Internal.Queries;
using DSaC_second.Models.Internal.Сommands;
using MediatR;
using Microsoft.AspNetCore.Components.Forms;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
namespace DSaC_second.Controllers
{
[Route("api/[controller]")]
[ApiController]
public class ContractController : ControllerBase
{
private readonly IMediator _mediator;
public ContractController(IMediator mediator)
{
_mediator = mediator;
}
[HttpGet("")]
public async Task<IActionResult> GetContracts(
[FromQuery] int page = 0,
[FromQuery] int pageSize = 10,
[FromQuery] List<Guid>? ids = null
)
{
var request = new GetContractsQuery
{
Page = page,
PageSize = pageSize,
Ids = ids
};
var response = await _mediator.Send(request);
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpGet("{uuid:guid}")]
public async Task<IActionResult> GetFullContract([FromRoute] Guid uuid)
{
var request = new GetContractQuery
{
Id = uuid
};
var response = await _mediator.Send(request);
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpPost("")]
public async Task<IActionResult> CreateContract([FromBody] ContractBaseDto dto)
{
var response = await _mediator.Send(new CreateContractCommand()
{
Model = dto
});
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpPut("{uuid:guid}")]
public async Task<IActionResult> UpdateContract([FromRoute] Guid uuid, [FromBody] ContractViewDto dto)
{
var response = await _mediator.Send(new UpdateContractCommand()
{
Id = uuid,
Model = dto
});
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpDelete("{uuid:guid}")]
public async Task<IActionResult> DeleteContract([FromRoute] Guid uuid)
{
var response = await _mediator.Send(new DeleteContractCommand()
{
Id = uuid,
});
return !response.IsError ? Ok() : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
}
}

View File

@@ -0,0 +1,27 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<UserSecretsId>39d29416-63e9-4884-9c5f-9d6ff461995f</UserSecretsId>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MediatR" Version="12.4.1" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.NewtonsoftJson" Version="8.0.8" />
<PackageReference Include="Microsoft.EntityFrameworkCore" Version="8.0.8" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="8.0.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.VisualStudio.Azure.Containers.Tools.Targets" Version="1.20.1" />
<PackageReference Include="NLog.Extensions.Logging" Version="5.3.13" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="8.0.4" />
<PackageReference Include="PIHelperSh.Configuration" Version="1.0.1" />
<PackageReference Include="RestSharp" Version="112.0.0" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.4.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,15 @@
using DSaC_second.Database.Models;
using Microsoft.EntityFrameworkCore;
namespace DSaC_second.Database
{
public class DsacContext : DbContext
{
public DsacContext(DbContextOptions options) : base(options)
{
AppContext.SetSwitch("Npgsql.EnableLegacyTimestampBehavior", true);
}
public DbSet<Contract> Contracts { get; set; }
}
}

View File

@@ -0,0 +1,38 @@
using AutoMapper;
using DSaC_second.Models.DTOs;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace DSaC_second.Database.Models
{
[Table("contract")]
[AutoMap(typeof(ContractBaseDto))]
[AutoMap(typeof(ContractViewDto))]
public class Contract
{
[Key]
[Column("id")]
public Guid Id { get; set; }
[Column("contract_number")]
[Required]
public string ContractNumber { get; set; }
[Column("manager_name")]
[Required]
public string ManagerName { get; set; }
[Column("counterparty_id")]
[Required]
public Guid CounterpartyId { get; set; }
[Column("creation_time")]
[Required]
public DateTime CreatinTime { get; set; } = DateTime.Now;
[Column("is_closed")]
[DefaultValue(false)]
public bool IsClosed { get; set; }
}
}

View File

@@ -0,0 +1,24 @@
#See https://aka.ms/customizecontainer to learn how to customize your debug container and how Visual Studio uses this Dockerfile to build your images for faster debugging.
FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS base
WORKDIR /app
EXPOSE 8080
EXPOSE 8081
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["DSaC_second.csproj", "DSaC_second/"]
RUN dotnet restore "DSaC_second/DSaC_second.csproj"
WORKDIR "/src/DSaC_second"
COPY . .
RUN dotnet build "DSaC_second.csproj" -c $BUILD_CONFIGURATION -o /app/build
FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "DSaC_second.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "DSaC_second.dll"]

View File

@@ -0,0 +1,76 @@
using AutoMapper;
using DSaC_second.Database;
using DSaC_second.Database.Models;
using DSaC_second.Logic.Handlers.Queries;
using DSaC_second.Models.DTOs;
using DSaC_second.Models.Internal;
using DSaC_second.Models.Internal.Queries;
using DSaC_second.Models.Internal.Сommands;
using MediatR;
namespace DSaC_second.Logic.Handlers.Commands
{
public class CreateContractCommandHandler : IRequestHandler<CreateContractCommand, ResponseModel<ContractFullDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
private readonly IMediator _mediator;
public CreateContractCommandHandler(ILogger<CreateContractCommandHandler> logger, DsacContext context, IMapper mapper, IMediator mediator)
{
_logger = logger;
_context = context;
_mapper = mapper;
_mediator = mediator;
}
public async Task<ResponseModel<ContractFullDto>> Handle(CreateContractCommand request, CancellationToken cancellationToken)
{
try
{
var counterparty = await _mediator.Send(new GetConunterpartyQuery()
{
Id = request.Model.CounterpartyId,
}, cancellationToken: cancellationToken);
if (counterparty.IsError)
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = counterparty.ErrorText,
};
if (counterparty.Value == null)
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cpty with this id not found",
};
var model = _mapper.Map<Contract>(request.Model);
var outModel = await _context.Contracts.AddAsync(model, cancellationToken: cancellationToken);
await _context.SaveChangesAsync(cancellationToken);
var res = _mapper.Map<ContractFullDto>(model);
res.Counterparty = counterparty.Value!;
return new()
{
Value = res,
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on creating record");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot create contract",
};
}
}
}
}

View File

@@ -0,0 +1,43 @@
using AutoMapper;
using DSaC_second.Database;
using DSaC_second.Models.Internal;
using DSaC_second.Models.Internal.Сommands;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC_second.Logic.Handlers.Commands
{
public class DeleteContractCommandHandler: IRequestHandler<DeleteContractCommand, ResponseModel>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public DeleteContractCommandHandler(ILogger<DeleteContractCommandHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel> Handle(DeleteContractCommand request, CancellationToken cancellationToken)
{
try
{
await _context.Contracts.Where(x => x.Id == request.Id).ExecuteDeleteAsync(cancellationToken);
return new();
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on deleting record");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot delete contract by id",
};
}
}
}
}

View File

@@ -0,0 +1,78 @@
using AutoMapper;
using DSaC_second.Database;
using DSaC_second.Database.Models;
using DSaC_second.Models.DTOs;
using DSaC_second.Models.Internal;
using DSaC_second.Models.Internal.Queries;
using DSaC_second.Models.Internal.Сommands;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC_second.Logic.Handlers.Commands
{
public class UpdateContractCommandHandler : IRequestHandler<UpdateContractCommand, ResponseModel<ContractFullDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
private readonly IMediator _mediator;
public UpdateContractCommandHandler(ILogger<UpdateContractCommandHandler> logger, DsacContext context, IMapper mapper, IMediator mediator)
{
_logger = logger;
_context = context;
_mapper = mapper;
_mediator = mediator;
}
public async Task<ResponseModel<ContractFullDto>> Handle(UpdateContractCommand request, CancellationToken cancellationToken)
{
try
{
var counterparty = await _mediator.Send(new GetConunterpartyQuery()
{
Id = request.Model.CounterpartyId,
}, cancellationToken: cancellationToken);
if (counterparty.IsError)
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = counterparty.ErrorText,
};
if(counterparty.Value == null)
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cpty with this id not found",
};
var model = _mapper.Map<Contract>(request.Model);
_context.Contracts.Update(model);
await _context.SaveChangesAsync(cancellationToken: cancellationToken);
var res = _mapper.Map<ContractFullDto>(model);
res.Counterparty = counterparty.Value!;
return new()
{
Value = res
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on updating record");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot update contract by id",
};
}
}
}
}

View File

@@ -0,0 +1,71 @@
using AutoMapper;
using DSaC_second.Database;
using DSaC_second.Models.DTOs;
using DSaC_second.Models.Internal;
using DSaC_second.Models.Internal.Queries;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC_second.Logic.Handlers.Queries
{
public class GetContractQueryHandler : IRequestHandler<GetContractQuery, ResponseModel<ContractFullDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
private readonly IMediator _mediator;
public GetContractQueryHandler(ILogger<GetContractQueryHandler> logger, DsacContext context, IMapper mapper, IMediator mediator)
{
_logger = logger;
_context = context;
_mapper = mapper;
_mediator = mediator;
}
public async Task<ResponseModel<ContractFullDto>> Handle(GetContractQuery request, CancellationToken cancellationToken)
{
try
{
var res = await _context.Contracts.FirstOrDefaultAsync(x => x.Id == request.Id, cancellationToken: cancellationToken);
if (res == null)
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Contract with whit Id is not exist"
};
var counterparty = await _mediator.Send(new GetConunterpartyQuery()
{
Id = res.CounterpartyId
}, cancellationToken: cancellationToken);
if (counterparty.IsError)
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = counterparty.ErrorText
};
var model = _mapper.Map<ContractFullDto>(res);
model.Counterparty = counterparty.Value!;
return new()
{
Value = model,
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on getting record");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot get contract by id",
};
}
}
}
}

View File

@@ -0,0 +1,50 @@
using AutoMapper;
using DSaC_second.Database;
using DSaC_second.Models.DTOs;
using DSaC_second.Models.Internal;
using DSaC_second.Models.Internal.Queries;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC_second.Logic.Handlers.Queries
{
public class GetContractsQueryHandler : IRequestHandler<GetContractsQuery, ResponseModel<List<ContractViewDto>>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public GetContractsQueryHandler(ILogger<GetContractsQueryHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel<List<ContractViewDto>>> Handle(GetContractsQuery request, CancellationToken cancellationToken)
{
try
{
var res = await _context.Contracts
.Where(x=>request.Ids == null || request.Ids.Contains(x.Id))
.Skip(request.Page * request.PageSize).Take(request.PageSize)
.ToListAsync(cancellationToken: cancellationToken);
return new()
{
Value = res.Select(_mapper.Map<ContractViewDto>).ToList(),
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on getting records");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot get contracts",
};
}
}
}
}

View File

@@ -0,0 +1,72 @@
using AutoMapper;
using DSaC_second.Database;
using DSaC_second.Logic.Handlers.Commands;
using DSaC_second.Models.DTOs;
using DSaC_second.Models.Internal;
using DSaC_second.Models.Internal.Queries;
using MediatR;
using PIHelperSh.Configuration.Attributes;
using RestSharp;
namespace DSaC_second.Logic.Handlers.Queries
{
[TrackedType]
public class GetConunterpartyQueryHandler : IRequestHandler<GetConunterpartyQuery, ResponseModel<CounterpartyViewDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
private readonly RestClient _restClient;
[Constant(BlockName = "FirstService")]
private static string BaseUrl;
[Constant(BlockName = "FirstService")]
private static string GetCounterpartyPostfix;
public GetConunterpartyQueryHandler(ILogger<GetConunterpartyQueryHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
_restClient = new RestClient(BaseUrl);
}
~GetConunterpartyQueryHandler()
{
_restClient.Dispose();
}
public async Task<ResponseModel<CounterpartyViewDto>> Handle(GetConunterpartyQuery request, CancellationToken cancellationToken)
{
try
{
var rq = new RestRequest($"{GetCounterpartyPostfix}/{request.Id.ToString()}");
var res = await _restClient.ExecuteAsync<CounterpartyViewDto>(rq, cancellationToken:cancellationToken);
if (res.IsSuccessful)
return new()
{
Value = res.Data
};
return new()
{
StatusCode = res.StatusCode,
ErrorText = res.ErrorMessage,
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on getting record from first service");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot get cpty from first service",
};
}
}
}
}

View File

@@ -0,0 +1,64 @@
// <auto-generated />
using System;
using DSaC_second.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC_second.Migrations
{
[DbContext(typeof(DsacContext))]
[Migration("20240914152154_initial")]
partial class initial
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC_second.Database.Models.Contract", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("ContractNumber")
.IsRequired()
.HasColumnType("text")
.HasColumnName("contract_number");
b.Property<Guid>("CounterpartyId")
.HasColumnType("uuid")
.HasColumnName("counterparty_id");
b.Property<DateTime>("CreatinTime")
.HasColumnType("timestamp with time zone")
.HasColumnName("creation_time");
b.Property<bool>("IsClosed")
.HasColumnType("boolean")
.HasColumnName("is_closed");
b.Property<string>("ManagerName")
.IsRequired()
.HasColumnType("text")
.HasColumnName("manager_name");
b.HasKey("Id");
b.ToTable("contract");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,38 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DSaC_second.Migrations
{
/// <inheritdoc />
public partial class initial : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "contract",
columns: table => new
{
id = table.Column<Guid>(type: "uuid", nullable: false),
contract_number = table.Column<string>(type: "text", nullable: false),
manager_name = table.Column<string>(type: "text", nullable: false),
counterparty_id = table.Column<Guid>(type: "uuid", nullable: false),
creation_time = table.Column<DateTime>(type: "timestamp with time zone", nullable: false),
is_closed = table.Column<bool>(type: "boolean", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_contract", x => x.id);
});
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "contract");
}
}
}

View File

@@ -0,0 +1,61 @@
// <auto-generated />
using System;
using DSaC_second.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC_second.Migrations
{
[DbContext(typeof(DsacContext))]
partial class DsacContextModelSnapshot : ModelSnapshot
{
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC_second.Database.Models.Contract", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("ContractNumber")
.IsRequired()
.HasColumnType("text")
.HasColumnName("contract_number");
b.Property<Guid>("CounterpartyId")
.HasColumnType("uuid")
.HasColumnName("counterparty_id");
b.Property<DateTime>("CreatinTime")
.HasColumnType("timestamp with time zone")
.HasColumnName("creation_time");
b.Property<bool>("IsClosed")
.HasColumnType("boolean")
.HasColumnName("is_closed");
b.Property<string>("ManagerName")
.IsRequired()
.HasColumnType("text")
.HasColumnName("manager_name");
b.HasKey("Id");
b.ToTable("contract");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,17 @@
using AutoMapper;
using System.ComponentModel.DataAnnotations;
namespace DSaC_second.Models.DTOs
{
public class ContractBaseDto
{
[Required(ErrorMessage = "Contract number must be present")]
public string ContractNumber { get; set; } = null!;
[Required(ErrorMessage = "Manager name must be present")]
public string ManagerName { get; set; } = null!;
[Required(ErrorMessage = "Counterparty id must be present")]
public Guid CounterpartyId { get; set; }
}
}

View File

@@ -0,0 +1,11 @@
using AutoMapper;
using DSaC_second.Database.Models;
namespace DSaC_second.Models.DTOs
{
[AutoMap(typeof(Contract))]
public class ContractFullDto: ContractViewDto
{
public CounterpartyViewDto Counterparty { get; set; } = new();
}
}

View File

@@ -0,0 +1,15 @@
using AutoMapper;
using DSaC_second.Database.Models;
namespace DSaC_second.Models.DTOs
{
[AutoMap(typeof(Contract))]
public class ContractViewDto: ContractBaseDto
{
public Guid Id { get; set; }
public DateTime CreatinTime { get; set; }
public bool IsClosed { get; set; }
}
}

View File

@@ -0,0 +1,12 @@
using System.ComponentModel.DataAnnotations;
namespace DSaC_second.Models.DTOs
{
public class CounterpartyViewDto
{
public Guid Id { get; set; }
[Required(ErrorMessage = "Cpty name must be present")]
public string Name { get; set; } = null!;
}
}

View File

@@ -0,0 +1,10 @@
using DSaC_second.Models.DTOs;
using MediatR;
namespace DSaC_second.Models.Internal.Queries
{
public class GetContractQuery : IRequest<ResponseModel<ContractFullDto>>
{
public Guid Id { get; set; }
}
}

View File

@@ -0,0 +1,14 @@
using DSaC_second.Models.DTOs;
using MediatR;
namespace DSaC_second.Models.Internal.Queries
{
public class GetContractsQuery : IRequest<ResponseModel<List<ContractViewDto>>>
{
public List<Guid>? Ids { get; set; }
public int Page { get; set; }
public int PageSize { get; set; }
}
}

View File

@@ -0,0 +1,10 @@
using DSaC_second.Models.DTOs;
using MediatR;
namespace DSaC_second.Models.Internal.Queries
{
public class GetConunterpartyQuery: IRequest<ResponseModel<CounterpartyViewDto>>
{
public Guid Id { get; set; }
}
}

View File

@@ -0,0 +1,30 @@
using System.Net;
namespace DSaC_second.Models.Internal
{
public class ResponseModel
{
public string? ErrorText;
public bool IsError => !string.IsNullOrEmpty(ErrorText);
private int? _errorCode = 200;
public int? ErrorCode
{
get => _errorCode;
set => _errorCode = value;
}
public HttpStatusCode? StatusCode
{
get => (HttpStatusCode?)_errorCode;
set => _errorCode = (int?)value;
}
}
public class ResponseModel<T> : ResponseModel
{
public T? Value { get; set; }
}
}

View File

@@ -0,0 +1,10 @@
using DSaC_second.Models.DTOs;
using MediatR;
namespace DSaC_second.Models.Internal.Сommands
{
public class CreateContractCommand : IRequest<ResponseModel<ContractFullDto>>
{
public ContractBaseDto Model { get; set; } = null!;
}
}

View File

@@ -0,0 +1,9 @@
using MediatR;
namespace DSaC_second.Models.Internal.Сommands
{
public class DeleteContractCommand : IRequest<ResponseModel>
{
public Guid Id { get; set; }
}
}

View File

@@ -0,0 +1,14 @@
using DSaC_second.Models.DTOs;
using MediatR;
using System.ComponentModel.DataAnnotations;
namespace DSaC_second.Models.Internal.Сommands
{
public class UpdateContractCommand : IRequest<ResponseModel<ContractFullDto>>
{
public Guid Id { get; set; }
[Required(ErrorMessage = "Update model msust be present")]
public ContractViewDto Model { get; set; } = null!;
}
}

View File

@@ -0,0 +1,130 @@
using DSaC_second.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.OpenApi.Models;
using NLog.Config;
using NLog.Extensions.Logging;
using NLog.Targets;
using PIHelperSh.Configuration;
using PIHelperSh.Configuration.Attributes;
using System;
using System.Reflection;
using LogLevel = NLog.LogLevel;
[TrackedType]
public class Program
{
private static WebApplication? app;
[Constant(BlockName = "Database")]
private static string ConnectionString;
[Constant(BlockName = "GatewaySettings")]
private static string AppPrefix;
public static void Main(string[] args)
{
var builder = WebApplication.CreateBuilder(args);
ConfigureLogger(builder);
ConfigureServices(builder);
ConfigureDatabase(builder);
app = builder.Build();
var t = MigrateDatabase();
app.UseSwagger(c =>
{
if (!string.IsNullOrEmpty(AppPrefix))
{
//c.RouteTemplate = AppPrefix + "/swagger/{documentName}/swagger.json";
c.PreSerializeFilters.Add((swaggerDoc, httpReq) =>
{
swaggerDoc.Servers = new List<OpenApiServer> { new OpenApiServer { Url = $"{httpReq.Scheme}://{httpReq.Host.Value}/{AppPrefix}" } };
});
}
});
app.UseSwaggerUI(c =>
{
//if (!string.IsNullOrEmpty(AppPrefix))
//{
// c.SwaggerEndpoint($"/{AppPrefix}/swagger/v1/swagger.json", $"APP API");
// c.RoutePrefix = $"{AppPrefix}/swagger";
//}
});
app.UseHttpsRedirection();
app.UseAuthorization();
app.MapControllers();
t.Wait();
app.Run();
}
private static void ConfigureLogger(WebApplicationBuilder builder)
{
var nLogConfig = new LoggingConfiguration();
var logConsole = new ConsoleTarget();
var blackhole = new NullTarget();
var logFile = new FileTarget()
{
FileName = "${basedir}/logs/${shortdate}_logs.log"
};
nLogConfig.AddRule(LogLevel.Trace, LogLevel.Trace, blackhole, "Microsoft.AspNetCore.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Warn, logFile, "Microsoft.EntityFrameworkCore.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Warn, logFile, "Microsoft.AspNetCore.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Warn, logFile, "System.Net.Http.HttpClient.Refit.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Error, logConsole);
nLogConfig.AddRule(LogLevel.Debug, LogLevel.Error, logFile);
builder.Logging.ClearProviders();
builder.Services.AddLogging(m => m.AddNLog(nLogConfig));
}
private static void ConfigureServices(WebApplicationBuilder builder)
{
builder.Services.AddConfigurations(builder.Configuration);
builder.Configuration.AddConstants();
builder.Services.AddAutoMapper(AppDomain.CurrentDomain.GetAssemblies());
builder.Services.AddMediatR(cfg => cfg.RegisterServicesFromAssemblyContaining<Program>());
builder.Services.AddControllers().AddNewtonsoftJson();
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen(c =>
{
c.SwaggerDoc("v1", new OpenApiInfo
{
Title = "Distributed computing and applications",
Version = "v1",
Description = ""
});
//c.EnableAnnotations();
});
}
private static void ConfigureDatabase(WebApplicationBuilder builder)
{
builder.Services.AddDbContext<DsacContext>(options =>
{
options.UseNpgsql(ConnectionString);
});
}
private static async Task MigrateDatabase()
{
var context = app?.Services.CreateScope().ServiceProvider.GetService<DsacContext>();
if(context != null)
await context.Database.MigrateAsync();
}
}

View File

@@ -0,0 +1,53 @@
{
"profiles": {
"http": {
"commandName": "Project",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development",
"GATEWAY_SETTINGS_APP_PREFIX": "first"
},
"dotnetRunMessages": true,
"applicationUrl": "http://localhost:5246"
},
"https": {
"commandName": "Project",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
},
"dotnetRunMessages": true,
"applicationUrl": "https://localhost:7239;http://localhost:5246"
},
"IIS Express": {
"commandName": "IISExpress",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
"Container (Dockerfile)": {
"commandName": "Docker",
"launchBrowser": true,
"launchUrl": "{Scheme}://{ServiceHost}:{ServicePort}/swagger",
"environmentVariables": {
"ASPNETCORE_HTTPS_PORTS": "8081",
"ASPNETCORE_HTTP_PORTS": "8080"
},
"publishAllPorts": true,
"useSSL": true
}
},
"$schema": "http://json.schemastore.org/launchsettings.json",
"iisSettings": {
"windowsAuthentication": false,
"anonymousAuthentication": true,
"iisExpress": {
"applicationUrl": "http://localhost:56866",
"sslPort": 44308
}
}
}

View File

@@ -0,0 +1,18 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"Database": {
"ConnectionString": "Host=db.dev-moio.online;Port=31153;Database=dsac_maxim;Username=postgres;Password=postgres_password"
},
"FirstService": {
"BaseUrl": "http://localhost:5062/api",
"GetCounterpartyPostfix": "/Counterparties"
},
"GatewaySettings": {
"AppPrefix": ""
}
}

View File

@@ -0,0 +1,9 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*"
}

Some files were not shown because too many files have changed in this diff Show More