Merge branch 'main' into dozorova_alena_lab_3

This commit is contained in:
Zara28 2024-09-26 11:00:24 +04:00
commit 757a7819f6
118 changed files with 4903 additions and 0 deletions

38
borschevskaya_anna_lab_2/.gitignore vendored Normal file
View File

@ -0,0 +1,38 @@
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**/target/
!**/src/test/**/target/
### IntelliJ IDEA ###
.idea/modules.xml
.idea/jarRepositories.xml
.idea/compiler.xml
.idea/libraries/
*.iws
*.iml
*.ipr
### Eclipse ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
build/
!**/src/main/**/build/
!**/src/test/**/build/
### VS Code ###
.vscode/
### Mac OS ###
.DS_Store

View File

@ -0,0 +1,43 @@
# Отчет. Лабораторная работа 2
В рамках лабораторной работы №2 были написаны два сервиса, работающих с текстовыми файлами.
Для первого сервиса был выбран вариант задания №5:
```
Ищет в каталоге /var/data файл с самым коротким названием и перекладывает его в /var/result/data.txt.
```
А для второго - №2:
```
Ищет наименьшее число из файла /var/data/data.txt и сохраняет его третью степень в /var/result/result.txt.
```
## Описание
Сначала сервис first перемещает данные из файла с самым коротким названием, находящегося в указанной примонтированной директории, в выходную папку.
Доступ к выходной папке имеет второй сервис, который выводит наименьшее число из помещенного первым сервисом файла
в третьей степени в выходной файл.
Выходной файл расположен в примонтированной директории и доступен на машине, где запускаются сервисы.
В Dockerfile используется многоэтапная сборка с использованием нескольких базовых образов на каждом этапе.
Описание значения каждой строки есть в Dockerfile в сервисе first.
В файле docker-compose.yml приведено описание новых строк, связанных с подключением примонтированных томов.
Стоит отметить, что для "общения" сервисов используется общий том common, который монтируется в контейнер по пути /var/result. Это позволяет сохранять результаты
работы первого сервиса для использования вторым сервисом.
## Как запустить
Для того, чтобы запустить сервисы, необходимо выполнить следующие действия:
1. Установить и запустить Docker Engine или Docker Desktop
2. Через консоль перейти в папку, в которой расположен файл docker-compose.yml
3. Выполнить команду:
```
docker compose up --build
```
В случае успешного запуска всех контейнеров в консоли будет выведено следующее сообщение:
```
✔ Network borschevskaya_anna_lab_2_default Created 0.1s
✔ Container borschevskaya_anna_lab_2-first-1 Created 0.1s
✔ Container borschevskaya_anna_lab_2-second-1 Created 0.1s
Attaching to borschevskaya_anna_lab_2-first-1, borschevskaya_anna_lab_2-second-1
```
Далее, в консоль каждого сервиса будут выведены сообщения о том, как прошла обработка файлов.
В случае отсутствия заданных значений переменных окружения INPUT_PATH и OUTPUT_PATH и
в иных исключительных ситуация будет выведена информация об этом.
## Видео-отчет
Работоспособность лабораторной работы можно оценить в следующем [видео](https://disk.yandex.ru/i/LFxdyRUFQDwXEQ).

View File

@ -0,0 +1,22 @@
services:
first:
build: ./first # директория, в которой нужно искать Dockerfile для сборки первого сервиса
environment:
INPUT_PATH: /var/data/ # директория с входными данными для обработки файлов
OUTPUT_PATH: /var/result/ # директория с выходными данными обработки
volumes:
- ./volumes/input:/var/data # монтируется локальная папка с входными данными в папку внутри контейнера
- common:/var/result # монтируется общий для двух сервисов том, в который first сложит результаты обработки по варианту
second:
build: ./second # директория, в которой нужно искать Dockerfile для сборки второго сервиса
depends_on: # сервис second зависит от сервиса first и будет запущен после него
- first
environment:
INPUT_PATH: /var/result/
OUTPUT_PATH: /var/data/
volumes:
- ./volumes/output:/var/data
- common:/var/result # монтируется общий для двух сервисов том, из которого second получит результаты обработки first сервиса и выполнит свою логику
volumes:
common:

View File

@ -0,0 +1,25 @@
# Используем образ Maven для сборки
FROM maven:3.8-eclipse-temurin-21-alpine AS build
# Устанавливаем рабочую директорию
WORKDIR /app
# Копируем только pom.xml и загружаем зависимости
# Так зависимости закэшируются в Docker при изменении кода закэшированные слои с зависимостями будут подгружаться быстрее
COPY pom.xml .
RUN mvn dependency:go-offline
# Копируем остальные исходные файлы
COPY src ./src
# Собираем весь проект
RUN mvn clean package -DskipTests
# Используем официальный образ JDK для запуска собранного jar-файла
FROM eclipse-temurin:21-jdk-alpine
# Копируем jar-файл из предыдущего этапа
COPY --from=build /app/target/*.jar /app.jar
# Указываем команду для запуска приложения
CMD ["java", "-jar", "app.jar"]

View File

@ -0,0 +1,37 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>ru.first</groupId>
<artifactId>first</artifactId>
<version>1.0.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>21</maven.compiler.source>
<maven.compiler.target>21</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<build>
<plugins>
<plugin>
<!-- Build an executable JAR -->
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.1.0</version>
<configuration>
<archive>
<manifest>
<addClasspath>true</addClasspath>
<classpathPrefix>lib/</classpathPrefix>
<mainClass>ru.first.Main</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,50 @@
package ru.first;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Comparator;
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
import static java.util.Objects.isNull;
public class Main {
public static final String INPUT_PATH = System.getenv("INPUT_PATH");
public static final String OUTPUT_PATH = System.getenv("OUTPUT_PATH");
public static final String RESULT_FILE_NAME = "data.txt";
public static void main(String[] args) throws IOException {
if (isNull(INPUT_PATH) || INPUT_PATH.isEmpty() || isNull(OUTPUT_PATH) || OUTPUT_PATH.isEmpty()) {
System.out.printf("Отсутствуют переменные окружения INPUT_PATH = '%s' или OUTPUT_PATH = '%s'%n",
INPUT_PATH, OUTPUT_PATH);
return;
}
var inputPathDir = Path.of(INPUT_PATH);
if (!Files.exists(inputPathDir)) {
Files.createDirectory(inputPathDir);
}
var inputDirectory = new File(INPUT_PATH);
var allDirFiles = inputDirectory.listFiles();
if (isNull(allDirFiles) || allDirFiles.length == 0) {
System.out.println("Директория пуста");
return;
}
var dirFiles = Arrays.stream(allDirFiles).filter(File::isFile).toList();
if (dirFiles.isEmpty()) {
System.out.println("В указанной директории нет подходящих для обработки файлов");
return;
}
var shortestName = dirFiles.stream().min(Comparator.comparing(file -> file.getName().length())).get();
var outputPathDir = Path.of(OUTPUT_PATH);
if (!Files.exists(outputPathDir)) {
Files.createDirectory(outputPathDir);
}
var resultFilePath = Path.of(OUTPUT_PATH + File.separator + RESULT_FILE_NAME);
Files.move(Path.of(INPUT_PATH + File.separator + shortestName.getName()), resultFilePath, REPLACE_EXISTING);
}
}

View File

@ -0,0 +1,16 @@
FROM maven:3.8-eclipse-temurin-21-alpine AS build
WORKDIR /app
COPY pom.xml .
RUN mvn dependency:go-offline
COPY src ./src
RUN mvn clean package -DskipTests
FROM eclipse-temurin:21-jdk-alpine
COPY --from=build /app/target/*.jar /app.jar
CMD ["java", "-jar", "app.jar"]

View File

@ -0,0 +1,36 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>ru.second</groupId>
<artifactId>second</artifactId>
<version>1.0.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>21</maven.compiler.source>
<maven.compiler.target>21</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.1.0</version>
<configuration>
<archive>
<manifest>
<addClasspath>true</addClasspath>
<classpathPrefix>lib/</classpathPrefix>
<mainClass>ru.second.Main</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,51 @@
package ru.second;
import java.io.File;
import java.io.FileWriter;
import java.nio.file.Files;
import static java.util.Objects.isNull;
public class Main {
public static final String INPUT_PATH = System.getenv("INPUT_PATH");
public static final String INPUT_FILE_NAME = "data.txt";
public static final String OUTPUT_PATH = System.getenv("OUTPUT_PATH");
public static final String RESULT_FILE_NAME = "result.txt";
public static void main(String[] args) {
if (isNull(INPUT_PATH) || INPUT_PATH.isEmpty() || isNull(OUTPUT_PATH) || OUTPUT_PATH.isEmpty()) {
System.out.printf("Отсутствуют переменные окружения INPUT_PATH = '%s' или OUTPUT_PATH = '%s'%n",
INPUT_PATH, OUTPUT_PATH);
return;
}
var inputFile = new File(INPUT_PATH + File.separator + INPUT_FILE_NAME);
if (!inputFile.exists()) {
System.out.println("Входной файл не существует");
return;
}
try (var stream = Files.lines(inputFile.toPath());
var writer = new FileWriter(OUTPUT_PATH + File.separator + RESULT_FILE_NAME);
) {
var min = stream.map(Main::parseInt).reduce(Integer::min);
if (min.isEmpty()) {
System.out.println("Не найдено минимальное значение среди строк файла");
return;
}
var minValue = Math.pow(min.get(), 3);
System.out.printf("Get min value = '%d'%n", min.get());
writer.append(Double.toString(minValue));
System.out.printf("To file %s was written value %f%n", RESULT_FILE_NAME, minValue);
} catch (Exception ex) {
System.out.println(ex.getMessage());
}
}
private static Integer parseInt(String line) {
line = line.replace("\\n", "");
return Integer.parseInt(line);
}
}

4
dolgov_dmitriy_lab_1/.gitignore vendored Normal file
View File

@ -0,0 +1,4 @@
data/
log/
wordpress/
custom/

View File

@ -0,0 +1,34 @@
# Лабораторная работа №1
## Выполнил: Долгов Дмитрий, группа ПИбд-42
### Были развёрнуты следующие сервисы:
* mediawiki (движок вики)
* wordpress (популярная система управления контентом)
* gitea (сервис для хранения репозиториев git)
* mariaDB
### Были использованы следующие технологии:
* git
* docker
* docker-compose
### Для запуска лабораторной работы необходимо ввести в консоль следующую команду:
```
docker compose up -d
```
## Результат запуска:
```
[+] Running 4/4
✔ Container dolgov_dmitriy_lab_1-wordpress-1 Running 0.0s
✔ Container dolgov_dmitriy_lab_1-database-1 Running 0.0s
✔ Container dolgov_dmitriy_lab_1-mediawiki-1 Running 0.0s
✔ Container gitea Running
```
## Видео с результатом запуска:
Видео можно посмотреть по данной [ссылке](https://drive.google.com/file/d/1hC6HhNvYBRuYVClobXyDMReA4ngwxhwc/view?usp=drive_link).

View File

@ -0,0 +1,73 @@
# Сервисы по заданию
services:
# Сервис MediaWiki
mediawiki:
# Образ MediaWiki
image: mediawiki
# Автоматический перезапуск при сбое
restart: always
# проброс порта 80 из контейнера на порт 8080 хоста
ports:
- "8080:80"
# связь с сервисом database
links:
- database
# монтирование volume для хранения данных
volumes:
- images:/var/www/html/images
# Сервис WordPress
wordpress:
# Образ WordPress
image: wordpress:latest
# Автоматический перезапуск при сбое
ports:
- "8082:80"
restart: always
volumes:
- ./wordpress:/var/www/html
# Сервис Gitea
server:
image: gitea/gitea:latest
container_name: gitea
restart: always
environment:
- USER_UID=1000
- USER_GID=1000
volumes:
- ./data:/data
- ./custom:/app/gitea/custom
- ./log:/app/gitea/log
ports:
- "8081:3000"
links:
- database
depends_on:
- database
# Сервис MariaDB
database:
# Образ MariaDB
image: mariadb
# Автоматический перезапуск при сбое
restart: always
# переменные окружения для настройки базы данных
environment:
# имя базы данных
MYSQL_DATABASE: my_wiki
# имя пользователя
MYSQL_USER: user
# пароль пользователя
MYSQL_PASSWORD: user
# случайный пароль для пользователя root
MYSQL_RANDOM_ROOT_PASSWORD: 'yes'
# монтирование volume для хранения данных
volumes:
- db:/var/lib/mysql
# тома для хранения данных
volumes:
images:
db:

Binary file not shown.

After

Width:  |  Height:  |  Size: 275 KiB

View File

@ -0,0 +1,93 @@
# Кашин Максим ПИбд-42
## Описание
Этот проект разворачивает среду с базой данных MySQL, системами управления проектами Redmine и платформой для создания сайтов WordPress с использованием Docker Compose. Ниже приведены шаги для запуска и настройки.
## 0. Предварительные действия
Перед запуском Docker Compose выполнил следующие шаги:
1. Установил [Docker Desktop](https://www.docker.com/products/docker-desktop) для моей операционной системы.
2. Настроил брандмауэр Windows и разрешил доступ Docker.
3. Открыл PowerShell с правами администратора и выполнил следующие команды:
```bash
net start vmcompute
wsl --set-default-version 2
```
## 1. Структура и запуск сервиса
Файл `docker-compose.yml` содержит описание трех сервисов:
- **db (MySQL)** база данных;
- **redmine (Redmine)** система управления проектами;
- **wordpress (WordPress)** платформа для создания сайтов.
Запуск всех сервисов происходит через команду:
```bash
docker-compose up -d
```
## 2. Сервис MySQL
Сервис `db` представляет собой контейнер с MySQL версии 8.0.
- **Образ**: `mysql:8.0`
- **Переменные окружения**:
- `MYSQL_ROOT_PASSWORD` — пароль для пользователя root.
- `MYSQL_DATABASE` — имя создаваемой базы данных (exampledb).
- `MYSQL_USER` — имя пользователя базы данных (exampleuser).
- `MYSQL_PASSWORD` — пароль пользователя базы данных (examplepass).
- **Тома (volumes)**:
- `db-data:/var/lib/mysql` — том для хранения данных базы данных.
Сервис MySQL необходим для работы как Redmine, так и WordPress.
## 3. Сервис Redmine
Сервис `redmine` представляет собой контейнер с системой управления проектами Redmine.
- **Образ**: `redmine`
- **Порты**:
- `8080:3000` — Redmine будет доступен по адресу http://localhost:8080.
- **Переменные окружения**:
- `REDMINE_DB_DATABASE` — имя базы данных (exampledb).
- `REDMINE_DB_MYSQL` — имя хоста базы данных (db).
- `REDMINE_DB_PASSWORD` — пароль для подключения к базе данных (example).
- `REDMINE_SECRET_KEY_BASE` — секретный ключ для работы с сессиями.
- **Тома (volumes)**:
- `redmine:/usr/src/redmine/files` — том для хранения файлов Redmine.
## 4. Сервис WordPress
Сервис `wordpress` представляет собой контейнер с платформой WordPress.
- **Образ**: `wordpress`
- **Порты**:
- `8081:80` — WordPress будет доступен по адресу http://localhost:8081.
- **Переменные окружения**:
- `WORDPRESS_DB_HOST` — хост базы данных (db).
- `WORDPRESS_DB_USER` — имя пользователя базы данных (exampleuser).
- `WORDPRESS_DB_PASSWORD` — пароль для подключения к базе данных (examplepass).
- `WORDPRESS_DB_NAME` — имя базы данных (exampledb).
- **Тома (volumes)**:
- `wordpress:/var/www/html` — том для хранения файлов WordPress.
## 5. Томa данных
Для хранения данных и постоянства контейнеров используются три тома:
- `wordpress` — для данных WordPress.
- `db-data` — для данных MySQL.
- `redmine` — для файлов Redmine.
## 6. Остановка сервисов
Для остановки и удаления всех контейнеров необходимо выполнить команду:
```bash
docker-compose down
```
## 7. Ссылка на видео
[Видео-отчёт Кашин Максим ПИбд-42](https://disk.yandex.ru/i/O8L1qmk4PIbCvA)

View File

@ -0,0 +1,45 @@
version: '3.1' # Версия Docker Compose файла
services: # Определение сервисов, которые будут запускаться
db: # Сервис базы данных MySQL
image: mysql:8.0 # Образ
restart: always # Автоматический перезапуск контейнера
environment: # Переменные окружения для конфигурации базы данных
MYSQL_ROOT_PASSWORD: example
MYSQL_DATABASE: exampledb
MYSQL_USER: exampleuser
MYSQL_PASSWORD: examplepass
volumes: # Определение томов для сохранения данных базы данных
- db-data:/var/lib/mysql
redmine: # redmine Система учёта багов, т.е. баг-трекер.
image: redmine # Образ
restart: always # Автоматический перезапуск контейнера
ports:
- 8080:3000 # Переадресация локального порта 8080 на порт 3000 в контейнере
environment: # Переменные окружения для настройки Redmine (подключение к бд)
REDMINE_DB_DATABASE: exampledb
REDMINE_DB_MYSQL: db
REDMINE_DB_PASSWORD: example
REDMINE_SECRET_KEY_BASE: supersecretkey
volumes: # Том для хранения данных Redmine
- redmine:/usr/src/redmine/files
wordpress: # wordpress Популярная система управления контентом.
image: wordpress # Образ
restart: always # Автоматический перезапуск контейнера
ports:
- 8081:80 # Переадресация локального порта 8081 на порт 80 в контейнере
environment: # Переменные окружения для настройки WordPress (подключение к бд)
WORDPRESS_DB_HOST: db
WORDPRESS_DB_USER: exampleuser
WORDPRESS_DB_PASSWORD: examplepass
WORDPRESS_DB_NAME: exampledb
volumes: # Том для хранения данных WordPress
- wordpress:/var/www/html
volumes: # Определение томов для хранения данных
wordpress:
db-data:
redmine:

View File

@ -0,0 +1,30 @@
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/azds.yaml
**/bin
**/charts
**/docker-compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/obj
**/secrets.dev.yaml
**/values.dev.yaml
LICENSE
README.md
!**/.gitignore
!.git/HEAD
!.git/config
!.git/packed-refs
!.git/refs/heads/**

63
kuzarin_maxim_lab_3/.gitattributes vendored Normal file
View File

@ -0,0 +1,63 @@
###############################################################################
# Set default behavior to automatically normalize line endings.
###############################################################################
* text=auto
###############################################################################
# Set default behavior for command prompt diff.
#
# This is need for earlier builds of msysgit that does not have it on by
# default for csharp files.
# Note: This is only used by command line
###############################################################################
#*.cs diff=csharp
###############################################################################
# Set the merge driver for project and solution files
#
# Merging from the command prompt will add diff markers to the files if there
# are conflicts (Merging from VS is not affected by the settings below, in VS
# the diff markers are never inserted). Diff markers may cause the following
# file extensions to fail to load in VS. An alternative would be to treat
# these files as binary and thus will always conflict and require user
# intervention with every merge. To do so, just uncomment the entries below
###############################################################################
#*.sln merge=binary
#*.csproj merge=binary
#*.vbproj merge=binary
#*.vcxproj merge=binary
#*.vcproj merge=binary
#*.dbproj merge=binary
#*.fsproj merge=binary
#*.lsproj merge=binary
#*.wixproj merge=binary
#*.modelproj merge=binary
#*.sqlproj merge=binary
#*.wwaproj merge=binary
###############################################################################
# behavior for image files
#
# image files are treated as binary by default.
###############################################################################
#*.jpg binary
#*.png binary
#*.gif binary
###############################################################################
# diff behavior for common document formats
#
# Convert binary document formats to text before diffing them. This feature
# is only available from the command line. Turn it on by uncommenting the
# entries below.
###############################################################################
#*.doc diff=astextplain
#*.DOC diff=astextplain
#*.docx diff=astextplain
#*.DOCX diff=astextplain
#*.dot diff=astextplain
#*.DOT diff=astextplain
#*.pdf diff=astextplain
#*.PDF diff=astextplain
#*.rtf diff=astextplain
#*.RTF diff=astextplain

363
kuzarin_maxim_lab_3/.gitignore vendored Normal file
View File

@ -0,0 +1,363 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Oo]ut/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd

View File

@ -0,0 +1,31 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.10.34916.146
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "DSaC", "DSaC\DSaC.csproj", "{C1051C12-D7D0-4C77-AFBC-4F5FFD8EE367}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DSaC_second", "DSaC_second\DSaC_second.csproj", "{64F78585-2BBC-4656-BC50-41FBB8917719}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{C1051C12-D7D0-4C77-AFBC-4F5FFD8EE367}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C1051C12-D7D0-4C77-AFBC-4F5FFD8EE367}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C1051C12-D7D0-4C77-AFBC-4F5FFD8EE367}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C1051C12-D7D0-4C77-AFBC-4F5FFD8EE367}.Release|Any CPU.Build.0 = Release|Any CPU
{64F78585-2BBC-4656-BC50-41FBB8917719}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{64F78585-2BBC-4656-BC50-41FBB8917719}.Debug|Any CPU.Build.0 = Debug|Any CPU
{64F78585-2BBC-4656-BC50-41FBB8917719}.Release|Any CPU.ActiveCfg = Release|Any CPU
{64F78585-2BBC-4656-BC50-41FBB8917719}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {30D05708-634E-4FDE-9BCA-5A1B7A5EFF59}
EndGlobalSection
EndGlobal

View File

@ -0,0 +1,88 @@
using DSaC.Models.DTOs;
using DSaC.Models.Internal.Queries;
using DSaC.Models.Internal.Сommands;
using MediatR;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.RazorPages;
namespace DSaC.Controllers
{
[Route("api/[controller]")]
[ApiController]
public class CounterpartiesController : ControllerBase
{
private readonly IMediator mediator;
public CounterpartiesController(IMediator mediator)
{
this.mediator = mediator;
}
[HttpGet("")]
public async Task<IActionResult> GetCounterparties(
[FromQuery] int page = 0,
[FromQuery] int pageSize = 10,
[FromQuery] List<Guid>? ids = null
)
{
var request = new GetCounterpartiesQuery
{
Page = page,
PageSize = pageSize,
Ids = ids
};
var response = await mediator.Send(request);
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpGet("{uuid:guid}")]
public async Task<IActionResult> GetFullCounterparty([FromRoute] Guid uuid)
{
var request = new GetCounterpartyQuery
{
Id = uuid
};
var response = await mediator.Send(request);
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpPost("")]
public async Task<IActionResult> CreateCounterparty([FromBody] CounterpartyBaseDto dto)
{
var response = await mediator.Send(new CreateCounterpartyCommand()
{
Model = dto
});
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpPut("{uuid:guid}")]
public async Task<IActionResult> UpdateRecord([FromRoute] Guid uuid, [FromBody] CounterpartyViewDto dto)
{
var response = await mediator.Send(new UpdateCounterpartyCommand()
{
Id=uuid,
Model = dto
});
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpDelete("{uuid:guid}")]
public async Task<IActionResult> DeleteRecord([FromRoute] Guid uuid)
{
var response = await mediator.Send(new DeleteCounterpartyCommand()
{
Id = uuid,
});
return !response.IsError ? Ok() : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
}
}

View File

@ -0,0 +1,26 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<UserSecretsId>55894bef-8317-4e30-a5f0-4dcd5c3f861e</UserSecretsId>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MediatR" Version="12.4.0" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.NewtonsoftJson" Version="8.0.8" />
<PackageReference Include="Microsoft.EntityFrameworkCore" Version="8.0.8" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="8.0.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.VisualStudio.Azure.Containers.Tools.Targets" Version="1.20.1" />
<PackageReference Include="NLog.Extensions.Logging" Version="5.3.12" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="8.0.4" />
<PackageReference Include="PIHelperSh.Configuration" Version="1.0.1" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.4.0" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,14 @@
using DSaC.Database.Models;
using Microsoft.EntityFrameworkCore;
namespace DSaC.Database
{
public class DsacContext: DbContext
{
public DsacContext(DbContextOptions options) : base(options)
{
}
public DbSet<Counterparty> Counterparties { get; set; }
}
}

View File

@ -0,0 +1,24 @@
using AutoMapper;
using DSaC.Models.DTOs;
using DSaC.Models.Internal.Сommands;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace DSaC.Database.Models
{
[Table("counterparty")]
[AutoMap(typeof(CounterpartyBaseDto))]
[AutoMap(typeof(CounterpartyViewDto))]
public class Counterparty
{
[Key]
[Column("id")]
public Guid Id { get; set; }
[Required]
[MaxLength(255)]
[Column("name")]
public string Name { get; set; }
}
}

View File

@ -0,0 +1,24 @@
#See https://aka.ms/customizecontainer to learn how to customize your debug container and how Visual Studio uses this Dockerfile to build your images for faster debugging.
FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS base
WORKDIR /app
EXPOSE 8080
EXPOSE 8081
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["DSaC.csproj", "DSaC/"]
RUN dotnet restore "DSaC/DSaC.csproj"
WORKDIR "/src/DSaC"
COPY . .
RUN dotnet build "DSaC.csproj" -c $BUILD_CONFIGURATION -o /app/build
FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "DSaC.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "DSaC.dll"]

View File

@ -0,0 +1,51 @@
using AutoMapper;
using DSaC.Database;
using DSaC.Database.Models;
using DSaC.Logic.Handlers.Queries;
using DSaC.Models.DTOs;
using DSaC.Models.Internal;
using DSaC.Models.Internal.Сommands;
using MediatR;
namespace DSaC.Logic.Handlers.Commands
{
public class CreateCounterpartyCommandHandler : IRequestHandler<CreateCounterpartyCommand, ResponseModel<CounterpartyViewDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public CreateCounterpartyCommandHandler(ILogger<CreateCounterpartyCommandHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel<CounterpartyViewDto>> Handle(CreateCounterpartyCommand request, CancellationToken cancellationToken)
{
try
{
var model = _mapper.Map<Counterparty>(request.Model);
var res = await _context.AddAsync(model);
await _context.SaveChangesAsync();
return new()
{
Value = _mapper.Map<CounterpartyViewDto>(res.Entity)
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on creating counterparty");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot create counterparty"
};
}
}
}
}

View File

@ -0,0 +1,50 @@
using AutoMapper;
using DSaC.Database;
using DSaC.Database.Models;
using DSaC.Models.DTOs;
using DSaC.Models.Internal;
using DSaC.Models.Internal.Сommands;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC.Logic.Handlers.Commands
{
public class DeleteCounterpartyCommandHandler: IRequestHandler<DeleteCounterpartyCommand, ResponseModel>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public DeleteCounterpartyCommandHandler(ILogger<DeleteCounterpartyCommandHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel> Handle(DeleteCounterpartyCommand request, CancellationToken cancellationToken)
{
try
{
var res = await _context.Counterparties.Where(x=>x.Id == request.Id).ExecuteDeleteAsync();
if (res == 1)return new();
return new()
{
ErrorText = "Cannot find object to delete",
StatusCode = System.Net.HttpStatusCode.NotFound
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on deleteing counterparty");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot delete counterparty"
};
}
}
}
}

View File

@ -0,0 +1,50 @@
using AutoMapper;
using DSaC.Database;
using DSaC.Database.Models;
using DSaC.Models.DTOs;
using DSaC.Models.Internal;
using DSaC.Models.Internal.Сommands;
using MediatR;
namespace DSaC.Logic.Handlers.Commands
{
public class UpdateCounterpartyCommandHandler: IRequestHandler<UpdateCounterpartyCommand, ResponseModel<CounterpartyViewDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public UpdateCounterpartyCommandHandler(ILogger<UpdateCounterpartyCommandHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel<CounterpartyViewDto>> Handle(UpdateCounterpartyCommand request, CancellationToken cancellationToken)
{
try
{
var model = _mapper.Map<Counterparty>(request.Model);
var res = _context.Update(model);
await _context.SaveChangesAsync();
return new()
{
Value = _mapper.Map<CounterpartyViewDto>(res.Entity)
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on updating counterparty");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot update counterparty"
};
}
}
}
}

View File

@ -0,0 +1,50 @@
using AutoMapper;
using DSaC.Database;
using DSaC.Models.DTOs;
using DSaC.Models.Internal;
using DSaC.Models.Internal.Queries;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC.Logic.Handlers.Queries
{
public class GetCounterpartiesQueryHandler : IRequestHandler<GetCounterpartiesQuery, ResponseModel<List<CounterpartyViewDto>>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public GetCounterpartiesQueryHandler(ILogger<GetCounterpartiesQueryHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel<List<CounterpartyViewDto>>> Handle(GetCounterpartiesQuery request, CancellationToken cancellationToken)
{
try
{
var res = await _context.Counterparties
.Where(x=>request.Ids == null || request.Ids.Contains(x.Id))
.Skip(request.Page * request.PageSize).Take(request.PageSize)
.ToListAsync();
return new()
{
Value = res.Select(_mapper.Map<CounterpartyViewDto>).ToList(),
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on getting records");
return new()
{
StatusCode = System.Net.HttpStatusCode.BadRequest,
ErrorText = "Cannot get Counterparties by this request",
};
}
}
}
}

View File

@ -0,0 +1,54 @@
using AutoMapper;
using DSaC.Database;
using DSaC.Models.DTOs;
using DSaC.Models.Internal;
using DSaC.Models.Internal.Queries;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC.Logic.Handlers.Queries
{
public class GetCounterpartyQueryHandler: IRequestHandler<GetCounterpartyQuery, ResponseModel<CounterpartyViewDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public GetCounterpartyQueryHandler(ILogger<GetCounterpartyQueryHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel<CounterpartyViewDto>> Handle(GetCounterpartyQuery request, CancellationToken cancellationToken)
{
try
{
var res = await _context.Counterparties.FirstOrDefaultAsync(x=>x.Id == request.Id);
if (res == null)
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Counterparty with this ID does not exsist",
};
return new()
{
Value = _mapper.Map<CounterpartyViewDto>(res),
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on getting record");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot get Counterparty by id",
};
}
}
}
}

View File

@ -0,0 +1,60 @@
// <auto-generated />
using System;
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC.Migrations
{
[DbContext(typeof(DsacContext))]
[Migration("20240907133944_InitMigraton")]
partial class InitMigraton
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC.Database.Models.Counterparty", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("ContractNumber")
.IsRequired()
.HasMaxLength(50)
.HasColumnType("character varying(50)")
.HasColumnName("contract_number");
b.Property<string>("ManagerName")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("manager_name");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("name");
b.HasKey("Id");
b.ToTable("counterparty");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,36 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DSaC.Migrations
{
/// <inheritdoc />
public partial class InitMigraton : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "counterparty",
columns: table => new
{
id = table.Column<Guid>(type: "uuid", nullable: false),
name = table.Column<string>(type: "character varying(255)", maxLength: 255, nullable: false),
contract_number = table.Column<string>(type: "character varying(50)", maxLength: 50, nullable: false),
manager_name = table.Column<string>(type: "character varying(255)", maxLength: 255, nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_counterparty", x => x.id);
});
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "counterparty");
}
}
}

View File

@ -0,0 +1,64 @@
// <auto-generated />
using System;
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC.Migrations
{
[DbContext(typeof(DsacContext))]
[Migration("20240907140843_ChangingContractInfo")]
partial class ChangingContractInfo
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC.Database.Models.Counterparty", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("ContractNumber")
.IsRequired()
.HasMaxLength(50)
.HasColumnType("character varying(50)")
.HasColumnName("contract_number");
b.Property<bool>("IsContractClosed")
.HasColumnType("boolean")
.HasColumnName("is_contract_closed");
b.Property<string>("ManagerName")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("manager_name");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("name");
b.HasKey("Id");
b.ToTable("counterparty");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,29 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DSaC.Migrations
{
/// <inheritdoc />
public partial class ChangingContractInfo : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<bool>(
name: "is_contract_closed",
table: "counterparty",
type: "boolean",
nullable: false,
defaultValue: false);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "is_contract_closed",
table: "counterparty");
}
}
}

View File

@ -0,0 +1,63 @@
// <auto-generated />
using System;
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC.Migrations
{
[DbContext(typeof(DsacContext))]
[Migration("20240907141343_SmallHotfix")]
partial class SmallHotfix
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC.Database.Models.Counterparty", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("ContractNumber")
.IsRequired()
.HasMaxLength(50)
.HasColumnType("character varying(50)")
.HasColumnName("contract_number");
b.Property<bool>("IsContractClosed")
.HasColumnType("boolean")
.HasColumnName("is_contract_closed");
b.Property<string>("ManagerName")
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("manager_name");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("name");
b.HasKey("Id");
b.ToTable("counterparty");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,40 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DSaC.Migrations
{
/// <inheritdoc />
public partial class SmallHotfix : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AlterColumn<string>(
name: "manager_name",
table: "counterparty",
type: "character varying(255)",
maxLength: 255,
nullable: true,
oldClrType: typeof(string),
oldType: "character varying(255)",
oldMaxLength: 255);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.AlterColumn<string>(
name: "manager_name",
table: "counterparty",
type: "character varying(255)",
maxLength: 255,
nullable: false,
defaultValue: "",
oldClrType: typeof(string),
oldType: "character varying(255)",
oldMaxLength: 255,
oldNullable: true);
}
}
}

View File

@ -0,0 +1,48 @@
// <auto-generated />
using System;
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC.Migrations
{
[DbContext(typeof(DsacContext))]
[Migration("20240914065929_after-reconstruction")]
partial class afterreconstruction
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC.Database.Models.Counterparty", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("name");
b.HasKey("Id");
b.ToTable("counterparty");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,52 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DSaC.Migrations
{
/// <inheritdoc />
public partial class afterreconstruction : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "contract_number",
table: "counterparty");
migrationBuilder.DropColumn(
name: "is_contract_closed",
table: "counterparty");
migrationBuilder.DropColumn(
name: "manager_name",
table: "counterparty");
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "contract_number",
table: "counterparty",
type: "character varying(50)",
maxLength: 50,
nullable: false,
defaultValue: "");
migrationBuilder.AddColumn<bool>(
name: "is_contract_closed",
table: "counterparty",
type: "boolean",
nullable: false,
defaultValue: false);
migrationBuilder.AddColumn<string>(
name: "manager_name",
table: "counterparty",
type: "character varying(255)",
maxLength: 255,
nullable: true);
}
}
}

View File

@ -0,0 +1,45 @@
// <auto-generated />
using System;
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC.Migrations
{
[DbContext(typeof(DsacContext))]
partial class DsacContextModelSnapshot : ModelSnapshot
{
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC.Database.Models.Counterparty", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("name");
b.HasKey("Id");
b.ToTable("counterparty");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,17 @@
using System.ComponentModel.DataAnnotations;
namespace DSaC.Models.DTOs
{
public class ContractViewDto
{
[Required(ErrorMessage = "Contract number must be present")]
public string ContractNumber { get; set; } = null!;
[Required(ErrorMessage = "Manager name must be present")]
public string ManagerName { get; set; } = null!;
public DateTime CreatinTime { get; set; }
public bool IsClosed { get; set; }
}
}

View File

@ -0,0 +1,13 @@

using AutoMapper;
using DSaC.Database.Models;
using System.ComponentModel.DataAnnotations;
namespace DSaC.Models.DTOs
{
public class CounterpartyBaseDto
{
[Required(ErrorMessage = "Cpty name must be present")]
public string Name { get; set; } = null!;
}
}

View File

@ -0,0 +1,11 @@
using AutoMapper;
using DSaC.Database.Models;
namespace DSaC.Models.DTOs
{
[AutoMap(typeof(Counterparty))]
public class CounterpartyViewDto: CounterpartyBaseDto
{
public Guid Id { get; set; }
}
}

View File

@ -0,0 +1,14 @@
using DSaC.Models.DTOs;
using MediatR;
namespace DSaC.Models.Internal.Queries
{
public class GetCounterpartiesQuery: IRequest<ResponseModel<List<CounterpartyViewDto>>>
{
public List<Guid>? Ids { get; set; }
public int Page { get; set; }
public int PageSize { get; set; }
}
}

View File

@ -0,0 +1,10 @@
using DSaC.Models.DTOs;
using MediatR;
namespace DSaC.Models.Internal.Queries
{
public class GetCounterpartyQuery: IRequest<ResponseModel<CounterpartyViewDto>>
{
public Guid Id { get; set; }
}
}

View File

@ -0,0 +1,30 @@
using System.Net;
namespace DSaC.Models.Internal
{
public class ResponseModel
{
public string? ErrorText;
public bool IsError => !string.IsNullOrEmpty(ErrorText);
private int? _errorCode = 200;
public int? ErrorCode
{
get => _errorCode;
set => _errorCode = value;
}
public HttpStatusCode? StatusCode
{
get => (HttpStatusCode?)_errorCode;
set => _errorCode = (int?)value;
}
}
public class ResponseModel<T> : ResponseModel
{
public T? Value { get; set; }
}
}

View File

@ -0,0 +1,10 @@
using DSaC.Models.DTOs;
using MediatR;
namespace DSaC.Models.Internal.Сommands
{
public class CreateCounterpartyCommand : IRequest<ResponseModel<CounterpartyViewDto>>
{
public CounterpartyBaseDto Model { get; set; } = null!;
}
}

View File

@ -0,0 +1,9 @@
using MediatR;
namespace DSaC.Models.Internal.Сommands
{
public class DeleteCounterpartyCommand: IRequest<ResponseModel>
{
public Guid Id { get; set; }
}
}

View File

@ -0,0 +1,14 @@
using DSaC.Models.DTOs;
using MediatR;
using System.ComponentModel.DataAnnotations;
namespace DSaC.Models.Internal.Сommands
{
public class UpdateCounterpartyCommand: IRequest<ResponseModel<CounterpartyViewDto>>
{
public Guid Id { get; set; }
[Required(ErrorMessage = "UpdateMessageMust be present")]
public CounterpartyViewDto Model { get; set; } = null!;
}
}

View File

@ -0,0 +1,128 @@
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.OpenApi.Models;
using NLog.Config;
using NLog.Extensions.Logging;
using NLog.Targets;
using PIHelperSh.Configuration;
using PIHelperSh.Configuration.Attributes;
using System.Reflection;
using LogLevel = NLog.LogLevel;
[TrackedType]
public class Program
{
private static WebApplication? app;
[Constant(BlockName = "Database")]
private static string ConnectionString;
[Constant(BlockName = "GatewaySettings")]
private static string AppPrefix;
public static void Main(string[] args)
{
var builder = WebApplication.CreateBuilder(args);
ConfigureLogger(builder);
ConfigureServices(builder);
ConfigureDatabase(builder);
app = builder.Build();
var t = MigrateDatabase();
app.UseSwagger(c =>
{
if (!string.IsNullOrEmpty(AppPrefix))
{
//c.RouteTemplate = AppPrefix + "/swagger/{documentName}/swagger.json";
c.PreSerializeFilters.Add((swaggerDoc, httpReq) =>
{
swaggerDoc.Servers = new List<OpenApiServer> { new OpenApiServer { Url = $"{httpReq.Scheme}://{httpReq.Host.Value}/{AppPrefix}" } };
});
}
});
app.UseSwaggerUI(c =>
{
//if (!string.IsNullOrEmpty(AppPrefix))
//{
// c.SwaggerEndpoint($"/{AppPrefix}/swagger/v1/swagger.json", $"APP API");
// c.RoutePrefix = $"{AppPrefix}/swagger";
//}
});
app.UseHttpsRedirection();
app.UseAuthorization();
app.MapControllers();
t.Wait();
app.Run();
}
private static void ConfigureLogger(WebApplicationBuilder builder)
{
var nLogConfig = new LoggingConfiguration();
var logConsole = new ConsoleTarget();
var blackhole = new NullTarget();
var logFile = new FileTarget()
{
FileName = "${basedir}/logs/${shortdate}_logs.log"
};
nLogConfig.AddRule(LogLevel.Trace, LogLevel.Trace, blackhole, "Microsoft.AspNetCore.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Warn, logFile, "Microsoft.EntityFrameworkCore.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Warn, logFile, "Microsoft.AspNetCore.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Warn, logFile, "System.Net.Http.HttpClient.Refit.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Error, logConsole);
nLogConfig.AddRule(LogLevel.Debug, LogLevel.Error, logFile);
builder.Logging.ClearProviders();
builder.Services.AddLogging(m => m.AddNLog(nLogConfig));
}
private static void ConfigureServices(WebApplicationBuilder builder)
{
builder.Services.AddConfigurations(builder.Configuration);
builder.Configuration.AddConstants();
builder.Services.AddAutoMapper(AppDomain.CurrentDomain.GetAssemblies());
builder.Services.AddMediatR(cfg => cfg.RegisterServicesFromAssemblyContaining<Program>());
builder.Services.AddControllers().AddNewtonsoftJson();
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen(c =>
{
c.SwaggerDoc("v1", new OpenApiInfo
{
Title = "Distributed computing and applications",
Version = "v1",
Description = ""
});
//c.EnableAnnotations();
});
}
private static void ConfigureDatabase(WebApplicationBuilder builder)
{
builder.Services.AddDbContext<DsacContext>(options =>
{
options.UseNpgsql(ConnectionString);
});
}
private static async Task MigrateDatabase()
{
var context = app?.Services.CreateScope().ServiceProvider.GetService<DsacContext>();
if(context != null)
await context.Database.MigrateAsync();
}
}

View File

@ -0,0 +1,52 @@
{
"profiles": {
"http": {
"commandName": "Project",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
},
"dotnetRunMessages": true,
"applicationUrl": "http://localhost:5062"
},
"https": {
"commandName": "Project",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
},
"dotnetRunMessages": true,
"applicationUrl": "https://localhost:7219;http://localhost:5062"
},
"IIS Express": {
"commandName": "IISExpress",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
"Container (Dockerfile)": {
"commandName": "Docker",
"launchBrowser": true,
"launchUrl": "{Scheme}://{ServiceHost}:{ServicePort}/swagger",
"environmentVariables": {
"ASPNETCORE_HTTPS_PORTS": "8081",
"ASPNETCORE_HTTP_PORTS": "8080"
},
"publishAllPorts": true,
"useSSL": true
}
},
"$schema": "http://json.schemastore.org/launchsettings.json",
"iisSettings": {
"windowsAuthentication": false,
"anonymousAuthentication": true,
"iisExpress": {
"applicationUrl": "http://localhost:9154",
"sslPort": 44381
}
}
}

View File

@ -0,0 +1,14 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"Database": {
"ConnectionString": "Host=db.dev-moio.online;Port=31153;Database=dsac_maxim;Username=postgres;Password=postgres_password"
},
"GatewaySettings": {
"AppPrefix": ""
}
}

View File

@ -0,0 +1,9 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*"
}

View File

@ -0,0 +1,88 @@
using DSaC_second.Models.DTOs;
using DSaC_second.Models.Internal.Queries;
using DSaC_second.Models.Internal.Сommands;
using MediatR;
using Microsoft.AspNetCore.Components.Forms;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
namespace DSaC_second.Controllers
{
[Route("api/[controller]")]
[ApiController]
public class ContractController : ControllerBase
{
private readonly IMediator _mediator;
public ContractController(IMediator mediator)
{
_mediator = mediator;
}
[HttpGet("")]
public async Task<IActionResult> GetContracts(
[FromQuery] int page = 0,
[FromQuery] int pageSize = 10,
[FromQuery] List<Guid>? ids = null
)
{
var request = new GetContractsQuery
{
Page = page,
PageSize = pageSize,
Ids = ids
};
var response = await _mediator.Send(request);
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpGet("{uuid:guid}")]
public async Task<IActionResult> GetFullContract([FromRoute] Guid uuid)
{
var request = new GetContractQuery
{
Id = uuid
};
var response = await _mediator.Send(request);
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpPost("")]
public async Task<IActionResult> CreateContract([FromBody] ContractBaseDto dto)
{
var response = await _mediator.Send(new CreateContractCommand()
{
Model = dto
});
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpPut("{uuid:guid}")]
public async Task<IActionResult> UpdateContract([FromRoute] Guid uuid, [FromBody] ContractViewDto dto)
{
var response = await _mediator.Send(new UpdateContractCommand()
{
Id = uuid,
Model = dto
});
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpDelete("{uuid:guid}")]
public async Task<IActionResult> DeleteContract([FromRoute] Guid uuid)
{
var response = await _mediator.Send(new DeleteContractCommand()
{
Id = uuid,
});
return !response.IsError ? Ok() : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
}
}

View File

@ -0,0 +1,27 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<UserSecretsId>39d29416-63e9-4884-9c5f-9d6ff461995f</UserSecretsId>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MediatR" Version="12.4.1" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.NewtonsoftJson" Version="8.0.8" />
<PackageReference Include="Microsoft.EntityFrameworkCore" Version="8.0.8" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="8.0.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.VisualStudio.Azure.Containers.Tools.Targets" Version="1.20.1" />
<PackageReference Include="NLog.Extensions.Logging" Version="5.3.13" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="8.0.4" />
<PackageReference Include="PIHelperSh.Configuration" Version="1.0.1" />
<PackageReference Include="RestSharp" Version="112.0.0" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.4.0" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,15 @@
using DSaC_second.Database.Models;
using Microsoft.EntityFrameworkCore;
namespace DSaC_second.Database
{
public class DsacContext : DbContext
{
public DsacContext(DbContextOptions options) : base(options)
{
AppContext.SetSwitch("Npgsql.EnableLegacyTimestampBehavior", true);
}
public DbSet<Contract> Contracts { get; set; }
}
}

View File

@ -0,0 +1,38 @@
using AutoMapper;
using DSaC_second.Models.DTOs;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace DSaC_second.Database.Models
{
[Table("contract")]
[AutoMap(typeof(ContractBaseDto))]
[AutoMap(typeof(ContractViewDto))]
public class Contract
{
[Key]
[Column("id")]
public Guid Id { get; set; }
[Column("contract_number")]
[Required]
public string ContractNumber { get; set; }
[Column("manager_name")]
[Required]
public string ManagerName { get; set; }
[Column("counterparty_id")]
[Required]
public Guid CounterpartyId { get; set; }
[Column("creation_time")]
[Required]
public DateTime CreatinTime { get; set; } = DateTime.Now;
[Column("is_closed")]
[DefaultValue(false)]
public bool IsClosed { get; set; }
}
}

View File

@ -0,0 +1,24 @@
#See https://aka.ms/customizecontainer to learn how to customize your debug container and how Visual Studio uses this Dockerfile to build your images for faster debugging.
FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS base
WORKDIR /app
EXPOSE 8080
EXPOSE 8081
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["DSaC_second.csproj", "DSaC_second/"]
RUN dotnet restore "DSaC_second/DSaC_second.csproj"
WORKDIR "/src/DSaC_second"
COPY . .
RUN dotnet build "DSaC_second.csproj" -c $BUILD_CONFIGURATION -o /app/build
FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "DSaC_second.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "DSaC_second.dll"]

View File

@ -0,0 +1,76 @@
using AutoMapper;
using DSaC_second.Database;
using DSaC_second.Database.Models;
using DSaC_second.Logic.Handlers.Queries;
using DSaC_second.Models.DTOs;
using DSaC_second.Models.Internal;
using DSaC_second.Models.Internal.Queries;
using DSaC_second.Models.Internal.Сommands;
using MediatR;
namespace DSaC_second.Logic.Handlers.Commands
{
public class CreateContractCommandHandler : IRequestHandler<CreateContractCommand, ResponseModel<ContractFullDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
private readonly IMediator _mediator;
public CreateContractCommandHandler(ILogger<CreateContractCommandHandler> logger, DsacContext context, IMapper mapper, IMediator mediator)
{
_logger = logger;
_context = context;
_mapper = mapper;
_mediator = mediator;
}
public async Task<ResponseModel<ContractFullDto>> Handle(CreateContractCommand request, CancellationToken cancellationToken)
{
try
{
var counterparty = await _mediator.Send(new GetConunterpartyQuery()
{
Id = request.Model.CounterpartyId,
}, cancellationToken: cancellationToken);
if (counterparty.IsError)
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = counterparty.ErrorText,
};
if (counterparty.Value == null)
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cpty with this id not found",
};
var model = _mapper.Map<Contract>(request.Model);
var outModel = await _context.Contracts.AddAsync(model, cancellationToken: cancellationToken);
await _context.SaveChangesAsync(cancellationToken);
var res = _mapper.Map<ContractFullDto>(model);
res.Counterparty = counterparty.Value!;
return new()
{
Value = res,
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on creating record");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot create contract",
};
}
}
}
}

View File

@ -0,0 +1,43 @@
using AutoMapper;
using DSaC_second.Database;
using DSaC_second.Models.Internal;
using DSaC_second.Models.Internal.Сommands;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC_second.Logic.Handlers.Commands
{
public class DeleteContractCommandHandler: IRequestHandler<DeleteContractCommand, ResponseModel>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public DeleteContractCommandHandler(ILogger<DeleteContractCommandHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel> Handle(DeleteContractCommand request, CancellationToken cancellationToken)
{
try
{
await _context.Contracts.Where(x => x.Id == request.Id).ExecuteDeleteAsync(cancellationToken);
return new();
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on deleting record");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot delete contract by id",
};
}
}
}
}

View File

@ -0,0 +1,78 @@
using AutoMapper;
using DSaC_second.Database;
using DSaC_second.Database.Models;
using DSaC_second.Models.DTOs;
using DSaC_second.Models.Internal;
using DSaC_second.Models.Internal.Queries;
using DSaC_second.Models.Internal.Сommands;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC_second.Logic.Handlers.Commands
{
public class UpdateContractCommandHandler : IRequestHandler<UpdateContractCommand, ResponseModel<ContractFullDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
private readonly IMediator _mediator;
public UpdateContractCommandHandler(ILogger<UpdateContractCommandHandler> logger, DsacContext context, IMapper mapper, IMediator mediator)
{
_logger = logger;
_context = context;
_mapper = mapper;
_mediator = mediator;
}
public async Task<ResponseModel<ContractFullDto>> Handle(UpdateContractCommand request, CancellationToken cancellationToken)
{
try
{
var counterparty = await _mediator.Send(new GetConunterpartyQuery()
{
Id = request.Model.CounterpartyId,
}, cancellationToken: cancellationToken);
if (counterparty.IsError)
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = counterparty.ErrorText,
};
if(counterparty.Value == null)
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cpty with this id not found",
};
var model = _mapper.Map<Contract>(request.Model);
_context.Contracts.Update(model);
await _context.SaveChangesAsync(cancellationToken: cancellationToken);
var res = _mapper.Map<ContractFullDto>(model);
res.Counterparty = counterparty.Value!;
return new()
{
Value = res
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on updating record");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot update contract by id",
};
}
}
}
}

View File

@ -0,0 +1,71 @@
using AutoMapper;
using DSaC_second.Database;
using DSaC_second.Models.DTOs;
using DSaC_second.Models.Internal;
using DSaC_second.Models.Internal.Queries;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC_second.Logic.Handlers.Queries
{
public class GetContractQueryHandler : IRequestHandler<GetContractQuery, ResponseModel<ContractFullDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
private readonly IMediator _mediator;
public GetContractQueryHandler(ILogger<GetContractQueryHandler> logger, DsacContext context, IMapper mapper, IMediator mediator)
{
_logger = logger;
_context = context;
_mapper = mapper;
_mediator = mediator;
}
public async Task<ResponseModel<ContractFullDto>> Handle(GetContractQuery request, CancellationToken cancellationToken)
{
try
{
var res = await _context.Contracts.FirstOrDefaultAsync(x => x.Id == request.Id, cancellationToken: cancellationToken);
if (res == null)
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Contract with whit Id is not exist"
};
var counterparty = await _mediator.Send(new GetConunterpartyQuery()
{
Id = res.CounterpartyId
}, cancellationToken: cancellationToken);
if (counterparty.IsError)
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = counterparty.ErrorText
};
var model = _mapper.Map<ContractFullDto>(res);
model.Counterparty = counterparty.Value!;
return new()
{
Value = model,
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on getting record");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot get contract by id",
};
}
}
}
}

View File

@ -0,0 +1,50 @@
using AutoMapper;
using DSaC_second.Database;
using DSaC_second.Models.DTOs;
using DSaC_second.Models.Internal;
using DSaC_second.Models.Internal.Queries;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC_second.Logic.Handlers.Queries
{
public class GetContractsQueryHandler : IRequestHandler<GetContractsQuery, ResponseModel<List<ContractViewDto>>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public GetContractsQueryHandler(ILogger<GetContractsQueryHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel<List<ContractViewDto>>> Handle(GetContractsQuery request, CancellationToken cancellationToken)
{
try
{
var res = await _context.Contracts
.Where(x=>request.Ids == null || request.Ids.Contains(x.Id))
.Skip(request.Page * request.PageSize).Take(request.PageSize)
.ToListAsync(cancellationToken: cancellationToken);
return new()
{
Value = res.Select(_mapper.Map<ContractViewDto>).ToList(),
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on getting records");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot get contracts",
};
}
}
}
}

View File

@ -0,0 +1,72 @@
using AutoMapper;
using DSaC_second.Database;
using DSaC_second.Logic.Handlers.Commands;
using DSaC_second.Models.DTOs;
using DSaC_second.Models.Internal;
using DSaC_second.Models.Internal.Queries;
using MediatR;
using PIHelperSh.Configuration.Attributes;
using RestSharp;
namespace DSaC_second.Logic.Handlers.Queries
{
[TrackedType]
public class GetConunterpartyQueryHandler : IRequestHandler<GetConunterpartyQuery, ResponseModel<CounterpartyViewDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
private readonly RestClient _restClient;
[Constant(BlockName = "FirstService")]
private static string BaseUrl;
[Constant(BlockName = "FirstService")]
private static string GetCounterpartyPostfix;
public GetConunterpartyQueryHandler(ILogger<GetConunterpartyQueryHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
_restClient = new RestClient(BaseUrl);
}
~GetConunterpartyQueryHandler()
{
_restClient.Dispose();
}
public async Task<ResponseModel<CounterpartyViewDto>> Handle(GetConunterpartyQuery request, CancellationToken cancellationToken)
{
try
{
var rq = new RestRequest($"{GetCounterpartyPostfix}/{request.Id.ToString()}");
var res = await _restClient.ExecuteAsync<CounterpartyViewDto>(rq, cancellationToken:cancellationToken);
if (res.IsSuccessful)
return new()
{
Value = res.Data
};
return new()
{
StatusCode = res.StatusCode,
ErrorText = res.ErrorMessage,
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on getting record from first service");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot get cpty from first service",
};
}
}
}
}

View File

@ -0,0 +1,64 @@
// <auto-generated />
using System;
using DSaC_second.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC_second.Migrations
{
[DbContext(typeof(DsacContext))]
[Migration("20240914152154_initial")]
partial class initial
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC_second.Database.Models.Contract", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("ContractNumber")
.IsRequired()
.HasColumnType("text")
.HasColumnName("contract_number");
b.Property<Guid>("CounterpartyId")
.HasColumnType("uuid")
.HasColumnName("counterparty_id");
b.Property<DateTime>("CreatinTime")
.HasColumnType("timestamp with time zone")
.HasColumnName("creation_time");
b.Property<bool>("IsClosed")
.HasColumnType("boolean")
.HasColumnName("is_closed");
b.Property<string>("ManagerName")
.IsRequired()
.HasColumnType("text")
.HasColumnName("manager_name");
b.HasKey("Id");
b.ToTable("contract");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,38 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DSaC_second.Migrations
{
/// <inheritdoc />
public partial class initial : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "contract",
columns: table => new
{
id = table.Column<Guid>(type: "uuid", nullable: false),
contract_number = table.Column<string>(type: "text", nullable: false),
manager_name = table.Column<string>(type: "text", nullable: false),
counterparty_id = table.Column<Guid>(type: "uuid", nullable: false),
creation_time = table.Column<DateTime>(type: "timestamp with time zone", nullable: false),
is_closed = table.Column<bool>(type: "boolean", nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_contract", x => x.id);
});
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "contract");
}
}
}

View File

@ -0,0 +1,61 @@
// <auto-generated />
using System;
using DSaC_second.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC_second.Migrations
{
[DbContext(typeof(DsacContext))]
partial class DsacContextModelSnapshot : ModelSnapshot
{
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC_second.Database.Models.Contract", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("ContractNumber")
.IsRequired()
.HasColumnType("text")
.HasColumnName("contract_number");
b.Property<Guid>("CounterpartyId")
.HasColumnType("uuid")
.HasColumnName("counterparty_id");
b.Property<DateTime>("CreatinTime")
.HasColumnType("timestamp with time zone")
.HasColumnName("creation_time");
b.Property<bool>("IsClosed")
.HasColumnType("boolean")
.HasColumnName("is_closed");
b.Property<string>("ManagerName")
.IsRequired()
.HasColumnType("text")
.HasColumnName("manager_name");
b.HasKey("Id");
b.ToTable("contract");
});
#pragma warning restore 612, 618
}
}
}

View File

@ -0,0 +1,17 @@
using AutoMapper;
using System.ComponentModel.DataAnnotations;
namespace DSaC_second.Models.DTOs
{
public class ContractBaseDto
{
[Required(ErrorMessage = "Contract number must be present")]
public string ContractNumber { get; set; } = null!;
[Required(ErrorMessage = "Manager name must be present")]
public string ManagerName { get; set; } = null!;
[Required(ErrorMessage = "Counterparty id must be present")]
public Guid CounterpartyId { get; set; }
}
}

View File

@ -0,0 +1,11 @@
using AutoMapper;
using DSaC_second.Database.Models;
namespace DSaC_second.Models.DTOs
{
[AutoMap(typeof(Contract))]
public class ContractFullDto: ContractViewDto
{
public CounterpartyViewDto Counterparty { get; set; } = new();
}
}

View File

@ -0,0 +1,15 @@
using AutoMapper;
using DSaC_second.Database.Models;
namespace DSaC_second.Models.DTOs
{
[AutoMap(typeof(Contract))]
public class ContractViewDto: ContractBaseDto
{
public Guid Id { get; set; }
public DateTime CreatinTime { get; set; }
public bool IsClosed { get; set; }
}
}

View File

@ -0,0 +1,12 @@
using System.ComponentModel.DataAnnotations;
namespace DSaC_second.Models.DTOs
{
public class CounterpartyViewDto
{
public Guid Id { get; set; }
[Required(ErrorMessage = "Cpty name must be present")]
public string Name { get; set; } = null!;
}
}

View File

@ -0,0 +1,10 @@
using DSaC_second.Models.DTOs;
using MediatR;
namespace DSaC_second.Models.Internal.Queries
{
public class GetContractQuery : IRequest<ResponseModel<ContractFullDto>>
{
public Guid Id { get; set; }
}
}

View File

@ -0,0 +1,14 @@
using DSaC_second.Models.DTOs;
using MediatR;
namespace DSaC_second.Models.Internal.Queries
{
public class GetContractsQuery : IRequest<ResponseModel<List<ContractViewDto>>>
{
public List<Guid>? Ids { get; set; }
public int Page { get; set; }
public int PageSize { get; set; }
}
}

View File

@ -0,0 +1,10 @@
using DSaC_second.Models.DTOs;
using MediatR;
namespace DSaC_second.Models.Internal.Queries
{
public class GetConunterpartyQuery: IRequest<ResponseModel<CounterpartyViewDto>>
{
public Guid Id { get; set; }
}
}

View File

@ -0,0 +1,30 @@
using System.Net;
namespace DSaC_second.Models.Internal
{
public class ResponseModel
{
public string? ErrorText;
public bool IsError => !string.IsNullOrEmpty(ErrorText);
private int? _errorCode = 200;
public int? ErrorCode
{
get => _errorCode;
set => _errorCode = value;
}
public HttpStatusCode? StatusCode
{
get => (HttpStatusCode?)_errorCode;
set => _errorCode = (int?)value;
}
}
public class ResponseModel<T> : ResponseModel
{
public T? Value { get; set; }
}
}

View File

@ -0,0 +1,10 @@
using DSaC_second.Models.DTOs;
using MediatR;
namespace DSaC_second.Models.Internal.Сommands
{
public class CreateContractCommand : IRequest<ResponseModel<ContractFullDto>>
{
public ContractBaseDto Model { get; set; } = null!;
}
}

View File

@ -0,0 +1,9 @@
using MediatR;
namespace DSaC_second.Models.Internal.Сommands
{
public class DeleteContractCommand : IRequest<ResponseModel>
{
public Guid Id { get; set; }
}
}

View File

@ -0,0 +1,14 @@
using DSaC_second.Models.DTOs;
using MediatR;
using System.ComponentModel.DataAnnotations;
namespace DSaC_second.Models.Internal.Сommands
{
public class UpdateContractCommand : IRequest<ResponseModel<ContractFullDto>>
{
public Guid Id { get; set; }
[Required(ErrorMessage = "Update model msust be present")]
public ContractViewDto Model { get; set; } = null!;
}
}

View File

@ -0,0 +1,130 @@
using DSaC_second.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.OpenApi.Models;
using NLog.Config;
using NLog.Extensions.Logging;
using NLog.Targets;
using PIHelperSh.Configuration;
using PIHelperSh.Configuration.Attributes;
using System;
using System.Reflection;
using LogLevel = NLog.LogLevel;
[TrackedType]
public class Program
{
private static WebApplication? app;
[Constant(BlockName = "Database")]
private static string ConnectionString;
[Constant(BlockName = "GatewaySettings")]
private static string AppPrefix;
public static void Main(string[] args)
{
var builder = WebApplication.CreateBuilder(args);
ConfigureLogger(builder);
ConfigureServices(builder);
ConfigureDatabase(builder);
app = builder.Build();
var t = MigrateDatabase();
app.UseSwagger(c =>
{
if (!string.IsNullOrEmpty(AppPrefix))
{
//c.RouteTemplate = AppPrefix + "/swagger/{documentName}/swagger.json";
c.PreSerializeFilters.Add((swaggerDoc, httpReq) =>
{
swaggerDoc.Servers = new List<OpenApiServer> { new OpenApiServer { Url = $"{httpReq.Scheme}://{httpReq.Host.Value}/{AppPrefix}" } };
});
}
});
app.UseSwaggerUI(c =>
{
//if (!string.IsNullOrEmpty(AppPrefix))
//{
// c.SwaggerEndpoint($"/{AppPrefix}/swagger/v1/swagger.json", $"APP API");
// c.RoutePrefix = $"{AppPrefix}/swagger";
//}
});
app.UseHttpsRedirection();
app.UseAuthorization();
app.MapControllers();
t.Wait();
app.Run();
}
private static void ConfigureLogger(WebApplicationBuilder builder)
{
var nLogConfig = new LoggingConfiguration();
var logConsole = new ConsoleTarget();
var blackhole = new NullTarget();
var logFile = new FileTarget()
{
FileName = "${basedir}/logs/${shortdate}_logs.log"
};
nLogConfig.AddRule(LogLevel.Trace, LogLevel.Trace, blackhole, "Microsoft.AspNetCore.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Warn, logFile, "Microsoft.EntityFrameworkCore.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Warn, logFile, "Microsoft.AspNetCore.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Warn, logFile, "System.Net.Http.HttpClient.Refit.*", true);
nLogConfig.AddRule(LogLevel.Info, LogLevel.Error, logConsole);
nLogConfig.AddRule(LogLevel.Debug, LogLevel.Error, logFile);
builder.Logging.ClearProviders();
builder.Services.AddLogging(m => m.AddNLog(nLogConfig));
}
private static void ConfigureServices(WebApplicationBuilder builder)
{
builder.Services.AddConfigurations(builder.Configuration);
builder.Configuration.AddConstants();
builder.Services.AddAutoMapper(AppDomain.CurrentDomain.GetAssemblies());
builder.Services.AddMediatR(cfg => cfg.RegisterServicesFromAssemblyContaining<Program>());
builder.Services.AddControllers().AddNewtonsoftJson();
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen(c =>
{
c.SwaggerDoc("v1", new OpenApiInfo
{
Title = "Distributed computing and applications",
Version = "v1",
Description = ""
});
//c.EnableAnnotations();
});
}
private static void ConfigureDatabase(WebApplicationBuilder builder)
{
builder.Services.AddDbContext<DsacContext>(options =>
{
options.UseNpgsql(ConnectionString);
});
}
private static async Task MigrateDatabase()
{
var context = app?.Services.CreateScope().ServiceProvider.GetService<DsacContext>();
if(context != null)
await context.Database.MigrateAsync();
}
}

View File

@ -0,0 +1,53 @@
{
"profiles": {
"http": {
"commandName": "Project",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development",
"GATEWAY_SETTINGS_APP_PREFIX": "first"
},
"dotnetRunMessages": true,
"applicationUrl": "http://localhost:5246"
},
"https": {
"commandName": "Project",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
},
"dotnetRunMessages": true,
"applicationUrl": "https://localhost:7239;http://localhost:5246"
},
"IIS Express": {
"commandName": "IISExpress",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
"Container (Dockerfile)": {
"commandName": "Docker",
"launchBrowser": true,
"launchUrl": "{Scheme}://{ServiceHost}:{ServicePort}/swagger",
"environmentVariables": {
"ASPNETCORE_HTTPS_PORTS": "8081",
"ASPNETCORE_HTTP_PORTS": "8080"
},
"publishAllPorts": true,
"useSSL": true
}
},
"$schema": "http://json.schemastore.org/launchsettings.json",
"iisSettings": {
"windowsAuthentication": false,
"anonymousAuthentication": true,
"iisExpress": {
"applicationUrl": "http://localhost:56866",
"sslPort": 44308
}
}
}

View File

@ -0,0 +1,18 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"Database": {
"ConnectionString": "Host=db.dev-moio.online;Port=31153;Database=dsac_maxim;Username=postgres;Password=postgres_password"
},
"FirstService": {
"BaseUrl": "http://localhost:5062/api",
"GetCounterpartyPostfix": "/Counterparties"
},
"GatewaySettings": {
"AppPrefix": ""
}
}

View File

@ -0,0 +1,9 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*"
}

View File

@ -0,0 +1,18 @@
# Лабораторная работа 3
Данная работа посвящена теме синхронного обмена сообщениями между сервисами, а так же шлюзам, для взаимодействия с внешним миром.
## Описание
В качестве предметной области использовался пример 18-го варианта. Имеются 2 сущность: контрагент(только имя) и договор(номер, ФИО менеджера, дата создания и статус).
Для реализации была выбрана платформа ASP.net. Информация хранится в СУБД Postgres, которая так же поднимается в контейнере(вместе с остальными сервисами)
Система позволяет проводить с сущностями базовые CRUD операции. В случае договора, имеется возможность получения его в связке с информацией по контрагенту(не просто Id, но и имя). В этот момент сервисы и взаимодействуют друг с другом. Для этого используется шлюз(напрямую сервисы друг о друге могут и не знать, общение как с внешним миром так и между собой только через nginx шлюз)
В качестве интерфейса имеется Swagger, который можно использовать для отправки запросов. Он и будет показан в видеодеомнстрации
## Запуск
Для запуска лабораторной работы необходимо иметь запущенный движок Docker на устройстве.
Необходимо перейти в папку, где располагается данный файл. Далее открыть терминал и ввести команду:
```
docker compose up -d --build
```
Важно, чтобы в этот момент на компьютере был свободен порт 80.
В результате, после сборки вся система запустится и Swagger-ы будут доступны по путям http://localhost/first/swagger и http://localhost/second/swagger
## Видеодемонстрация
Был записан видеоролик, демонстрирующий процесс запуска и работы системы, а так же всех её основных функйи. Он расположен по [адресу](https://drive.google.com/file/d/17OwZ6kEJ-AaW0uMTIfNtolyp-oyy9OfT/view?usp=sharing)

View File

@ -0,0 +1,37 @@
services:
postgres:
image: postgres
ports:
- 5432:5432
environment:
POSTGRES_PASSWORD: "password"
nginx:
image: nginx
depends_on:
- second
- first
volumes:
- ./nginx/nginx.conf:/etc/nginx/conf.d/default.conf
ports:
- 80:80
first:
build: .\DSaC
depends_on:
- postgres
environment:
DATABASE_CONNECTION_STRING: "Host=postgres;Database=dsac_maxim_1;Username=postgres;Password=password"
GATEWAY_SETTINGS_APP_PREFIX: "first" # Нужно, чтобы запросы из Swagger работали нормально
second:
build: .\DSaC_second
depends_on:
- first
environment:
DATABASE_CONNECTION_STRING: "Host=postgres;Database=dsac_maxim_2;Username=postgres;Password=password"
FIRST_SERVICE_BASE_URL: "http://nginx/first/api"
FIRST_SERVICE_GET_COUNTERPARTY_POSTFIX: "/Counterparties"
GATEWAY_SETTINGS_APP_PREFIX: "second"

View File

@ -0,0 +1,22 @@
server {
listen 80;
listen [::]:80;
server_name localhost;
location /first/ {
proxy_pass http://first:8080/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Prefix /test;
}
location /second/ {
proxy_pass http://second:8080/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Prefix /admin;
}
}

View File

@ -0,0 +1,64 @@
## Лабораторная работа №1: Знакомство с docker и docker-compose
---
**Цель:** изучение современных технологий контейнеризации.
**Задачи:**
1. Установить средство контейнеризации docker.
2. Изучить применение и принципы docker.
3. Изучить утилиту docker-compose и структуру файла docker-compose.yml.
4. Развернуть не менее 3х различных сервисов при помощи docker-compose.
5. Оформить отчёт в формате Markdown и создать Pull Request в git-репозитории.
---
**Сервисы:**
* **Gitea:** Легковесная и самодостаточная система управления версиями кода с веб-интерфейсом.
* **Postgres:** Мощная система управления реляционными базами данных с открытым исходным кодом.
* **Redis:** Высокопроизводительное хранилище данных типа "ключ-значение", используемое для кэширования, сессий и других задач.
---
**Инструкция по запуску:**
1. **Установка Docker:** Необходимо убедиться, установлен ли Docker на системе, в ином случае установить его.
2. **Навигация:** Перейти в директорию с файлом `docker-compose.yml`.
3. **Запуск:** Необходимо выполнить следующую команду в терминале:
```bash
docker-compose up --build --remove-orphans
```
Данная команда создаст и запустит все необходимые контейнеры, описанные в `docker-compose.yml`.
**Проверка:**
Для проверки успешного запуска нужно выполнить команду:
```bash
docker ps -a
```
В случае успеха мы можем увидеть список запущенных контейнеров (gitea, postgres и redis):
```bash
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
6ea2e28126a8 gitea/gitea:1.22.2 "/usr/bin/entrypoint…" About a minute ago Up 9 seconds 0.0.0.0:3000->3000/tcp, 0.0.0.0:222->22/tcp minhasapov_ruslan_lab_1-gitea-1
2490f332bc90 postgres:14 "docker-entrypoint.s…" About a minute ago Up 9 seconds 0.0.0.0:5432->5432/tcp minhasapov_ruslan_lab_1-postgres-1
c5580d8fe1d7 redis:latest "docker-entrypoint.s…" About a minute ago Up 9 seconds 0.0.0.0:6379->6379/tcp minhasapov_ruslan_lab_1-redis-1
```
**Примечание:**
*Файл `docker-compose.yml` содержит подробную информацию о конфигурации каждого сервиса, включая порты, тома и переменные окружения.*
**Важно:**
После первого запуска **Gitea**, необходимо зайти на сервис *(по адресу `http://localhost:3000`)*, выполнить первоначальную настройку и создать суперпользователя.
---
**Видеодемонстрация работы:**
Видео доступно по <u>*[ссылке](https://disk.yandex.ru/i/6Efipp3Gxs9-Jw)*</u>
---

View File

@ -0,0 +1,45 @@
services: # начало объявления сервисов, которые будут запущены
postgres: # указываем название сервиса
image: postgres:14 # указываем название и/или версии образа Docker для Postgres
restart: always # автоматический перезапуск контейнера, если он упал
ports:
- "5432:5432" # пробрасываем порт 5432 из контейнера на хост-машину
# (слева порт хостовой машины, справа порт контейнера)
volumes: # указываются монтированные тома, папки, файлы
- postgres_data:/var/lib/postgresql/data # монтирует том postgres_data к /var/lib/postgresql/data,
# чтобы данные Postgres сохранялись после перезапуска контейнера
environment: # задаем переменные окружения для Postgres, такие как:
POSTGRES_USER: postgres_user # имя пользователя
POSTGRES_PASSWORD: postgres_password # пароль пользователя
POSTGRES_DB: postgres_db # имя БД
gitea:
image: gitea/gitea:1.22.2
restart: always
ports:
- "3000:3000" # пробрасывает порты 3000 (HTTP, веб-интерфейс)
- "222:22" # и 22 (SSH) из контейнера на хост-машину
volumes:
- gitea_data:/data
environment:
GITEA__database__DB_TYPE: postgres
GITEA__database__HOST: postgres:5432 # хост бд, название сервиса выступает в роли доменного имени и внутри сети разрешается в ip нужного контейнера
GITEA__database__NAME: postgres_db
GITEA__database__USER: postgres_user
GITEA__database__PASSWD: postgres_password
depends_on: # указывается зависимость запуска контейнера от другого сервиса
- postgres # Gitea зависит от Postgres, поэтому Postgres будет запущен первым
- redis
redis:
image: redis:latest
restart: always
ports:
- "6379:6379"
volumes:
- redis_data:/data
volumes: # определяем тома, которые будут использоваться сервисами
postgres_data:
gitea_data:
redis_data:

View File

@ -0,0 +1,29 @@
services:
rabbitmq:
image: rabbitmq:3.12.8-management
environment:
RABBITMQ_DEFAULT_USER: admin
RABBITMQ_DEFAULT_PASS: admin
ports:
- 15672:15672
volumes:
- rabbitmq-data:/var/lib/rabbitmq
mediawiki:
image: mediawiki
ports:
- 8081:80
volumes:
- mediawiki-data:/var/files/mediawiki
wordpress:
image: wordpress
ports:
- 8082:80
volumes:
- wordpress-data:/var/files/wordpress
volumes:
rabbitmq-data:
mediawiki-data:
wordpress-data:

View File

@ -0,0 +1,26 @@
# Docker Compose: RabbitMQ, Mediawiki, Wordpress
## Описание проекта
Этот проект разворачивает три сервиса с помощью Docker Compose:
1. **RabbitMQ** — брокер сообщений.
2. **Mediawiki** — движок вики.
3. **Wordpress** — популярная система управления контентом.
## Команды для запуска
Из директории с файлом docker-compose.yml запустить сервисы docker-compose up --build
## Сервисы и порты
1. **RabbitMQ:**
Доступ по адресу http://localhost:15672/ (логин: admin, пароль: admin).
2. **Mediawiki:**
Доступ по адресу http://localhost:8081/.
3. **Wordpress:**
Доступ по адресу http://localhost:8082/.
## Видео https://drive.google.com/file/d/1NvsMFoMU2ecsQ17EouqB_ZaLBskonHv0/view?usp=sharing

View File

@ -0,0 +1,50 @@
# Отчет по лабораторной работе 1
## Описание
Docker Compose используется для одновременного управления несколькими контейнерами, входящими в состав приложения. Он позволяет хранить все настройки проекта в одном контейнере, что позволяет потом беспроблемно использовать те же настройки проекта на другом сервере. docker-compose.yml позволяет настраивать и документировать все зависимости служб приложения. В нем указываются образ, порт(который можно пробросить), окрущающие среды, томы и зависимости (если нужно).
## Описание docker-compose.yml
Были выбраны следующие сервисы:
1. Postgres
2. Redmine
3. Gitea
В файле у каждого сервиса пробрасывается порт. Каждый сервис содержит том, который используются для хранения данных. Запуск Redmine и Gitea имеют зависимость от Postgres, чтобы они имели возможность накатить миграции и заполнить таблицы после создания базы. У каждого сервиса заполнен паарметр environment, где указываются данные для БД.
## Инструкция по запуску
1. Запустить приложение Docker Hub
2. Открыть консоль с помощью команды cd перейти к папке, где находится файл docker-compose.yml
3. Ввести команду для поднятия контейнеров
```
docker compose up -d
```
Дожидаемся запуска сервисов:
```
[+] Running 3/3
✔ Container yakovleva_julia_lab_1-postgres-1 Started 0.5s
✔ Container yakovleva_julia_lab_1-gitea-1 Started 1.2s
✔ Container yakovleva_julia_lab_1-redmine-1 Started 1.1s
```
4. Убедиться, что сервисы успешно запущены можно по команде:
```
docker compose ps
```
Отобразится таблица с запущенными сервисами:
```
NAME IMAGE COMMAND SERVICE CREATED STATUS PORTS
yakovleva_julia_lab_1-gitea-1 gitea/gitea:1.22.2 "/usr/bin/entrypoint…" gitea 19 minutes ago Up 6 seconds 22/tcp, 0.0.0.0:3002->3000/tcp
yakovleva_julia_lab_1-postgres-1 postgres:16.4 "docker-entrypoint.s…" postgres 19 minutes ago Up 6 seconds 0.0.0.0:5433->5432/tcp
yakovleva_julia_lab_1-redmine-1 redmine:latest "/docker-entrypoint.…" redmine 19 minutes ago Up 6 seconds 0.0.0.0:3003->3000/tcp
```
Также можно посомтреть логи каждого севриса при вводе команды:
```
docker compose logs -ft (название сервиса)
```
-ft необходим для того, чтобы читать логи сервиса в реальном времени
## Видео
[Видео](https://disk.yandex.ru/i/Ds2f4xO8vQ8XPA) работоспособности.

View File

@ -0,0 +1,45 @@
services:
postgres:
image: postgres:16.4
ports:
- "5433:5432"
environment:
POSTGRES_USER: redmine
POSTGRES_PASSWORD: redminepass
POSTGRES_DB: redmine
volumes:
- ./postgres_data:/var/lib/postgresql/data # Volume для хранения данных PostgreSQL
restart: always
redmine:
image: redmine:latest
ports:
- "3003:3000"
volumes:
- ./redmine_data:/usr/src/redmine/files # Volume для хранения данных Redmine
environment:
REDMINE_DB_POSTGRES: postgres # Имя сервиса базы данных
REDMINE_DB_USERNAME: redmine
REDMINE_DB_PASSWORD: redminepass
REDMINE_DB_DATABASE: redmine
depends_on:
- postgres
restart: always
gitea:
image: gitea/gitea:1.22.2
ports:
- "3002:3000" # Проброс порта для доступа к Gitea
volumes:
- ./gitea_data:/data # Volume для хранения данных Gitea
environment:
- USER_UID=1000
- USER_GID=1000
- GITEA__database__DB_TYPE=postgres
- GITEA__database__HOST=postgres:5432
- GITEA__database__NAME=gitea
- GITEA__database__USER=gitea
- GITEA__database__PASSWD=gitea_password
depends_on:
- postgres
restart: always

View File

@ -0,0 +1,30 @@
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/azds.yaml
**/bin
**/charts
**/docker-compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/obj
**/secrets.dev.yaml
**/values.dev.yaml
LICENSE
README.md
!**/.gitignore
!.git/HEAD
!.git/config
!.git/packed-refs
!.git/refs/heads/**

484
yakovleva_yulia_lab_2/.gitignore vendored Normal file
View File

@ -0,0 +1,484 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from `dotnet new gitignore`
# dotenv files
.env
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET
project.lock.json
project.fragment.lock.json
artifacts/
# Tye
.tye/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.tlog
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
*.vbp
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
*.dsw
*.dsp
# Visual Studio 6 technical files
*.ncb
*.aps
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# Visual Studio History (VSHistory) files
.vshistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd
# VS Code files for those working on multiple tools
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
*.code-workspace
# Local History for Visual Studio Code
.history/
# Windows Installer files from build outputs
*.cab
*.msi
*.msix
*.msm
*.msp
# JetBrains Rider
*.sln.iml
.idea
##
## Visual studio for Mac
##
# globs
Makefile.in
*.userprefs
*.usertasks
config.make
config.status
aclocal.m4
install-sh
autom4te.cache/
*.tar.gz
tarballs/
test-results/
# Mac bundle stuff
*.dmg
*.app
# content below from: https://github.com/github/gitignore/blob/master/Global/macOS.gitignore
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
# content below from: https://github.com/github/gitignore/blob/master/Global/Windows.gitignore
# Windows thumbnail cache files
Thumbs.db
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
# Vim temporary swap files
*.swp

View File

@ -0,0 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.VisualStudio.Azure.Containers.Tools.Targets" Version="1.19.6" />
</ItemGroup>
<!-- <ItemGroup>
<ProjectReference Include="..\FileGeneration\FileGeneration.csproj" />
</ItemGroup> -->
</Project>

View File

@ -0,0 +1,20 @@
FROM mcr.microsoft.com/dotnet/runtime:8.0 AS base
WORKDIR /app
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["App1.csproj", "App1/"]
RUN dotnet restore "App1/App1.csproj"
WORKDIR "/src/App1"
COPY . .
RUN dotnet build "App1.csproj" -c $BUILD_CONFIGURATION -o /app/build
FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "App1.csproj" -c $BUILD_CONFIGURATION -o /app/publish --self-contained false --no-restore
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "App1.dll"]

View File

@ -0,0 +1,31 @@
using System;
using System.IO;
class Program
{
static void Main()
{
string inputPath = Environment.GetEnvironmentVariable("DATA_PATH") ?? ".\\data";
string outputPath = Environment.GetEnvironmentVariable("RESULT_PATH") ?? ".\\result";
try
{
string[] files = Directory.GetFiles(inputPath);
using (StreamWriter writer = new StreamWriter(Path.Combine(outputPath, "data.txt")))
{
foreach (string file in files)
{
int lineCount = File.ReadAllLines(file).Length;
writer.WriteLine(lineCount);
}
}
Console.WriteLine("Файл data.txt успешно создан");
}
catch (Exception ex)
{
Console.WriteLine("Произошла ошибка: " + ex.Message);
}
}
}

View File

@ -0,0 +1,10 @@
{
"profiles": {
"App1": {
"commandName": "Project"
},
"Container (Dockerfile)": {
"commandName": "Docker"
}
}
}

View File

@ -0,0 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.VisualStudio.Azure.Containers.Tools.Targets" Version="1.19.6" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,20 @@
FROM mcr.microsoft.com/dotnet/runtime:8.0 AS base
WORKDIR /app
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["App2.csproj", "App2/"]
RUN dotnet restore "App2/App2.csproj"
WORKDIR "/src/App2"
COPY . .
RUN dotnet build "App2.csproj" -c $BUILD_CONFIGURATION -o /app/build
FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "App2.csproj" -c $BUILD_CONFIGURATION -o /app/publish --self-contained false --no-restore
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "App2.dll"]

View File

@ -0,0 +1,35 @@
using System;
using System.IO;
using System.Linq;
class Program
{
static void Main()
{
string inputPath = Environment.GetEnvironmentVariable("DATA_PATH") ?? ".\\data";
string outputPath = Environment.GetEnvironmentVariable("RESULT_PATH") ?? ".\\result";
try
{
string dataFilePath = Path.Combine(inputPath, "data.txt");
// Чтение всех строк из файла
string[] lines = File.ReadAllLines(dataFilePath);
int[] numbers = lines.Select(int.Parse).ToArray();
// Поиск наибольшего числа
int maxNumber = numbers.Max();
// Подсчет количества таких чисел
int maxCount = numbers.Count(n => n == maxNumber);
File.WriteAllText(Path.Combine(outputPath, "result.txt"), maxCount.ToString());
Console.WriteLine($"Количество наибольших чисел: {maxCount}");
}
catch (Exception ex)
{
Console.WriteLine("Произошла ошибка: " + ex.Message);
}
}
}

View File

@ -0,0 +1,10 @@
{
"profiles": {
"App2": {
"commandName": "Project"
},
"Container (Dockerfile)": {
"commandName": "Docker"
}
}
}

View File

@ -0,0 +1,37 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.9.34728.123
MinimumVisualStudioVersion = 10.0.40219.1
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "App2", "App2\App2.csproj", "{D03073B1-0CB3-4CA9-A9D8-F1FB804F92D1}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "App1", "App1\App1.csproj", "{729837A1-27A7-4D48-BBB2-21F68E5F7766}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FileGeneration", "FileGeneration\FileGeneration.csproj", "{E7AD8A8C-C957-41FF-B92A-924465D73DDB}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{D03073B1-0CB3-4CA9-A9D8-F1FB804F92D1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D03073B1-0CB3-4CA9-A9D8-F1FB804F92D1}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D03073B1-0CB3-4CA9-A9D8-F1FB804F92D1}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D03073B1-0CB3-4CA9-A9D8-F1FB804F92D1}.Release|Any CPU.Build.0 = Release|Any CPU
{729837A1-27A7-4D48-BBB2-21F68E5F7766}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{729837A1-27A7-4D48-BBB2-21F68E5F7766}.Debug|Any CPU.Build.0 = Debug|Any CPU
{729837A1-27A7-4D48-BBB2-21F68E5F7766}.Release|Any CPU.ActiveCfg = Release|Any CPU
{729837A1-27A7-4D48-BBB2-21F68E5F7766}.Release|Any CPU.Build.0 = Release|Any CPU
{E7AD8A8C-C957-41FF-B92A-924465D73DDB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{E7AD8A8C-C957-41FF-B92A-924465D73DDB}.Debug|Any CPU.Build.0 = Debug|Any CPU
{E7AD8A8C-C957-41FF-B92A-924465D73DDB}.Release|Any CPU.ActiveCfg = Release|Any CPU
{E7AD8A8C-C957-41FF-B92A-924465D73DDB}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {36392E9A-B3A7-4CFF-B577-B2A3146B3007}
EndGlobalSection
EndGlobal

Some files were not shown because too many files have changed in this diff Show More