Compare commits

..

72 Commits

Author SHA1 Message Date
757a7819f6 Merge branch 'main' into dozorova_alena_lab_3 2024-09-26 11:00:24 +04:00
37996c249a Merge pull request 'dolgov_dmitriy_lab_1' (#29) from dolgov_dmitriy_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#29
2024-09-26 10:25:37 +04:00
9456d4fe01 Merge pull request 'borschevskaya_anna_lab_2 is ready' (#25) from borschevskaya_anna_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#25
2024-09-26 10:20:55 +04:00
c14e105db5 Merge pull request 'presnyakova_victoria_lab_1' (#24) from presnyakova_victoria_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#24
2024-09-26 09:59:12 +04:00
4d1e900721 Merge pull request 'yakovleva_yulia_lab_2' (#20) from yakovleva_yulia_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#20
Reviewed-by: Alexey <a.zhelepov@mail.ru>
2024-09-26 08:45:08 +04:00
JulYakJul
21cdd4971d fix link 2024-09-24 14:53:05 +04:00
6b55b7b0fc Merge pull request 'minhasapov_ruslan_lab_1' (#23) from minhasapov_ruslan_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#23
2024-09-24 13:43:10 +04:00
47193155d9 Merge pull request 'kashin_maxim_lab_1' (#22) from kashin_maxim_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#22
2024-09-24 13:21:02 +04:00
bc8c4c887e Merge pull request 'zhimolostnova_anna_lab_2' (#21) from zhimolostnova_anna_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#21
2024-09-24 13:17:26 +04:00
4a2adcc35a Merge pull request 'yakovleva_yulia_lab_1' (#19) from yakovleva_yulia_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#19
2024-09-24 11:59:06 +04:00
d7cb666a0d Merge pull request 'kuzarin_maxim_lab_3' (#17) from kuzarin_maxim_lab_3 into main
Reviewed-on: Alexey/DAS_2024_1#17
2024-09-24 11:58:22 +04:00
6c642384c1 Merge pull request 'zhimolostnova_anna_lab_1' (#16) from zhimolostnova_anna_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#16
2024-09-24 11:52:56 +04:00
bdb5cc07ed Обновить dolgov_dmitriy_lab_1/README.md 2024-09-24 01:30:02 +04:00
e761e33201 Обновить dolgov_dmitriy_lab_1/README.md 2024-09-24 01:28:51 +04:00
Аришина)
ceee500b95 ЛР 1 готова 2024-09-24 01:20:27 +04:00
2be2c71b69 перенос 2024-09-23 20:19:10 +04:00
JulYakJul
aa8180ba49 Merge branch 'main' into yakovleva_yulia_lab_2 2024-09-23 17:34:56 +04:00
19522cd6ab чистка 2024-09-23 17:12:01 +04:00
2f88911e17 возвращаем игнор на место 2024-09-23 17:00:46 +04:00
c509e74465 Merge pull request 'balakhonov_danila_lab_1' (#15) from balakhonov_danila_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#15
2024-09-23 16:55:14 +04:00
314751f25c Merge pull request 'tukaeva_alfiya_lab_1 is ready' (#14) from tukaeva_alfiya_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#14
2024-09-23 16:54:53 +04:00
48f7f3a215 Merge pull request 'polevoy_sergey_lab_1' (#13) from polevoy_sergey_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#13
2024-09-23 16:54:09 +04:00
f112d2a44b Merge pull request 'mochalov_danila_lab_1' (#12) from mochalov_danila_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#12
2024-09-23 16:53:36 +04:00
477afb824d Merge pull request 'dozorova_alena_lab_2' (#11) from dozorova_alena_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#11
2024-09-23 16:53:14 +04:00
e7b9938278 Merge pull request 'emelyanov_artem_lab_2' (#10) from emelyanov_artem_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#10
2024-09-23 13:45:07 +04:00
JulYakJul
ba7480cb4f fix 2024-09-23 13:10:28 +04:00
520337f92d borschevskaya_anna_lab_2 is ready 2024-09-23 08:40:17 +04:00
754ea99981 dozorova_alena_lab_3 2024-09-22 18:34:12 +04:00
06d1d8cdd4 lab1 2024-09-22 18:06:51 +04:00
4c76a9dea6 minhasapov_ruslan_lab_1 is ready 2024-09-21 22:14:08 +04:00
e5d0aa0b3d Выполнено 2024-09-21 16:19:03 +04:00
d326e64f24 fix readme again 2024-09-21 16:15:48 +04:00
1a118ae71f fix readme 2024-09-21 16:13:24 +04:00
e9b06b1f27 complete lab 2 2024-09-21 16:11:07 +04:00
JulYakJul
1adaac9281 yakovleva_yulia_lab_2 is ready 2024-09-20 18:36:39 +04:00
JulYakJul
5e9e2600f3 yakovleva_yulia_lab_1 is ready 2024-09-19 16:14:05 +04:00
b6e311755e add branch + readme 2024-09-19 15:54:13 +04:00
JulYakJul
0c3e973307 Revert "Merge pull request 'yakovleva_julia_lab_1' (#9) from yakovleva_julia_lab_1 into main"
This reverts commit c474c13c4a, reversing
changes made to 829a04a913.
2024-09-19 15:50:52 +04:00
c474c13c4a Merge pull request 'yakovleva_julia_lab_1' (#9) from yakovleva_julia_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#9
2024-09-19 15:42:48 +04:00
8eedde24a1 ЛР 3 готова. Нужно проверить пару моментов, но в целом всё должно быть нормально 2024-09-19 10:53:49 +03:00
829a04a913 Merge pull request 'Kuzarin_maxim_lab_2' (#8) from kuzarin_maxim_lab_2 into main
Reviewed-on: Alexey/DAS_2024_1#8
2024-09-19 11:19:58 +04:00
57970b3333 fix readme 2024-09-19 02:08:16 +04:00
1c77ba3272 fix readme 2024-09-19 02:05:34 +04:00
ce9527b1c9 fix comments 2024-09-19 02:02:41 +04:00
a1419f21ec changes readme 2024-09-19 02:00:03 +04:00
aac01e9f48 complete lab 1 2024-09-19 01:56:40 +04:00
221f3e248b Лабораторная работа номер 1 выполнена 2024-09-18 23:53:53 +04:00
3d98388a13 tukaeva_alfiya_lab_1 is ready 2024-09-18 23:09:14 +04:00
4922e9075e polevoy_sergey_lab_1_completed 2024-09-18 19:01:17 +04:00
891eae4211 mochalov_danila_lab_1 is ready 2024-09-18 17:02:04 +04:00
121e4bbcd2 dozorova_alena_lab_2 2024-09-17 22:46:46 +04:00
0590f7b532 feature: add README.md 2024-09-17 22:26:19 +04:00
0eec58a347 feature: completed lab 2 2024-09-17 22:07:57 +04:00
JulYakJul
c8dbd5fb37 yakovleva_julia_lab_1 is ready 2024-09-17 17:43:15 +04:00
253ad80e31 Работа готова. Нужнго проверить Readme, но вроде норм 2024-09-17 14:13:30 +03:00
f980a74f5e Merge pull request 'emelyanov_artem_lab_1' (#7) from emelyanov_artem_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#7
2024-09-17 14:55:50 +04:00
e10ae36577 Merge pull request 'borschevskaya_anna_lab_1' (#6) from borschevskaya_anna_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#6
2024-09-17 14:54:05 +04:00
46b8ecfc54 Merge pull request 'vaksman_valerya_lab_1' (#5) from vaksman_valerya_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#5
2024-09-17 14:52:42 +04:00
262193a301 Merge pull request 'tsukanova_irina_lab_1' (#3) from tsukanova_irina_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#3
2024-09-17 14:36:47 +04:00
48711e14e3 Merge pull request 'kuzarin_maxim_lab_1' (#2) from kuzarin_maxim_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#2
2024-09-17 14:11:50 +04:00
39664ac3a1 Merge pull request 'dozorova_alena_lab_1 is ready' (#1) from dozorova_alena_lab_1 into main
Reviewed-on: Alexey/DAS_2024_1#1
2024-09-17 14:10:20 +04:00
7af877c37a feature: completed lab 1 2024-09-17 13:08:06 +04:00
7d2ae7430d is super duper ready 2024-09-15 21:59:03 +04:00
ec21e89033 borschevskaya_anna_lab_1 is ready 2024-09-15 21:51:58 +04:00
afddfcf91f is super ready 2024-09-15 21:36:13 +04:00
9b0cb3582d ready 2024-09-15 21:34:49 +04:00
37080832d5 is ready 2024-09-15 21:23:41 +04:00
39fdc511ee Init commit. 2024-09-15 19:23:41 +04:00
2714d4e718 tsukanova_irina_lab_1 is ready 2024-09-15 16:18:03 +04:00
b8a59b4932 Обновить kuzarin_maxim_lab_1/README.md
Удалил ненужные символы
2024-09-14 11:04:19 +04:00
4af4abcb7f dozorova_alena_lab_1 is ready 2024-09-13 23:02:10 +04:00
e6dcbeb800 Выполнена первая ЛР. Добавлен отчёт и ссылка на видео 2024-09-12 21:27:13 +03:00
180 changed files with 5601 additions and 1444 deletions

6
.idea/.gitignore generated vendored Normal file
View File

@@ -0,0 +1,6 @@
# Default ignored files
/shelf/
/workspace.xml
/DAS_2024_1.iml
/modules.xml
/vcs.xml

View File

@@ -1,11 +1,18 @@
HELP.md
.gradle
build/
!gradle/wrapper/gradle-wrapper.jar
!**/src/main/**/build/
!**/src/test/**/build/
target/
!.mvn/wrapper/maven-wrapper.jar
!**/src/main/**/target/
!**/src/test/**/target/
### STS ###
### IntelliJ IDEA ###
.idea/modules.xml
.idea/jarRepositories.xml
.idea/compiler.xml
.idea/libraries/
*.iws
*.iml
*.ipr
### Eclipse ###
.apt_generated
.classpath
.factorypath
@@ -13,18 +20,6 @@ build/
.settings
.springBeans
.sts4-cache
bin/
!**/src/main/**/bin/
!**/src/test/**/bin/
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
out/
!**/src/main/**/out/
!**/src/test/**/out/
### NetBeans ###
/nbproject/private/
@@ -32,6 +27,12 @@ out/
/dist/
/nbdist/
/.nb-gradle/
build/
!**/src/main/**/build/
!**/src/test/**/build/
### VS Code ###
.vscode/
### Mac OS ###
.DS_Store

View File

@@ -0,0 +1,43 @@
# Отчет. Лабораторная работа 2
В рамках лабораторной работы №2 были написаны два сервиса, работающих с текстовыми файлами.
Для первого сервиса был выбран вариант задания №5:
```
Ищет в каталоге /var/data файл с самым коротким названием и перекладывает его в /var/result/data.txt.
```
А для второго - №2:
```
Ищет наименьшее число из файла /var/data/data.txt и сохраняет его третью степень в /var/result/result.txt.
```
## Описание
Сначала сервис first перемещает данные из файла с самым коротким названием, находящегося в указанной примонтированной директории, в выходную папку.
Доступ к выходной папке имеет второй сервис, который выводит наименьшее число из помещенного первым сервисом файла
в третьей степени в выходной файл.
Выходной файл расположен в примонтированной директории и доступен на машине, где запускаются сервисы.
В Dockerfile используется многоэтапная сборка с использованием нескольких базовых образов на каждом этапе.
Описание значения каждой строки есть в Dockerfile в сервисе first.
В файле docker-compose.yml приведено описание новых строк, связанных с подключением примонтированных томов.
Стоит отметить, что для "общения" сервисов используется общий том common, который монтируется в контейнер по пути /var/result. Это позволяет сохранять результаты
работы первого сервиса для использования вторым сервисом.
## Как запустить
Для того, чтобы запустить сервисы, необходимо выполнить следующие действия:
1. Установить и запустить Docker Engine или Docker Desktop
2. Через консоль перейти в папку, в которой расположен файл docker-compose.yml
3. Выполнить команду:
```
docker compose up --build
```
В случае успешного запуска всех контейнеров в консоли будет выведено следующее сообщение:
```
✔ Network borschevskaya_anna_lab_2_default Created 0.1s
✔ Container borschevskaya_anna_lab_2-first-1 Created 0.1s
✔ Container borschevskaya_anna_lab_2-second-1 Created 0.1s
Attaching to borschevskaya_anna_lab_2-first-1, borschevskaya_anna_lab_2-second-1
```
Далее, в консоль каждого сервиса будут выведены сообщения о том, как прошла обработка файлов.
В случае отсутствия заданных значений переменных окружения INPUT_PATH и OUTPUT_PATH и
в иных исключительных ситуация будет выведена информация об этом.
## Видео-отчет
Работоспособность лабораторной работы можно оценить в следующем [видео](https://disk.yandex.ru/i/LFxdyRUFQDwXEQ).

View File

@@ -0,0 +1,22 @@
services:
first:
build: ./first # директория, в которой нужно искать Dockerfile для сборки первого сервиса
environment:
INPUT_PATH: /var/data/ # директория с входными данными для обработки файлов
OUTPUT_PATH: /var/result/ # директория с выходными данными обработки
volumes:
- ./volumes/input:/var/data # монтируется локальная папка с входными данными в папку внутри контейнера
- common:/var/result # монтируется общий для двух сервисов том, в который first сложит результаты обработки по варианту
second:
build: ./second # директория, в которой нужно искать Dockerfile для сборки второго сервиса
depends_on: # сервис second зависит от сервиса first и будет запущен после него
- first
environment:
INPUT_PATH: /var/result/
OUTPUT_PATH: /var/data/
volumes:
- ./volumes/output:/var/data
- common:/var/result # монтируется общий для двух сервисов том, из которого second получит результаты обработки first сервиса и выполнит свою логику
volumes:
common:

View File

@@ -0,0 +1,25 @@
# Используем образ Maven для сборки
FROM maven:3.8-eclipse-temurin-21-alpine AS build
# Устанавливаем рабочую директорию
WORKDIR /app
# Копируем только pom.xml и загружаем зависимости
# Так зависимости закэшируются в Docker при изменении кода закэшированные слои с зависимостями будут подгружаться быстрее
COPY pom.xml .
RUN mvn dependency:go-offline
# Копируем остальные исходные файлы
COPY src ./src
# Собираем весь проект
RUN mvn clean package -DskipTests
# Используем официальный образ JDK для запуска собранного jar-файла
FROM eclipse-temurin:21-jdk-alpine
# Копируем jar-файл из предыдущего этапа
COPY --from=build /app/target/*.jar /app.jar
# Указываем команду для запуска приложения
CMD ["java", "-jar", "app.jar"]

View File

@@ -0,0 +1,37 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>ru.first</groupId>
<artifactId>first</artifactId>
<version>1.0.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>21</maven.compiler.source>
<maven.compiler.target>21</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<build>
<plugins>
<plugin>
<!-- Build an executable JAR -->
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.1.0</version>
<configuration>
<archive>
<manifest>
<addClasspath>true</addClasspath>
<classpathPrefix>lib/</classpathPrefix>
<mainClass>ru.first.Main</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,50 @@
package ru.first;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Comparator;
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
import static java.util.Objects.isNull;
public class Main {
public static final String INPUT_PATH = System.getenv("INPUT_PATH");
public static final String OUTPUT_PATH = System.getenv("OUTPUT_PATH");
public static final String RESULT_FILE_NAME = "data.txt";
public static void main(String[] args) throws IOException {
if (isNull(INPUT_PATH) || INPUT_PATH.isEmpty() || isNull(OUTPUT_PATH) || OUTPUT_PATH.isEmpty()) {
System.out.printf("Отсутствуют переменные окружения INPUT_PATH = '%s' или OUTPUT_PATH = '%s'%n",
INPUT_PATH, OUTPUT_PATH);
return;
}
var inputPathDir = Path.of(INPUT_PATH);
if (!Files.exists(inputPathDir)) {
Files.createDirectory(inputPathDir);
}
var inputDirectory = new File(INPUT_PATH);
var allDirFiles = inputDirectory.listFiles();
if (isNull(allDirFiles) || allDirFiles.length == 0) {
System.out.println("Директория пуста");
return;
}
var dirFiles = Arrays.stream(allDirFiles).filter(File::isFile).toList();
if (dirFiles.isEmpty()) {
System.out.println("В указанной директории нет подходящих для обработки файлов");
return;
}
var shortestName = dirFiles.stream().min(Comparator.comparing(file -> file.getName().length())).get();
var outputPathDir = Path.of(OUTPUT_PATH);
if (!Files.exists(outputPathDir)) {
Files.createDirectory(outputPathDir);
}
var resultFilePath = Path.of(OUTPUT_PATH + File.separator + RESULT_FILE_NAME);
Files.move(Path.of(INPUT_PATH + File.separator + shortestName.getName()), resultFilePath, REPLACE_EXISTING);
}
}

View File

@@ -0,0 +1,16 @@
FROM maven:3.8-eclipse-temurin-21-alpine AS build
WORKDIR /app
COPY pom.xml .
RUN mvn dependency:go-offline
COPY src ./src
RUN mvn clean package -DskipTests
FROM eclipse-temurin:21-jdk-alpine
COPY --from=build /app/target/*.jar /app.jar
CMD ["java", "-jar", "app.jar"]

View File

@@ -0,0 +1,36 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>ru.second</groupId>
<artifactId>second</artifactId>
<version>1.0.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>21</maven.compiler.source>
<maven.compiler.target>21</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.1.0</version>
<configuration>
<archive>
<manifest>
<addClasspath>true</addClasspath>
<classpathPrefix>lib/</classpathPrefix>
<mainClass>ru.second.Main</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@@ -0,0 +1,51 @@
package ru.second;
import java.io.File;
import java.io.FileWriter;
import java.nio.file.Files;
import static java.util.Objects.isNull;
public class Main {
public static final String INPUT_PATH = System.getenv("INPUT_PATH");
public static final String INPUT_FILE_NAME = "data.txt";
public static final String OUTPUT_PATH = System.getenv("OUTPUT_PATH");
public static final String RESULT_FILE_NAME = "result.txt";
public static void main(String[] args) {
if (isNull(INPUT_PATH) || INPUT_PATH.isEmpty() || isNull(OUTPUT_PATH) || OUTPUT_PATH.isEmpty()) {
System.out.printf("Отсутствуют переменные окружения INPUT_PATH = '%s' или OUTPUT_PATH = '%s'%n",
INPUT_PATH, OUTPUT_PATH);
return;
}
var inputFile = new File(INPUT_PATH + File.separator + INPUT_FILE_NAME);
if (!inputFile.exists()) {
System.out.println("Входной файл не существует");
return;
}
try (var stream = Files.lines(inputFile.toPath());
var writer = new FileWriter(OUTPUT_PATH + File.separator + RESULT_FILE_NAME);
) {
var min = stream.map(Main::parseInt).reduce(Integer::min);
if (min.isEmpty()) {
System.out.println("Не найдено минимальное значение среди строк файла");
return;
}
var minValue = Math.pow(min.get(), 3);
System.out.printf("Get min value = '%d'%n", min.get());
writer.append(Double.toString(minValue));
System.out.printf("To file %s was written value %f%n", RESULT_FILE_NAME, minValue);
} catch (Exception ex) {
System.out.println(ex.getMessage());
}
}
private static Integer parseInt(String line) {
line = line.replace("\\n", "");
return Integer.parseInt(line);
}
}

4
dolgov_dmitriy_lab_1/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
data/
log/
wordpress/
custom/

View File

@@ -0,0 +1,34 @@
# Лабораторная работа №1
## Выполнил: Долгов Дмитрий, группа ПИбд-42
### Были развёрнуты следующие сервисы:
* mediawiki (движок вики)
* wordpress (популярная система управления контентом)
* gitea (сервис для хранения репозиториев git)
* mariaDB
### Были использованы следующие технологии:
* git
* docker
* docker-compose
### Для запуска лабораторной работы необходимо ввести в консоль следующую команду:
```
docker compose up -d
```
## Результат запуска:
```
[+] Running 4/4
✔ Container dolgov_dmitriy_lab_1-wordpress-1 Running 0.0s
✔ Container dolgov_dmitriy_lab_1-database-1 Running 0.0s
✔ Container dolgov_dmitriy_lab_1-mediawiki-1 Running 0.0s
✔ Container gitea Running
```
## Видео с результатом запуска:
Видео можно посмотреть по данной [ссылке](https://drive.google.com/file/d/1hC6HhNvYBRuYVClobXyDMReA4ngwxhwc/view?usp=drive_link).

View File

@@ -0,0 +1,73 @@
# Сервисы по заданию
services:
# Сервис MediaWiki
mediawiki:
# Образ MediaWiki
image: mediawiki
# Автоматический перезапуск при сбое
restart: always
# проброс порта 80 из контейнера на порт 8080 хоста
ports:
- "8080:80"
# связь с сервисом database
links:
- database
# монтирование volume для хранения данных
volumes:
- images:/var/www/html/images
# Сервис WordPress
wordpress:
# Образ WordPress
image: wordpress:latest
# Автоматический перезапуск при сбое
ports:
- "8082:80"
restart: always
volumes:
- ./wordpress:/var/www/html
# Сервис Gitea
server:
image: gitea/gitea:latest
container_name: gitea
restart: always
environment:
- USER_UID=1000
- USER_GID=1000
volumes:
- ./data:/data
- ./custom:/app/gitea/custom
- ./log:/app/gitea/log
ports:
- "8081:3000"
links:
- database
depends_on:
- database
# Сервис MariaDB
database:
# Образ MariaDB
image: mariadb
# Автоматический перезапуск при сбое
restart: always
# переменные окружения для настройки базы данных
environment:
# имя базы данных
MYSQL_DATABASE: my_wiki
# имя пользователя
MYSQL_USER: user
# пароль пользователя
MYSQL_PASSWORD: user
# случайный пароль для пользователя root
MYSQL_RANDOM_ROOT_PASSWORD: 'yes'
# монтирование volume для хранения данных
volumes:
- db:/var/lib/mysql
# тома для хранения данных
volumes:
images:
db:

Binary file not shown.

After

Width:  |  Height:  |  Size: 275 KiB

7
dozorova_alena_lab_3/.gitignore vendored Normal file
View File

@@ -0,0 +1,7 @@
/dozorova_alena_lab_3/PostService/.vs
/dozorova_alena_lab_3/PostService/bin
/dozorova_alena_lab_3/PostService/obj
/dozorova_alena_lab_3/WorkerService/.vs
/dozorova_alena_lab_3/WorkerService/bin
/dozorova_alena_lab_3/WorkerService/obj

View File

@@ -0,0 +1,30 @@
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/azds.yaml
**/bin
**/charts
**/docker-compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/obj
**/secrets.dev.yaml
**/values.dev.yaml
LICENSE
README.md
!**/.gitignore
!.git/HEAD
!.git/config
!.git/packed-refs
!.git/refs/heads/**

View File

@@ -0,0 +1,94 @@
using Microsoft.AspNetCore.Mvc;
using Newtonsoft.Json;
namespace PostService.Controllers
{
[ApiController]
[Route("[controller]")]
public class HomeController : ControllerBase
{
private readonly ILogger<HomeController> _logger;
public static List<Post> list = new List<Post>()
{
new Post()
{
Id = Guid.NewGuid(),
Name = "Default"
}
};
public HomeController(ILogger<HomeController> logger)
{
_logger = logger;
}
[HttpGet("get")]
public IActionResult Get()
{
return list == null || list.Count == 0 ? NotFound() : Ok(list);
}
[HttpGet("get/{Id}")]
public IActionResult Get([FromRoute] Guid Id)
{
var obj = list.Where(l => l.Id == Id).FirstOrDefault();
return obj == null ? NotFound() : Ok(obj);
}
[HttpPost("create")]
public IActionResult Create([FromBody] CreateUpdatePost data)
{
try
{
var model = new Post()
{
Id = Guid.NewGuid(),
Name = data.Name,
};
list.Add(model);
var modelForResult = new CreateUpdatePost
{
Name = model.Name
};
return Ok(modelForResult);
}
catch (Exception ex)
{
return BadRequest(ex.Message);
}
}
[HttpPut("update/{Id}")]
public IActionResult Update([FromRoute] Guid Id,
[FromBody] CreateUpdatePost data)
{
var oldModel = list.Where(l => l.Id == Id).Select(l => list.IndexOf(l)).FirstOrDefault();
if (oldModel != null)
{
list[oldModel].Name = data.Name;
var modelForResult = new CreateUpdatePost {
Name = data.Name,
};
return Ok(modelForResult);
}
else return NotFound();
}
[HttpDelete("delete/{Id}")]
public IActionResult Delete([FromRoute] Guid Id)
{
var model = list.Where(l => l.Id != Id).FirstOrDefault();
if (model != null) {
list.Remove(model);
return Ok();
}
else return NotFound();
}
}
}

View File

@@ -0,0 +1,24 @@
#See https://aka.ms/customizecontainer to learn how to customize your debug container and how Visual Studio uses this Dockerfile to build your images for faster debugging.
FROM mcr.microsoft.com/dotnet/aspnet:6.0 AS base
WORKDIR /app
EXPOSE 80
EXPOSE 443
FROM mcr.microsoft.com/dotnet/sdk:6.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["PostService.csproj", "."]
RUN dotnet restore "./PostService.csproj"
COPY . .
WORKDIR "/src/."
RUN dotnet build "./PostService.csproj" -c $BUILD_CONFIGURATION -o /app/build
FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "./PostService.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "PostService.dll"]

View File

@@ -0,0 +1,16 @@
namespace PostService
{
public class Post
{
public Guid Id { get; set; }
public string Name { get; set; }
}
public class PostDTO : Post { }
public class CreateUpdatePost
{
public string Name { get; set; }
}
}

View File

@@ -0,0 +1,18 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<UserSecretsId>1a8ae13d-1a3e-4308-955e-4fbf68b758c9</UserSecretsId>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
<DockerfileContext>.</DockerfileContext>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.VisualStudio.Azure.Containers.Tools.Targets" Version="1.20.1" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.5.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="Current" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<ActiveDebugProfile>PostService</ActiveDebugProfile>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<DebuggerFlavor>ProjectDebugger</DebuggerFlavor>
</PropertyGroup>
</Project>

View File

@@ -0,0 +1,25 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.10.35004.147
MinimumVisualStudioVersion = 10.0.40219.1
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "PostService", "PostService.csproj", "{EB304D56-A05F-41BC-9523-B5FEB658BCEE}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{EB304D56-A05F-41BC-9523-B5FEB658BCEE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{EB304D56-A05F-41BC-9523-B5FEB658BCEE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{EB304D56-A05F-41BC-9523-B5FEB658BCEE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{EB304D56-A05F-41BC-9523-B5FEB658BCEE}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {6625EB51-D403-4C57-A7C2-AA01C90AB472}
EndGlobalSection
EndGlobal

View File

@@ -0,0 +1,31 @@
using Microsoft.OpenApi.Models;
var builder = WebApplication.CreateBuilder(args);
// Add services to the container.
builder.Services.AddControllers();
// Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen();
var app = builder.Build();
app.UseSwagger(c =>
{
c.PreSerializeFilters.Add((swaggerDoc, httpReq) =>
{
swaggerDoc.Servers = new List<OpenApiServer> { new OpenApiServer { Url = $"{httpReq.Scheme}://{httpReq.Host.Value}/postservice" } };
});
});
app.UseSwaggerUI();
app.UseHttpsRedirection();
app.UseAuthorization();
app.MapControllers();
app.Run();

View File

@@ -0,0 +1,41 @@
{
"profiles": {
"PostService": {
"commandName": "Project",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
},
"dotnetRunMessages": true,
"applicationUrl": "https://localhost:7293;http://localhost:5139"
},
"IIS Express": {
"commandName": "IISExpress",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
"Container (Dockerfile)": {
"commandName": "Docker",
"launchBrowser": true,
"launchUrl": "{Scheme}://{ServiceHost}:{ServicePort}/swagger",
"environmentVariables": {
"ASPNETCORE_URLS": "https://+:443;http://+:80"
},
"publishAllPorts": true,
"useSSL": true
}
},
"$schema": "https://json.schemastore.org/launchsettings.json",
"iisSettings": {
"windowsAuthentication": false,
"anonymousAuthentication": true,
"iisExpress": {
"applicationUrl": "http://localhost:36974",
"sslPort": 44370
}
}
}

View File

@@ -0,0 +1,8 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
}
}

View File

@@ -0,0 +1,9 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*"
}

View File

@@ -0,0 +1,30 @@
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/azds.yaml
**/bin
**/charts
**/docker-compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/obj
**/secrets.dev.yaml
**/values.dev.yaml
LICENSE
README.md
!**/.gitignore
!.git/HEAD
!.git/config
!.git/packed-refs
!.git/refs/heads/**

View File

@@ -0,0 +1,112 @@
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Hosting;
using RestSharp;
namespace WorkerService.Controllers
{
[ApiController]
[Route("[controller]")]
public class HomeController : ControllerBase
{
private readonly ILogger<HomeController> _logger;
private readonly RestClient _restClient;
public static List<WorkerEntity> list = new List<WorkerEntity>();
public HomeController(ILogger<HomeController> logger)
{
_logger = logger;
_restClient = new RestClient("http://nginx/postservice/Home");
}
[HttpGet("get")]
public IActionResult Get()
{
if (list == null || list.Count == 0)
{
return NotFound();
}
var result = list.Select(l => new GetEntity
{
Id = l.Id,
FIO = l.FIO,
PostId = l.PostId,
});
return Ok(result);
}
[HttpGet("get/{Id}")]
public IActionResult Get([FromRoute] Guid Id)
{
var obj = list.Where(l => l.Id == Id);
return obj == null ? NotFound() : Ok(obj);
}
[HttpPost("create")]
public async Task<IActionResult> CreateAsync([FromBody] CreateUpdateWorker data)
{
try
{
var model = new WorkerEntity()
{
Id = Guid.NewGuid(),
FIO = data.FIO,
PostId = data.PostId,
};
var restRequest = new RestRequest($"/get/{data.PostId}", Method.Get);
var responce = await _restClient.ExecuteAsync<PostEntity>(restRequest);
if (!responce.IsSuccessful)
{
return BadRequest(responce);
}
model.Post = responce.Data;
list.Add(model);
return Ok(model);
}
catch (Exception ex)
{
return BadRequest(ex.Message);
}
}
[HttpPut("update/{Id}")]
public IActionResult Update([FromRoute] Guid Id,
[FromBody] CreateUpdateWorker data)
{
var oldModel = list.Where(l => l.Id == Id).Select(l => list.IndexOf(l)).FirstOrDefault();
if (oldModel != null)
{
list[oldModel].FIO = data.FIO;
list[oldModel].PostId = data.PostId;
var modelForResult = new CreateUpdateWorker
{
FIO = data.FIO,
PostId = data.PostId,
};
return Ok(modelForResult);
}
else return NotFound();
}
[HttpDelete("delete/{Id}")]
public IActionResult Delete([FromRoute] Guid Id)
{
var model = list.Where(l => l.Id != Id).FirstOrDefault();
if (model != null)
{
list.Remove(model);
return Ok();
}
else return NotFound();
}
}
}

View File

@@ -0,0 +1,24 @@
#See https://aka.ms/customizecontainer to learn how to customize your debug container and how Visual Studio uses this Dockerfile to build your images for faster debugging.
FROM mcr.microsoft.com/dotnet/aspnet:6.0 AS base
WORKDIR /app
EXPOSE 80
EXPOSE 443
FROM mcr.microsoft.com/dotnet/sdk:6.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["WorkerService.csproj", "."]
RUN dotnet restore "./WorkerService.csproj"
COPY . .
WORKDIR "/src/."
RUN dotnet build "./WorkerService.csproj" -c $BUILD_CONFIGURATION -o /app/build
FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "./WorkerService.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "WorkerService.dll"]

View File

@@ -0,0 +1,32 @@
namespace WorkerService
{
public class WorkerEntity
{
public Guid Id { get; set; }
public string FIO { get; set; }
public Guid PostId { get; set; }
public PostEntity Post { get; set; }
}
public class PostEntity
{
public Guid id { get; set; }
public string name { get; set; }
}
public class CreateUpdateWorker
{
public string FIO { get; set; }
public Guid PostId { get; set; }
}
public class GetEntity
{
public Guid Id { get; set; }
public string FIO { get; set; }
public Guid PostId { get; set; }
}
}

View File

@@ -0,0 +1,32 @@
using Microsoft.OpenApi.Models;
var builder = WebApplication.CreateBuilder(args);
// Add services to the container.
builder.Services.AddControllers();
// Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen();
var app = builder.Build();
app.UseSwagger(c =>
{
c.PreSerializeFilters.Add((swaggerDoc, httpReq) =>
{
swaggerDoc.Servers = new List<OpenApiServer> { new OpenApiServer { Url = $"{httpReq.Scheme}://{httpReq.Host.Value}/workerservice" } };
});
});
app.UseSwaggerUI();
app.UseHttpsRedirection();
app.UseAuthorization();
app.MapControllers();
app.Run();

View File

@@ -0,0 +1,41 @@
{
"profiles": {
"WorkerService": {
"commandName": "Project",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
},
"dotnetRunMessages": true,
"applicationUrl": "https://localhost:7144;http://localhost:5220"
},
"IIS Express": {
"commandName": "IISExpress",
"launchBrowser": true,
"launchUrl": "swagger",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
"Container (Dockerfile)": {
"commandName": "Docker",
"launchBrowser": true,
"launchUrl": "{Scheme}://{ServiceHost}:{ServicePort}/swagger",
"environmentVariables": {
"ASPNETCORE_URLS": "https://+:443;http://+:80"
},
"publishAllPorts": true,
"useSSL": true
}
},
"$schema": "https://json.schemastore.org/launchsettings.json",
"iisSettings": {
"windowsAuthentication": false,
"anonymousAuthentication": true,
"iisExpress": {
"applicationUrl": "http://localhost:55850",
"sslPort": 44303
}
}
}

View File

@@ -0,0 +1,18 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<UserSecretsId>3228803c-add9-46fb-b949-9e2a2e89056a</UserSecretsId>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
<DockerfileContext>.</DockerfileContext>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.VisualStudio.Azure.Containers.Tools.Targets" Version="1.20.1" />
<PackageReference Include="RestSharp" Version="112.0.0" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.5.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="Current" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<ActiveDebugProfile>WorkerService</ActiveDebugProfile>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<DebuggerFlavor>ProjectDebugger</DebuggerFlavor>
</PropertyGroup>
</Project>

View File

@@ -0,0 +1,25 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.10.35004.147
MinimumVisualStudioVersion = 10.0.40219.1
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "WorkerService", "WorkerService.csproj", "{A9DF2332-E571-49AD-805A-D61B23C40D5F}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{A9DF2332-E571-49AD-805A-D61B23C40D5F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{A9DF2332-E571-49AD-805A-D61B23C40D5F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{A9DF2332-E571-49AD-805A-D61B23C40D5F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{A9DF2332-E571-49AD-805A-D61B23C40D5F}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {B515427C-329A-4B73-B82C-45D929A6EA75}
EndGlobalSection
EndGlobal

View File

@@ -0,0 +1,8 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
}
}

View File

@@ -0,0 +1,9 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*"
}

View File

@@ -0,0 +1,21 @@
services:
nginx:
image: nginx
depends_on:
- postservice
- workerservice
volumes:
- ./nginx/nginx.conf:/etc/nginx/conf.d/default.conf
ports:
- 80:80
postservice:
build: .\PostService
ports:
- 81:8080
workerservice:
build: .\WorkerService
ports:
- 82:8080
depends_on:
- postservice

View File

@@ -0,0 +1,21 @@
server {
listen 80;
listen [::]:80;
server_name localhost;
location /postservice/ {
proxy_pass http://postservice:80/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Prefix /admin;
}
location /workerservice/ {
proxy_pass http://workerservice:80/;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Prefix /admin;
}
}

View File

@@ -0,0 +1,16 @@
# Лабораторная работа 3
В работе представлен пример синхронного обмена сообщениями между сервисами и их взаимодейсвием с внешним миром.
## Описание
Были взяты две сущности: работник и должность. Первый сервис позволяет манипулировать списком должностей, второй - списком работников, у каждого из которых может быть одна должность. У одной должности может быть много работников.
Реализована система была с помощью web-api на c# с хранением данных в оперативной памяти.
В качестве интерфейса используется Swagger, который можно использовать для отправки запросов.
## Запуск
Для запуска лабораторной работы необходимо иметь запущенный Docker.
Необходимо перейти в папку, где располагается данный файл. Далее открыть терминал и ввести команду:
```
docker compose up -d --build
```
Важно, чтобы в этот момент на компьютере был свободен порт 80.
В результате, после сборки вся система запустится и Swagger-ы будут доступны по путям http://localhost/postservice/swagger и http://localhost/workerservice/swagger
## Видеодемонстрация
Видеодемонстрация результата лабораторной работы представлена по [адресу](https://drive.google.com/file/d/1rg2xnXM-jPDfFJWxNIitq0I8kXj9Pr3-/view?usp=sharing)

View File

@@ -1,114 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="AutoImportSettings">
<option name="autoReloadType" value="SELECTIVE" />
</component>
<component name="ChangeListManager">
<list default="true" id="c0e70306-e650-4c5f-8796-30690eb2be47" name="Changes" comment="">
<change afterPath="$PROJECT_DIR$/docker-compose.yml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/nginx.conf" afterDir="false" />
<change afterPath="$PROJECT_DIR$/price_history_module/Dockerfile" afterDir="false" />
<change afterPath="$PROJECT_DIR$/price_history_module/src/main/java/ru/ulstu/price_history_module/config/AppConfig.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/price_history_module/src/main/java/ru/ulstu/price_history_module/controller/PriceHistoryController.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/price_history_module/src/main/java/ru/ulstu/price_history_module/model/PriceHistory.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/price_history_module/src/main/java/ru/ulstu/price_history_module/service/PriceHistoryService.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/price_history_module/src/main/java/ru/ulstu/price_history_module/service/dto/CreatePriceHistoryDto.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/product_module/Dockerfile" afterDir="false" />
<change afterPath="$PROJECT_DIR$/product_module/src/main/java/ru/ulstu/product_module/controller/ProductController.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/product_module/src/main/java/ru/ulstu/product_module/model/Product.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/product_module/src/main/java/ru/ulstu/product_module/service/ProductService.java" afterDir="false" />
<change afterPath="$PROJECT_DIR$/product_module/src/main/java/ru/ulstu/product_module/service/dto/CreateProductDto.java" afterDir="false" />
</list>
<option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" />
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
<option name="LAST_RESOLUTION" value="IGNORE" />
</component>
<component name="Git.Settings">
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$/.." />
</component>
<component name="KubernetesApiPersistence">{}</component>
<component name="KubernetesApiProvider">{
&quot;isMigrated&quot;: true
}</component>
<component name="ProjectColorInfo">{
&quot;associatedIndex&quot;: 3
}</component>
<component name="ProjectId" id="2nKW7SdJYEDJy5g4bOXYcSQkE08" />
<component name="ProjectViewState">
<option name="hideEmptyMiddlePackages" value="true" />
<option name="showLibraryContents" value="true" />
</component>
<component name="PropertiesComponent">{
&quot;keyToString&quot;: {
&quot;Docker.docker-compose.yml.price: Compose Deployment.executor&quot;: &quot;Run&quot;,
&quot;Docker.docker-compose.yml: Compose Deployment.executor&quot;: &quot;Run&quot;,
&quot;RunOnceActivity.ShowReadmeOnStart&quot;: &quot;true&quot;,
&quot;git-widget-placeholder&quot;: &quot;emelaynov__artem__lab__3&quot;,
&quot;kotlin-language-version-configured&quot;: &quot;true&quot;,
&quot;last_opened_file_path&quot;: &quot;/home/forever/УлГТУ/Распределенные вычисления и приложения/DAS_2024_1/emelaynov_artem_lab_3&quot;,
&quot;node.js.detected.package.eslint&quot;: &quot;true&quot;,
&quot;node.js.detected.package.tslint&quot;: &quot;true&quot;,
&quot;node.js.selected.package.eslint&quot;: &quot;(autodetect)&quot;,
&quot;node.js.selected.package.tslint&quot;: &quot;(autodetect)&quot;,
&quot;nodejs_package_manager_path&quot;: &quot;npm&quot;,
&quot;vue.rearranger.settings.migration&quot;: &quot;true&quot;
}
}</component>
<component name="RunManager" selected="Docker.docker-compose.yml: Compose Deployment">
<configuration default="true" type="docker-deploy" factoryName="docker-compose.yml" temporary="true">
<deployment type="docker-compose.yml" />
<method v="2" />
</configuration>
<configuration name="docker-compose.yml: Compose Deployment" type="docker-deploy" factoryName="docker-compose.yml" temporary="true" server-name="Docker">
<deployment type="docker-compose.yml">
<settings>
<option name="sourceFilePath" value="docker-compose.yml" />
</settings>
</deployment>
<method v="2" />
</configuration>
<configuration name="docker-compose.yml.price: Compose Deployment" type="docker-deploy" factoryName="docker-compose.yml" temporary="true" server-name="Docker">
<deployment type="docker-compose.yml">
<settings>
<option name="services">
<list>
<option value="price" />
</list>
</option>
<option name="sourceFilePath" value="docker-compose.yml" />
</settings>
</deployment>
<method v="2" />
</configuration>
<recent_temporary>
<list>
<item itemvalue="Docker.docker-compose.yml: Compose Deployment" />
<item itemvalue="Docker.docker-compose.yml.price: Compose Deployment" />
</list>
</recent_temporary>
</component>
<component name="SharedIndexes">
<attachedChunks>
<set>
<option value="bundled-jdk-9823dce3aa75-28b599e66164-intellij.indexing.shared.core-IU-242.22855.74" />
<option value="bundled-js-predefined-d6986cc7102b-5c90d61e3bab-JavaScript-IU-242.22855.74" />
</set>
</attachedChunks>
</component>
<component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" />
<component name="TaskManager">
<task active="true" id="Default" summary="Default task">
<changelist id="c0e70306-e650-4c5f-8796-30690eb2be47" name="Changes" comment="" />
<created>1728722079988</created>
<option name="number" value="Default" />
<option name="presentableId" value="Default" />
<updated>1728722079988</updated>
<workItem from="1728722081004" duration="8714000" />
</task>
<servers />
</component>
<component name="TypeScriptGeneratedFilesManager">
<option name="version" value="3" />
</component>
</project>

View File

@@ -1,30 +0,0 @@
## Задание
1. Создать 2 микросервиса, реализующих CRUD на связанных сущностях.
2. Реализовать механизм синхронного обмена сообщениями между микросервисами.
3. Реализовать шлюз на основе прозрачного прокси-сервера nginx.
Вариант: Продукты и история цен на них
## Выполнение
Были написаны два сервиса на языке java, фреймворк Spring:
* Сервис price_module, хранящий данные о продуктах и реализующий CRUD операции с ними через HTTP запросы.
* Сервис price_history_module, хранящий данные об истории цен на продукты и реализующий CRUD операции с ними через HTTP запросы.
Сервисы синхронно сообщены - сервис истории цен при создании записи с ценой, посылает сообщение продукту на связывание.
Для сервисов прописаны файлы Dockerfile, описывающие создание контейнеров:
* Оба контейнера проявляют порты, на которых работает приложение: 8080 для продуктов и 8081 для истории цен.
* Выбирается рабочая директория /app и туда копируются файлы скриптов.
* Командой запускаются сами скрипты.
Общий yaml-файл развёртки был настроен следующим образом:
* блок services, где перечислены разворачиваемые сервисы.
* для каждого сервиса прописан build, где объявляется его папка и докерфайл создания и зависимости.
* для сервиса nginx прописан порт для отображения вовне.
## Результат
Демонстрация работы в видео.
## Ссылка на видео
https://drive.google.com/file/d/1tH7FwSu_VWJ5SKJBXm3zPKbxTAKVUFxb/view?usp=sharing

View File

@@ -1,37 +0,0 @@
version: '3'
services:
nginx:
image: nginx:latest
container_name: nginx
ports:
- "80:80"
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf:ro
depends_on:
- price
- product
networks:
- app-network
product:
build:
context: ./product_module
dockerfile: Dockerfile
ports:
- "8080:8080"
networks:
- app-network
price:
build:
context: ./price_history_module
dockerfile: Dockerfile
ports:
- "8081:8081"
networks:
- app-network
networks:
app-network:
driver: bridge

View File

@@ -1,21 +0,0 @@
http {
server {
listen 80;
listen [::]:80;
server_name localhost;
# Прокси для ProductService
location /product/ {
proxy_pass http://product:8080;
}
# Прокси для PriceHistoryService
location /price-history/ {
proxy_pass http://price:8081;
}
}
}
events {
worker_connections 1024;
}

View File

@@ -1,7 +0,0 @@
FROM openjdk:17-jdk-slim
WORKDIR /app
COPY build/libs/price_history_module-0.0.1-SNAPSHOT.jar /app/price.jar
ENTRYPOINT ["java", "-jar", "price.jar"]

View File

@@ -1,38 +0,0 @@
plugins {
id 'java'
id 'org.springframework.boot' version '3.3.4'
id 'io.spring.dependency-management' version '1.1.6'
}
group = 'ru.ulstu'
version = '0.0.1-SNAPSHOT'
java {
toolchain {
languageVersion = JavaLanguageVersion.of(17)
}
}
configurations {
compileOnly {
extendsFrom annotationProcessor
}
}
repositories {
mavenCentral()
}
dependencies {
implementation 'org.springframework.boot:spring-boot-starter-web'
compileOnly 'org.projectlombok:lombok'
annotationProcessor 'org.projectlombok:lombok'
testImplementation 'org.springframework.boot:spring-boot-starter-test'
testRuntimeOnly 'org.junit.platform:junit-platform-launcher'
implementation 'org.springdoc:springdoc-openapi-starter-webmvc-ui:2.5.0'
implementation 'org.springframework.boot:spring-boot-starter-web'
}
tasks.named('test') {
useJUnitPlatform()
}

View File

@@ -1,7 +0,0 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View File

@@ -1,252 +0,0 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s
' "$PWD" ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
if ! command -v java >/dev/null 2>&1
then
die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC2039,SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC2039,SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Collect all arguments for the java command:
# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
# and any embedded shellness will be escaped.
# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
# treated as '${Hostname}' itself on the command line.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

View File

@@ -1,94 +0,0 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@rem SPDX-License-Identifier: Apache-2.0
@rem
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%"=="" set DIRNAME=.
@rem This is normally unused
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute
echo. 1>&2
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. 1>&2
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo. 1>&2
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. 1>&2
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@@ -1 +0,0 @@
rootProject.name = 'price_history_module'

View File

@@ -1,13 +0,0 @@
package ru.ulstu.price_history_module;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class PriceHistoryModuleApplication {
public static void main(String[] args) {
SpringApplication.run(PriceHistoryModuleApplication.class, args);
}
}

View File

@@ -1,13 +0,0 @@
package ru.ulstu.price_history_module.config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.client.RestTemplate;
@Configuration
public class AppConfig {
@Bean
public RestTemplate restTemplate() {
return new RestTemplate();
}
}

View File

@@ -1,42 +0,0 @@
package ru.ulstu.price_history_module.controller;
import lombok.AllArgsConstructor;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import ru.ulstu.price_history_module.service.PriceHistoryService;
import ru.ulstu.price_history_module.service.dto.CreatePriceHistoryDto;
@AllArgsConstructor
@RequestMapping("/price-history")
@RestController
public class PriceHistoryController {
private final PriceHistoryService priceHistoryService;
@GetMapping
public ResponseEntity<?> findAll() {
return ResponseEntity.ok(priceHistoryService.getPriceHistory());
}
@GetMapping("/{id}")
public ResponseEntity<?> findById(@PathVariable Long id) {
return ResponseEntity.ok(priceHistoryService.getProductPrice(id));
}
@PostMapping
public ResponseEntity<?> createPriceHistory(@RequestBody CreatePriceHistoryDto createProductDto) {
return ResponseEntity.ok(priceHistoryService.addProductPrice(createProductDto));
}
@PutMapping("/{id}")
public ResponseEntity<?> updatePriceHistory(
@PathVariable Long id,
@RequestBody CreatePriceHistoryDto createProductDto
) {
return ResponseEntity.ok(priceHistoryService.updatePriceHistory(id, createProductDto));
}
@DeleteMapping("/{id}")
public void deletePriceHistory(@PathVariable Long id) {
priceHistoryService.deletePriceHistory(id);
}
}

View File

@@ -1,19 +0,0 @@
package ru.ulstu.price_history_module.model;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Date;
@AllArgsConstructor
@NoArgsConstructor
@Data
@Builder
public class PriceHistory {
private Long id;
private Date date;
private Double price;
private Long productId;
}

View File

@@ -1,80 +0,0 @@
package ru.ulstu.price_history_module.service;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service;
import org.springframework.web.client.RestClientException;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.server.ResponseStatusException;
import ru.ulstu.price_history_module.model.PriceHistory;
import ru.ulstu.price_history_module.service.dto.CreatePriceHistoryDto;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Service
public class PriceHistoryService {
private final Map<Long, PriceHistory> products = new HashMap<>();
private Long countId = 1L;
private final RestTemplate restTemplate;
public PriceHistoryService(RestTemplate restTemplate) {
this.restTemplate = restTemplate;
}
public List<PriceHistory> getPriceHistory() {
return new ArrayList<>(products.values());
}
public PriceHistory getProductPrice(Long id) {
if (!products.containsKey(id)) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Product price not found");
}
return products.get(id);
}
public Long addProductPrice(CreatePriceHistoryDto createPriceHistoryDto) {
final PriceHistory priceHistory = PriceHistory.builder()
.id(countId)
.date(createPriceHistoryDto.getDate())
.price(createPriceHistoryDto.getPrice())
.productId(createPriceHistoryDto.getProductId())
.build();
countId++;
products.put(priceHistory.getId(), priceHistory);
try {
String baseUrl = "http://nginx/product/addPriceHistory";
restTemplate.postForObject(
baseUrl + "/" + createPriceHistoryDto.getProductId(),
priceHistory,
Void.class
);
} catch (RestClientException e) {
throw new RuntimeException("Failed to add price history to product: " + e.getMessage(), e);
}
return priceHistory.getId();
}
public CreatePriceHistoryDto updatePriceHistory(Long id, CreatePriceHistoryDto createPriceHistoryDto) {
final PriceHistory priceHistory = products.get(id);
priceHistory.setDate(createPriceHistoryDto.getDate());
priceHistory.setPrice(createPriceHistoryDto.getPrice());
priceHistory.setProductId(createPriceHistoryDto.getProductId());
return CreatePriceHistoryDto.builder()
.price(priceHistory.getPrice())
.date(priceHistory.getDate())
.productId(priceHistory.getProductId())
.build();
}
public void deletePriceHistory(Long id) {
if (!products.containsKey(id)) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Product price not found");
}
products.remove(id);
}
}

View File

@@ -1,18 +0,0 @@
package ru.ulstu.price_history_module.service.dto;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Date;
@AllArgsConstructor
@NoArgsConstructor
@Data
@Builder
public class CreatePriceHistoryDto {
private Date date;
private Double price;
private Long productId;
}

View File

@@ -1,2 +0,0 @@
spring.application.name=price_history_module
server.port=8081

View File

@@ -1,13 +0,0 @@
package ru.ulstu.price_history_module;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class PriceHistoryModuleApplicationTests {
@Test
void contextLoads() {
}
}

View File

@@ -1,37 +0,0 @@
HELP.md
.gradle
build/
!gradle/wrapper/gradle-wrapper.jar
!**/src/main/**/build/
!**/src/test/**/build/
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
.sts4-cache
bin/
!**/src/main/**/bin/
!**/src/test/**/bin/
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
out/
!**/src/main/**/out/
!**/src/test/**/out/
### NetBeans ###
/nbproject/private/
/nbbuild/
/dist/
/nbdist/
/.nb-gradle/
### VS Code ###
.vscode/

View File

@@ -1,4 +0,0 @@
FROM openjdk:23-ea-17-jdk-bullseye
WORKDIR /app
COPY build/libs/product_module-0.0.1-SNAPSHOT.jar /app/product.jar
ENTRYPOINT ["java", "-jar", "product.jar"]

View File

@@ -1,38 +0,0 @@
plugins {
id 'java'
id 'org.springframework.boot' version '3.3.4'
id 'io.spring.dependency-management' version '1.1.6'
}
group = 'ru.ulstu'
version = '0.0.1-SNAPSHOT'
java {
toolchain {
languageVersion = JavaLanguageVersion.of(17)
}
}
configurations {
compileOnly {
extendsFrom annotationProcessor
}
}
repositories {
mavenCentral()
}
dependencies {
implementation 'org.springframework.boot:spring-boot-starter'
compileOnly 'org.projectlombok:lombok'
annotationProcessor 'org.projectlombok:lombok'
testImplementation 'org.springframework.boot:spring-boot-starter-test'
testRuntimeOnly 'org.junit.platform:junit-platform-launcher'
implementation 'org.springdoc:springdoc-openapi-starter-webmvc-ui:2.5.0'
implementation 'org.springframework.boot:spring-boot-starter-web'
}
tasks.named('test') {
useJUnitPlatform()
}

View File

@@ -1,7 +0,0 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View File

@@ -1,252 +0,0 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s
' "$PWD" ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
if ! command -v java >/dev/null 2>&1
then
die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC2039,SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC2039,SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Collect all arguments for the java command:
# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
# and any embedded shellness will be escaped.
# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
# treated as '${Hostname}' itself on the command line.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Stop when "xargs" is not available.
if ! command -v xargs >/dev/null 2>&1
then
die "xargs is not available"
fi
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

View File

@@ -1,94 +0,0 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@rem SPDX-License-Identifier: Apache-2.0
@rem
@if "%DEBUG%"=="" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%"=="" set DIRNAME=.
@rem This is normally unused
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if %ERRORLEVEL% equ 0 goto execute
echo. 1>&2
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. 1>&2
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo. 1>&2
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
echo. 1>&2
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
echo location of your Java installation. 1>&2
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if %ERRORLEVEL% equ 0 goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
set EXIT_CODE=%ERRORLEVEL%
if %EXIT_CODE% equ 0 set EXIT_CODE=1
if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
exit /b %EXIT_CODE%
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@@ -1 +0,0 @@
rootProject.name = 'product_module'

View File

@@ -1,13 +0,0 @@
package ru.ulstu.product_module;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class ProductModuleApplication {
public static void main(String[] args) {
SpringApplication.run(ProductModuleApplication.class, args);
}
}

View File

@@ -1,52 +0,0 @@
package ru.ulstu.product_module.controller;
import lombok.AllArgsConstructor;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import ru.ulstu.product_module.model.PriceHistory;
import ru.ulstu.product_module.service.ProductService;
import ru.ulstu.product_module.service.dto.CreateProductDto;
@AllArgsConstructor
@RequestMapping("/product")
@RestController
public class ProductController {
private final ProductService productService;
@GetMapping
public ResponseEntity<?> findAll() {
return ResponseEntity.ok(productService.getProduct());
}
@GetMapping("/{id}")
public ResponseEntity<?> findById(@PathVariable Long id) {
return ResponseEntity.ok(productService.getProduct(id));
}
@PostMapping
public ResponseEntity<?> createProduct(@RequestBody CreateProductDto createProductDto) {
return ResponseEntity.ok(productService.addProduct(createProductDto));
}
@PutMapping("/{id}")
public ResponseEntity<?> updateProduct(
@PathVariable Long id,
@RequestBody CreateProductDto createProductDto
) {
return ResponseEntity.ok(productService.updateProduct(id, createProductDto));
}
@DeleteMapping("/{id}")
public void deleteProduct(@PathVariable Long id) {
productService.deleteProduct(id);
}
@PostMapping("/addPriceHistory/{id}")
public void addPriceHistory(
@PathVariable Long id,
@RequestBody PriceHistory priceHistory
){
productService.addPriceHistory(id, priceHistory);
}
}

View File

@@ -1,15 +0,0 @@
package ru.ulstu.product_module.model;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Date;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class PriceHistory {
private Date date;
private Double price;
}

View File

@@ -1,18 +0,0 @@
package ru.ulstu.product_module.model;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@AllArgsConstructor
@NoArgsConstructor
@Builder
public class Product {
private Long id;
private String name;
private List<PriceHistory> priceHistory;
}

View File

@@ -1,64 +0,0 @@
package ru.ulstu.product_module.service;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service;
import org.springframework.web.server.ResponseStatusException;
import ru.ulstu.product_module.model.PriceHistory;
import ru.ulstu.product_module.model.Product;
import ru.ulstu.product_module.service.dto.CreateProductDto;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Service
public class ProductService {
private final Map<Long, Product> products = new HashMap<>();
private Long countId = 1L;
public List<Product> getProduct() {
return new ArrayList<>(products.values());
}
public Product getProduct(Long id) {
if (!products.containsKey(id)) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Product not found");
}
return products.get(id);
}
public Long addProduct(CreateProductDto createProductDto) {
final Product product = Product.builder()
.id(countId)
.name(createProductDto.getName())
.priceHistory(new ArrayList<>())
.build();
countId++;
products.put(product.getId(), product);
return product.getId();
}
public CreateProductDto updateProduct(Long id, CreateProductDto createProductDto) {
final Product product = products.get(id);
product.setName(createProductDto.getName());
return CreateProductDto.builder()
.name(product.getName())
.build();
}
public void deleteProduct(Long id) {
if (!products.containsKey(id)) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Product not found");
}
products.remove(id);
}
public void addPriceHistory(Long id, PriceHistory priceHistory) {
if (!products.containsKey(id)) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Product not found");
}
final Product product = products.get(id);
product.getPriceHistory().add(priceHistory);
}
}

View File

@@ -1,14 +0,0 @@
package ru.ulstu.product_module.service.dto;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
@Builder
public class CreateProductDto {
private String name;
}

View File

@@ -1,2 +0,0 @@
spring.application.name=product_module
server.port=8080

View File

@@ -1,13 +0,0 @@
package ru.ulstu.product_module;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class ProductModuleApplicationTests {
@Test
void contextLoads() {
}
}

View File

@@ -0,0 +1,93 @@
# Кашин Максим ПИбд-42
## Описание
Этот проект разворачивает среду с базой данных MySQL, системами управления проектами Redmine и платформой для создания сайтов WordPress с использованием Docker Compose. Ниже приведены шаги для запуска и настройки.
## 0. Предварительные действия
Перед запуском Docker Compose выполнил следующие шаги:
1. Установил [Docker Desktop](https://www.docker.com/products/docker-desktop) для моей операционной системы.
2. Настроил брандмауэр Windows и разрешил доступ Docker.
3. Открыл PowerShell с правами администратора и выполнил следующие команды:
```bash
net start vmcompute
wsl --set-default-version 2
```
## 1. Структура и запуск сервиса
Файл `docker-compose.yml` содержит описание трех сервисов:
- **db (MySQL)** база данных;
- **redmine (Redmine)** система управления проектами;
- **wordpress (WordPress)** платформа для создания сайтов.
Запуск всех сервисов происходит через команду:
```bash
docker-compose up -d
```
## 2. Сервис MySQL
Сервис `db` представляет собой контейнер с MySQL версии 8.0.
- **Образ**: `mysql:8.0`
- **Переменные окружения**:
- `MYSQL_ROOT_PASSWORD` — пароль для пользователя root.
- `MYSQL_DATABASE` — имя создаваемой базы данных (exampledb).
- `MYSQL_USER` — имя пользователя базы данных (exampleuser).
- `MYSQL_PASSWORD` — пароль пользователя базы данных (examplepass).
- **Тома (volumes)**:
- `db-data:/var/lib/mysql` — том для хранения данных базы данных.
Сервис MySQL необходим для работы как Redmine, так и WordPress.
## 3. Сервис Redmine
Сервис `redmine` представляет собой контейнер с системой управления проектами Redmine.
- **Образ**: `redmine`
- **Порты**:
- `8080:3000` — Redmine будет доступен по адресу http://localhost:8080.
- **Переменные окружения**:
- `REDMINE_DB_DATABASE` — имя базы данных (exampledb).
- `REDMINE_DB_MYSQL` — имя хоста базы данных (db).
- `REDMINE_DB_PASSWORD` — пароль для подключения к базе данных (example).
- `REDMINE_SECRET_KEY_BASE` — секретный ключ для работы с сессиями.
- **Тома (volumes)**:
- `redmine:/usr/src/redmine/files` — том для хранения файлов Redmine.
## 4. Сервис WordPress
Сервис `wordpress` представляет собой контейнер с платформой WordPress.
- **Образ**: `wordpress`
- **Порты**:
- `8081:80` — WordPress будет доступен по адресу http://localhost:8081.
- **Переменные окружения**:
- `WORDPRESS_DB_HOST` — хост базы данных (db).
- `WORDPRESS_DB_USER` — имя пользователя базы данных (exampleuser).
- `WORDPRESS_DB_PASSWORD` — пароль для подключения к базе данных (examplepass).
- `WORDPRESS_DB_NAME` — имя базы данных (exampledb).
- **Тома (volumes)**:
- `wordpress:/var/www/html` — том для хранения файлов WordPress.
## 5. Томa данных
Для хранения данных и постоянства контейнеров используются три тома:
- `wordpress` — для данных WordPress.
- `db-data` — для данных MySQL.
- `redmine` — для файлов Redmine.
## 6. Остановка сервисов
Для остановки и удаления всех контейнеров необходимо выполнить команду:
```bash
docker-compose down
```
## 7. Ссылка на видео
[Видео-отчёт Кашин Максим ПИбд-42](https://disk.yandex.ru/i/O8L1qmk4PIbCvA)

View File

@@ -0,0 +1,45 @@
version: '3.1' # Версия Docker Compose файла
services: # Определение сервисов, которые будут запускаться
db: # Сервис базы данных MySQL
image: mysql:8.0 # Образ
restart: always # Автоматический перезапуск контейнера
environment: # Переменные окружения для конфигурации базы данных
MYSQL_ROOT_PASSWORD: example
MYSQL_DATABASE: exampledb
MYSQL_USER: exampleuser
MYSQL_PASSWORD: examplepass
volumes: # Определение томов для сохранения данных базы данных
- db-data:/var/lib/mysql
redmine: # redmine Система учёта багов, т.е. баг-трекер.
image: redmine # Образ
restart: always # Автоматический перезапуск контейнера
ports:
- 8080:3000 # Переадресация локального порта 8080 на порт 3000 в контейнере
environment: # Переменные окружения для настройки Redmine (подключение к бд)
REDMINE_DB_DATABASE: exampledb
REDMINE_DB_MYSQL: db
REDMINE_DB_PASSWORD: example
REDMINE_SECRET_KEY_BASE: supersecretkey
volumes: # Том для хранения данных Redmine
- redmine:/usr/src/redmine/files
wordpress: # wordpress Популярная система управления контентом.
image: wordpress # Образ
restart: always # Автоматический перезапуск контейнера
ports:
- 8081:80 # Переадресация локального порта 8081 на порт 80 в контейнере
environment: # Переменные окружения для настройки WordPress (подключение к бд)
WORDPRESS_DB_HOST: db
WORDPRESS_DB_USER: exampleuser
WORDPRESS_DB_PASSWORD: examplepass
WORDPRESS_DB_NAME: exampledb
volumes: # Том для хранения данных WordPress
- wordpress:/var/www/html
volumes: # Определение томов для хранения данных
wordpress:
db-data:
redmine:

View File

@@ -0,0 +1,30 @@
**/.classpath
**/.dockerignore
**/.env
**/.git
**/.gitignore
**/.project
**/.settings
**/.toolstarget
**/.vs
**/.vscode
**/*.*proj.user
**/*.dbmdl
**/*.jfm
**/azds.yaml
**/bin
**/charts
**/docker-compose*
**/Dockerfile*
**/node_modules
**/npm-debug.log
**/obj
**/secrets.dev.yaml
**/values.dev.yaml
LICENSE
README.md
!**/.gitignore
!.git/HEAD
!.git/config
!.git/packed-refs
!.git/refs/heads/**

63
kuzarin_maxim_lab_3/.gitattributes vendored Normal file
View File

@@ -0,0 +1,63 @@
###############################################################################
# Set default behavior to automatically normalize line endings.
###############################################################################
* text=auto
###############################################################################
# Set default behavior for command prompt diff.
#
# This is need for earlier builds of msysgit that does not have it on by
# default for csharp files.
# Note: This is only used by command line
###############################################################################
#*.cs diff=csharp
###############################################################################
# Set the merge driver for project and solution files
#
# Merging from the command prompt will add diff markers to the files if there
# are conflicts (Merging from VS is not affected by the settings below, in VS
# the diff markers are never inserted). Diff markers may cause the following
# file extensions to fail to load in VS. An alternative would be to treat
# these files as binary and thus will always conflict and require user
# intervention with every merge. To do so, just uncomment the entries below
###############################################################################
#*.sln merge=binary
#*.csproj merge=binary
#*.vbproj merge=binary
#*.vcxproj merge=binary
#*.vcproj merge=binary
#*.dbproj merge=binary
#*.fsproj merge=binary
#*.lsproj merge=binary
#*.wixproj merge=binary
#*.modelproj merge=binary
#*.sqlproj merge=binary
#*.wwaproj merge=binary
###############################################################################
# behavior for image files
#
# image files are treated as binary by default.
###############################################################################
#*.jpg binary
#*.png binary
#*.gif binary
###############################################################################
# diff behavior for common document formats
#
# Convert binary document formats to text before diffing them. This feature
# is only available from the command line. Turn it on by uncommenting the
# entries below.
###############################################################################
#*.doc diff=astextplain
#*.DOC diff=astextplain
#*.docx diff=astextplain
#*.DOCX diff=astextplain
#*.dot diff=astextplain
#*.DOT diff=astextplain
#*.pdf diff=astextplain
#*.PDF diff=astextplain
#*.rtf diff=astextplain
#*.RTF diff=astextplain

363
kuzarin_maxim_lab_3/.gitignore vendored Normal file
View File

@@ -0,0 +1,363 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
# User-specific files
*.rsuser
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Mono auto generated files
mono_crash.*
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
[Ww][Ii][Nn]32/
[Aa][Rr][Mm]/
[Aa][Rr][Mm]64/
bld/
[Bb]in/
[Oo]bj/
[Oo]ut/
[Ll]og/
[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
Generated\ Files/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUnit
*.VisualState.xml
TestResult.xml
nunit-*.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# Benchmark Results
BenchmarkDotNet.Artifacts/
# .NET Core
project.lock.json
project.fragment.lock.json
artifacts/
# ASP.NET Scaffolding
ScaffoldingReadMe.txt
# StyleCop
StyleCopReport.xml
# Files built by Visual Studio
*_i.c
*_p.c
*_h.h
*.ilk
*.meta
*.obj
*.iobj
*.pch
*.pdb
*.ipdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*_wpftmp.csproj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# Visual Studio Trace Files
*.e2e
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# AxoCover is a Code Coverage Tool
.axoCover/*
!.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
coverage*.json
coverage*.xml
coverage*.info
# Visual Studio code coverage results
*.coverage
*.coveragexml
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# NuGet Symbol Packages
*.snupkg
# The packages folder can be ignored because of Package Restore
**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/[Pp]ackages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
*.appx
*.appxbundle
*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!?*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
ServiceFabricBackup/
*.rptproj.bak
# SQL Server files
*.mdf
*.ldf
*.ndf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
*.rptproj.rsuser
*- [Bb]ackup.rdl
*- [Bb]ackup ([0-9]).rdl
*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
node_modules/
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
*.vbw
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# CodeRush personal settings
.cr/personal
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
*.tss
# Telerik's JustMock configuration file
*.jmconfig
# BizTalk build output
*.btp.cs
*.btm.cs
*.odx.cs
*.xsd.cs
# OpenCover UI analysis results
OpenCover/
# Azure Stream Analytics local run output
ASALocalRun/
# MSBuild Binary and Structured Log
*.binlog
# NVidia Nsight GPU debugger configuration file
*.nvuser
# MFractors (Xamarin productivity tool) working folder
.mfractor/
# Local History for Visual Studio
.localhistory/
# BeatPulse healthcheck temp database
healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
MigrationBackup/
# Ionide (cross platform F# VS Code tools) working folder
.ionide/
# Fody - auto-generated XML schema
FodyWeavers.xsd

View File

@@ -0,0 +1,31 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.10.34916.146
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "DSaC", "DSaC\DSaC.csproj", "{C1051C12-D7D0-4C77-AFBC-4F5FFD8EE367}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DSaC_second", "DSaC_second\DSaC_second.csproj", "{64F78585-2BBC-4656-BC50-41FBB8917719}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{C1051C12-D7D0-4C77-AFBC-4F5FFD8EE367}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C1051C12-D7D0-4C77-AFBC-4F5FFD8EE367}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C1051C12-D7D0-4C77-AFBC-4F5FFD8EE367}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C1051C12-D7D0-4C77-AFBC-4F5FFD8EE367}.Release|Any CPU.Build.0 = Release|Any CPU
{64F78585-2BBC-4656-BC50-41FBB8917719}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{64F78585-2BBC-4656-BC50-41FBB8917719}.Debug|Any CPU.Build.0 = Debug|Any CPU
{64F78585-2BBC-4656-BC50-41FBB8917719}.Release|Any CPU.ActiveCfg = Release|Any CPU
{64F78585-2BBC-4656-BC50-41FBB8917719}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {30D05708-634E-4FDE-9BCA-5A1B7A5EFF59}
EndGlobalSection
EndGlobal

View File

@@ -0,0 +1,88 @@
using DSaC.Models.DTOs;
using DSaC.Models.Internal.Queries;
using DSaC.Models.Internal.Сommands;
using MediatR;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.RazorPages;
namespace DSaC.Controllers
{
[Route("api/[controller]")]
[ApiController]
public class CounterpartiesController : ControllerBase
{
private readonly IMediator mediator;
public CounterpartiesController(IMediator mediator)
{
this.mediator = mediator;
}
[HttpGet("")]
public async Task<IActionResult> GetCounterparties(
[FromQuery] int page = 0,
[FromQuery] int pageSize = 10,
[FromQuery] List<Guid>? ids = null
)
{
var request = new GetCounterpartiesQuery
{
Page = page,
PageSize = pageSize,
Ids = ids
};
var response = await mediator.Send(request);
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpGet("{uuid:guid}")]
public async Task<IActionResult> GetFullCounterparty([FromRoute] Guid uuid)
{
var request = new GetCounterpartyQuery
{
Id = uuid
};
var response = await mediator.Send(request);
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpPost("")]
public async Task<IActionResult> CreateCounterparty([FromBody] CounterpartyBaseDto dto)
{
var response = await mediator.Send(new CreateCounterpartyCommand()
{
Model = dto
});
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpPut("{uuid:guid}")]
public async Task<IActionResult> UpdateRecord([FromRoute] Guid uuid, [FromBody] CounterpartyViewDto dto)
{
var response = await mediator.Send(new UpdateCounterpartyCommand()
{
Id=uuid,
Model = dto
});
return !response.IsError ? Ok(response.Value) : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
[HttpDelete("{uuid:guid}")]
public async Task<IActionResult> DeleteRecord([FromRoute] Guid uuid)
{
var response = await mediator.Send(new DeleteCounterpartyCommand()
{
Id = uuid,
});
return !response.IsError ? Ok() : StatusCode(response.ErrorCode!.Value, response.ErrorText);
}
}
}

View File

@@ -0,0 +1,26 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<UserSecretsId>55894bef-8317-4e30-a5f0-4dcd5c3f861e</UserSecretsId>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MediatR" Version="12.4.0" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.NewtonsoftJson" Version="8.0.8" />
<PackageReference Include="Microsoft.EntityFrameworkCore" Version="8.0.8" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Tools" Version="8.0.8">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.VisualStudio.Azure.Containers.Tools.Targets" Version="1.20.1" />
<PackageReference Include="NLog.Extensions.Logging" Version="5.3.12" />
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="8.0.4" />
<PackageReference Include="PIHelperSh.Configuration" Version="1.0.1" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.4.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,14 @@
using DSaC.Database.Models;
using Microsoft.EntityFrameworkCore;
namespace DSaC.Database
{
public class DsacContext: DbContext
{
public DsacContext(DbContextOptions options) : base(options)
{
}
public DbSet<Counterparty> Counterparties { get; set; }
}
}

View File

@@ -0,0 +1,24 @@
using AutoMapper;
using DSaC.Models.DTOs;
using DSaC.Models.Internal.Сommands;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace DSaC.Database.Models
{
[Table("counterparty")]
[AutoMap(typeof(CounterpartyBaseDto))]
[AutoMap(typeof(CounterpartyViewDto))]
public class Counterparty
{
[Key]
[Column("id")]
public Guid Id { get; set; }
[Required]
[MaxLength(255)]
[Column("name")]
public string Name { get; set; }
}
}

View File

@@ -0,0 +1,24 @@
#See https://aka.ms/customizecontainer to learn how to customize your debug container and how Visual Studio uses this Dockerfile to build your images for faster debugging.
FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS base
WORKDIR /app
EXPOSE 8080
EXPOSE 8081
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["DSaC.csproj", "DSaC/"]
RUN dotnet restore "DSaC/DSaC.csproj"
WORKDIR "/src/DSaC"
COPY . .
RUN dotnet build "DSaC.csproj" -c $BUILD_CONFIGURATION -o /app/build
FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "DSaC.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false
FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "DSaC.dll"]

View File

@@ -0,0 +1,51 @@
using AutoMapper;
using DSaC.Database;
using DSaC.Database.Models;
using DSaC.Logic.Handlers.Queries;
using DSaC.Models.DTOs;
using DSaC.Models.Internal;
using DSaC.Models.Internal.Сommands;
using MediatR;
namespace DSaC.Logic.Handlers.Commands
{
public class CreateCounterpartyCommandHandler : IRequestHandler<CreateCounterpartyCommand, ResponseModel<CounterpartyViewDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public CreateCounterpartyCommandHandler(ILogger<CreateCounterpartyCommandHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel<CounterpartyViewDto>> Handle(CreateCounterpartyCommand request, CancellationToken cancellationToken)
{
try
{
var model = _mapper.Map<Counterparty>(request.Model);
var res = await _context.AddAsync(model);
await _context.SaveChangesAsync();
return new()
{
Value = _mapper.Map<CounterpartyViewDto>(res.Entity)
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on creating counterparty");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot create counterparty"
};
}
}
}
}

View File

@@ -0,0 +1,50 @@
using AutoMapper;
using DSaC.Database;
using DSaC.Database.Models;
using DSaC.Models.DTOs;
using DSaC.Models.Internal;
using DSaC.Models.Internal.Сommands;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC.Logic.Handlers.Commands
{
public class DeleteCounterpartyCommandHandler: IRequestHandler<DeleteCounterpartyCommand, ResponseModel>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public DeleteCounterpartyCommandHandler(ILogger<DeleteCounterpartyCommandHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel> Handle(DeleteCounterpartyCommand request, CancellationToken cancellationToken)
{
try
{
var res = await _context.Counterparties.Where(x=>x.Id == request.Id).ExecuteDeleteAsync();
if (res == 1)return new();
return new()
{
ErrorText = "Cannot find object to delete",
StatusCode = System.Net.HttpStatusCode.NotFound
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on deleteing counterparty");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot delete counterparty"
};
}
}
}
}

View File

@@ -0,0 +1,50 @@
using AutoMapper;
using DSaC.Database;
using DSaC.Database.Models;
using DSaC.Models.DTOs;
using DSaC.Models.Internal;
using DSaC.Models.Internal.Сommands;
using MediatR;
namespace DSaC.Logic.Handlers.Commands
{
public class UpdateCounterpartyCommandHandler: IRequestHandler<UpdateCounterpartyCommand, ResponseModel<CounterpartyViewDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public UpdateCounterpartyCommandHandler(ILogger<UpdateCounterpartyCommandHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel<CounterpartyViewDto>> Handle(UpdateCounterpartyCommand request, CancellationToken cancellationToken)
{
try
{
var model = _mapper.Map<Counterparty>(request.Model);
var res = _context.Update(model);
await _context.SaveChangesAsync();
return new()
{
Value = _mapper.Map<CounterpartyViewDto>(res.Entity)
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on updating counterparty");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot update counterparty"
};
}
}
}
}

View File

@@ -0,0 +1,50 @@
using AutoMapper;
using DSaC.Database;
using DSaC.Models.DTOs;
using DSaC.Models.Internal;
using DSaC.Models.Internal.Queries;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC.Logic.Handlers.Queries
{
public class GetCounterpartiesQueryHandler : IRequestHandler<GetCounterpartiesQuery, ResponseModel<List<CounterpartyViewDto>>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public GetCounterpartiesQueryHandler(ILogger<GetCounterpartiesQueryHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel<List<CounterpartyViewDto>>> Handle(GetCounterpartiesQuery request, CancellationToken cancellationToken)
{
try
{
var res = await _context.Counterparties
.Where(x=>request.Ids == null || request.Ids.Contains(x.Id))
.Skip(request.Page * request.PageSize).Take(request.PageSize)
.ToListAsync();
return new()
{
Value = res.Select(_mapper.Map<CounterpartyViewDto>).ToList(),
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on getting records");
return new()
{
StatusCode = System.Net.HttpStatusCode.BadRequest,
ErrorText = "Cannot get Counterparties by this request",
};
}
}
}
}

View File

@@ -0,0 +1,54 @@
using AutoMapper;
using DSaC.Database;
using DSaC.Models.DTOs;
using DSaC.Models.Internal;
using DSaC.Models.Internal.Queries;
using MediatR;
using Microsoft.EntityFrameworkCore;
namespace DSaC.Logic.Handlers.Queries
{
public class GetCounterpartyQueryHandler: IRequestHandler<GetCounterpartyQuery, ResponseModel<CounterpartyViewDto>>
{
private readonly ILogger _logger;
private readonly DsacContext _context;
private readonly IMapper _mapper;
public GetCounterpartyQueryHandler(ILogger<GetCounterpartyQueryHandler> logger, DsacContext context, IMapper mapper)
{
_logger = logger;
_context = context;
_mapper = mapper;
}
public async Task<ResponseModel<CounterpartyViewDto>> Handle(GetCounterpartyQuery request, CancellationToken cancellationToken)
{
try
{
var res = await _context.Counterparties.FirstOrDefaultAsync(x=>x.Id == request.Id);
if (res == null)
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Counterparty with this ID does not exsist",
};
return new()
{
Value = _mapper.Map<CounterpartyViewDto>(res),
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error on getting record");
return new()
{
StatusCode = System.Net.HttpStatusCode.NotFound,
ErrorText = "Cannot get Counterparty by id",
};
}
}
}
}

View File

@@ -0,0 +1,60 @@
// <auto-generated />
using System;
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC.Migrations
{
[DbContext(typeof(DsacContext))]
[Migration("20240907133944_InitMigraton")]
partial class InitMigraton
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC.Database.Models.Counterparty", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("ContractNumber")
.IsRequired()
.HasMaxLength(50)
.HasColumnType("character varying(50)")
.HasColumnName("contract_number");
b.Property<string>("ManagerName")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("manager_name");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("name");
b.HasKey("Id");
b.ToTable("counterparty");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,36 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DSaC.Migrations
{
/// <inheritdoc />
public partial class InitMigraton : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "counterparty",
columns: table => new
{
id = table.Column<Guid>(type: "uuid", nullable: false),
name = table.Column<string>(type: "character varying(255)", maxLength: 255, nullable: false),
contract_number = table.Column<string>(type: "character varying(50)", maxLength: 50, nullable: false),
manager_name = table.Column<string>(type: "character varying(255)", maxLength: 255, nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_counterparty", x => x.id);
});
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "counterparty");
}
}
}

View File

@@ -0,0 +1,64 @@
// <auto-generated />
using System;
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC.Migrations
{
[DbContext(typeof(DsacContext))]
[Migration("20240907140843_ChangingContractInfo")]
partial class ChangingContractInfo
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC.Database.Models.Counterparty", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("ContractNumber")
.IsRequired()
.HasMaxLength(50)
.HasColumnType("character varying(50)")
.HasColumnName("contract_number");
b.Property<bool>("IsContractClosed")
.HasColumnType("boolean")
.HasColumnName("is_contract_closed");
b.Property<string>("ManagerName")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("manager_name");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("name");
b.HasKey("Id");
b.ToTable("counterparty");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,29 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DSaC.Migrations
{
/// <inheritdoc />
public partial class ChangingContractInfo : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<bool>(
name: "is_contract_closed",
table: "counterparty",
type: "boolean",
nullable: false,
defaultValue: false);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "is_contract_closed",
table: "counterparty");
}
}
}

View File

@@ -0,0 +1,63 @@
// <auto-generated />
using System;
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC.Migrations
{
[DbContext(typeof(DsacContext))]
[Migration("20240907141343_SmallHotfix")]
partial class SmallHotfix
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC.Database.Models.Counterparty", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("ContractNumber")
.IsRequired()
.HasMaxLength(50)
.HasColumnType("character varying(50)")
.HasColumnName("contract_number");
b.Property<bool>("IsContractClosed")
.HasColumnType("boolean")
.HasColumnName("is_contract_closed");
b.Property<string>("ManagerName")
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("manager_name");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("name");
b.HasKey("Id");
b.ToTable("counterparty");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,40 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DSaC.Migrations
{
/// <inheritdoc />
public partial class SmallHotfix : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AlterColumn<string>(
name: "manager_name",
table: "counterparty",
type: "character varying(255)",
maxLength: 255,
nullable: true,
oldClrType: typeof(string),
oldType: "character varying(255)",
oldMaxLength: 255);
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.AlterColumn<string>(
name: "manager_name",
table: "counterparty",
type: "character varying(255)",
maxLength: 255,
nullable: false,
defaultValue: "",
oldClrType: typeof(string),
oldType: "character varying(255)",
oldMaxLength: 255,
oldNullable: true);
}
}
}

View File

@@ -0,0 +1,48 @@
// <auto-generated />
using System;
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC.Migrations
{
[DbContext(typeof(DsacContext))]
[Migration("20240914065929_after-reconstruction")]
partial class afterreconstruction
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC.Database.Models.Counterparty", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("name");
b.HasKey("Id");
b.ToTable("counterparty");
});
#pragma warning restore 612, 618
}
}
}

View File

@@ -0,0 +1,52 @@
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace DSaC.Migrations
{
/// <inheritdoc />
public partial class afterreconstruction : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "contract_number",
table: "counterparty");
migrationBuilder.DropColumn(
name: "is_contract_closed",
table: "counterparty");
migrationBuilder.DropColumn(
name: "manager_name",
table: "counterparty");
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "contract_number",
table: "counterparty",
type: "character varying(50)",
maxLength: 50,
nullable: false,
defaultValue: "");
migrationBuilder.AddColumn<bool>(
name: "is_contract_closed",
table: "counterparty",
type: "boolean",
nullable: false,
defaultValue: false);
migrationBuilder.AddColumn<string>(
name: "manager_name",
table: "counterparty",
type: "character varying(255)",
maxLength: 255,
nullable: true);
}
}
}

View File

@@ -0,0 +1,45 @@
// <auto-generated />
using System;
using DSaC.Database;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
using Npgsql.EntityFrameworkCore.PostgreSQL.Metadata;
#nullable disable
namespace DSaC.Migrations
{
[DbContext(typeof(DsacContext))]
partial class DsacContextModelSnapshot : ModelSnapshot
{
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "8.0.8")
.HasAnnotation("Relational:MaxIdentifierLength", 63);
NpgsqlModelBuilderExtensions.UseIdentityByDefaultColumns(modelBuilder);
modelBuilder.Entity("DSaC.Database.Models.Counterparty", b =>
{
b.Property<Guid>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("uuid")
.HasColumnName("id");
b.Property<string>("Name")
.IsRequired()
.HasMaxLength(255)
.HasColumnType("character varying(255)")
.HasColumnName("name");
b.HasKey("Id");
b.ToTable("counterparty");
});
#pragma warning restore 612, 618
}
}
}

Some files were not shown because too many files have changed in this diff Show More