Compare commits
20 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
2e34bcd8c1 | ||
|
5f4dcf84f6 | ||
75bf3f157d | |||
|
9d3c1e7af5 | ||
821a1eb473 | |||
9bc9c5553b | |||
8cbe236fcf | |||
f394139d35 | |||
7eea3cea40 | |||
d986305ee1 | |||
3002be30dd | |||
f66cceeadd | |||
7791dbfbaa | |||
a233bbf903 | |||
de0e04fb3a | |||
774aacef19 | |||
9bde6c2ecf | |||
ab32a0f7f5 | |||
e52066f65d | |||
49e327005e |
@ -1,4 +1,4 @@
|
|||||||
FROM python:3.12-slim
|
FROM python:3.12
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
21
README.md
21
README.md
@ -68,3 +68,24 @@ alembic revision --autogenerate
|
|||||||
```
|
```
|
||||||
alembic upgrade head
|
alembic upgrade head
|
||||||
```
|
```
|
||||||
|
|
||||||
|
# Инициализация БД
|
||||||
|
|
||||||
|
## 1. Запустить docker-compose
|
||||||
|
```
|
||||||
|
docker-compose up --build
|
||||||
|
```
|
||||||
|
## 2. Зайти в ClickHouse
|
||||||
|
```
|
||||||
|
docker exec -it clickhouse-db clickhouse-client -u UserMyHouse --password NotWarningWord2 --host localhost
|
||||||
|
```
|
||||||
|
## 3. Создать базу данных
|
||||||
|
```
|
||||||
|
CREATE DATABASE SuperService;
|
||||||
|
```
|
||||||
|
## 4. Инициализировать БД
|
||||||
|
Зайти на fastapi и выполнить запрос:
|
||||||
|
```
|
||||||
|
http://localhost:8000/init_db_data
|
||||||
|
```
|
||||||
|
PostgreSQL и ClickHouse будут заполнены данными.
|
0
db/__init__.py
Normal file
0
db/__init__.py
Normal file
7
db/clickhouse_db_connection.py
Normal file
7
db/clickhouse_db_connection.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
from clickhouse_sqlalchemy import make_session, get_declarative_base, engines
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from settings import settings
|
||||||
|
|
||||||
|
engine_clickhouse = create_engine(settings.clickhouse_url)
|
||||||
|
BaseClickhouse = get_declarative_base()
|
||||||
|
session_clickhouse = make_session(engine_clickhouse)
|
69
db/crud.py
Normal file
69
db/crud.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
from typing import Type, TypeVar, Sequence, Optional, Dict, Any
|
||||||
|
|
||||||
|
from sqlalchemy import update as update_, delete as delete_
|
||||||
|
from sqlalchemy.future import select
|
||||||
|
from db.models import ExperimentCategory, ExperimentData, ExperimentParameters, LoadParameters, RecyclingParameters
|
||||||
|
|
||||||
|
from db.postgres_db_connection import async_session_postgres
|
||||||
|
|
||||||
|
T = TypeVar("T", ExperimentCategory, ExperimentData, ExperimentParameters, LoadParameters, RecyclingParameters)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_all(model_class: Type[T]) -> Sequence[T]:
|
||||||
|
async with async_session_postgres() as session:
|
||||||
|
result = await session.execute(select(model_class))
|
||||||
|
return result.scalars().all()
|
||||||
|
|
||||||
|
|
||||||
|
async def get_by_id(model_class: Type[T], id: int) -> Optional[T]:
|
||||||
|
async with async_session_postgres() as session:
|
||||||
|
result = await session.execute(select(model_class).where(model_class.id == id))
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
|
||||||
|
async def create(model_class: Type[T], **kwargs) -> T:
|
||||||
|
async with async_session_postgres() as session:
|
||||||
|
new_instance = model_class(**kwargs)
|
||||||
|
session.add(new_instance)
|
||||||
|
await session.commit()
|
||||||
|
await session.refresh(new_instance)
|
||||||
|
return new_instance
|
||||||
|
|
||||||
|
|
||||||
|
async def update(model_class: Type[T], id: int, updated_data: Dict[str, Any]) -> Optional[T]:
|
||||||
|
async with async_session_postgres() as session:
|
||||||
|
stmt = (
|
||||||
|
update_(model_class)
|
||||||
|
.where(model_class.id == id)
|
||||||
|
.values(**updated_data)
|
||||||
|
.execution_options(synchronize_session="fetch")
|
||||||
|
)
|
||||||
|
await session.execute(stmt)
|
||||||
|
await session.commit()
|
||||||
|
return await get_by_id(model_class, id)
|
||||||
|
|
||||||
|
# Надо переписать/раасмотреть update
|
||||||
|
async def update_exp(model_class: Type[T], id: int, updated_data: Dict[str, Any]) -> Optional[T]:
|
||||||
|
async with async_session_postgres() as session:
|
||||||
|
async with session.begin(): # Явная транзакция
|
||||||
|
stmt = (
|
||||||
|
update_(model_class)
|
||||||
|
.where(model_class.id == id)
|
||||||
|
.values(**updated_data)
|
||||||
|
.returning(model_class) # Возвращаем обновленный объект
|
||||||
|
)
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
updated_instance = result.scalars().first()
|
||||||
|
|
||||||
|
if not updated_instance:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return updated_instance # Возвращаем сразу обновленный объект
|
||||||
|
|
||||||
|
|
||||||
|
async def delete(model_class: Type[T], id: int) -> bool:
|
||||||
|
async with async_session_postgres() as session:
|
||||||
|
stmt = delete_(model_class).where(model_class.id == id)
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
await session.commit()
|
||||||
|
return result.rowcount > 0
|
106
db/csv_to_db.py
106
db/csv_to_db.py
@ -1,8 +1,10 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from sqlalchemy import insert
|
from sqlalchemy import insert, text
|
||||||
from db.db_connection import async_session, engine
|
from sqlalchemy.exc import OperationalError, IntegrityError
|
||||||
|
|
||||||
|
from db.clickhouse_db_connection import BaseClickhouse, engine_clickhouse, session_clickhouse
|
||||||
|
from db.postgres_db_connection import async_session_postgres, engine_postgres
|
||||||
from db.models.base import Base
|
from db.models.base import Base
|
||||||
from db.models.ch_experimentdb_experiment_data_model import ChExperimentDBExperimentData
|
from db.models.ch_experimentdb_experiment_data_model import ChExperimentDBExperimentData
|
||||||
from db.models.experiment_data_model import ExperimentData
|
from db.models.experiment_data_model import ExperimentData
|
||||||
@ -10,95 +12,49 @@ from db.models.experiment_parameters_model import ExperimentParameters
|
|||||||
from db.models.load_parameters_model import LoadParameters
|
from db.models.load_parameters_model import LoadParameters
|
||||||
from db.models.recycling_parameters_model import RecyclingParameters
|
from db.models.recycling_parameters_model import RecyclingParameters
|
||||||
|
|
||||||
|
|
||||||
def add_ids_in_csv(file: str):
|
|
||||||
try:
|
|
||||||
df = pd.read_csv(file)
|
|
||||||
df.insert(0, 'id', pd.Series(range(1, len(df) + 1)))
|
|
||||||
df.to_csv(file, index=False)
|
|
||||||
except Exception as e:
|
|
||||||
print(f'Exception!! {e}')
|
|
||||||
|
|
||||||
|
|
||||||
def print_headers_and_types(file: str):
|
|
||||||
df = pd.read_csv(file)
|
|
||||||
headers = df.columns.tolist()
|
|
||||||
print(headers)
|
|
||||||
|
|
||||||
for header in headers:
|
|
||||||
column_type = df[header].dtype
|
|
||||||
print(column_type)
|
|
||||||
|
|
||||||
|
|
||||||
async def create_all_tables():
|
async def create_all_tables():
|
||||||
print('create_all_tables')
|
async with engine_postgres.begin() as conn:
|
||||||
async with engine.begin() as conn:
|
|
||||||
await conn.run_sync(Base.metadata.create_all)
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
|
||||||
|
BaseClickhouse.metadata.create_all(engine_clickhouse)
|
||||||
async def drop_all_tables():
|
|
||||||
print('drop_all_tables')
|
|
||||||
async with engine.begin() as conn:
|
|
||||||
await conn.run_sync(Base.metadata.drop_all)
|
|
||||||
|
|
||||||
|
|
||||||
async def load_data_to_db(file: str, model_class):
|
async def load_data_to_postgres(file: str, model_class, chunk_size=100):
|
||||||
print('load_data_to_db')
|
|
||||||
async with async_session() as session:
|
|
||||||
df = pd.read_csv(file).dropna()
|
df = pd.read_csv(file).dropna()
|
||||||
# Преобразование данных из DataFrame в формат, подходящий для SQLAlchemy
|
|
||||||
data_records = df.to_dict(orient='records')
|
|
||||||
|
|
||||||
# Пакетная вставка всех записей
|
async with async_session_postgres() as session:
|
||||||
stmt = insert(model_class).values(data_records)
|
for start in range(0, len(df), chunk_size):
|
||||||
await session.execute(stmt)
|
end = start + chunk_size
|
||||||
|
chunk = df.iloc[start:end].to_dict(orient='records')
|
||||||
await session.commit()
|
stmt = insert(model_class).values(chunk)
|
||||||
|
try:
|
||||||
|
|
||||||
async def insert_batch(session, model_class, batch_data):
|
|
||||||
stmt = insert(model_class).values(batch_data)
|
|
||||||
await session.execute(stmt)
|
await session.execute(stmt)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
except IntegrityError as e:
|
||||||
|
await session.rollback()
|
||||||
|
|
||||||
async def test(file: str, model_class):
|
|
||||||
print('test')
|
|
||||||
df = pd.read_csv(file)
|
|
||||||
data_records = df.to_dict(orient='records')
|
|
||||||
|
|
||||||
# async with async_session() as session:
|
def load_data_to_clickhouse(file: str, model_class, chunk_size=100):
|
||||||
# batch_size = 3000
|
df = pd.read_csv(file).dropna()
|
||||||
# tasks = []
|
|
||||||
#
|
|
||||||
# for i in range(0, len(data_records), batch_size):
|
|
||||||
# batch_data = data_records[i:i + batch_size]
|
|
||||||
# tasks.append(insert_batch(session, model_class, batch_data))
|
|
||||||
#
|
|
||||||
# await asyncio.gather(*tasks)
|
|
||||||
|
|
||||||
# две минуты добавляет
|
with session_clickhouse as session:
|
||||||
async with async_session() as session:
|
for start in range(0, len(df), chunk_size):
|
||||||
batch_size = 3000
|
end = start + chunk_size
|
||||||
print(batch_size)
|
chunk = df.iloc[start:end].to_dict(orient='records')
|
||||||
for i in range(0, len(data_records), batch_size):
|
stmt = insert(model_class).values(chunk)
|
||||||
batch_data = data_records[i:i + batch_size]
|
session.execute(stmt)
|
||||||
stmt = insert(model_class).values(batch_data)
|
session.commit()
|
||||||
await session.execute(stmt)
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
|
|
||||||
async def csv_to_db():
|
async def csv_to_db():
|
||||||
await drop_all_tables()
|
|
||||||
await create_all_tables()
|
await create_all_tables()
|
||||||
await load_data_to_db('./db/files/ch_experimentdb_experiment_data.csv', ChExperimentDBExperimentData)
|
|
||||||
await test('./db/files/experiment_data.csv', ExperimentData)
|
|
||||||
await load_data_to_db('./db/files/load_parameters.csv', LoadParameters)
|
|
||||||
await load_data_to_db('./db/files/recycling_parameters.csv', RecyclingParameters)
|
|
||||||
await load_data_to_db('./db/files/experiment_parameters.csv', ExperimentParameters)
|
|
||||||
|
|
||||||
|
await load_data_to_postgres('./db/files/load_parameters.csv', LoadParameters)
|
||||||
|
await load_data_to_postgres('./db/files/recycling_parameters.csv', RecyclingParameters)
|
||||||
|
await load_data_to_postgres('./db/files/experiment_parameters.csv', ExperimentParameters)
|
||||||
|
await load_data_to_postgres('./db/files/experiment_data.csv', ExperimentData)
|
||||||
|
|
||||||
|
load_data_to_clickhouse('./db/files/ch_experimentdb_experiment_data.csv', ChExperimentDBExperimentData)
|
||||||
|
|
||||||
# asyncio.run(csv_to_db())
|
print('csv_to_db выполнено')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,7 +0,0 @@
|
|||||||
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
|
|
||||||
|
|
||||||
from settings import settings
|
|
||||||
|
|
||||||
engine = create_async_engine(url=settings.db_url_asyncpg_docker, echo=True)
|
|
||||||
|
|
||||||
async_session = async_sessionmaker(engine)
|
|
404603
db/files/experiment_data.csv
404603
db/files/experiment_data.csv
File diff suppressed because it is too large
Load Diff
7
db/models/__init__.py
Normal file
7
db/models/__init__.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
from .base import *
|
||||||
|
from .experiment_parameters_model import *
|
||||||
|
from .experiment_data_model import *
|
||||||
|
from .ch_experimentdb_experiment_data_model import *
|
||||||
|
from .load_parameters_model import *
|
||||||
|
from .recycling_parameters_model import *
|
||||||
|
from .experiment_category import *
|
@ -1,23 +1,26 @@
|
|||||||
from sqlalchemy import Identity
|
from clickhouse_sqlalchemy import engines
|
||||||
|
from clickhouse_sqlalchemy.types import Float64, String
|
||||||
|
from sqlalchemy import Integer
|
||||||
from sqlalchemy.orm import Mapped, mapped_column
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
|
||||||
from db.models.base import Base
|
from db.clickhouse_db_connection import BaseClickhouse
|
||||||
|
|
||||||
|
class ChExperimentDBExperimentData(BaseClickhouse):
|
||||||
|
__tablename__ = 'experiment_data'
|
||||||
|
__table_args__ = (
|
||||||
|
engines.MergeTree(order_by='id'),
|
||||||
|
)
|
||||||
|
|
||||||
class ChExperimentDBExperimentData(Base):
|
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||||
__tablename__ = 'ch_experimentdb_experiment_data'
|
volume: Mapped[float] = mapped_column(Float64)
|
||||||
|
nitrogen_oxide_emission: Mapped[float] = mapped_column(Float64)
|
||||||
id: Mapped[int] = mapped_column(Identity(start=11, cycle=True),
|
temperature: Mapped[float] = mapped_column(Float64)
|
||||||
primary_key=True)
|
co_fraction: Mapped[float] = mapped_column(Float64)
|
||||||
volume: Mapped[float]
|
co2_fraction: Mapped[float] = mapped_column(Float64)
|
||||||
nitrogen_oxide_emission: Mapped[float]
|
x: Mapped[float] = mapped_column(Float64)
|
||||||
temperature: Mapped[float]
|
y: Mapped[float] = mapped_column(Float64)
|
||||||
co_fraction: Mapped[float]
|
z: Mapped[float] = mapped_column(Float64)
|
||||||
co2_fraction: Mapped[float]
|
file_id: Mapped[str] = mapped_column(String)
|
||||||
x: Mapped[float]
|
|
||||||
y: Mapped[float]
|
|
||||||
z: Mapped[float]
|
|
||||||
file_id: Mapped[str]
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<ChExperimentDBExperimentData>"
|
return f"<ChExperimentDBExperimentData id={self.id}, volume={self.volume}>"
|
||||||
|
17
db/models/experiment_category.py
Normal file
17
db/models/experiment_category.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from sqlalchemy import Identity, ForeignKey
|
||||||
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
|
||||||
|
from db.models.base import Base
|
||||||
|
|
||||||
|
|
||||||
|
class ExperimentCategory(Base):
|
||||||
|
__tablename__ = 'experiment_category'
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Identity(start=1, cycle=True),
|
||||||
|
primary_key=True)
|
||||||
|
name: Mapped[str]
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"<ExperimentCategory>"
|
@ -1,6 +1,6 @@
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from sqlalchemy import Identity
|
from sqlalchemy import Identity, ForeignKey
|
||||||
from sqlalchemy.orm import Mapped, mapped_column
|
from sqlalchemy.orm import Mapped, mapped_column
|
||||||
|
|
||||||
from db.models.base import Base
|
from db.models.base import Base
|
||||||
@ -16,7 +16,7 @@ class ExperimentData(Base):
|
|||||||
nox: Mapped[float]
|
nox: Mapped[float]
|
||||||
co2: Mapped[float]
|
co2: Mapped[float]
|
||||||
co: Mapped[float]
|
co: Mapped[float]
|
||||||
file_id: Mapped[Optional[str]]
|
file_id: Mapped[Optional[str]] = mapped_column(ForeignKey('experiment_parameters.experiment_hash', ondelete='SET NULL'))
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<ExperimentData>"
|
return f"<ExperimentData>"
|
||||||
|
@ -9,7 +9,7 @@ from db.models.base import Base
|
|||||||
class ExperimentParameters(Base):
|
class ExperimentParameters(Base):
|
||||||
__tablename__ = 'experiment_parameters'
|
__tablename__ = 'experiment_parameters'
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(Identity(start=11, cycle=True),
|
id: Mapped[int] = mapped_column(Identity(start=1100, cycle=True),
|
||||||
primary_key=True)
|
primary_key=True)
|
||||||
outer_blades_count: Mapped[int]
|
outer_blades_count: Mapped[int]
|
||||||
outer_blades_length: Mapped[float]
|
outer_blades_length: Mapped[float]
|
||||||
@ -17,7 +17,9 @@ class ExperimentParameters(Base):
|
|||||||
middle_blades_count: Mapped[int]
|
middle_blades_count: Mapped[int]
|
||||||
load_id: Mapped[Optional[int]] = mapped_column(ForeignKey('load_parameters.id', ondelete='SET NULL'))
|
load_id: Mapped[Optional[int]] = mapped_column(ForeignKey('load_parameters.id', ondelete='SET NULL'))
|
||||||
recycling_id: Mapped[Optional[int]] = mapped_column(ForeignKey('recycling_parameters.id', ondelete='SET NULL'))
|
recycling_id: Mapped[Optional[int]] = mapped_column(ForeignKey('recycling_parameters.id', ondelete='SET NULL'))
|
||||||
experiment_hash: Mapped[str]
|
experiment_hash: Mapped[str] = mapped_column(unique=True)
|
||||||
|
experiment_category_id: Mapped[Optional[int]] = mapped_column(ForeignKey('experiment_category.id', ondelete='SET NULL'), nullable=True)
|
||||||
|
oxidizer_temp: Mapped[float] = mapped_column(nullable=True)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f"<ExperimentParameters>"
|
return f"<ExperimentParameters>"
|
||||||
|
@ -7,7 +7,7 @@ from db.models.base import Base
|
|||||||
class LoadParameters(Base):
|
class LoadParameters(Base):
|
||||||
__tablename__ = 'load_parameters'
|
__tablename__ = 'load_parameters'
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(Identity(start=6, cycle=True),
|
id: Mapped[int] = mapped_column(Identity(start=1000, cycle=True),
|
||||||
primary_key=True)
|
primary_key=True)
|
||||||
load: Mapped[int]
|
load: Mapped[int]
|
||||||
primary_air_consumption: Mapped[float]
|
primary_air_consumption: Mapped[float]
|
||||||
|
@ -9,7 +9,7 @@ from db.models.base import Base
|
|||||||
class RecyclingParameters(Base):
|
class RecyclingParameters(Base):
|
||||||
__tablename__ = 'recycling_parameters'
|
__tablename__ = 'recycling_parameters'
|
||||||
|
|
||||||
id: Mapped[int] = mapped_column(Identity(start=6, cycle=True),
|
id: Mapped[int] = mapped_column(Identity(start=1000, cycle=True),
|
||||||
primary_key=True)
|
primary_key=True)
|
||||||
|
|
||||||
load_id: Mapped[Optional[int]] = mapped_column(ForeignKey('load_parameters.id', ondelete='SET NULL'))
|
load_id: Mapped[Optional[int]] = mapped_column(ForeignKey('load_parameters.id', ondelete='SET NULL'))
|
||||||
|
11
db/postgres_db_connection.py
Normal file
11
db/postgres_db_connection.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
|
||||||
|
from settings import settings
|
||||||
|
|
||||||
|
engine_postgres = create_async_engine(url=settings.db_url_asyncpg_docker, echo=True)
|
||||||
|
async_session_postgres = async_sessionmaker(engine_postgres, expire_on_commit=False)
|
||||||
|
|
||||||
|
|
||||||
|
# Функция для получения новой сессии
|
||||||
|
async def get_async_session():
|
||||||
|
async with async_session_postgres() as session:
|
||||||
|
yield session
|
6
db/repositories/__init__.py
Normal file
6
db/repositories/__init__.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
from .experiment_data_repos import *
|
||||||
|
from .experiment_parameters_repos import *
|
||||||
|
from .load_parameters_repos import *
|
||||||
|
from .recycling_parameters_repos import *
|
||||||
|
from .ch_experimentdb_experiment_data_repos import *
|
||||||
|
from .experiment_category_repos import *
|
@ -1,63 +1,43 @@
|
|||||||
from typing import Optional
|
from typing import List
|
||||||
from sqlalchemy import select
|
|
||||||
from db.db_connection import async_session
|
|
||||||
from db.models.ch_experimentdb_experiment_data_model import ChExperimentDBExperimentData
|
|
||||||
from network.schemas import ChExperimentDBExperimentDataBody
|
|
||||||
|
|
||||||
|
from db.models.ch_experimentdb_experiment_data_model import ChExperimentDBExperimentData
|
||||||
|
from sqlalchemy import select,func
|
||||||
|
|
||||||
class ChExperimentDBExperimentDataRepository:
|
class ChExperimentDBExperimentDataRepository:
|
||||||
@staticmethod
|
def __init__(self, session):
|
||||||
async def get_all() -> Optional[list[ChExperimentDBExperimentData]]:
|
self.session = session
|
||||||
async with async_session() as session:
|
|
||||||
result = await session.execute(select(ChExperimentDBExperimentData))
|
|
||||||
return result.scalars().all()
|
|
||||||
|
|
||||||
|
def get_all(self):
|
||||||
|
return self.session.query(ChExperimentDBExperimentData).all()
|
||||||
|
|
||||||
@staticmethod
|
def get_by_id(self, id: int) -> ChExperimentDBExperimentData:
|
||||||
async def get_by_id(id_: int) -> Optional[ChExperimentDBExperimentData]:
|
return self.session.query(ChExperimentDBExperimentData).filter(ChExperimentDBExperimentData.id == id).one_or_none()
|
||||||
async with async_session() as session:
|
|
||||||
result = session.get(ChExperimentDBExperimentData, id_)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
def get_by_file_id(self, file_id: str) -> List[ChExperimentDBExperimentData]:
|
||||||
|
return self.session.query(ChExperimentDBExperimentData).filter(ChExperimentDBExperimentData.file_id == file_id).all()
|
||||||
|
|
||||||
@staticmethod
|
def create(self, ch_experiment_data: ChExperimentDBExperimentData) -> ChExperimentDBExperimentData:
|
||||||
async def create(volume: float,
|
max_id_query = select(func.max(ChExperimentDBExperimentData.id))
|
||||||
nitrogen_oxide_emission: float,
|
max_id_result = self.session.execute(max_id_query).scalar()
|
||||||
temperature: float,
|
|
||||||
co_fraction: float,
|
|
||||||
co2_fraction: float,
|
|
||||||
x: float,
|
|
||||||
y: float,
|
|
||||||
z: float,
|
|
||||||
file_id: str):
|
|
||||||
new_data = ChExperimentDBExperimentData(
|
|
||||||
volume=volume,
|
|
||||||
nitrogen_oxide_emission=nitrogen_oxide_emission,
|
|
||||||
temperature=temperature,
|
|
||||||
co_fraction=co_fraction,
|
|
||||||
co2_fraction=co2_fraction,
|
|
||||||
x=x,
|
|
||||||
y=y,
|
|
||||||
z=z,
|
|
||||||
file_id=file_id
|
|
||||||
)
|
|
||||||
async with async_session() as session:
|
|
||||||
session.add(new_data)
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
@staticmethod
|
max_id = max_id_result or 0
|
||||||
async def create_from_pydantic(body: ChExperimentDBExperimentDataBody):
|
|
||||||
new_data = ChExperimentDBExperimentData(
|
ch_experiment_data.id = max_id + 1
|
||||||
volume=body.volume,
|
|
||||||
nitrogen_oxide_emission=body.nitrogen_oxide_emission,
|
self.session.add(ch_experiment_data)
|
||||||
temperature=body.temperature,
|
self.session.commit()
|
||||||
co_fraction=body.co_fraction,
|
|
||||||
co2_fraction=body.co2_fraction,
|
return ch_experiment_data
|
||||||
x=body.x,
|
|
||||||
y=body.y,
|
def update(self, id: int, updated_data: dict) -> ChExperimentDBExperimentData:
|
||||||
z=body.z,
|
self.session.query(ChExperimentDBExperimentData).filter(ChExperimentDBExperimentData.id == id).update(updated_data)
|
||||||
file_id=body.file_id
|
self.session.commit()
|
||||||
)
|
return self.get_by_id(id)
|
||||||
async with async_session() as session:
|
|
||||||
session.add(new_data)
|
def delete(self, id: int) -> bool:
|
||||||
await session.commit()
|
item = self.get_by_id(id)
|
||||||
|
if item:
|
||||||
|
self.session.delete(item)
|
||||||
|
self.session.commit()
|
||||||
|
return True
|
||||||
|
return False
|
41
db/repositories/experiment_category_repos.py
Normal file
41
db/repositories/experiment_category_repos.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
from typing import Sequence
|
||||||
|
|
||||||
|
from sqlalchemy import update, delete
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy.future import select
|
||||||
|
from typing_extensions import deprecated
|
||||||
|
from db.models import ExperimentCategory
|
||||||
|
|
||||||
|
|
||||||
|
@deprecated(
|
||||||
|
"теперь есть параметризованный круд, а уже свои специфичные методы у каждой сущности в своем репозитории"
|
||||||
|
)
|
||||||
|
class ExperimentCategoryRepository:
|
||||||
|
def __init__(self, session: AsyncSession):
|
||||||
|
self.session = session
|
||||||
|
|
||||||
|
async def get_all(self) -> Sequence[ExperimentCategory]:
|
||||||
|
result = await self.session.execute(select(ExperimentCategory))
|
||||||
|
return result.scalars().all()
|
||||||
|
|
||||||
|
async def get_by_id(self, id: int) -> ExperimentCategory:
|
||||||
|
result = await self.session.execute(select(ExperimentCategory).where(ExperimentCategory.id == id))
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
|
async def create(self, new_data: ExperimentCategory):
|
||||||
|
self.session.add(new_data)
|
||||||
|
await self.session.commit()
|
||||||
|
|
||||||
|
async def update(self, id: int, updated_data: dict):
|
||||||
|
stmt = (
|
||||||
|
update(ExperimentCategory).
|
||||||
|
where(ExperimentCategory.id == id).
|
||||||
|
values(**updated_data)
|
||||||
|
)
|
||||||
|
await self.session.execute(stmt)
|
||||||
|
await self.session.commit()
|
||||||
|
|
||||||
|
async def delete(self, id: int):
|
||||||
|
stmt = delete(ExperimentCategory).where(ExperimentCategory.id == id)
|
||||||
|
await self.session.execute(stmt)
|
||||||
|
await self.session.commit()
|
@ -1,53 +1,24 @@
|
|||||||
from typing import Optional
|
from typing import Sequence
|
||||||
from sqlalchemy import select
|
|
||||||
from db.db_connection import async_session
|
from sqlalchemy.future import select
|
||||||
|
|
||||||
from db.models.experiment_data_model import ExperimentData
|
from db.models.experiment_data_model import ExperimentData
|
||||||
from network.schemas import ExperimentDataBody
|
from db.postgres_db_connection import async_session_postgres
|
||||||
|
|
||||||
|
|
||||||
class ExperimentDataRepository:
|
async def get_exp_data_by_file_id(file_id: str) -> Sequence[ExperimentData]:
|
||||||
@staticmethod
|
async with async_session_postgres() as session:
|
||||||
async def get_all() -> Optional[list[ExperimentData]]:
|
result = await session.execute(
|
||||||
async with async_session() as session:
|
select(ExperimentData).where(ExperimentData.file_id == file_id)
|
||||||
result = await session.execute(select(ExperimentData))
|
)
|
||||||
return result.scalars().all()
|
return result.scalars().all()
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def get_by_id(id_: int) -> Optional[ExperimentData]:
|
|
||||||
async with async_session() as session:
|
|
||||||
result = session.get(ExperimentData, id_)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
async def get_all_exp_data(page: int, page_size: int) -> Sequence[ExperimentData]:
|
||||||
|
async with async_session_postgres() as session:
|
||||||
|
offset = (page - 1) * page_size
|
||||||
|
|
||||||
@staticmethod
|
result = await session.execute(
|
||||||
async def create(direction: float,
|
select(ExperimentData).offset(offset).limit(page_size)
|
||||||
temperature: float,
|
|
||||||
nox: float,
|
|
||||||
co2: float,
|
|
||||||
co: float,
|
|
||||||
file_id: str):
|
|
||||||
new_data = ExperimentData(
|
|
||||||
direction=direction,
|
|
||||||
temperature=temperature,
|
|
||||||
nox=nox,
|
|
||||||
co2=co2,
|
|
||||||
co=co,
|
|
||||||
file_id=file_id
|
|
||||||
)
|
)
|
||||||
async with async_session() as session:
|
return result.scalars().all()
|
||||||
session.add(new_data)
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def create_from_pydantic(body: ExperimentDataBody):
|
|
||||||
new_data = ExperimentData(
|
|
||||||
direction=body.direction,
|
|
||||||
temperature=body.temperature,
|
|
||||||
nox=body.nox,
|
|
||||||
co2=body.co2,
|
|
||||||
co=body.co,
|
|
||||||
file_id=body.file_id
|
|
||||||
)
|
|
||||||
async with async_session() as session:
|
|
||||||
session.add(new_data)
|
|
||||||
await session.commit()
|
|
||||||
|
@ -1,55 +1,129 @@
|
|||||||
from typing import Optional
|
import hashlib
|
||||||
from sqlalchemy import select
|
from typing import Sequence
|
||||||
from db.db_connection import async_session
|
|
||||||
|
import pandas as pd
|
||||||
|
import yaml
|
||||||
|
from fastapi import HTTPException
|
||||||
|
from sqlalchemy.future import select
|
||||||
|
|
||||||
|
from db.crud import create, update, get_by_id, update_exp
|
||||||
|
from db.models import LoadParameters, RecyclingParameters
|
||||||
from db.models.experiment_parameters_model import ExperimentParameters
|
from db.models.experiment_parameters_model import ExperimentParameters
|
||||||
from network.schemas import ExperimentParametersBody
|
from db.postgres_db_connection import async_session_postgres
|
||||||
|
from macros_generator import load_calculation, recycling_calculation
|
||||||
|
|
||||||
|
|
||||||
class ExperimentParametersRepository:
|
async def get_exp_parameters_by_category(category_id: int) -> Sequence[ExperimentParameters]:
|
||||||
@staticmethod
|
async with async_session_postgres() as session:
|
||||||
async def get_all() -> Optional[list[ExperimentParameters]]:
|
result = await session.execute(
|
||||||
async with async_session() as session:
|
select(ExperimentParameters).where(ExperimentParameters.experiment_category_id == category_id)
|
||||||
result = await session.execute(select(ExperimentParameters))
|
)
|
||||||
return result.scalars().all()
|
return result.scalars().all()
|
||||||
|
|
||||||
@staticmethod
|
async def get_exp_parameters_by_exp_hash(exp_hash: str) -> Sequence[ExperimentParameters]:
|
||||||
async def get_by_id(id_: int) -> Optional[ExperimentParameters]:
|
async with async_session_postgres() as session:
|
||||||
async with async_session() as session:
|
result = await session.execute(
|
||||||
result = session.get(ExperimentParameters, id_)
|
select(ExperimentParameters).where(ExperimentParameters.experiment_hash == exp_hash)
|
||||||
return result
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def create(outer_blades_count: int,
|
|
||||||
outer_blades_length: float,
|
|
||||||
outer_blades_angle: float,
|
|
||||||
middle_blades_count: int,
|
|
||||||
load_id: int,
|
|
||||||
recycling_id: int,
|
|
||||||
experiment_hash: str):
|
|
||||||
new_data = ExperimentParameters(
|
|
||||||
outer_blades_count=outer_blades_count,
|
|
||||||
outer_blades_length=outer_blades_length,
|
|
||||||
outer_blades_angle=outer_blades_angle,
|
|
||||||
middle_blades_count=middle_blades_count,
|
|
||||||
load_id=load_id,
|
|
||||||
recycling_id=recycling_id,
|
|
||||||
experiment_hash=experiment_hash
|
|
||||||
)
|
)
|
||||||
async with async_session() as session:
|
return result.scalars().all()
|
||||||
session.add(new_data)
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def create_from_pydantic(body: ExperimentParametersBody):
|
def generate_experiment_hash(data: dict) -> str:
|
||||||
new_data = ExperimentParameters(
|
"""Генерация уникального хеша на основе данных эксперимента"""
|
||||||
outer_blades_count=body.outer_blades_count,
|
hash_input = f"{data['outer_blades_count']}_{data['outer_blades_length']}_{data['outer_blades_angle']}_{data['middle_blades_count']}_{data['load']}_{data['recycling_level']}"
|
||||||
outer_blades_length=body.outer_blades_length,
|
return hashlib.sha256(hash_input.encode()).hexdigest()
|
||||||
outer_blades_angle=body.outer_blades_angle,
|
|
||||||
middle_blades_count=body.middle_blades_count,
|
|
||||||
load_id=body.load_id,
|
async def save_experiment_to_db(df: pd.DataFrame):
|
||||||
recycling_id=body.recycling_id,
|
for _, row in df.iterrows():
|
||||||
experiment_hash=body.experiment_hash
|
try:
|
||||||
|
# Преобразуем load и recycling_level в соответствующие id
|
||||||
|
load = int(row['load'])
|
||||||
|
recycling = int(row['recycling_level'])
|
||||||
|
|
||||||
|
# Генерация хеша для experiment_hash
|
||||||
|
experiment_hash = generate_experiment_hash(row)
|
||||||
|
|
||||||
|
exp = await create(
|
||||||
|
ExperimentParameters,
|
||||||
|
outer_blades_count=int(row['outer_blades_count']),
|
||||||
|
outer_blades_length=float(row['outer_blades_length']),
|
||||||
|
outer_blades_angle=float(row['outer_blades_angle']),
|
||||||
|
middle_blades_count=int(row['middle_blades_count']),
|
||||||
|
load_id=None,
|
||||||
|
recycling_id=None,
|
||||||
|
experiment_hash=experiment_hash,
|
||||||
|
oxidizer_temp=float(row['oxidizer_temp']),
|
||||||
|
experiment_category_id=experiment_category_id
|
||||||
)
|
)
|
||||||
async with async_session() as session:
|
|
||||||
session.add(new_data)
|
await process_and_save_experiment_data(exp.id, load, recycling)
|
||||||
await session.commit()
|
except Exception as e:
|
||||||
|
print(f"Ошибка при сохранении данных: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail=f"Ошибка при сохранении данных: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
async def process_and_save_experiment_data(id: int, load: float, recycling_level: float) -> dict:
|
||||||
|
try:
|
||||||
|
experiment = await get_by_id(ExperimentParameters, id)
|
||||||
|
if experiment is None:
|
||||||
|
raise HTTPException(status_code=404, detail=f"ExperimentParameters с id {id} не найден.")
|
||||||
|
|
||||||
|
yaml_file_path = "config.yaml"
|
||||||
|
|
||||||
|
with open(yaml_file_path, "r", encoding="utf-8") as file:
|
||||||
|
data = yaml.safe_load(file)
|
||||||
|
|
||||||
|
diameters = data["parameters"]["diameters"]
|
||||||
|
|
||||||
|
dict_load = load_calculation(load, diameters, None)
|
||||||
|
|
||||||
|
primary_air_consumption = dict_load["primary_air_consumption"]
|
||||||
|
secondary_air_consumption = dict_load["secondary_air_consumption"]
|
||||||
|
gas_inlet_consumption = dict_load["gas_inlet_consumption"]
|
||||||
|
alpha = dict_load["alpha"]
|
||||||
|
gas_consumption = dict_load["gas_consumption"]
|
||||||
|
air_consumption = dict_load["air_consumption"]
|
||||||
|
|
||||||
|
dict_recycling = recycling_calculation(alpha, gas_consumption, air_consumption, recycling_level)
|
||||||
|
|
||||||
|
co2 = dict_recycling["CO2"]
|
||||||
|
n2 = dict_recycling["N2"]
|
||||||
|
h2o = dict_recycling["H2O"]
|
||||||
|
o2 = dict_recycling["O2"]
|
||||||
|
|
||||||
|
load_params = await create(
|
||||||
|
LoadParameters,
|
||||||
|
load=int(load),
|
||||||
|
primary_air_consumption=primary_air_consumption,
|
||||||
|
secondary_air_consumption=secondary_air_consumption,
|
||||||
|
gas_inlet_consumption=gas_inlet_consumption
|
||||||
|
)
|
||||||
|
|
||||||
|
recycling_params = await create(
|
||||||
|
RecyclingParameters,
|
||||||
|
load_id=load_params.id,
|
||||||
|
recycling_level=int(recycling_level),
|
||||||
|
co2=co2,
|
||||||
|
n2=n2,
|
||||||
|
h2o=h2o,
|
||||||
|
o2=o2
|
||||||
|
)
|
||||||
|
|
||||||
|
await update_exp(
|
||||||
|
ExperimentParameters,
|
||||||
|
id=experiment.id,
|
||||||
|
updated_data={
|
||||||
|
"load_id": load_params.id,
|
||||||
|
"recycling_id": recycling_params.id
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"message": "Данные успешно обработаны и сохранены.",
|
||||||
|
"load_parameters": load_params,
|
||||||
|
"recycling_parameters": recycling_params
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
@ -1,49 +1,76 @@
|
|||||||
from typing import Optional
|
from typing import Optional, Sequence
|
||||||
|
|
||||||
|
from sqlalchemy import delete as delete_
|
||||||
|
from sqlalchemy import update as update_
|
||||||
from sqlalchemy.future import select
|
from sqlalchemy.future import select
|
||||||
from db.db_connection import async_session
|
|
||||||
from db.models.load_parameters_model import LoadParameters
|
from db.models.load_parameters_model import LoadParameters
|
||||||
from network.schemas import LoadParametersBody
|
from db.postgres_db_connection import async_session_postgres
|
||||||
|
|
||||||
|
|
||||||
class LoadParametersRepository:
|
# async def get_all() -> Sequence[LoadParameters]:
|
||||||
@staticmethod
|
# async with async_session_postgres() as session:
|
||||||
async def get_all() -> Optional[list[LoadParameters]]:
|
# result = await session.execute(select(LoadParameters))
|
||||||
async with async_session() as session:
|
# return result.scalars().all()
|
||||||
result = await session.execute(select(LoadParameters))
|
#
|
||||||
return result.scalars().all()
|
#
|
||||||
|
# async def get_by_id(id: int) -> LoadParameters:
|
||||||
@staticmethod
|
# async with async_session_postgres() as session:
|
||||||
async def get_by_id(id_: int) -> Optional[LoadParameters]:
|
# result = await session.execute(select(LoadParameters).where(LoadParameters.id == id))
|
||||||
async with async_session() as session:
|
# return result.scalar_one_or_none()
|
||||||
result = session.get(LoadParameters, id_)
|
#
|
||||||
return result
|
#
|
||||||
|
# async def create(new_data: LoadParameters):
|
||||||
@staticmethod
|
# async with async_session_postgres() as session:
|
||||||
async def create(load: int,
|
# session.add(new_data)
|
||||||
primary_air_consumption: float,
|
# await session.commit()
|
||||||
secondary_air_consumption: float,
|
#
|
||||||
gas_inlet_consumption: float) -> None:
|
#
|
||||||
new_data = LoadParameters(
|
# async def update(id: int, updated_data: dict):
|
||||||
load=load,
|
# async with async_session_postgres() as session:
|
||||||
primary_air_consumption=primary_air_consumption,
|
# stmt = (
|
||||||
secondary_air_consumption=secondary_air_consumption,
|
# update_(LoadParameters).
|
||||||
gas_inlet_consumption=gas_inlet_consumption
|
# where(LoadParameters.id == id).
|
||||||
)
|
# values(**updated_data)
|
||||||
async with async_session() as session:
|
# )
|
||||||
session.add(new_data)
|
# await session.execute(stmt)
|
||||||
await session.commit()
|
# await session.commit()
|
||||||
|
#
|
||||||
@staticmethod
|
#
|
||||||
async def create_from_pydantic(body: LoadParametersBody):
|
# async def delete(id: int) -> bool:
|
||||||
new_data = LoadParameters(
|
# async with async_session_postgres() as session:
|
||||||
load=body.load,
|
# stmt = delete_(LoadParameters).where(LoadParameters.id == id)
|
||||||
primary_air_consumption=body.primary_air_consumption,
|
# result = await session.execute(stmt)
|
||||||
secondary_air_consumption=body.secondary_air_consumption,
|
# await session.commit()
|
||||||
gas_inlet_consumption=body.gas_inlet_consumption
|
# return result.rowcount > 0
|
||||||
)
|
|
||||||
async with async_session() as session:
|
|
||||||
session.add(new_data)
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# class LoadParametersRepository:
|
||||||
|
# def __init__(self, session: AsyncSession):
|
||||||
|
# self.session = session
|
||||||
|
#
|
||||||
|
# async def get_all(self) -> Sequence[LoadParameters]:
|
||||||
|
# result = await self.session.execute(select(LoadParameters))
|
||||||
|
# return result.scalars().all()
|
||||||
|
#
|
||||||
|
# async def get_by_id(self, id: int) -> LoadParameters:
|
||||||
|
# result = await self.session.execute(select(LoadParameters).where(LoadParameters.id == id))
|
||||||
|
# return result.scalar_one_or_none()
|
||||||
|
#
|
||||||
|
# async def create(self, new_data: LoadParameters):
|
||||||
|
# self.session.add(new_data)
|
||||||
|
# await self.session.commit()
|
||||||
|
#
|
||||||
|
# async def update(self, id: int, updated_data: dict):
|
||||||
|
# stmt = (
|
||||||
|
# update_(LoadParameters).
|
||||||
|
# where(LoadParameters.id == id).
|
||||||
|
# values(**updated_data)
|
||||||
|
# )
|
||||||
|
# await self.session.execute(stmt)
|
||||||
|
# await self.session.commit()
|
||||||
|
#
|
||||||
|
# async def delete(self, id: int):
|
||||||
|
# stmt = delete_(LoadParameters).where(LoadParameters.id == id)
|
||||||
|
# await self.session.execute(stmt)
|
||||||
|
# await self.session.commit()
|
||||||
|
@ -1,52 +1,41 @@
|
|||||||
from typing import Optional
|
from typing import Sequence
|
||||||
|
from typing_extensions import deprecated
|
||||||
|
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from db.db_connection import async_session
|
from sqlalchemy import update, delete
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
from db.models.recycling_parameters_model import RecyclingParameters
|
from db.models.recycling_parameters_model import RecyclingParameters
|
||||||
from network.schemas import RecyclingParametersBody
|
|
||||||
|
|
||||||
|
|
||||||
|
@deprecated(
|
||||||
|
"теперь есть параметризованный круд, а уже свои специфичные методы у каждой сущности в своем репозитории"
|
||||||
|
)
|
||||||
class RecyclingParametersRepository:
|
class RecyclingParametersRepository:
|
||||||
@staticmethod
|
def __init__(self, session: AsyncSession):
|
||||||
async def get_all() -> Optional[list[RecyclingParameters]]:
|
self.session = session
|
||||||
async with async_session() as session:
|
|
||||||
result = await session.execute(select(RecyclingParameters))
|
|
||||||
return result.scalars.all()
|
|
||||||
|
|
||||||
@staticmethod
|
async def get_all(self) -> Sequence[RecyclingParameters]:
|
||||||
async def get_by_id(id_: int) -> Optional[RecyclingParameters]:
|
result = await self.session.execute(select(RecyclingParameters))
|
||||||
async with async_session() as session:
|
return result.scalars().all()
|
||||||
result = await session.execute(select(RecyclingParameters).where(RecyclingParameters.id == id_))
|
|
||||||
return result.scalars().first()
|
|
||||||
|
|
||||||
@staticmethod
|
async def get_by_id(self, id: int) -> RecyclingParameters:
|
||||||
async def create(load_id: int,
|
result = await self.session.execute(select(RecyclingParameters).where(RecyclingParameters.id == id))
|
||||||
recycling_level: int,
|
return result.scalar_one_or_none()
|
||||||
co2: float,
|
|
||||||
n2: float,
|
async def create(self, new_data: RecyclingParameters):
|
||||||
h2o: float,
|
self.session.add(new_data)
|
||||||
o2: float):
|
await self.session.commit()
|
||||||
new_data = RecyclingParameters(
|
|
||||||
load_id=load_id,
|
async def update(self, id: int, updated_data: dict):
|
||||||
recycling_level=recycling_level,
|
stmt = (
|
||||||
co2=co2,
|
update(RecyclingParameters).
|
||||||
n2=n2,
|
where(RecyclingParameters.id == id).
|
||||||
h2o=h2o,
|
values(**updated_data)
|
||||||
o2=o2
|
|
||||||
)
|
)
|
||||||
async with async_session() as session:
|
await self.session.execute(stmt)
|
||||||
session.add(new_data)
|
await self.session.commit()
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
@staticmethod
|
async def delete(self, id: int):
|
||||||
async def create_from_pydantic(body: RecyclingParametersBody):
|
stmt = delete(RecyclingParameters).where(RecyclingParameters.id == id)
|
||||||
new_data = RecyclingParameters(
|
await self.session.execute(stmt)
|
||||||
load_id=body.load_id,
|
await self.session.commit()
|
||||||
recycling_level=body.recycling_level,
|
|
||||||
co2=body.co2,
|
|
||||||
n2=body.n2,
|
|
||||||
h2o=body.h2o,
|
|
||||||
o2=body.o2
|
|
||||||
)
|
|
||||||
async with async_session() as session:
|
|
||||||
session.add(new_data)
|
|
||||||
await session.commit()
|
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
db:
|
db:
|
||||||
image: postgres
|
image: postgres
|
||||||
@ -8,7 +7,7 @@ services:
|
|||||||
POSTGRES_USER: ${POSTGRES_USER}
|
POSTGRES_USER: ${POSTGRES_USER}
|
||||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||||
ports:
|
ports:
|
||||||
- "5432:5432"
|
- ${DB_PORT}
|
||||||
volumes:
|
volumes:
|
||||||
- postgres_data:/var/lib/postgresql/data
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
|
||||||
@ -33,7 +32,7 @@ services:
|
|||||||
- db
|
- db
|
||||||
- clickhouse
|
- clickhouse
|
||||||
volumes:
|
volumes:
|
||||||
- .:/app # Связываем текущую директорию с контейнером для доступа к коду
|
- .:/app
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
postgres_data:
|
postgres_data:
|
||||||
|
128
main.py
128
main.py
@ -1,16 +1,40 @@
|
|||||||
import asyncio
|
from random import uniform, random
|
||||||
from fastapi import FastAPI, HTTPException
|
from traceback import format_exc
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
from fastapi import FastAPI, HTTPException, BackgroundTasks
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from pyDOE3 import pbdesign, lhs
|
||||||
|
|
||||||
from db.csv_to_db import csv_to_db
|
from db.csv_to_db import csv_to_db
|
||||||
from network.routes import ch_experimentdb_experiment_data_router, experiment_data_router, experiment_parameters_router
|
from db.models import ChExperimentDBExperimentData
|
||||||
|
from db.repositories import save_experiment_to_db
|
||||||
|
from network.routes import (ch_experimentdb_experiment_data_router, experiment_data_router,
|
||||||
|
experiment_parameters_router, experiment_category_router)
|
||||||
from network.routes import load_parameters_router, recycling_parameters_router
|
from network.routes import load_parameters_router, recycling_parameters_router
|
||||||
|
from network.routes.ch_experimentdb_experiment_data_router import create_ch_experimentdb_experiment_data
|
||||||
|
from network.routes.experiment_data_router import create_experiment_data
|
||||||
|
from network.routes.experiment_parameters_router import get_all_experiment_parameters
|
||||||
from network.schemas import *
|
from network.schemas import *
|
||||||
from new_experiment_planner import run_experiment # Импортируем функцию из твоего скрипта
|
from new_experiment_planner import run_experiment
|
||||||
|
from new_experiment_planner_pyDOE3 import scale_design, scale_design_lhs, round_by_index
|
||||||
|
|
||||||
app = FastAPI()
|
app = FastAPI()
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=['*'],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
app.include_router(ch_experimentdb_experiment_data_router.router,
|
app.include_router(ch_experimentdb_experiment_data_router.router,
|
||||||
prefix="/ch_experimentdb_experiment_data",
|
prefix="/ch_experimentdb_experiment_data",
|
||||||
tags=["ch_experimentdb_experiment_data"])
|
tags=["ch_experimentdb_experiment_data"])
|
||||||
|
app.include_router(experiment_category_router.router,
|
||||||
|
prefix="/experiment_category",
|
||||||
|
tags=["experiment_category"])
|
||||||
app.include_router(experiment_data_router.router,
|
app.include_router(experiment_data_router.router,
|
||||||
prefix="/experiment_data",
|
prefix="/experiment_data",
|
||||||
tags=["experiment_data"])
|
tags=["experiment_data"])
|
||||||
@ -45,9 +69,99 @@ def run_experiment_api(params: ExperimentParameters):
|
|||||||
|
|
||||||
# эндпоинт инициализации бд из csv файлов
|
# эндпоинт инициализации бд из csv файлов
|
||||||
@app.get('/init_db_data')
|
@app.get('/init_db_data')
|
||||||
def init_db_data():
|
async def init_db_data(background_tasks: BackgroundTasks):
|
||||||
try:
|
try:
|
||||||
asyncio.run(csv_to_db())
|
background_tasks.add_task(csv_to_db)
|
||||||
return {"status": "success", "message": "База данных инициализирована. Данные из файлов csv успешно добавлены."}
|
return {"status": "success", "message": "Инициализация БД запущена в фоне"}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
print(str(e.with_traceback()))
|
||||||
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
@app.get('/init_data_plot')
|
||||||
|
async def init_data_plot():
|
||||||
|
try:
|
||||||
|
parameters = await get_all_experiment_parameters()
|
||||||
|
|
||||||
|
async def generate_data_for_file_id(file_id):
|
||||||
|
for _ in range(100):
|
||||||
|
await create_ch_experimentdb_experiment_data(
|
||||||
|
ChExperimentDBExperimentData(
|
||||||
|
volume=uniform(1e-8, 6.3e-6),
|
||||||
|
nitrogen_oxide_emission=uniform(2.7e-10, 8.92e-10),
|
||||||
|
temperature=uniform(1543.0, 2432.0),
|
||||||
|
co_fraction=uniform(0.0875, 0.4567),
|
||||||
|
co2_fraction=uniform(0.0824, 0.5678),
|
||||||
|
x=uniform(0.0002, 0.9849),
|
||||||
|
y=uniform(4.25, 10.31),
|
||||||
|
z=uniform(4.20, 10.26),
|
||||||
|
file_id=file_id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for param in parameters:
|
||||||
|
print(param)
|
||||||
|
file_id = param.experiment_hash
|
||||||
|
if file_id:
|
||||||
|
await generate_data_for_file_id(file_id)
|
||||||
|
|
||||||
|
return {"status": "success", "message": "Добавление данных в БД успешно завершено"}
|
||||||
|
except Exception as e:
|
||||||
|
print(format_exc())
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
# Пример запроса
|
||||||
|
# {
|
||||||
|
# "param_ranges": {
|
||||||
|
# "outer_blades_count": [12, 48],
|
||||||
|
# "outer_blades_length": [44, 107.5],
|
||||||
|
# "outer_blades_angle": [30, 75],
|
||||||
|
# "middle_blades_count": [9, 36],
|
||||||
|
# "load": [315, 465],
|
||||||
|
# "recycling_level": [0, 20],
|
||||||
|
# "oxidizer_temp": [471, 493]
|
||||||
|
# },
|
||||||
|
# "count_exp": 1440,
|
||||||
|
# "round_rules": [0, 1, 1, 0, 0, 0, 0]
|
||||||
|
# }
|
||||||
|
|
||||||
|
@app.post("/pyDOE3_screening_design")
|
||||||
|
async def generate_screening_design(request: ExperimentParametersPyDOE3) -> List[Dict[str, float]]:
|
||||||
|
param_ranges = request.param_ranges
|
||||||
|
category = request.category
|
||||||
|
|
||||||
|
# Создаем screening design и масштабируем его
|
||||||
|
num_factors = len(param_ranges)
|
||||||
|
screening_design = pbdesign(num_factors)
|
||||||
|
scaled_screening_design = scale_design(screening_design, param_ranges)
|
||||||
|
|
||||||
|
# Преобразуем в DataFrame
|
||||||
|
df_screening = pd.DataFrame(scaled_screening_design, columns=param_ranges.keys())
|
||||||
|
|
||||||
|
# Сохраняем результаты в базу данных
|
||||||
|
await save_experiment_to_db(df_screening, category)
|
||||||
|
|
||||||
|
return df_screening.to_dict(orient="records")
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/pyDOE3_lhs_design")
|
||||||
|
async def generate_lhs_design(request: ExperimentParametersPyDOE3) -> List[Dict[str, float]]:
|
||||||
|
param_ranges = request.param_ranges
|
||||||
|
count_exp = request.count_exp
|
||||||
|
round_rules = request.round_rules
|
||||||
|
category = request.category
|
||||||
|
|
||||||
|
# Создаем lhs design и масштабируем его
|
||||||
|
num_factors = len(param_ranges)
|
||||||
|
lhs_samples = lhs(num_factors, samples=count_exp)
|
||||||
|
scaled_lhs_samples = scale_design_lhs(lhs_samples, param_ranges)
|
||||||
|
|
||||||
|
# Округляем значения
|
||||||
|
round_scaled_lhs_samples = round_by_index(scaled_lhs_samples, round_rules)
|
||||||
|
|
||||||
|
# Преобразуем в DataFrame
|
||||||
|
df_lhs = pd.DataFrame(round_scaled_lhs_samples, columns=param_ranges.keys())
|
||||||
|
|
||||||
|
# Сохраняем результаты в базу данных
|
||||||
|
await save_experiment_to_db(df_lhs, category)
|
||||||
|
|
||||||
|
return df_lhs.to_dict(orient="records")
|
@ -1,28 +1,80 @@
|
|||||||
from fastapi import APIRouter, HTTPException
|
from fastapi import APIRouter, HTTPException
|
||||||
|
|
||||||
|
from db.clickhouse_db_connection import session_clickhouse
|
||||||
|
from db.models import ChExperimentDBExperimentData
|
||||||
from db.repositories.ch_experimentdb_experiment_data_repos import ChExperimentDBExperimentDataRepository
|
from db.repositories.ch_experimentdb_experiment_data_repos import ChExperimentDBExperimentDataRepository
|
||||||
from network.schemas import ChExperimentDBExperimentDataBody
|
from network.schemas import ChExperimentDBExperimentDataBody
|
||||||
|
from typing import List
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
repository = ChExperimentDBExperimentDataRepository(session_clickhouse)
|
||||||
|
|
||||||
@router.post('/create')
|
@router.post('/create')
|
||||||
async def create_ch_experimentdb_experiment_data(data: ChExperimentDBExperimentDataBody):
|
async def create_ch_experimentdb_experiment_data(data: ChExperimentDBExperimentDataBody):
|
||||||
try:
|
try:
|
||||||
await ChExperimentDBExperimentDataRepository.create_from_pydantic(data)
|
new_record = repository.create(ChExperimentDBExperimentData(
|
||||||
|
volume=data.volume,
|
||||||
|
nitrogen_oxide_emission=data.nitrogen_oxide_emission,
|
||||||
|
temperature=data.temperature,
|
||||||
|
co_fraction=data.co_fraction,
|
||||||
|
co2_fraction=data.co2_fraction,
|
||||||
|
x=data.x,
|
||||||
|
y=data.y,
|
||||||
|
z=data.z,
|
||||||
|
file_id=data.file_id
|
||||||
|
))
|
||||||
|
|
||||||
return {"message": "Новая запись <ChExperimentDBExperimentData> успешно добавлена"}
|
return {"message": "Новая запись успешно добавлена"}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
@router.get('/all')
|
@router.get('/all', response_model=List[ChExperimentDBExperimentDataBody])
|
||||||
async def get_all_ch_experimentdb_experiment_data():
|
async def get_all_ch_experimentdb_experiment_data():
|
||||||
try:
|
try:
|
||||||
result = await ChExperimentDBExperimentDataRepository.get_all()
|
result = repository.get_all()
|
||||||
|
|
||||||
if result is not None:
|
if result:
|
||||||
return result
|
return result
|
||||||
else:
|
else:
|
||||||
return {"message": "Нет записей в <ChExperimentDBExperimentData>, либо произошла непредвиденная ошибка"}
|
return {"message": "Нет записей"}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get('/{id}')
|
||||||
|
async def get_ch_experimentdb_experiment_data_by_id(id: int):
|
||||||
|
try:
|
||||||
|
record = repository.get_by_id(id)
|
||||||
|
if record:
|
||||||
|
return record
|
||||||
|
else:
|
||||||
|
raise HTTPException(status_code=404, detail="Запись не найдена")
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get('/file_id/{file_id}')
|
||||||
|
async def get_ch_experimentdb_experiment_data_by_file_id(file_id: str):
|
||||||
|
try:
|
||||||
|
record = repository.get_by_file_id(file_id)
|
||||||
|
if record:
|
||||||
|
return record
|
||||||
|
else:
|
||||||
|
raise HTTPException(status_code=404, detail="Запись не найдена")
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete('/{id}/delete')
|
||||||
|
async def delete_ch_experimentdb_experiment_data(id: int):
|
||||||
|
try:
|
||||||
|
is_deleted = repository.delete(id)
|
||||||
|
|
||||||
|
if is_deleted:
|
||||||
|
return {"message": "Запись успешно удалена"}
|
||||||
|
else:
|
||||||
|
raise HTTPException(status_code=404, detail="Запись не найдена")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
59
network/routes/experiment_category_router.py
Normal file
59
network/routes/experiment_category_router.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
from fastapi import APIRouter, HTTPException
|
||||||
|
|
||||||
|
from db.crud import *
|
||||||
|
from db.models import ExperimentCategory
|
||||||
|
from network.schemas import ExperimentCategoryBody
|
||||||
|
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@router.post('/create')
|
||||||
|
async def create_experiment_category(body: ExperimentCategoryBody):
|
||||||
|
try:
|
||||||
|
await create(ExperimentCategory,
|
||||||
|
name=body.name
|
||||||
|
)
|
||||||
|
|
||||||
|
return {"message": "Новая запись <ExperimentCategory> успешно добавлена"}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get('/all')
|
||||||
|
async def get_all_experiment_category():
|
||||||
|
try:
|
||||||
|
result = await get_all(ExperimentCategory)
|
||||||
|
|
||||||
|
if result is not None:
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
return {"message": "Нет записей в <ExperimentCategory>"}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get('/{id}')
|
||||||
|
async def get_by_id_experiment_category(id: int):
|
||||||
|
try:
|
||||||
|
result = await get_by_id(ExperimentCategory, id)
|
||||||
|
|
||||||
|
if result is not None:
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
return {"message": "Запись <ExperimentCategory> не найдена"}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete('/{id}/delete')
|
||||||
|
async def delete_experiment_category(id: int):
|
||||||
|
try:
|
||||||
|
is_deleted = await delete(ExperimentCategory, id)
|
||||||
|
if is_deleted:
|
||||||
|
return {"message": "Запись <ExperimentCategory> успешно удалена"}
|
||||||
|
else:
|
||||||
|
return {"message": "Запись <ExperimentCategory> не найдена"}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
@ -1,14 +1,26 @@
|
|||||||
from fastapi import APIRouter, HTTPException
|
import math
|
||||||
from db.repositories.experiment_data_repos import ExperimentDataRepository
|
|
||||||
|
from fastapi import APIRouter, HTTPException, Query
|
||||||
|
|
||||||
|
from db.crud import *
|
||||||
|
from db.models import ExperimentData
|
||||||
|
from db.repositories import get_exp_data_by_file_id, get_all_exp_data
|
||||||
from network.schemas import ExperimentDataBody
|
from network.schemas import ExperimentDataBody
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
@router.post('/create')
|
@router.post('/create')
|
||||||
async def create_experiment_data(data: ExperimentDataBody):
|
async def create_experiment_data(body: ExperimentDataBody):
|
||||||
try:
|
try:
|
||||||
await ExperimentDataRepository.create_from_pydantic(data)
|
await create(ExperimentData,
|
||||||
|
direction=body.direction,
|
||||||
|
temperature=body.temperature,
|
||||||
|
nox=body.nox,
|
||||||
|
co2=body.co2,
|
||||||
|
co=body.co,
|
||||||
|
file_id=body.file_id
|
||||||
|
)
|
||||||
|
|
||||||
return {"message": "Новая запись <ExperimentData> успешно добавлена"}
|
return {"message": "Новая запись <ExperimentData> успешно добавлена"}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -16,14 +28,52 @@ async def create_experiment_data(data: ExperimentDataBody):
|
|||||||
|
|
||||||
|
|
||||||
@router.get('/all')
|
@router.get('/all')
|
||||||
async def get_all_experiment_data():
|
async def get_all_experiment_data(page: int = Query(ge=1, default=1),
|
||||||
|
size: int = Query(ge = 1, le=100, default=15)):
|
||||||
try:
|
try:
|
||||||
result = await ExperimentDataRepository.get_all()
|
result = await get_all_exp_data(page, size)
|
||||||
|
return {
|
||||||
if result is not None:
|
"items": result,
|
||||||
# return {"status": "success", "data": [LoadParametersBody.model_validate(param) for param in result]}
|
"page": page,
|
||||||
return result
|
"size": size,
|
||||||
else:
|
}
|
||||||
return {"message": "Нет записей в <ExperimentData>, либо произошла непредвиденная ошибка"}
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get('/{id}')
|
||||||
|
async def get_by_id_experiment_data(id: int):
|
||||||
|
try:
|
||||||
|
result = await get_by_id(ExperimentData, id)
|
||||||
|
|
||||||
|
if result is not None:
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
return {"message": "Запись <ExperimentData> не найдена"}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get('/file_id/{file_id}')
|
||||||
|
async def get_experiment_data_by_file_id(file_id: str):
|
||||||
|
try:
|
||||||
|
result = await get_exp_data_by_file_id(file_id)
|
||||||
|
|
||||||
|
if result is not None:
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
return {"message": f'<ExperimentData> с file_id = {id} - не найдены'}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete('/{id}/delete')
|
||||||
|
async def delete_experiment_data(id: int):
|
||||||
|
try:
|
||||||
|
is_deleted = await delete(ExperimentData, id)
|
||||||
|
if is_deleted:
|
||||||
|
return {"message": "Запись <ExperimentData> успешно удалена"}
|
||||||
|
else:
|
||||||
|
return {"message": "Запись <ExperimentData> не найдена"}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
@ -1,25 +1,212 @@
|
|||||||
|
import yaml
|
||||||
from fastapi import APIRouter, HTTPException
|
from fastapi import APIRouter, HTTPException
|
||||||
from db.repositories.experiment_parameters_repos import ExperimentParametersRepository
|
from pydantic import BaseModel
|
||||||
from network.schemas import ExperimentParametersBody
|
from typing import List, Tuple
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
from matplotlib import cm
|
||||||
|
from mpl_toolkits.mplot3d import Axes3D
|
||||||
|
import io
|
||||||
|
import base64
|
||||||
|
|
||||||
|
from db.crud import *
|
||||||
|
from db.models import LoadParameters
|
||||||
|
from db.repositories import get_exp_parameters_by_category, get_exp_parameters_by_exp_hash
|
||||||
|
from macros_generator import load_calculation, recycling_calculation
|
||||||
|
from network.routes.ch_experimentdb_experiment_data_router import get_ch_experimentdb_experiment_data_by_file_id
|
||||||
|
from network.schemas import ExperimentParametersBody, ExperimentParametersPlot
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
@router.post('/create')
|
@router.post('/create')
|
||||||
async def create_experiment_parameters(data: ExperimentParametersBody):
|
async def create_experiment_parameters(body: ExperimentParametersBody):
|
||||||
try:
|
try:
|
||||||
await ExperimentParametersRepository.create_from_pydantic(data)
|
await create(ExperimentParameters,
|
||||||
|
outer_blades_count=body.outer_blades_count,
|
||||||
|
outer_blades_length=body.outer_blades_length,
|
||||||
|
outer_blades_angle=body.outer_blades_angle,
|
||||||
|
middle_blades_count=body.middle_blades_count,
|
||||||
|
load_id=body.load_id,
|
||||||
|
recycling_id=body.recycling_id,
|
||||||
|
experiment_hash=body.experiment_hash,
|
||||||
|
experiment_category_id=body.experiment_category_id
|
||||||
|
)
|
||||||
|
|
||||||
return {"message": "Новая запись <ExperimentParameters> успешно добавлена"}
|
return {"message": "Новая запись <ExperimentParameters> успешно добавлена"}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
@router.post('/plot')
|
||||||
|
async def create_plot(body: ExperimentParametersPlot):
|
||||||
|
try:
|
||||||
|
exp_data = await get_ch_experimentdb_experiment_data_by_file_id(body.file_id)
|
||||||
|
if not exp_data:
|
||||||
|
raise HTTPException(status_code=404, detail="Данные не найдены для указанного file_id")
|
||||||
|
|
||||||
|
filtered_data = [
|
||||||
|
record for record in exp_data
|
||||||
|
if body.range[0] <= getattr(record, body.additional_dimension, None) <= body.range[1]
|
||||||
|
]
|
||||||
|
|
||||||
|
if not filtered_data:
|
||||||
|
return {"message": "Нет данных для отображения в заданном диапазоне"}
|
||||||
|
|
||||||
|
x = [record.x for record in filtered_data]
|
||||||
|
y = [record.y for record in filtered_data]
|
||||||
|
z = [record.z for record in filtered_data]
|
||||||
|
color_values = [getattr(record, body.additional_dimension, None) for record in filtered_data]
|
||||||
|
|
||||||
|
fig = plt.figure(figsize=(10, 8))
|
||||||
|
ax = fig.add_subplot(111, projection='3d')
|
||||||
|
scatter = ax.scatter(x, y, z, c=color_values, cmap=cm.viridis, marker='o')
|
||||||
|
|
||||||
|
cbar = fig.colorbar(scatter, ax=ax)
|
||||||
|
cbar.set_label(body.additional_dimension)
|
||||||
|
|
||||||
|
ax.set_xlabel('X')
|
||||||
|
ax.set_ylabel('Y')
|
||||||
|
ax.set_zlabel('Z')
|
||||||
|
|
||||||
|
plt.title(f"3D график с измерением: {body.additional_dimension}")
|
||||||
|
|
||||||
|
buf = io.BytesIO()
|
||||||
|
plt.savefig(buf, format='png')
|
||||||
|
buf.seek(0)
|
||||||
|
image_base64 = base64.b64encode(buf.getvalue()).decode('utf-8')
|
||||||
|
buf.close()
|
||||||
|
plt.close(fig)
|
||||||
|
|
||||||
|
return {"message": "График построен успешно", "plot": image_base64}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
@router.get('/all')
|
@router.get('/all')
|
||||||
async def get_all_experiment_parameters():
|
async def get_all_experiment_parameters():
|
||||||
try:
|
try:
|
||||||
result = await ExperimentParametersRepository.get_all()
|
result = await get_all(ExperimentParameters)
|
||||||
|
|
||||||
if result:
|
if result is not None:
|
||||||
return result
|
return result
|
||||||
else:
|
else:
|
||||||
return {"message": "Нет записей в <ExperimentParameters>, либо произошла непредвиденная ошибка"}
|
return {"message": "Нет записей в <ExperimentParameters>"}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get('/{id}')
|
||||||
|
async def get_by_id_experiment_parameters(id: int):
|
||||||
|
try:
|
||||||
|
result = await get_by_id(ExperimentParameters, id)
|
||||||
|
|
||||||
|
if result is not None:
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
return {"message": "Запись <ExperimentParameters> не найдена"}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get('/by_category/{id}')
|
||||||
|
async def get_experiment_parameters_by_exp_category(id: int):
|
||||||
|
try:
|
||||||
|
result = await get_exp_parameters_by_category(id)
|
||||||
|
|
||||||
|
if result is not None:
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
return {"message": f'<ExperimentParameters> с идентификатором категории - {id} - не найдены'}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get('/by_exp_hash/{hash}')
|
||||||
|
async def get_experiment_parameters_by_exp_category(hash: str):
|
||||||
|
try:
|
||||||
|
result = await get_exp_parameters_by_exp_hash(hash)
|
||||||
|
|
||||||
|
if result is not None:
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
return {"message": f'<ExperimentParameters> с experiment_hash = {id} - не найдены'}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete('/{id}/delete')
|
||||||
|
async def delete_experiment_parameters(id: int):
|
||||||
|
try:
|
||||||
|
is_deleted = await delete(ExperimentParameters, id)
|
||||||
|
|
||||||
|
if is_deleted:
|
||||||
|
return {"message": "Запись <ExperimentParameters> успешно удалена"}
|
||||||
|
else:
|
||||||
|
return {"message": "Запись <ExperimentParameters> не найдена"}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
# @router.post('/process_and_save/{id}') было нужно для проверки
|
||||||
|
async def process_and_save_experiment_data(id: int, load: float, recycling_level: float) -> dict:
|
||||||
|
try:
|
||||||
|
experiment = await get_by_id(ExperimentParameters, id)
|
||||||
|
if experiment is None:
|
||||||
|
raise HTTPException(status_code=404, detail=f"ExperimentParameters с id {id} не найден.")
|
||||||
|
|
||||||
|
yaml_file_path = "config.yaml"
|
||||||
|
|
||||||
|
with open(yaml_file_path, "r", encoding="utf-8") as file:
|
||||||
|
data = yaml.safe_load(file)
|
||||||
|
|
||||||
|
diameters = data["parameters"]["diameters"]
|
||||||
|
|
||||||
|
dict_load = load_calculation(load, diameters, None)
|
||||||
|
|
||||||
|
primary_air_consumption = dict_load["primary_air_consumption"]
|
||||||
|
secondary_air_consumption = dict_load["secondary_air_consumption"]
|
||||||
|
gas_inlet_consumption = dict_load["gas_inlet_consumption"]
|
||||||
|
alpha = dict_load["alpha"]
|
||||||
|
gas_consumption = dict_load["gas_consumption"]
|
||||||
|
air_consumption = dict_load["air_consumption"]
|
||||||
|
|
||||||
|
dict_recycling = recycling_calculation(alpha, gas_consumption, air_consumption, recycling_level)
|
||||||
|
|
||||||
|
co2 = dict_recycling["CO2"]
|
||||||
|
n2 = dict_recycling["N2"]
|
||||||
|
h2o = dict_recycling["H2O"]
|
||||||
|
o2 = dict_recycling["O2"]
|
||||||
|
|
||||||
|
load_params = await create(
|
||||||
|
LoadParameters,
|
||||||
|
load=int(load),
|
||||||
|
primary_air_consumption=primary_air_consumption,
|
||||||
|
secondary_air_consumption=secondary_air_consumption,
|
||||||
|
gas_inlet_consumption=gas_inlet_consumption
|
||||||
|
)
|
||||||
|
|
||||||
|
recycling_params = await create(
|
||||||
|
RecyclingParameters,
|
||||||
|
load_id=load_params.id,
|
||||||
|
recycling_level=int(recycling_level),
|
||||||
|
co2=co2,
|
||||||
|
n2=n2,
|
||||||
|
h2o=h2o,
|
||||||
|
o2=o2
|
||||||
|
)
|
||||||
|
|
||||||
|
await update_exp(
|
||||||
|
ExperimentParameters,
|
||||||
|
id=experiment.id,
|
||||||
|
updated_data={
|
||||||
|
"load_id": load_params.id,
|
||||||
|
"recycling_id": recycling_params.id
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"message": "Данные успешно обработаны и сохранены.",
|
||||||
|
"load_parameters": load_params,
|
||||||
|
"recycling_parameters": recycling_params
|
||||||
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
@ -1,15 +1,22 @@
|
|||||||
from fastapi import APIRouter, HTTPException
|
from fastapi import APIRouter, HTTPException
|
||||||
from db.repositories.load_parameters_repos import LoadParametersRepository
|
|
||||||
|
from db.crud import *
|
||||||
|
from db.models import LoadParameters
|
||||||
from network.schemas import LoadParametersBody
|
from network.schemas import LoadParametersBody
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
@router.post('/create')
|
@router.post('/create')
|
||||||
async def create_load_parameters(data: LoadParametersBody):
|
async def create_load_parameters(body: LoadParametersBody):
|
||||||
try:
|
try:
|
||||||
await LoadParametersRepository.create_from_pydantic(data)
|
await create(LoadParameters,
|
||||||
|
load=body.load,
|
||||||
|
primary_air_consumption=body.primary_air_consumption,
|
||||||
|
secondary_air_consumption=body.secondary_air_consumption,
|
||||||
|
gas_inlet_consumption=body.gas_inlet_consumption
|
||||||
|
)
|
||||||
return {"message": "Новая запись <LoadParameters> успешно добавлена"}
|
return {"message": "Новая запись <LoadParameters> успешно добавлена"}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
@ -18,12 +25,36 @@ async def create_load_parameters(data: LoadParametersBody):
|
|||||||
@router.get('/all')
|
@router.get('/all')
|
||||||
async def get_all_load_parameters():
|
async def get_all_load_parameters():
|
||||||
try:
|
try:
|
||||||
result = await LoadParametersRepository.get_all()
|
result = await get_all(LoadParameters)
|
||||||
|
|
||||||
if result is not None:
|
if result is not None:
|
||||||
# return {"status": "success", "data": [LoadParametersBody.model_validate(param) for param in result]}
|
|
||||||
return result
|
return result
|
||||||
else:
|
else:
|
||||||
return {"message": "Нет записей в <LoadParameters>, либо произошла непредвиденная ошибка"}
|
return {"message": "Нет записей в <LoadParameters>"}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get('/{id}')
|
||||||
|
async def get_by_id_load_parameters(id: int):
|
||||||
|
try:
|
||||||
|
result = await get_by_id(LoadParameters, id)
|
||||||
|
|
||||||
|
if result is not None:
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
return {"message": "Запись <LoadParameters> не найдена"}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete('/{id}/delete')
|
||||||
|
async def delete_load_parameters(id: int):
|
||||||
|
try:
|
||||||
|
is_deleted = await delete(LoadParameters, id)
|
||||||
|
if is_deleted:
|
||||||
|
return {"message": "Запись <LoadParameters> успешно удалена"}
|
||||||
|
else:
|
||||||
|
return {"message": "Запись <LoadParameters> не найдена"}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
@ -1,25 +1,61 @@
|
|||||||
from fastapi import APIRouter, HTTPException
|
from fastapi import APIRouter, HTTPException
|
||||||
from db.repositories.recycling_parameters_repos import RecyclingParametersRepository
|
|
||||||
|
from db.crud import *
|
||||||
|
from db.models import RecyclingParameters
|
||||||
from network.schemas import RecyclingParametersBody
|
from network.schemas import RecyclingParametersBody
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
@router.post('/create')
|
@router.post('/create')
|
||||||
async def create_recycling_parameters(data: RecyclingParametersBody):
|
async def create_recycling_parameters(body: RecyclingParametersBody):
|
||||||
try:
|
try:
|
||||||
await RecyclingParametersRepository.create_from_pydantic(data)
|
await create(RecyclingParameters,
|
||||||
|
load_id=body.load_id,
|
||||||
|
recycling_level=body.recycling_level,
|
||||||
|
co2=body.co2,
|
||||||
|
n2=body.n2,
|
||||||
|
h2o=body.h2o,
|
||||||
|
o2=body.o2
|
||||||
|
)
|
||||||
return {"message": "Новая запись <RecyclingParameters> успешно добавлена"}
|
return {"message": "Новая запись <RecyclingParameters> успешно добавлена"}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
@router.get('/all')
|
@router.get('/all')
|
||||||
async def get_all_recycling_parameters():
|
async def get_all_recycling_parameters():
|
||||||
try:
|
try:
|
||||||
result = await RecyclingParametersRepository.get_all()
|
result = await get_all(RecyclingParameters)
|
||||||
|
if result is not None:
|
||||||
if result:
|
|
||||||
return result
|
return result
|
||||||
else:
|
else:
|
||||||
return {"message": "Нет записей в <RecyclingParameters>, либо произошла непредвиденная ошибка"}
|
return {"message": "Нет записей в <RecyclingParameters>"}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get('/{id}')
|
||||||
|
async def get_by_id_recycling_parameters(id: int):
|
||||||
|
try:
|
||||||
|
result = await get_by_id(RecyclingParameters, id)
|
||||||
|
|
||||||
|
if result is not None:
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
return {"message": "Запись <RecyclingParameters> не найдена"}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete('/{id}/delete')
|
||||||
|
async def delete_recycling_parameters(id: int):
|
||||||
|
try:
|
||||||
|
is_deleted = await delete(RecyclingParameters, id)
|
||||||
|
if is_deleted:
|
||||||
|
return {"message": "Запись <RecyclingParameters> успешно удалена"}
|
||||||
|
else:
|
||||||
|
return {"message": "Запись <RecyclingParameters> не найдена"}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
|
||||||
|
@ -1,9 +1,17 @@
|
|||||||
from typing import Optional
|
from enum import Enum
|
||||||
|
from typing import Optional, Dict, Tuple, List
|
||||||
|
|
||||||
|
from psycopg.types import enum
|
||||||
from pydantic import BaseModel, ConfigDict
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
|
||||||
|
|
||||||
# Модель данных для передачи параметров
|
class ExperimentParametersPyDOE3(BaseModel):
|
||||||
|
param_ranges: Dict[str, Tuple[float, float]]
|
||||||
|
category: int
|
||||||
|
count_exp: int
|
||||||
|
round_rules: List[int]
|
||||||
|
|
||||||
|
|
||||||
class ExperimentParameters(BaseModel):
|
class ExperimentParameters(BaseModel):
|
||||||
outer_blades_count: str
|
outer_blades_count: str
|
||||||
outer_blades_length: str
|
outer_blades_length: str
|
||||||
@ -12,6 +20,10 @@ class ExperimentParameters(BaseModel):
|
|||||||
load: str
|
load: str
|
||||||
recycling: str
|
recycling: str
|
||||||
|
|
||||||
|
class ExperimentParametersPlot(BaseModel):
|
||||||
|
file_id: str
|
||||||
|
additional_dimension: str
|
||||||
|
range: Tuple[float, float]
|
||||||
|
|
||||||
class ChExperimentDBExperimentDataBody(BaseModel):
|
class ChExperimentDBExperimentDataBody(BaseModel):
|
||||||
model_config = ConfigDict(from_attributes=True)
|
model_config = ConfigDict(from_attributes=True)
|
||||||
@ -26,6 +38,11 @@ class ChExperimentDBExperimentDataBody(BaseModel):
|
|||||||
file_id: str
|
file_id: str
|
||||||
|
|
||||||
|
|
||||||
|
class ExperimentCategoryBody(BaseModel):
|
||||||
|
model_config = ConfigDict(from_attributes=True)
|
||||||
|
name: str
|
||||||
|
|
||||||
|
|
||||||
class ExperimentDataBody(BaseModel):
|
class ExperimentDataBody(BaseModel):
|
||||||
model_config = ConfigDict(from_attributes=True)
|
model_config = ConfigDict(from_attributes=True)
|
||||||
direction: float
|
direction: float
|
||||||
@ -45,6 +62,7 @@ class ExperimentParametersBody(BaseModel):
|
|||||||
load_id: Optional[int]
|
load_id: Optional[int]
|
||||||
recycling_id: Optional[int]
|
recycling_id: Optional[int]
|
||||||
experiment_hash: str
|
experiment_hash: str
|
||||||
|
experiment_category_id: int
|
||||||
|
|
||||||
|
|
||||||
class LoadParametersBody(BaseModel):
|
class LoadParametersBody(BaseModel):
|
||||||
|
36
new_experiment_planner_pyDOE3.py
Normal file
36
new_experiment_planner_pyDOE3.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import numpy as np
|
||||||
|
|
||||||
|
# Функция для масштабирования значений дизайна
|
||||||
|
def scale_design(design, param_ranges):
|
||||||
|
scaled_design = []
|
||||||
|
for row in design:
|
||||||
|
scaled_row = []
|
||||||
|
for i, val in enumerate(row):
|
||||||
|
min_val, max_val = param_ranges[list(param_ranges.keys())[i]]
|
||||||
|
scaled_val = (val + 1) / 2 * (max_val - min_val) + min_val
|
||||||
|
scaled_row.append(scaled_val)
|
||||||
|
scaled_design.append(scaled_row)
|
||||||
|
return np.array(scaled_design)
|
||||||
|
|
||||||
|
|
||||||
|
def scale_design_lhs(design, param_ranges):
|
||||||
|
scaled_design = []
|
||||||
|
for row in design:
|
||||||
|
scaled_row = []
|
||||||
|
for i, val in enumerate(row):
|
||||||
|
min_val, max_val = param_ranges[list(param_ranges.keys())[i]]
|
||||||
|
scaled_val = val * (max_val - min_val) + min_val
|
||||||
|
scaled_row.append(scaled_val)
|
||||||
|
scaled_design.append(scaled_row)
|
||||||
|
return np.array(scaled_design)
|
||||||
|
|
||||||
|
|
||||||
|
# Функция для округления значений
|
||||||
|
def round_by_index(array, rules):
|
||||||
|
rounded_array = np.zeros(array.shape)
|
||||||
|
for i in range(array.shape[0]):
|
||||||
|
for j in range(array.shape[1]):
|
||||||
|
rounded_array[i, j] = round(array[i, j], rules[j])
|
||||||
|
return rounded_array
|
||||||
|
|
||||||
|
|
BIN
requirements.txt
BIN
requirements.txt
Binary file not shown.
@ -22,7 +22,10 @@ class Settings(BaseSettings):
|
|||||||
# 'postgresql+asyncpg://username:password@localhost:5432/database_name'
|
# 'postgresql+asyncpg://username:password@localhost:5432/database_name'
|
||||||
return f'postgresql+asyncpg://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@db:{self.DB_PORT}/{self.DATABASE}'
|
return f'postgresql+asyncpg://{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}@db:{self.DB_PORT}/{self.DATABASE}'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def clickhouse_url(self):
|
||||||
|
return f'clickhouse://{self.CLICKHOUSE_USER}:{self.CLICKHOUSE_PASSWORD}@clickhouse:8123/{self.DATABASE}'
|
||||||
|
|
||||||
model_config = SettingsConfigDict(env_file=".env")
|
model_config = SettingsConfigDict(env_file=".env")
|
||||||
|
|
||||||
|
|
||||||
settings = Settings()
|
settings = Settings()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user