Merge pull request 'modified_db' (#2) from modified_db into main

Reviewed-on: #2
This commit is contained in:
Sosees04ka 2024-11-30 01:45:56 +04:00
commit 9bc9c5553b
7 changed files with 214 additions and 10 deletions

View File

@ -42,6 +42,24 @@ async def update(model_class: Type[T], id: int, updated_data: Dict[str, Any]) ->
await session.commit() await session.commit()
return await get_by_id(model_class, id) return await get_by_id(model_class, id)
# Надо переписать/раасмотреть update
async def update_exp(model_class: Type[T], id: int, updated_data: Dict[str, Any]) -> Optional[T]:
async with async_session_postgres() as session:
async with session.begin(): # Явная транзакция
stmt = (
update_(model_class)
.where(model_class.id == id)
.values(**updated_data)
.returning(model_class) # Возвращаем обновленный объект
)
result = await session.execute(stmt)
updated_instance = result.scalars().first()
if not updated_instance:
return None
return updated_instance # Возвращаем сразу обновленный объект
async def delete(model_class: Type[T], id: int) -> bool: async def delete(model_class: Type[T], id: int) -> bool:
async with async_session_postgres() as session: async with async_session_postgres() as session:

View File

@ -9,7 +9,7 @@ from db.models.base import Base
class ExperimentParameters(Base): class ExperimentParameters(Base):
__tablename__ = 'experiment_parameters' __tablename__ = 'experiment_parameters'
id: Mapped[int] = mapped_column(Identity(start=11, cycle=True), id: Mapped[int] = mapped_column(Identity(start=1100, cycle=True),
primary_key=True) primary_key=True)
outer_blades_count: Mapped[int] outer_blades_count: Mapped[int]
outer_blades_length: Mapped[float] outer_blades_length: Mapped[float]
@ -19,6 +19,7 @@ class ExperimentParameters(Base):
recycling_id: Mapped[Optional[int]] = mapped_column(ForeignKey('recycling_parameters.id', ondelete='SET NULL')) recycling_id: Mapped[Optional[int]] = mapped_column(ForeignKey('recycling_parameters.id', ondelete='SET NULL'))
experiment_hash: Mapped[str] = mapped_column(unique=True) experiment_hash: Mapped[str] = mapped_column(unique=True)
experiment_category_id: Mapped[Optional[int]] = mapped_column(ForeignKey('experiment_category.id', ondelete='SET NULL'), nullable=True) experiment_category_id: Mapped[Optional[int]] = mapped_column(ForeignKey('experiment_category.id', ondelete='SET NULL'), nullable=True)
oxidizer_temp: Mapped[float] = mapped_column(nullable=True)
def __repr__(self): def __repr__(self):
return f"<ExperimentParameters>" return f"<ExperimentParameters>"

View File

@ -7,7 +7,7 @@ from db.models.base import Base
class LoadParameters(Base): class LoadParameters(Base):
__tablename__ = 'load_parameters' __tablename__ = 'load_parameters'
id: Mapped[int] = mapped_column(Identity(start=6, cycle=True), id: Mapped[int] = mapped_column(Identity(start=1000, cycle=True),
primary_key=True) primary_key=True)
load: Mapped[int] load: Mapped[int]
primary_air_consumption: Mapped[float] primary_air_consumption: Mapped[float]

View File

@ -9,7 +9,7 @@ from db.models.base import Base
class RecyclingParameters(Base): class RecyclingParameters(Base):
__tablename__ = 'recycling_parameters' __tablename__ = 'recycling_parameters'
id: Mapped[int] = mapped_column(Identity(start=6, cycle=True), id: Mapped[int] = mapped_column(Identity(start=1000, cycle=True),
primary_key=True) primary_key=True)
load_id: Mapped[Optional[int]] = mapped_column(ForeignKey('load_parameters.id', ondelete='SET NULL')) load_id: Mapped[Optional[int]] = mapped_column(ForeignKey('load_parameters.id', ondelete='SET NULL'))

View File

@ -1,9 +1,16 @@
import hashlib
from typing import Sequence from typing import Sequence
import pandas as pd
import yaml
from fastapi import HTTPException
from sqlalchemy.future import select from sqlalchemy.future import select
from db.crud import create, update, get_by_id, update_exp
from db.models import LoadParameters, RecyclingParameters
from db.models.experiment_parameters_model import ExperimentParameters from db.models.experiment_parameters_model import ExperimentParameters
from db.postgres_db_connection import async_session_postgres from db.postgres_db_connection import async_session_postgres
from macros_generator import load_calculation, recycling_calculation
async def get_exp_parameters_by_category(category_id: int) -> Sequence[ExperimentParameters]: async def get_exp_parameters_by_category(category_id: int) -> Sequence[ExperimentParameters]:
@ -18,4 +25,103 @@ async def get_exp_parameters_by_exp_hash(exp_hash: str) -> Sequence[ExperimentPa
result = await session.execute( result = await session.execute(
select(ExperimentParameters).where(ExperimentParameters.experiment_hash == exp_hash) select(ExperimentParameters).where(ExperimentParameters.experiment_hash == exp_hash)
) )
return result.scalars().all() return result.scalars().all()
def generate_experiment_hash(data: dict) -> str:
"""Генерация уникального хеша на основе данных эксперимента"""
hash_input = f"{data['outer_blades_count']}_{data['outer_blades_length']}_{data['outer_blades_angle']}_{data['middle_blades_count']}_{data['load']}_{data['recycling_level']}"
return hashlib.sha256(hash_input.encode()).hexdigest()
async def save_experiment_to_db(df: pd.DataFrame):
for _, row in df.iterrows():
try:
# Преобразуем load и recycling_level в соответствующие id
load = int(row['load'])
recycling = int(row['recycling_level'])
# Генерация хеша для experiment_hash
experiment_hash = generate_experiment_hash(row)
exp = await create(
ExperimentParameters,
outer_blades_count=int(row['outer_blades_count']),
outer_blades_length=float(row['outer_blades_length']),
outer_blades_angle=float(row['outer_blades_angle']),
middle_blades_count=int(row['middle_blades_count']),
load_id= None,
recycling_id=None,
experiment_hash=experiment_hash,
oxidizer_temp=float(row['oxidizer_temp'])
)
await process_and_save_experiment_data(exp.id, load, recycling)
except Exception as e:
print(f"Ошибка при сохранении данных: {e}")
raise HTTPException(status_code=500, detail=f"Ошибка при сохранении данных: {e}")
async def process_and_save_experiment_data(id: int, load: float, recycling_level: float) -> dict:
try:
experiment = await get_by_id(ExperimentParameters, id)
if experiment is None:
raise HTTPException(status_code=404, detail=f"ExperimentParameters с id {id} не найден.")
yaml_file_path = "config.yaml"
with open(yaml_file_path, "r", encoding="utf-8") as file:
data = yaml.safe_load(file)
diameters = data["parameters"]["diameters"]
dict_load = load_calculation(load, diameters, None)
primary_air_consumption = dict_load["primary_air_consumption"]
secondary_air_consumption = dict_load["secondary_air_consumption"]
gas_inlet_consumption = dict_load["gas_inlet_consumption"]
alpha = dict_load["alpha"]
gas_consumption = dict_load["gas_consumption"]
air_consumption = dict_load["air_consumption"]
dict_recycling = recycling_calculation(alpha, gas_consumption, air_consumption, recycling_level)
co2 = dict_recycling["CO2"]
n2 = dict_recycling["N2"]
h2o = dict_recycling["H2O"]
o2 = dict_recycling["O2"]
load_params = await create(
LoadParameters,
load=int(load),
primary_air_consumption=primary_air_consumption,
secondary_air_consumption=secondary_air_consumption,
gas_inlet_consumption=gas_inlet_consumption
)
recycling_params = await create(
RecyclingParameters,
load_id=load_params.id,
recycling_level=int(recycling_level),
co2=co2,
n2=n2,
h2o=h2o,
o2=o2
)
await update_exp(
ExperimentParameters,
id=experiment.id,
updated_data={
"load_id": load_params.id,
"recycling_id": recycling_params.id
}
)
return {
"message": "Данные успешно обработаны и сохранены.",
"load_parameters": load_params,
"recycling_parameters": recycling_params
}
except Exception as e:
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")

19
main.py
View File

@ -3,6 +3,7 @@ from fastapi import FastAPI, HTTPException, BackgroundTasks
from pyDOE3 import pbdesign, lhs from pyDOE3 import pbdesign, lhs
from db.csv_to_db import csv_to_db from db.csv_to_db import csv_to_db
from db.repositories import save_experiment_to_db
from network.routes import (ch_experimentdb_experiment_data_router, experiment_data_router, from network.routes import (ch_experimentdb_experiment_data_router, experiment_data_router,
experiment_parameters_router, experiment_category_router) experiment_parameters_router, experiment_category_router)
from network.routes import load_parameters_router, recycling_parameters_router from network.routes import load_parameters_router, recycling_parameters_router
@ -72,11 +73,11 @@ async def init_db_data(background_tasks: BackgroundTasks):
# "oxidizer_temp": [471, 493] # "oxidizer_temp": [471, 493]
# }, # },
# "count_exp": 1440, # "count_exp": 1440,
# "round_rules": [0, 1, 1, 0, 1, 1, 1] # "round_rules": [0, 1, 1, 0, 0, 0, 0]
# } # }
@app.post("/pyDOE3_screening_design") @app.post("/pyDOE3_screening_design")
def generate_screening_design(request: ExperimentParametersPyDOE3) -> List[Dict[str, float]]: async def generate_screening_design(request: ExperimentParametersPyDOE3) -> List[Dict[str, float]]:
param_ranges = request.param_ranges param_ranges = request.param_ranges
# Создаем screening design и масштабируем его # Создаем screening design и масштабируем его
@ -84,13 +85,17 @@ def generate_screening_design(request: ExperimentParametersPyDOE3) -> List[Dict[
screening_design = pbdesign(num_factors) screening_design = pbdesign(num_factors)
scaled_screening_design = scale_design(screening_design, param_ranges) scaled_screening_design = scale_design(screening_design, param_ranges)
# Преобразуем в DataFrame и возвращаем результат # Преобразуем в DataFrame
df_screening = pd.DataFrame(scaled_screening_design, columns=param_ranges.keys()) df_screening = pd.DataFrame(scaled_screening_design, columns=param_ranges.keys())
# Сохраняем результаты в базу данных
await save_experiment_to_db(df_screening)
return df_screening.to_dict(orient="records") return df_screening.to_dict(orient="records")
@app.post("/pyDOE3_lhs_design") @app.post("/pyDOE3_lhs_design")
def generate_lhs_design(request: ExperimentParametersPyDOE3) -> List[Dict[str, float]]: async def generate_lhs_design(request: ExperimentParametersPyDOE3) -> List[Dict[str, float]]:
param_ranges = request.param_ranges param_ranges = request.param_ranges
count_exp = request.count_exp count_exp = request.count_exp
round_rules = request.round_rules round_rules = request.round_rules
@ -103,6 +108,10 @@ def generate_lhs_design(request: ExperimentParametersPyDOE3) -> List[Dict[str, f
# Округляем значения # Округляем значения
round_scaled_lhs_samples = round_by_index(scaled_lhs_samples, round_rules) round_scaled_lhs_samples = round_by_index(scaled_lhs_samples, round_rules)
# Преобразуем в DataFrame и возвращаем результат # Преобразуем в DataFrame
df_lhs = pd.DataFrame(round_scaled_lhs_samples, columns=param_ranges.keys()) df_lhs = pd.DataFrame(round_scaled_lhs_samples, columns=param_ranges.keys())
# Сохраняем результаты в базу данных
await save_experiment_to_db(df_lhs)
return df_lhs.to_dict(orient="records") return df_lhs.to_dict(orient="records")

View File

@ -1,8 +1,11 @@
import yaml
from fastapi import APIRouter, HTTPException from fastapi import APIRouter, HTTPException
from scipy.stats import alpha
from db.crud import * from db.crud import *
from db.models import LoadParameters from db.models import LoadParameters
from db.repositories import get_exp_parameters_by_category, get_exp_parameters_by_exp_hash from db.repositories import get_exp_parameters_by_category, get_exp_parameters_by_exp_hash
from macros_generator import load_calculation, recycling_calculation
from network.schemas import ExperimentParametersBody from network.schemas import ExperimentParametersBody
router = APIRouter() router = APIRouter()
@ -81,10 +84,77 @@ async def get_experiment_parameters_by_exp_category(hash: str):
@router.delete('/{id}/delete') @router.delete('/{id}/delete')
async def delete_experiment_parameters(id: int): async def delete_experiment_parameters(id: int):
try: try:
is_deleted = await delete(LoadParameters, id) is_deleted = await delete(ExperimentParameters, id)
if is_deleted: if is_deleted:
return {"message": "Запись <ExperimentParameters> успешно удалена"} return {"message": "Запись <ExperimentParameters> успешно удалена"}
else: else:
return {"message": "Запись <ExperimentParameters> не найдена"} return {"message": "Запись <ExperimentParameters> не найдена"}
except Exception as e: except Exception as e:
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}") raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")
# @router.post('/process_and_save/{id}') было нужно для проверки
async def process_and_save_experiment_data(id: int, load: float, recycling_level: float) -> dict:
try:
experiment = await get_by_id(ExperimentParameters, id)
if experiment is None:
raise HTTPException(status_code=404, detail=f"ExperimentParameters с id {id} не найден.")
yaml_file_path = "config.yaml"
with open(yaml_file_path, "r", encoding="utf-8") as file:
data = yaml.safe_load(file)
diameters = data["parameters"]["diameters"]
dict_load = load_calculation(load, diameters, None)
primary_air_consumption = dict_load["primary_air_consumption"]
secondary_air_consumption = dict_load["secondary_air_consumption"]
gas_inlet_consumption = dict_load["gas_inlet_consumption"]
alpha = dict_load["alpha"]
gas_consumption = dict_load["gas_consumption"]
air_consumption = dict_load["air_consumption"]
dict_recycling = recycling_calculation(alpha, gas_consumption, air_consumption, recycling_level)
co2 = dict_recycling["CO2"]
n2 = dict_recycling["N2"]
h2o = dict_recycling["H2O"]
o2 = dict_recycling["O2"]
load_params = await create(
LoadParameters,
load=int(load),
primary_air_consumption=primary_air_consumption,
secondary_air_consumption=secondary_air_consumption,
gas_inlet_consumption=gas_inlet_consumption
)
recycling_params = await create(
RecyclingParameters,
load_id=load_params.id,
recycling_level=int(recycling_level),
co2=co2,
n2=n2,
h2o=h2o,
o2=o2
)
await update_exp(
ExperimentParameters,
id=experiment.id,
updated_data={
"load_id": load_params.id,
"recycling_id": recycling_params.id
}
)
return {
"message": "Данные успешно обработаны и сохранены.",
"load_parameters": load_params,
"recycling_parameters": recycling_params
}
except Exception as e:
raise HTTPException(status_code=500, detail=f"An error occurred: {str(e)}")