Update dev release

This commit is contained in:
Bazunov Andrew Igorevich 2024-12-11 13:41:58 +04:00
parent 9bc9c5553b
commit 9d3c1e7af5
7 changed files with 28 additions and 404612 deletions

File diff suppressed because it is too large Load Diff

View File

@ -20,6 +20,7 @@ async def get_exp_parameters_by_category(category_id: int) -> Sequence[Experimen
) )
return result.scalars().all() return result.scalars().all()
async def get_exp_parameters_by_exp_hash(exp_hash: str) -> Sequence[ExperimentParameters]: async def get_exp_parameters_by_exp_hash(exp_hash: str) -> Sequence[ExperimentParameters]:
async with async_session_postgres() as session: async with async_session_postgres() as session:
result = await session.execute( result = await session.execute(
@ -34,7 +35,7 @@ def generate_experiment_hash(data: dict) -> str:
return hashlib.sha256(hash_input.encode()).hexdigest() return hashlib.sha256(hash_input.encode()).hexdigest()
async def save_experiment_to_db(df: pd.DataFrame): async def save_experiment_to_db(df: pd.DataFrame, experiment_category_id: int):
for _, row in df.iterrows(): for _, row in df.iterrows():
try: try:
# Преобразуем load и recycling_level в соответствующие id # Преобразуем load и recycling_level в соответствующие id
@ -42,7 +43,7 @@ async def save_experiment_to_db(df: pd.DataFrame):
recycling = int(row['recycling_level']) recycling = int(row['recycling_level'])
# Генерация хеша для experiment_hash # Генерация хеша для experiment_hash
experiment_hash = generate_experiment_hash(row) experiment_hash = generate_experiment_hash(row.to_dict())
exp = await create( exp = await create(
ExperimentParameters, ExperimentParameters,
@ -50,10 +51,11 @@ async def save_experiment_to_db(df: pd.DataFrame):
outer_blades_length=float(row['outer_blades_length']), outer_blades_length=float(row['outer_blades_length']),
outer_blades_angle=float(row['outer_blades_angle']), outer_blades_angle=float(row['outer_blades_angle']),
middle_blades_count=int(row['middle_blades_count']), middle_blades_count=int(row['middle_blades_count']),
load_id= None, load_id=None,
recycling_id=None, recycling_id=None,
experiment_hash=experiment_hash, experiment_hash=experiment_hash,
oxidizer_temp=float(row['oxidizer_temp']) oxidizer_temp=float(row['oxidizer_temp']),
experiment_category_id=experiment_category_id
) )
await process_and_save_experiment_data(exp.id, load, recycling) await process_and_save_experiment_data(exp.id, load, recycling)
@ -61,6 +63,7 @@ async def save_experiment_to_db(df: pd.DataFrame):
print(f"Ошибка при сохранении данных: {e}") print(f"Ошибка при сохранении данных: {e}")
raise HTTPException(status_code=500, detail=f"Ошибка при сохранении данных: {e}") raise HTTPException(status_code=500, detail=f"Ошибка при сохранении данных: {e}")
async def process_and_save_experiment_data(id: int, load: float, recycling_level: float) -> dict: async def process_and_save_experiment_data(id: int, load: float, recycling_level: float) -> dict:
try: try:
experiment = await get_by_id(ExperimentParameters, id) experiment = await get_by_id(ExperimentParameters, id)

View File

@ -1,4 +1,3 @@
services: services:
db: db:
image: postgres image: postgres
@ -8,7 +7,7 @@ services:
POSTGRES_USER: ${POSTGRES_USER} POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
ports: ports:
- "5432:5432" - ${DB_PORT}
volumes: volumes:
- postgres_data:/var/lib/postgresql/data - postgres_data:/var/lib/postgresql/data

15
main.py
View File

@ -1,5 +1,6 @@
import pandas as pd import pandas as pd
from fastapi import FastAPI, HTTPException, BackgroundTasks from fastapi import FastAPI, HTTPException, BackgroundTasks
from fastapi.middleware.cors import CORSMiddleware
from pyDOE3 import pbdesign, lhs from pyDOE3 import pbdesign, lhs
from db.csv_to_db import csv_to_db from db.csv_to_db import csv_to_db
@ -13,6 +14,14 @@ from new_experiment_planner_pyDOE3 import scale_design, scale_design_lhs, round_
app = FastAPI() app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=['*'],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(ch_experimentdb_experiment_data_router.router, app.include_router(ch_experimentdb_experiment_data_router.router,
prefix="/ch_experimentdb_experiment_data", prefix="/ch_experimentdb_experiment_data",
tags=["ch_experimentdb_experiment_data"]) tags=["ch_experimentdb_experiment_data"])
@ -79,6 +88,7 @@ async def init_db_data(background_tasks: BackgroundTasks):
@app.post("/pyDOE3_screening_design") @app.post("/pyDOE3_screening_design")
async def generate_screening_design(request: ExperimentParametersPyDOE3) -> List[Dict[str, float]]: async def generate_screening_design(request: ExperimentParametersPyDOE3) -> List[Dict[str, float]]:
param_ranges = request.param_ranges param_ranges = request.param_ranges
category = request.category
# Создаем screening design и масштабируем его # Создаем screening design и масштабируем его
num_factors = len(param_ranges) num_factors = len(param_ranges)
@ -89,7 +99,7 @@ async def generate_screening_design(request: ExperimentParametersPyDOE3) -> List
df_screening = pd.DataFrame(scaled_screening_design, columns=param_ranges.keys()) df_screening = pd.DataFrame(scaled_screening_design, columns=param_ranges.keys())
# Сохраняем результаты в базу данных # Сохраняем результаты в базу данных
await save_experiment_to_db(df_screening) await save_experiment_to_db(df_screening, category)
return df_screening.to_dict(orient="records") return df_screening.to_dict(orient="records")
@ -99,6 +109,7 @@ async def generate_lhs_design(request: ExperimentParametersPyDOE3) -> List[Dict[
param_ranges = request.param_ranges param_ranges = request.param_ranges
count_exp = request.count_exp count_exp = request.count_exp
round_rules = request.round_rules round_rules = request.round_rules
category = request.category
# Создаем lhs design и масштабируем его # Создаем lhs design и масштабируем его
num_factors = len(param_ranges) num_factors = len(param_ranges)
@ -112,6 +123,6 @@ async def generate_lhs_design(request: ExperimentParametersPyDOE3) -> List[Dict[
df_lhs = pd.DataFrame(round_scaled_lhs_samples, columns=param_ranges.keys()) df_lhs = pd.DataFrame(round_scaled_lhs_samples, columns=param_ranges.keys())
# Сохраняем результаты в базу данных # Сохраняем результаты в базу данных
await save_experiment_to_db(df_lhs) await save_experiment_to_db(df_lhs, category)
return df_lhs.to_dict(orient="records") return df_lhs.to_dict(orient="records")

View File

@ -21,7 +21,8 @@ async def create_experiment_parameters(body: ExperimentParametersBody):
middle_blades_count=body.middle_blades_count, middle_blades_count=body.middle_blades_count,
load_id=body.load_id, load_id=body.load_id,
recycling_id=body.recycling_id, recycling_id=body.recycling_id,
experiment_hash=body.experiment_hash experiment_hash=body.experiment_hash,
experiment_category_id=body.experiment_category_id
) )
return {"message": "Новая запись <ExperimentParameters> успешно добавлена"} return {"message": "Новая запись <ExperimentParameters> успешно добавлена"}

View File

@ -2,10 +2,14 @@ from typing import Optional, Dict, Tuple, List
from pydantic import BaseModel, ConfigDict from pydantic import BaseModel, ConfigDict
class ExperimentParametersPyDOE3(BaseModel): class ExperimentParametersPyDOE3(BaseModel):
param_ranges: Dict[str, Tuple[float, float]] param_ranges: Dict[str, Tuple[float, float]]
category: int
count_exp: int count_exp: int
round_rules: List[int] round_rules: List[int]
category: int
class ExperimentParameters(BaseModel): class ExperimentParameters(BaseModel):
outer_blades_count: str outer_blades_count: str
@ -53,6 +57,7 @@ class ExperimentParametersBody(BaseModel):
load_id: Optional[int] load_id: Optional[int]
recycling_id: Optional[int] recycling_id: Optional[int]
experiment_hash: str experiment_hash: str
experiment_category_id: int
class LoadParametersBody(BaseModel): class LoadParametersBody(BaseModel):

Binary file not shown.