modified_db #2

Merged
Sosees04ka merged 6 commits from modified_db into main 2024-11-30 01:45:57 +04:00
2 changed files with 49 additions and 5 deletions
Showing only changes of commit 3002be30dd - Show all commits

View File

@ -1,7 +1,11 @@
import hashlib
from typing import Sequence
import pandas as pd
from fastapi import HTTPException
from sqlalchemy.future import select
from db.crud import create
from db.models.experiment_parameters_model import ExperimentParameters
from db.postgres_db_connection import async_session_postgres
@ -18,4 +22,35 @@ async def get_exp_parameters_by_exp_hash(exp_hash: str) -> Sequence[ExperimentPa
result = await session.execute(
select(ExperimentParameters).where(ExperimentParameters.experiment_hash == exp_hash)
)
return result.scalars().all()
return result.scalars().all()
def generate_experiment_hash(data: dict) -> str:
"""Генерация уникального хеша на основе данных эксперимента"""
hash_input = f"{data['outer_blades_count']}_{data['outer_blades_length']}_{data['outer_blades_angle']}_{data['middle_blades_count']}_{data['load']}_{data['recycling_level']}"
return hashlib.sha256(hash_input.encode()).hexdigest()
async def save_experiment_to_db(df: pd.DataFrame):
for _, row in df.iterrows():
try:
# Преобразуем load и recycling_level в соответствующие id
load_id = int(row['load'])
recycling_id = int(row['recycling_level'])
# Генерация хеша для experiment_hash
experiment_hash = generate_experiment_hash(row)
await create(
ExperimentParameters,
outer_blades_count=int(row['outer_blades_count']),
outer_blades_length=float(row['outer_blades_length']),
outer_blades_angle=float(row['outer_blades_angle']),
middle_blades_count=int(row['middle_blades_count']),
load_id= None,
recycling_id=None,
experiment_hash=experiment_hash
)
except Exception as e:
print(f"Ошибка при сохранении данных: {e}")
raise HTTPException(status_code=500, detail=f"Ошибка при сохранении данных: {e}")

17
main.py
View File

@ -3,6 +3,7 @@ from fastapi import FastAPI, HTTPException, BackgroundTasks
from pyDOE3 import pbdesign, lhs
from db.csv_to_db import csv_to_db
from db.repositories import save_experiment_to_db
from network.routes import (ch_experimentdb_experiment_data_router, experiment_data_router,
experiment_parameters_router, experiment_category_router)
from network.routes import load_parameters_router, recycling_parameters_router
@ -76,7 +77,7 @@ async def init_db_data(background_tasks: BackgroundTasks):
# }
@app.post("/pyDOE3_screening_design")
def generate_screening_design(request: ExperimentParametersPyDOE3) -> List[Dict[str, float]]:
async def generate_screening_design(request: ExperimentParametersPyDOE3) -> List[Dict[str, float]]:
param_ranges = request.param_ranges
# Создаем screening design и масштабируем его
@ -84,13 +85,17 @@ def generate_screening_design(request: ExperimentParametersPyDOE3) -> List[Dict[
screening_design = pbdesign(num_factors)
scaled_screening_design = scale_design(screening_design, param_ranges)
# Преобразуем в DataFrame и возвращаем результат
# Преобразуем в DataFrame
df_screening = pd.DataFrame(scaled_screening_design, columns=param_ranges.keys())
# Сохраняем результаты в базу данных
await save_experiment_to_db(df_screening)
return df_screening.to_dict(orient="records")
@app.post("/pyDOE3_lhs_design")
def generate_lhs_design(request: ExperimentParametersPyDOE3) -> List[Dict[str, float]]:
async def generate_lhs_design(request: ExperimentParametersPyDOE3) -> List[Dict[str, float]]:
param_ranges = request.param_ranges
count_exp = request.count_exp
round_rules = request.round_rules
@ -103,6 +108,10 @@ def generate_lhs_design(request: ExperimentParametersPyDOE3) -> List[Dict[str, f
# Округляем значения
round_scaled_lhs_samples = round_by_index(scaled_lhs_samples, round_rules)
# Преобразуем в DataFrame и возвращаем результат
# Преобразуем в DataFrame
df_lhs = pd.DataFrame(round_scaled_lhs_samples, columns=param_ranges.keys())
# Сохраняем результаты в базу данных
await save_experiment_to_db(df_lhs)
return df_lhs.to_dict(orient="records")