2024-10-04 17:19:46 +04:00
|
|
|
|
import requests
|
|
|
|
|
import subprocess
|
|
|
|
|
import argparse
|
|
|
|
|
import yaml
|
|
|
|
|
import psutil
|
|
|
|
|
import time
|
|
|
|
|
|
|
|
|
|
import macros_generator as mg
|
|
|
|
|
from settings import settings
|
|
|
|
|
|
|
|
|
|
from clickhouse_tools import ClickHouseClient
|
|
|
|
|
import utils
|
|
|
|
|
from postgres_tools import PostgresClient
|
|
|
|
|
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
from contextlib import contextmanager
|
|
|
|
|
|
|
|
|
|
# Загрузка конфигурации из файла config.yaml
|
|
|
|
|
with open('config.yaml', 'r') as config_file:
|
|
|
|
|
config = yaml.safe_load(config_file)
|
|
|
|
|
|
|
|
|
|
STARCCM_PATH = config['paths']['starccm']
|
|
|
|
|
CHEMKIN_PATH = config['paths']['chemkin']
|
|
|
|
|
MAIN_PATH = config['paths']['main']
|
|
|
|
|
|
|
|
|
|
NUMBER_PROCESSES = config['parameters']['number_processes']
|
|
|
|
|
MESH_BASE_SIZE = config['parameters']['mesh_base_size']
|
|
|
|
|
STOPPING_CRITERION = config['parameters']['stopping_criterion']
|
|
|
|
|
DIAMETERS = config['parameters']['diameters']
|
|
|
|
|
DEFAULT_VALUES = config['parameters']['default_values']
|
|
|
|
|
|
|
|
|
|
API_URL = config['api_url']
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def download_file_from_fastapi(api_url, params, full_file_name):
|
|
|
|
|
try:
|
|
|
|
|
response = requests.post(api_url, json=params)
|
|
|
|
|
response.raise_for_status()
|
|
|
|
|
with open(full_file_name, "wb") as f:
|
|
|
|
|
f.write(response.content)
|
|
|
|
|
print("File downloaded successfully.")
|
|
|
|
|
except requests.RequestException as e:
|
|
|
|
|
print(f"Failed to download file: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def terminate_process_by_name(process_name):
|
|
|
|
|
for proc in psutil.process_iter(['pid', 'name']):
|
|
|
|
|
try:
|
|
|
|
|
if proc.info['name'] == process_name:
|
|
|
|
|
proc.terminate()
|
|
|
|
|
print(f"Process '{process_name}' with PID {proc.pid} was terminated.")
|
|
|
|
|
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess) as e:
|
|
|
|
|
print(f"Failed to terminate process '{process_name}': {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def create_directory(path):
|
|
|
|
|
Path(path).mkdir(parents=True, exist_ok=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
|
def remove_file_on_exit(file_path):
|
|
|
|
|
try:
|
|
|
|
|
yield
|
|
|
|
|
finally:
|
|
|
|
|
if file_path.exists():
|
|
|
|
|
file_path.unlink()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def run_macros(macros_name, model_name=None, new_model=False, is_gpgpu=False):
|
|
|
|
|
np_value = '1 -gpgpu auto' if is_gpgpu else str(NUMBER_PROCESSES)
|
|
|
|
|
|
|
|
|
|
if new_model:
|
|
|
|
|
macros_command = f'{STARCCM_PATH} -np {np_value} -new -batch \'{macros_name}\''
|
|
|
|
|
else:
|
|
|
|
|
if model_name is None:
|
|
|
|
|
raise ValueError("model_name must be provided if new_model is False")
|
|
|
|
|
macros_command = f'{STARCCM_PATH} -np {np_value} \'{model_name}\' -batch \'{macros_name}\''
|
|
|
|
|
|
|
|
|
|
subprocess.run(["bash", "-c", macros_command], check=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def run_experiment(outer_blades_count, outer_blades_length, outer_blades_angle, middle_blades_count, load, recycling):
|
|
|
|
|
main_path = Path(MAIN_PATH)
|
|
|
|
|
create_directory(main_path)
|
|
|
|
|
|
|
|
|
|
geometry_path = f"{outer_blades_count}_{outer_blades_length}_{outer_blades_angle}_{middle_blades_count}"
|
|
|
|
|
geometry_path_full = main_path / geometry_path / 'geometry'
|
|
|
|
|
create_directory(geometry_path_full)
|
|
|
|
|
|
|
|
|
|
geometry_file_name = geometry_path_full / 'geometry.stp'
|
|
|
|
|
|
|
|
|
|
general_macros_path = main_path / geometry_path / 'general_macros'
|
|
|
|
|
create_directory(general_macros_path)
|
|
|
|
|
|
|
|
|
|
model_path = main_path / geometry_path / 'model'
|
|
|
|
|
create_directory(model_path)
|
|
|
|
|
model_file = model_path / 'init_model.sim'
|
|
|
|
|
|
|
|
|
|
experiments_path = main_path / geometry_path / 'experiments'
|
|
|
|
|
load_path = experiments_path / str(load)
|
|
|
|
|
load_parameters_path = load_path / 'parameters'
|
|
|
|
|
load_macros_path = load_path / 'macros'
|
|
|
|
|
load_model_path = load_path / 'model'
|
|
|
|
|
recycling_path = load_path / str(recycling)
|
|
|
|
|
recycling_macros_path = recycling_path / 'macros'
|
|
|
|
|
|
|
|
|
|
for directory in [experiments_path, load_path, load_parameters_path, load_macros_path, load_model_path,
|
|
|
|
|
recycling_path, recycling_macros_path]:
|
|
|
|
|
create_directory(directory)
|
|
|
|
|
|
|
|
|
|
load_parameters_file = load_parameters_path / f"load_{load}_parameters.yaml"
|
|
|
|
|
load_model_file = load_model_path / f"load_{load}.sim"
|
|
|
|
|
exp_file = recycling_path / f"recycling_{recycling}.sim"
|
|
|
|
|
|
|
|
|
|
# Проверка наличия файла init_model.sim
|
|
|
|
|
if not model_file.exists():
|
|
|
|
|
download_file_from_fastapi(API_URL, {"N1": outer_blades_count,
|
|
|
|
|
"L1": outer_blades_length,
|
|
|
|
|
"a1": outer_blades_angle,
|
|
|
|
|
"N2": middle_blades_count},
|
|
|
|
|
geometry_file_name)
|
|
|
|
|
prc_macros_file = general_macros_path / 'preprocess_macro.java'
|
|
|
|
|
model_parameters = {
|
|
|
|
|
'geometry_path': geometry_file_name,
|
|
|
|
|
'chemkin_path': CHEMKIN_PATH,
|
|
|
|
|
'init_model_folder': model_path,
|
|
|
|
|
'bladeCount': middle_blades_count,
|
|
|
|
|
'mesh_base_size': MESH_BASE_SIZE
|
|
|
|
|
}
|
|
|
|
|
mg.preprocessor_settings(prc_macros_file, model_parameters, model_file)
|
|
|
|
|
run_macros(prc_macros_file, new_model=True)
|
|
|
|
|
|
|
|
|
|
if not load_parameters_file.exists():
|
|
|
|
|
fuel_parameters = mg.load_calculation(float(load), DIAMETERS)
|
|
|
|
|
with open(load_parameters_file, 'w') as fuel_dict_file:
|
|
|
|
|
yaml.dump(fuel_parameters, fuel_dict_file, default_flow_style=False, allow_unicode=True)
|
|
|
|
|
else:
|
|
|
|
|
with open(load_parameters_file, 'r') as fuel_dict_file:
|
|
|
|
|
fuel_parameters = yaml.safe_load(fuel_dict_file)
|
|
|
|
|
|
|
|
|
|
# Проверка наличия файла load_{load}.sim
|
|
|
|
|
if not load_model_file.exists():
|
|
|
|
|
fuel_macros_file = load_macros_path / 'fuel_macro.java'
|
|
|
|
|
mg.fuel_settings(fuel_macros_file, fuel_parameters, load_model_file)
|
|
|
|
|
run_macros(fuel_macros_file, model_file)
|
|
|
|
|
|
|
|
|
|
# Проверка наличия файла recycling_{recycling}.sim
|
|
|
|
|
if not exp_file.exists():
|
|
|
|
|
rec_macros_file = recycling_macros_path / 'recycle_macro.java'
|
|
|
|
|
run_macros_file = recycling_macros_path / 'run_macros.java'
|
|
|
|
|
recycling_parameters = mg.recycling_calculation(
|
|
|
|
|
fuel_parameters['alpha'], fuel_parameters['gas_consumption'], fuel_parameters['air_consumption'],
|
|
|
|
|
float(recycling))
|
|
|
|
|
solver_parameters = {
|
|
|
|
|
'experiment_path': recycling_path,
|
|
|
|
|
'stopping_criterion': STOPPING_CRITERION
|
|
|
|
|
}
|
|
|
|
|
mg.fgm_table_settings(rec_macros_file, recycling_parameters, exp_file)
|
|
|
|
|
mg.setting_and_running_solver(run_macros_file, solver_parameters, exp_file)
|
|
|
|
|
run_macros(rec_macros_file, load_model_file)
|
|
|
|
|
run_macros(run_macros_file, exp_file, is_gpgpu=True)
|
|
|
|
|
|
|
|
|
|
experiment_parameters = {
|
|
|
|
|
'outer_blades_count': int(float(outer_blades_count)),
|
|
|
|
|
'outer_blades_length': outer_blades_length,
|
|
|
|
|
'outer_blades_angle': outer_blades_angle,
|
|
|
|
|
'middle_blades_count': int(float(middle_blades_count)),
|
|
|
|
|
'load': float(load),
|
|
|
|
|
'recycling': float(recycling),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fields_to_select = ['primary_air_consumption', 'secondary_air_consumption', 'gas_inlet_consumption']
|
|
|
|
|
|
|
|
|
|
load_parameters = {key: fuel_parameters[key] for key in fields_to_select}
|
|
|
|
|
load_parameters['load'] = float(load)
|
|
|
|
|
|
|
|
|
|
recycling_parameters['load'] = float(load)
|
|
|
|
|
recycling_parameters['recycling_level'] = float(recycling)
|
|
|
|
|
|
|
|
|
|
plot_csv = recycling_path / 'plot.csv'
|
|
|
|
|
table_csv = recycling_path / 'data_table.csv'
|
|
|
|
|
|
|
|
|
|
clickhouse_client = ClickHouseClient("localhost", 8123, settings.DATABASE, settings.CLICKHOUSE_USER,
|
|
|
|
|
settings.CLICKHOUSE_PASSWORD)
|
|
|
|
|
db = PostgresClient(
|
|
|
|
|
dbname=settings.DATABASE,
|
|
|
|
|
user=settings.POSTGRES_USER,
|
|
|
|
|
password=settings.POSTGRES_PASSWORD,
|
|
|
|
|
host="localhost",
|
|
|
|
|
port="5432"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
file_id = utils.calculate_hash(experiment_parameters)
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
clickhouse_client.save_csv_to_clickhouse(table_csv, file_id)
|
|
|
|
|
print("Clickhouse saved successfully")
|
|
|
|
|
|
|
|
|
|
load_id = db.insert_load_parameters(load_parameters)
|
|
|
|
|
recycling_id = db.insert_recycling_parameters(recycling_parameters, load_id)
|
|
|
|
|
db.insert_experiment_parameters(experiment_parameters, load_id, recycling_id, file_id)
|
|
|
|
|
db.connection.commit()
|
|
|
|
|
db.save_csv_to_postgres(plot_csv, file_id)
|
|
|
|
|
print("Postgres saved successfully")
|
|
|
|
|
finally:
|
|
|
|
|
db.close()
|
|
|
|
|
|
|
|
|
|
with remove_file_on_exit(exp_file.with_suffix(".sim~")):
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2024-10-08 21:21:05 +04:00
|
|
|
|
import argparse
|
2024-10-04 17:19:46 +04:00
|
|
|
|
parser = argparse.ArgumentParser(description="Запуск экспериментов с энергетической установкой ГМУ-45")
|
|
|
|
|
parser.add_argument("outer_blades_count", type=str, help="Количество лопаток во внешнем контуре")
|
|
|
|
|
parser.add_argument("outer_blades_length", type=str, help="Ширина лопаток во внешнем контуре")
|
|
|
|
|
parser.add_argument("outer_blades_angle", type=str, help="Угол наклона лопаток во внешнем контуре")
|
|
|
|
|
parser.add_argument("middle_blades_count", type=str, help="Количество лопаток в среднем контуре")
|
|
|
|
|
parser.add_argument("load", type=str, help="Паровая нагрузка")
|
|
|
|
|
parser.add_argument("recycling", type=str, help="Уровень рециркуляции уходящих газов")
|
|
|
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
2024-10-08 21:21:05 +04:00
|
|
|
|
run_experiment(
|
|
|
|
|
args.outer_blades_count,
|
|
|
|
|
args.outer_blades_length,
|
|
|
|
|
args.outer_blades_angle,
|
|
|
|
|
args.middle_blades_count,
|
|
|
|
|
args.load,
|
|
|
|
|
args.recycling
|
|
|
|
|
)
|