commit project
This commit is contained in:
parent
b08617960e
commit
3b0a993c12
164
.gitignore
vendored
Normal file
164
.gitignore
vendored
Normal file
@ -0,0 +1,164 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||
.pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
.idea/
|
||||
|
||||
credentials.json
|
14
README.md
14
README.md
@ -1,10 +1,6 @@
|
||||
# PIbd-42_SSPR
|
||||
# Запуск
|
||||
|
||||
Доработка автоматизированной системы планирования и выполнения численного моделирования процессов сжигания топлив в горелочных устройствах энергоустановок предназначена для оптимизации процессов проведения численных экспериментов с цифровой моделью горелочного устройства с целью поиска наиболее экономичных и низко-эмиссионных режимов работы
|
||||
|
||||
# Участники
|
||||
1. Кашин Максим
|
||||
2. Клюшенкова Ксения
|
||||
3. Базунов Андрей
|
||||
4. Жимолостнова Анна
|
||||
5. Цуканова Ирина
|
||||
1. Создание venv
|
||||
2. Активация виртуальной среды
|
||||
3. Установка зависимостей из requirements.txt
|
||||
4. Запуск с помощью uvicorn app.main:app --host 127.0.0.1 --port 8000
|
0
app/__init__.py
Normal file
0
app/__init__.py
Normal file
0
app/api/__init__.py
Normal file
0
app/api/__init__.py
Normal file
67
app/api/endpoints.py
Normal file
67
app/api/endpoints.py
Normal file
@ -0,0 +1,67 @@
|
||||
from fastapi import APIRouter, HTTPException, BackgroundTasks
|
||||
from fastapi.responses import FileResponse
|
||||
from pydantic import BaseModel
|
||||
from app.services.geom_service import create_geometry
|
||||
from app.services.hash_generator import generate_hash
|
||||
from app.services.minio_service import check_file_exists_minio, upload_to_minio, get_file_from_minio
|
||||
from app.db.utils import save_params_to_db
|
||||
from app.core.constants import MINIO_BUCKET_NAME, DEFAULT_VALUES
|
||||
from app.core.config import settings
|
||||
from .utils import remove_file
|
||||
|
||||
import os
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
class GeomParams(BaseModel):
|
||||
N1: float = DEFAULT_VALUES["N1"]
|
||||
L1: float = DEFAULT_VALUES["L1"]
|
||||
a1: float = DEFAULT_VALUES["a1"]
|
||||
N2: float = DEFAULT_VALUES["N2"]
|
||||
L2: float = DEFAULT_VALUES["L2"]
|
||||
N3: float = DEFAULT_VALUES["N3"]
|
||||
L3: float = DEFAULT_VALUES["L3"]
|
||||
|
||||
|
||||
async def generate_geom(params: GeomParams):
|
||||
params_dict = params.model_dump()
|
||||
file_id = generate_hash(params_dict)
|
||||
file_name = file_id + ".stp"
|
||||
|
||||
if check_file_exists_minio(MINIO_BUCKET_NAME, file_name):
|
||||
print(f"Файл {file_name} уже существует в MinIO. Возвращаем его.")
|
||||
temp_path = os.path.join(settings.current_directory, "tmp/")
|
||||
if not os.path.exists(temp_path):
|
||||
os.makedirs(temp_path)
|
||||
temp_file_path = os.path.join(temp_path, file_name)
|
||||
get_file_from_minio(MINIO_BUCKET_NAME, file_name, temp_file_path)
|
||||
return temp_file_path
|
||||
|
||||
file_path = create_geometry(params_dict, file_name)
|
||||
upload_to_minio(MINIO_BUCKET_NAME, file_path, file_name)
|
||||
save_params_to_db(params_dict, file_name)
|
||||
|
||||
return file_path
|
||||
|
||||
|
||||
@router.post("/generate_geom")
|
||||
async def generate_geom_endpoint(params: GeomParams, background_tasks: BackgroundTasks):
|
||||
file_path = await generate_geom(params)
|
||||
background_tasks.add_task(remove_file, file_path)
|
||||
return FileResponse(file_path, media_type='application/octet-stream', filename=os.path.basename(file_path))
|
||||
|
||||
|
||||
@router.get("/check_geom/{file_name}")
|
||||
async def check_geom(file_name: str):
|
||||
if check_file_exists_minio(MINIO_BUCKET_NAME, file_name):
|
||||
return {"status": "exists"}
|
||||
return {"status": "not found"}
|
||||
|
||||
|
||||
@router.post("/get_file_name")
|
||||
async def check_geom(params: GeomParams):
|
||||
params_dict = params.model_dump()
|
||||
file_id = generate_hash(params_dict)
|
||||
file_name = file_id + ".stp"
|
||||
return {"file_name": file_name}
|
6
app/api/utils.py
Normal file
6
app/api/utils.py
Normal file
@ -0,0 +1,6 @@
|
||||
import os
|
||||
|
||||
|
||||
def remove_file(file_path: str):
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
0
app/core/__init__.py
Normal file
0
app/core/__init__.py
Normal file
17
app/core/config.py
Normal file
17
app/core/config.py
Normal file
@ -0,0 +1,17 @@
|
||||
import os
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
POSTGRES_USER: str
|
||||
POSTGRES_PASSWORD: str
|
||||
DATABASE_URL: str
|
||||
MINIO_ROOT_USER: str
|
||||
MINIO_ROOT_PASSWORD: str
|
||||
current_directory: str = os.getcwd()
|
||||
print(current_directory)
|
||||
|
||||
model_config = SettingsConfigDict(env_file=".env")
|
||||
|
||||
|
||||
settings = Settings()
|
11
app/core/constants.py
Normal file
11
app/core/constants.py
Normal file
@ -0,0 +1,11 @@
|
||||
DEFAULT_VALUES = {
|
||||
"N1": 24.0,
|
||||
"L1": 70.0,
|
||||
"a1": 60.0,
|
||||
"N2": 18.0,
|
||||
"L2": 105.0,
|
||||
"N3": 9.0,
|
||||
"L3": 29.6
|
||||
}
|
||||
|
||||
MINIO_BUCKET_NAME = "geom-bucket"
|
0
app/db/__init__.py
Normal file
0
app/db/__init__.py
Normal file
29
app/db/connection.py
Normal file
29
app/db/connection.py
Normal file
@ -0,0 +1,29 @@
|
||||
from psycopg_pool import ConnectionPool
|
||||
from contextlib import contextmanager
|
||||
from app.core.config import settings
|
||||
|
||||
connection_pool = ConnectionPool(settings.DATABASE_URL)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def get_db_connection():
|
||||
conn = connection_pool.getconn()
|
||||
try:
|
||||
yield conn
|
||||
finally:
|
||||
connection_pool.putconn(conn)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def get_db_cursor(commit=False):
|
||||
with get_db_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
try:
|
||||
yield cursor
|
||||
if commit:
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
raise e
|
||||
finally:
|
||||
cursor.close()
|
39
app/db/utils.py
Normal file
39
app/db/utils.py
Normal file
@ -0,0 +1,39 @@
|
||||
from .connection import get_db_cursor
|
||||
|
||||
|
||||
def initialize_database():
|
||||
with get_db_cursor(commit=True) as cursor:
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS geom_params (
|
||||
id SERIAL PRIMARY KEY,
|
||||
count_blades_in_outer_contour int,
|
||||
width_blades_in_outer_contour numeric(5,2),
|
||||
angle_blades_in_outer_contour numeric(5,2),
|
||||
count_blades_in_middle_contour int,
|
||||
width_blades_in_middle_contour numeric(5,2),
|
||||
count_blades_in_fuel_contour int,
|
||||
width_blades_in_fuel_contour numeric(5,2),
|
||||
file_name varchar(105)
|
||||
);
|
||||
""")
|
||||
|
||||
|
||||
def save_params_to_db(params, file_name):
|
||||
with get_db_cursor(commit=True) as cur:
|
||||
cur.execute("SELECT 1 FROM geom_params WHERE file_name = %s", (file_name,))
|
||||
if not cur.fetchone():
|
||||
cur.execute("""
|
||||
INSERT INTO geom_params (count_blades_in_outer_contour, width_blades_in_outer_contour,
|
||||
angle_blades_in_outer_contour, count_blades_in_middle_contour,
|
||||
width_blades_in_middle_contour, count_blades_in_fuel_contour, width_blades_in_fuel_contour, file_name)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
|
||||
""", (
|
||||
params.get('N1'),
|
||||
params.get('L1'),
|
||||
params.get('a1'),
|
||||
params.get('N2'),
|
||||
params.get('L2'),
|
||||
params.get('N3'),
|
||||
params.get('L3'),
|
||||
file_name
|
||||
))
|
23
app/main.py
Normal file
23
app/main.py
Normal file
@ -0,0 +1,23 @@
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from .db.utils import initialize_database
|
||||
from .api.endpoints import router as geom_router
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
origins = [
|
||||
"http://localhost:8000",
|
||||
"http://localhost:3000",
|
||||
]
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
initialize_database()
|
||||
|
||||
app.include_router(geom_router, prefix="/api/v1")
|
0
app/services/__init__.py
Normal file
0
app/services/__init__.py
Normal file
92
app/services/geom_service.py
Normal file
92
app/services/geom_service.py
Normal file
@ -0,0 +1,92 @@
|
||||
from typing import Optional, Dict, Any
|
||||
import os
|
||||
|
||||
from win32com.client import gencache
|
||||
from win32com.client import Dispatch
|
||||
import pythoncom
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
|
||||
def create_geometry(params: Dict[str, Any], file_name: str):
|
||||
pythoncom.CoInitializeEx(0)
|
||||
|
||||
|
||||
geom_path = os.path.join(settings.current_directory, "model_3d/Поток.m3d")
|
||||
|
||||
# Проверяем на корректность ключи в запросе
|
||||
valid_keys = {'N1', 'L1', 'a1', 'N2', 'L2', 'N3', 'L3'}
|
||||
list_collms = []
|
||||
values = []
|
||||
for key in params:
|
||||
if key not in valid_keys and key not in {"plan_title"}:
|
||||
raise ValueError(f"Invalid parameter: {key}")
|
||||
if key in valid_keys:
|
||||
list_collms.append(key)
|
||||
values.append(params[key])
|
||||
|
||||
# Генерация уникального имени файла на основе параметров
|
||||
# file_name = generate_file_name(params)
|
||||
|
||||
def get_kompas_api7():
|
||||
module = gencache.EnsureModule("{69AC2981-37C0-4379-84FD-5DD2F3C0A520}", 0, 1, 0)
|
||||
api = module.IKompasAPIObject(
|
||||
Dispatch("Kompas.Application.7")._oleobj_.QueryInterface(module.IKompasAPIObject.CLSID,
|
||||
pythoncom.IID_IDispatch))
|
||||
const = gencache.EnsureModule("{75C9F5D0-B5B8-4526-8681-9903C567D2ED}", 0, 1, 0).constants
|
||||
return module, api, const
|
||||
|
||||
module7, api7, const7 = get_kompas_api7() # Подключаемся к API7
|
||||
app7 = api7.Application # Получаем основной интерфейс
|
||||
app7.Visible = True # Показываем окно пользователю (если скрыто)
|
||||
app7.HideMessage = const7.ksHideMessageNo # Отвечаем НЕТ на любые вопросы программы
|
||||
print(app7.ApplicationName(FullName=True)) # Печатаем название программы
|
||||
|
||||
doc7 = app7.Documents.Open(PathName=geom_path,
|
||||
Visible=True,
|
||||
ReadOnly=True)
|
||||
|
||||
kompas6_constants = gencache.EnsureModule("{75C9F5D0-B5B8-4526-8681-9903C567D2ED}", 0, 1, 0).constants
|
||||
kompas6_constants_3d = gencache.EnsureModule("{2CAF168C-7961-4B90-9DA2-701419BEEFE3}", 0, 1, 0).constants
|
||||
|
||||
# Подключим описание интерфейсов API5
|
||||
kompas6_api5_module = gencache.EnsureModule("{0422828C-F174-495E-AC5D-D31014DBBE87}", 0, 1, 0)
|
||||
kompas_object = kompas6_api5_module.KompasObject(
|
||||
Dispatch("Kompas.Application.5")._oleobj_.QueryInterface(kompas6_api5_module.KompasObject.CLSID,
|
||||
pythoncom.IID_IDispatch))
|
||||
|
||||
# Подключим описание интерфейсов API7
|
||||
kompas_api7_module = gencache.EnsureModule("{69AC2981-37C0-4379-84FD-5DD2F3C0A520}", 0, 1, 0)
|
||||
application = kompas_api7_module.IApplication(
|
||||
Dispatch("Kompas.Application.7")._oleobj_.QueryInterface(kompas_api7_module.IApplication.CLSID,
|
||||
pythoncom.IID_IDispatch))
|
||||
|
||||
Documents = application.Documents
|
||||
# Получим активный документ
|
||||
kompas_document = application.ActiveDocument
|
||||
kompas_document_3d = kompas_api7_module.IKompasDocument3D(kompas_document)
|
||||
iDocument3D = kompas_object.ActiveDocument3D()
|
||||
|
||||
kPart = iDocument3D.GetPart(kompas6_constants_3d.pTop_Part)
|
||||
|
||||
varcoll = kPart.VariableCollection()
|
||||
varcoll.refresh()
|
||||
|
||||
for i in range(len(values)):
|
||||
Variable = varcoll.GetByName(list_collms[i], True, True)
|
||||
Variable.value = values[i]
|
||||
|
||||
kPart.RebuildModel()
|
||||
# Перерисовываем документ
|
||||
iDocument3D.RebuildDocument()
|
||||
|
||||
generated_files_path = os.path.join(settings.current_directory, "generated_files/")
|
||||
if not os.path.exists(generated_files_path):
|
||||
os.makedirs(generated_files_path)
|
||||
|
||||
save_path = os.path.join(generated_files_path, file_name)
|
||||
|
||||
kompas_document.SaveAs(save_path)
|
||||
kompas_document.Close(True)
|
||||
|
||||
return save_path
|
8
app/services/hash_generator.py
Normal file
8
app/services/hash_generator.py
Normal file
@ -0,0 +1,8 @@
|
||||
import hashlib
|
||||
import json
|
||||
|
||||
|
||||
def generate_hash(params):
|
||||
params_string = json.dumps(params, sort_keys=True)
|
||||
unique_hash = hashlib.md5(params_string.encode()).hexdigest()
|
||||
return unique_hash
|
50
app/services/minio_service.py
Normal file
50
app/services/minio_service.py
Normal file
@ -0,0 +1,50 @@
|
||||
from minio import Minio
|
||||
from minio.error import S3Error
|
||||
import json
|
||||
|
||||
with open('credentials.json', 'r', encoding='utf-8') as file:
|
||||
data = json.load(file)
|
||||
|
||||
minio_client = Minio(
|
||||
"127.0.0.1:9000",
|
||||
access_key=data['accessKey'],
|
||||
secret_key=data['secretKey'],
|
||||
secure=False
|
||||
)
|
||||
|
||||
|
||||
def upload_to_minio(bucket_name, file_path, object_name):
|
||||
try:
|
||||
if not minio_client.bucket_exists(bucket_name):
|
||||
minio_client.make_bucket(bucket_name)
|
||||
minio_client.fput_object(bucket_name, object_name, file_path)
|
||||
print(f"'{file_path}' is successfully uploaded as '{object_name}' to bucket '{bucket_name}'.")
|
||||
except S3Error as e:
|
||||
print("S3 Error:", e)
|
||||
except Exception as e:
|
||||
print("Error:", e)
|
||||
|
||||
|
||||
def check_file_exists_minio(bucket_name, object_name):
|
||||
try:
|
||||
minio_client.stat_object(bucket_name, object_name)
|
||||
return True
|
||||
except S3Error as e:
|
||||
print("S3 Error:", e)
|
||||
return False
|
||||
except Exception as e:
|
||||
print("Error:", e)
|
||||
return False
|
||||
|
||||
|
||||
def get_file_from_minio(bucket_name, object_name, file_path):
|
||||
try:
|
||||
minio_client.fget_object(bucket_name, object_name, file_path)
|
||||
print(f"'{object_name}' is successfully downloaded to '{file_path}'.")
|
||||
return file_path
|
||||
except S3Error as e:
|
||||
print("S3 Error:", e)
|
||||
raise
|
||||
except Exception as e:
|
||||
print("Error:", e)
|
||||
raise
|
39
docker-compose.yml
Normal file
39
docker-compose.yml
Normal file
@ -0,0 +1,39 @@
|
||||
services:
|
||||
minio:
|
||||
image: quay.io/minio/minio
|
||||
container_name: minio
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "9001:9001"
|
||||
environment:
|
||||
MINIO_ROOT_USER: ${MINIO_ROOT_USER}
|
||||
MINIO_ROOT_PASSWORD: ${MINIO_ROOT_PASSWORD}
|
||||
volumes:
|
||||
- minio_data:/data
|
||||
command: server /data --console-address ":9001"
|
||||
|
||||
db:
|
||||
image: postgres
|
||||
container_name: geometry_db
|
||||
ports:
|
||||
- "5436:5432"
|
||||
environment:
|
||||
POSTGRES_DB: geometrydb
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
driver: local
|
||||
driver_opts:
|
||||
type: none
|
||||
device: D:\docker_volumes\postgres_data
|
||||
o: bind
|
||||
minio_data:
|
||||
driver: local
|
||||
driver_opts:
|
||||
type: none
|
||||
device: D:\docker_volumes\minio_data
|
||||
o: bind
|
BIN
model_3d/Объем 1.m3d
Normal file
BIN
model_3d/Объем 1.m3d
Normal file
Binary file not shown.
BIN
model_3d/Поток.m3d
Normal file
BIN
model_3d/Поток.m3d
Normal file
Binary file not shown.
BIN
model_3d/Поток.m3d.bak
Normal file
BIN
model_3d/Поток.m3d.bak
Normal file
Binary file not shown.
9
requirements.txt
Normal file
9
requirements.txt
Normal file
@ -0,0 +1,9 @@
|
||||
fastapi
|
||||
pydantic
|
||||
uvicorn
|
||||
pydantic-settings
|
||||
minio
|
||||
pywin32
|
||||
psycopg[binary,pool]
|
||||
python-dotenv
|
||||
psycopg-pool
|
Loading…
Reference in New Issue
Block a user