lab_1
This commit is contained in:
parent
4e241aaec9
commit
1e9717ac16
52
backend/__init__.py
Normal file
52
backend/__init__.py
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
import importlib
|
||||||
|
import os
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
import matplotlib
|
||||||
|
from apiflask import APIBlueprint, APIFlask
|
||||||
|
from flask_cors import CORS
|
||||||
|
|
||||||
|
matplotlib.use("agg")
|
||||||
|
|
||||||
|
cors = CORS()
|
||||||
|
api_bp = APIBlueprint("api", __name__, url_prefix="/api/v1")
|
||||||
|
dataset_path: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
SECRET_KEY = "secret!"
|
||||||
|
SEND_FILE_MAX_AGE_DEFAULT = -1
|
||||||
|
|
||||||
|
|
||||||
|
def create_app():
|
||||||
|
global dataset_path
|
||||||
|
|
||||||
|
# Create and configure app
|
||||||
|
app = APIFlask(
|
||||||
|
"MAI Service",
|
||||||
|
title="MAI Service API",
|
||||||
|
docs_path="/",
|
||||||
|
version="1.0",
|
||||||
|
static_folder="",
|
||||||
|
template_folder="",
|
||||||
|
)
|
||||||
|
app.config.from_object(Config)
|
||||||
|
|
||||||
|
dataset_path = os.path.join(app.instance_path, "dataset")
|
||||||
|
os.makedirs(dataset_path, exist_ok=True)
|
||||||
|
|
||||||
|
@app.errorhandler(Exception)
|
||||||
|
def my_error_processor(error):
|
||||||
|
traceback.print_exception(error)
|
||||||
|
return {"message": str(error), "detail": "No details"}, 500
|
||||||
|
|
||||||
|
# Import custom REST methods
|
||||||
|
importlib.import_module("backend.api")
|
||||||
|
|
||||||
|
# Enable REST API
|
||||||
|
app.register_blueprint(api_bp)
|
||||||
|
|
||||||
|
# Enable app extensions
|
||||||
|
cors.init_app(app)
|
||||||
|
|
||||||
|
return app
|
BIN
backend/__pycache__/__init__.cpython-312.pyc
Normal file
BIN
backend/__pycache__/__init__.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/__pycache__/api.cpython-312.pyc
Normal file
BIN
backend/__pycache__/api.cpython-312.pyc
Normal file
Binary file not shown.
BIN
backend/__pycache__/service.cpython-312.pyc
Normal file
BIN
backend/__pycache__/service.cpython-312.pyc
Normal file
Binary file not shown.
57
backend/api.py
Normal file
57
backend/api.py
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
from apiflask import FileSchema, Schema, fields
|
||||||
|
from flask import send_file
|
||||||
|
|
||||||
|
from backend import api_bp, dataset_path
|
||||||
|
from backend.service import Service
|
||||||
|
|
||||||
|
|
||||||
|
class FileUpload(Schema):
|
||||||
|
file = fields.File(required=True)
|
||||||
|
|
||||||
|
|
||||||
|
class ColumnInfoDto(Schema):
|
||||||
|
datatype = fields.String()
|
||||||
|
items = fields.List(fields.String())
|
||||||
|
|
||||||
|
|
||||||
|
class TableColumnDto(Schema):
|
||||||
|
name = fields.String()
|
||||||
|
datatype = fields.String()
|
||||||
|
items = fields.List(fields.String())
|
||||||
|
|
||||||
|
|
||||||
|
service = Service(dataset_path)
|
||||||
|
|
||||||
|
|
||||||
|
@api_bp.post("/dataset")
|
||||||
|
@api_bp.input(FileUpload, location="files")
|
||||||
|
def upload_dataset(files_data):
|
||||||
|
uploaded_file = files_data["file"]
|
||||||
|
return service.upload_dataset(uploaded_file)
|
||||||
|
|
||||||
|
|
||||||
|
@api_bp.get("/dataset")
|
||||||
|
def get_all_datasets():
|
||||||
|
return service.get_all_datasets()
|
||||||
|
|
||||||
|
|
||||||
|
@api_bp.get("/dataset/<string:name>")
|
||||||
|
@api_bp.output(TableColumnDto(many=True))
|
||||||
|
def get_dataset_info(name: str):
|
||||||
|
return service.get_dataset_info(name)
|
||||||
|
|
||||||
|
|
||||||
|
@api_bp.get("/dataset/<string:name>/<string:column>")
|
||||||
|
@api_bp.output(ColumnInfoDto)
|
||||||
|
def get_column_info(name: str, column: str):
|
||||||
|
return service.get_column_info(name, column)
|
||||||
|
|
||||||
|
|
||||||
|
@api_bp.get("/dataset/draw/hist/<string:name>/<string:column>")
|
||||||
|
@api_bp.output(
|
||||||
|
FileSchema(type="string", format="binary"), content_type="image/png", example=""
|
||||||
|
)
|
||||||
|
def get_dataset_hist(name: str, column: str):
|
||||||
|
data = service.get_hist(name, column)
|
||||||
|
data.seek(0)
|
||||||
|
return send_file(data, download_name=f"{name}.hist.png", mimetype="image/png")
|
59
backend/service.py
Normal file
59
backend/service.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
import io
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
from typing import BinaryIO, Dict, List
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
from matplotlib.figure import Figure
|
||||||
|
from werkzeug.datastructures import FileStorage
|
||||||
|
from werkzeug.utils import secure_filename
|
||||||
|
|
||||||
|
|
||||||
|
class Service:
|
||||||
|
def __init__(self, dataset_path: str | None) -> None:
|
||||||
|
if dataset_path is None:
|
||||||
|
raise Exception("Dataset path is not defined")
|
||||||
|
self.__path: str = dataset_path
|
||||||
|
|
||||||
|
def __get_dataset(self, filename: str) -> pd.DataFrame:
|
||||||
|
full_file_name = os.path.join(self.__path, secure_filename(filename))
|
||||||
|
return pd.read_csv(full_file_name)
|
||||||
|
|
||||||
|
def upload_dataset(self, file: FileStorage) -> str:
|
||||||
|
if file.filename is None:
|
||||||
|
raise Exception("Dataset upload error")
|
||||||
|
file_name: str = file.filename
|
||||||
|
full_file_name = os.path.join(self.__path, secure_filename(file_name))
|
||||||
|
file.save(full_file_name)
|
||||||
|
return file_name
|
||||||
|
|
||||||
|
def get_all_datasets(self) -> List[str]:
|
||||||
|
return [file.name for file in pathlib.Path(self.__path).glob("*.csv")]
|
||||||
|
|
||||||
|
def get_dataset_info(self, filename) -> List[Dict]:
|
||||||
|
dataset = self.__get_dataset(filename)
|
||||||
|
dataset_info = []
|
||||||
|
for column in dataset.columns:
|
||||||
|
items = dataset[column].astype(str)
|
||||||
|
column_info = {
|
||||||
|
"name": column,
|
||||||
|
"datatype": dataset.dtypes[column],
|
||||||
|
"items": items,
|
||||||
|
}
|
||||||
|
dataset_info.append(column_info)
|
||||||
|
return dataset_info
|
||||||
|
|
||||||
|
def get_column_info(self, filename, column) -> Dict:
|
||||||
|
dataset = self.__get_dataset(filename)
|
||||||
|
datatype = dataset.dtypes[column]
|
||||||
|
items = sorted(dataset[column].astype(str).unique())
|
||||||
|
return {"datatype": datatype, "items": items}
|
||||||
|
|
||||||
|
def get_hist(self, filename, column) -> BinaryIO:
|
||||||
|
dataset = self.__get_dataset(filename)
|
||||||
|
bytes = io.BytesIO()
|
||||||
|
plot: Figure | None = dataset.plot.hist(column=[column], bins=80).get_figure()
|
||||||
|
if plot is None:
|
||||||
|
raise Exception("Can't create hist plot")
|
||||||
|
plot.savefig(bytes, dpi=300, format="png")
|
||||||
|
return bytes
|
2
lab_1/README.md
Normal file
2
lab_1/README.md
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# AIM-PIbd-32-Kuzin-P-S
|
||||||
|
|
BIN
lab_1/image.png
Normal file
BIN
lab_1/image.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 77 KiB |
155
lab_1/lab1.ipynb
Normal file
155
lab_1/lab1.ipynb
Normal file
File diff suppressed because one or more lines are too long
32
lab_1/requirements.txt
Normal file
32
lab_1/requirements.txt
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
asttokens==2.4.1
|
||||||
|
colorama==0.4.6
|
||||||
|
comm==0.2.2
|
||||||
|
debugpy==1.8.5
|
||||||
|
decorator==5.1.1
|
||||||
|
executing==2.1.0
|
||||||
|
ipykernel==6.29.5
|
||||||
|
ipython==8.27.0
|
||||||
|
jedi==0.19.1
|
||||||
|
jupyter_client==8.6.2
|
||||||
|
jupyter_core==5.7.2
|
||||||
|
matplotlib-inline==0.1.7
|
||||||
|
nest-asyncio==1.6.0
|
||||||
|
numpy==2.1.1
|
||||||
|
packaging==24.1
|
||||||
|
pandas==2.2.2
|
||||||
|
parso==0.8.4
|
||||||
|
platformdirs==4.3.3
|
||||||
|
prompt_toolkit==3.0.47
|
||||||
|
psutil==6.0.0
|
||||||
|
pure_eval==0.2.3
|
||||||
|
Pygments==2.18.0
|
||||||
|
python-dateutil==2.9.0.post0
|
||||||
|
pytz==2024.2
|
||||||
|
pywin32==306
|
||||||
|
pyzmq==26.2.0
|
||||||
|
six==1.16.0
|
||||||
|
stack-data==0.6.3
|
||||||
|
tornado==6.4.1
|
||||||
|
traitlets==5.14.3
|
||||||
|
tzdata==2024.1
|
||||||
|
wcwidth==0.2.13
|
8037
static/csv/Starbucks Dataset.csv
Normal file
8037
static/csv/Starbucks Dataset.csv
Normal file
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user