diff --git a/database.py b/database.py index 6984def..549e819 100644 --- a/database.py +++ b/database.py @@ -1,6 +1,9 @@ from typing import Optional from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column +from sqlalchemy import Column, DateTime +from sqlalchemy.sql import func +from datetime import datetime engine = create_async_engine("sqlite+aiosqlite:///questions.db") new_session = async_sessionmaker(engine, expire_on_commit=False) @@ -12,10 +15,11 @@ class QuestionOrm(Model): __tablename__ = "questions" id: Mapped[int] = mapped_column(primary_key=True) - id_user: Mapped[int] + email_user: Mapped[str] type_question: Mapped[bool] question: Mapped[str] answer: Mapped[Optional[str]] + question_time: Mapped[datetime] = mapped_column(DateTime, server_default=func.now()) async def create_tables(): async with engine.begin() as conn: diff --git a/model.py b/model.py index 09695a0..30a8fee 100644 --- a/model.py +++ b/model.py @@ -8,11 +8,11 @@ from keras.src.utils import pad_sequences model = tf.keras.models.load_model('.//neural_network/models/model/best_model_lstm_negative.keras') # Загрузка токенизатора -with open('neural_network/models/tokenization/tokenizer_lstm_lstm_negative.pickle', 'rb') as handle: +with open('.//neural_network/tokenization/tokenizer_negative.pickle', 'rb') as handle: tokenizer = pickle.load(handle) # Загрузка названий классов -with open('neural_network/models/classification/class_names_lstm_negative.txt', 'r', encoding='utf-8') as file: +with open('.//neural_network/classification/class_names_negative.txt', 'r', encoding='utf-8') as file: class_names = [line.strip() for line in file.readlines()] def preprocess_text(text: str): diff --git a/repository.py b/repository.py index dd8281b..f3bb697 100644 --- a/repository.py +++ b/repository.py @@ -1,10 +1,11 @@ -from sqlalchemy import select +from sqlalchemy import select, delete +from sqlalchemy.orm import joinedload +from datetime import datetime from database import new_session, QuestionOrm from schemas import SQuestionAdd, SQuestion from model import predict_answer - class QuestionRepository: @classmethod async def add_one(cls, data: SQuestionAdd) -> int: @@ -15,8 +16,19 @@ class QuestionRepository: answer = predict_answer(question_dict["question"]) question_dict["answer"] = answer + question_dict["question_time"] = datetime.now() question = QuestionOrm(**question_dict) session.add(question) + + # Проверка количества записей для email_user + query = select(QuestionOrm).where(QuestionOrm.email_user == data.email_user) + result = await session.execute(query) + user_questions = result.scalars().all() + if len(user_questions) > 10: + # Удаление самой старой записи + oldest_question = min(user_questions, key=lambda q: q.question_time) + await session.delete(oldest_question) + await session.flush() await session.commit() return question.id diff --git a/schemas.py b/schemas.py index 7f4c7fa..db39951 100644 --- a/schemas.py +++ b/schemas.py @@ -1,14 +1,16 @@ from typing import Optional from pydantic import BaseModel, ConfigDict +from datetime import datetime class SQuestionAdd(BaseModel): - id_user: int + email_user: str type_question: bool question: str class SQuestion(SQuestionAdd): id: int answer: Optional[str] = None + question_time: datetime model_config = ConfigDict(from_attributes=True)