feat: CRUD ProcessSchema #16
@@ -3,94 +3,96 @@ import math
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import insert, select, func
|
||||
from sqlalchemy import insert, select, func,or_, and_, asc, desc
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
from enum import Enum
|
||||
|
||||
from api.db.tables.process import process_schema_table
|
||||
|
||||
from api.schemas.process.process_schema import ProcessSchema
|
||||
|
||||
from api.schemas.endpoints.process_schema import all_process_schema_adapter, AllProcessSchemaResponse
|
||||
from api.schemas.endpoints.process_schema import all_process_schema_adapter, AllProcessSchemaResponse, ProcessSchemaFilterDTO
|
||||
|
||||
|
||||
async def get_process_schema_page_by_creator_id(
|
||||
connection: AsyncConnection, creator_id: int, page: int, limit: int
|
||||
) -> Optional[AllProcessSchemaResponse]:
|
||||
async def get_process_schema_page(connection: AsyncConnection, filter_dto: ProcessSchemaFilterDTO) -> Optional[AllProcessSchemaResponse]:
|
||||
ivan.dev marked this conversation as resolved
Outdated
|
||||
"""
|
||||
Получает список схем процессов по значениям page и limit и creator_id.
|
||||
Получает список схем процессов с комплексной фильтрацией через DTO объект.
|
||||
"""
|
||||
|
||||
first_schema = page * limit - limit
|
||||
query = (
|
||||
select(
|
||||
process_schema_table.c.id,
|
||||
process_schema_table.c.title,
|
||||
process_schema_table.c.description,
|
||||
process_schema_table.c.owner_id,
|
||||
process_schema_table.c.creator_id,
|
||||
process_schema_table.c.created_at,
|
||||
process_schema_table.c.settings,
|
||||
process_schema_table.c.status,
|
||||
page = filter_dto.pagination.get('page', 1)
|
||||
limit = filter_dto.pagination.get('limit', 10)
|
||||
offset = (page - 1) * limit
|
||||
|
||||
query = select(
|
||||
process_schema_table.c.id,
|
||||
process_schema_table.c.title,
|
||||
process_schema_table.c.description,
|
||||
process_schema_table.c.owner_id,
|
||||
process_schema_table.c.creator_id,
|
||||
process_schema_table.c.created_at,
|
||||
process_schema_table.c.settings,
|
||||
process_schema_table.c.status,
|
||||
)
|
||||
|
||||
if filter_dto.search:
|
||||
search_term = f"%{filter_dto.search}%"
|
||||
query = query.where(
|
||||
or_(
|
||||
process_schema_table.c.title.ilike(search_term),
|
||||
process_schema_table.c.description.ilike(search_term)
|
||||
)
|
||||
)
|
||||
.where(process_schema_table.c.creator_id == creator_id)
|
||||
.order_by(process_schema_table.c.id)
|
||||
.offset(first_schema)
|
||||
.limit(limit)
|
||||
)
|
||||
|
||||
count_query = (
|
||||
select(func.count()).select_from(process_schema_table).where(process_schema_table.c.creator_id == creator_id)
|
||||
)
|
||||
if filter_dto.filters:
|
||||
filter_conditions = []
|
||||
for field, values in filter_dto.filters.items():
|
||||
column = getattr(process_schema_table.c, field, None)
|
||||
if column is not None and values:
|
||||
if len(values) == 1:
|
||||
filter_conditions.append(column == values[0])
|
||||
else:
|
||||
filter_conditions.append(column.in_(values))
|
||||
|
||||
result = await connection.execute(query)
|
||||
count_result = await connection.execute(count_query)
|
||||
if filter_conditions:
|
||||
query = query.where(and_(*filter_conditions))
|
||||
|
||||
events_data = result.mappings().all()
|
||||
total_count = count_result.scalar()
|
||||
total_pages = math.ceil(total_count / limit)
|
||||
if filter_dto.order:
|
||||
order_field = filter_dto.order.get('field', 'id')
|
||||
order_direction = filter_dto.order.get('direction', 'asc')
|
||||
|
||||
validated_process_schema = all_process_schema_adapter.validate_python(events_data)
|
||||
column = getattr(process_schema_table.c, order_field, None)
|
||||
if column is not None:
|
||||
if order_direction.lower() == 'desc':
|
||||
ivan.dev marked this conversation as resolved
Outdated
cyrussmeat
commented
А если какая-то дополнительная фильтрация будет над этим списком - отдельным методом будет реализовываться? Это я к тому, что я бы через DTO расширяемый объект фильтрации тут проводил, у которого по базе limit[offset,count], search и order, к примеру А если какая-то дополнительная фильтрация будет над этим списком - отдельным методом будет реализовываться?
Это я к тому, что я бы через DTO расширяемый объект фильтрации тут проводил, у которого по базе limit[offset,count], search и order, к примеру
|
||||
query = query.order_by(desc(column))
|
||||
else:
|
||||
query = query.order_by(asc(column))
|
||||
ivan.dev marked this conversation as resolved
Outdated
vlad.dev
commented
Тут, наверное, тогда не "заданного", если я правильно понял Тут, наверное, тогда не "заданного", если я правильно понял
|
||||
else:
|
||||
query = query.order_by(process_schema_table.c.id)
|
||||
|
||||
return AllProcessSchemaResponse(
|
||||
process_schema=validated_process_schema,
|
||||
amount_count=total_count,
|
||||
amount_pages=total_pages,
|
||||
current_page=page,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
|
||||
async def get_process_schema_page(connection: AsyncConnection, page, limit) -> Optional[AllProcessSchemaResponse]:
|
||||
"""
|
||||
Получает список схем процессов по значениям page и limit.
|
||||
"""
|
||||
|
||||
first_schema = page * limit - (limit)
|
||||
|
||||
query = (
|
||||
select(
|
||||
process_schema_table.c.id,
|
||||
process_schema_table.c.title,
|
||||
process_schema_table.c.description,
|
||||
process_schema_table.c.owner_id,
|
||||
process_schema_table.c.creator_id,
|
||||
process_schema_table.c.created_at,
|
||||
process_schema_table.c.settings,
|
||||
process_schema_table.c.status,
|
||||
)
|
||||
.order_by(process_schema_table.c.id)
|
||||
.offset(first_schema)
|
||||
.limit(limit)
|
||||
)
|
||||
query = query.offset(offset).limit(limit)
|
||||
|
||||
count_query = select(func.count()).select_from(process_schema_table)
|
||||
|
||||
if filter_dto.search:
|
||||
search_term = f"%{filter_dto.search}%"
|
||||
count_query = count_query.where(
|
||||
or_(
|
||||
process_schema_table.c.title.ilike(search_term),
|
||||
process_schema_table.c.description.ilike(search_term)
|
||||
)
|
||||
)
|
||||
|
||||
if filter_dto.filters and filter_conditions:
|
||||
count_query = count_query.where(and_(*filter_conditions))
|
||||
|
||||
result = await connection.execute(query)
|
||||
count_result = await connection.execute(count_query)
|
||||
|
||||
events_data = result.mappings().all()
|
||||
total_count = count_result.scalar()
|
||||
|
||||
if not total_count:
|
||||
return None
|
||||
|
||||
total_pages = math.ceil(total_count / limit)
|
||||
|
||||
validated_process_schema = all_process_schema_adapter.validate_python(events_data)
|
||||
@@ -103,7 +105,6 @@ async def get_process_schema_page(connection: AsyncConnection, page, limit) -> O
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
|
||||
async def get_process_schema_by_title(connection: AsyncConnection, title: str) -> Optional[ProcessSchema]:
|
||||
"""
|
||||
Получает process schema по title.
|
||||
|
@@ -20,7 +20,6 @@ from api.schemas.account.account import User
|
||||
from api.schemas.base import bearer_schema
|
||||
from api.schemas.endpoints.account import AllUser, AllUserResponse, UserCreate, UserUpdate
|
||||
from api.services.auth import get_current_user
|
||||
from api.services.update_data_validation import update_user_data_changes
|
||||
from api.services.user_role_validation import db_user_role_validation
|
||||
|
||||
api_router = APIRouter(
|
||||
|
@@ -24,7 +24,6 @@ from api.schemas.account.account_keyring import AccountKeyring
|
||||
from api.services.auth import get_current_user
|
||||
|
||||
from api.services.user_role_validation import db_user_role_validation
|
||||
from api.services.update_data_validation import update_key_data_changes
|
||||
|
||||
|
||||
api_router = APIRouter(
|
||||
|
@@ -35,7 +35,7 @@ from api.services.user_role_validation import (
|
||||
db_user_role_validation_for_listevents_and_processschema_by_listevent_id,
|
||||
db_user_role_validation_for_listevents_and_processschema,
|
||||
)
|
||||
from api.services.update_data_validation import update_listevents_data_changes
|
||||
|
||||
|
||||
|
||||
api_router = APIRouter(
|
||||
|
@@ -3,8 +3,10 @@ from fastapi import (
|
||||
Depends,
|
||||
HTTPException,
|
||||
status,
|
||||
Query
|
||||
)
|
||||
|
||||
from typing import Optional, Dict, Any, List
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.connection.session import get_connection_dep
|
||||
@@ -16,7 +18,6 @@ from api.db.logic.processschema import (
|
||||
create_process_schema,
|
||||
get_process_schema_by_id,
|
||||
update_process_schema_by_id,
|
||||
get_process_schema_page_by_creator_id,
|
||||
get_process_schema_page,
|
||||
)
|
||||
|
||||
@@ -26,7 +27,7 @@ from api.db.tables.process import ProcessStatus
|
||||
|
||||
from api.schemas.base import bearer_schema
|
||||
|
||||
from api.schemas.endpoints.process_schema import ProcessSchemaUpdate, AllProcessSchemaResponse
|
||||
from api.schemas.endpoints.process_schema import ProcessSchemaUpdate, AllProcessSchemaResponse, ProcessSchemaFilterDTO
|
||||
|
||||
from api.services.auth import get_current_user
|
||||
|
||||
@@ -34,7 +35,6 @@ from api.services.user_role_validation import (
|
||||
db_user_role_validation_for_listevents_and_processschema_by_listevent_id,
|
||||
db_user_role_validation_for_listevents_and_processschema,
|
||||
)
|
||||
from api.services.update_data_validation import update_processschema_data_changes
|
||||
|
||||
|
||||
api_router = APIRouter(
|
||||
@@ -45,27 +45,47 @@ api_router = APIRouter(
|
||||
|
||||
@api_router.get("", dependencies=[Depends(bearer_schema)], response_model=AllProcessSchemaResponse)
|
||||
async def get_all_process_schema(
|
||||
page: int = 1,
|
||||
limit: int = 10,
|
||||
page: int = Query(1, description="Page number", gt=0),
|
||||
limit: int = Query(10, description="Number of items per page", gt=0),
|
||||
search: Optional[str] = Query(None, description="Search term to filter by title or description"),
|
||||
order_field: Optional[str] = Query("id", description="Field to sort by"),
|
||||
order_direction: Optional[str] = Query("asc", description="Sort direction (asc/desc)"),
|
||||
status_filter: Optional[List[str]] = Query(None, description="Filter by status"),
|
||||
owner_id: Optional[List[str]] = Query(None, description="Filter by owner ID"),
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
creator_id: Optional[int] = Query(None, description="Filter by creator ID"),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
|
||||
filters = {
|
||||
**({"status": status_filter} if status_filter else {}),
|
||||
**({"owner_id": owner_id} if owner_id else {}),
|
||||
**({"creator_id": [str(creator_id)]} if creator_id else {}),
|
||||
}
|
||||
|
||||
filter_dto = ProcessSchemaFilterDTO(
|
||||
pagination={"page": page, "limit": limit},
|
||||
search=search,
|
||||
order={"field": order_field, "direction": order_direction},
|
||||
filters=filters if filters else None
|
||||
)
|
||||
|
||||
authorize_user, page_flag = await db_user_role_validation_for_listevents_and_processschema(connection, current_user)
|
||||
|
||||
if page_flag:
|
||||
process_schema_page = await get_process_schema_page(connection, page, limit)
|
||||
if not page_flag:
|
||||
if filter_dto.filters is None:
|
||||
filter_dto.filters = {}
|
||||
filter_dto.filters["creator_id"] = [str(authorize_user.id)]
|
||||
|
||||
if process_schema_page is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Process schema not found")
|
||||
process_schema_page = await get_process_schema_page(connection, filter_dto)
|
||||
|
||||
return process_schema_page
|
||||
else:
|
||||
process_schema_page = await get_process_schema_page_by_creator_id(connection, authorize_user.id, page, limit)
|
||||
if process_schema_page is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Process schema not found"
|
||||
)
|
||||
|
||||
if process_schema_page is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Process schema not found")
|
||||
|
||||
return process_schema_page
|
||||
return process_schema_page
|
||||
|
||||
|
||||
@api_router.get("/{processschema_id}", dependencies=[Depends(bearer_schema)], response_model=ProcessSchema)
|
||||
|
@@ -12,7 +12,6 @@ from api.db.connection.session import get_connection_dep
|
||||
from api.db.logic.account import get_user_by_id, update_user_by_id, get_user_by_login
|
||||
from api.schemas.base import bearer_schema
|
||||
from api.services.auth import get_current_user
|
||||
from api.services.update_data_validation import update_user_data_changes
|
||||
|
||||
from api.schemas.endpoints.account import UserUpdate
|
||||
from api.schemas.account.account import User
|
||||
|
@@ -35,3 +35,11 @@ class AllProcessSchemaResponse(Base):
|
||||
|
||||
|
||||
all_process_schema_adapter = TypeAdapter(List[AllProcessSchema])
|
||||
|
||||
|
||||
# DTO объект для фильтрации
|
||||
class ProcessSchemaFilterDTO(Base):
|
||||
pagination: Dict[str, int] # {page: 1, limit: 10}
|
||||
search: Optional[str] = None
|
||||
order: Optional[Dict[str, str]] = None # {field: "id", direction: "asc"}
|
||||
filters: Optional[Dict[str, List[str]]] = None # {"status": ["active"], "owner_id": ["123"]}
|
||||
mikhail.dev
commented
Чтобы не писать такие комментарии, можешь использовать Чтобы не писать такие комментарии, можешь использовать `TypedDict`.
|
||||
|
@@ -1,146 +0,0 @@
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
from api.schemas.endpoints.account import UserUpdate
|
||||
from api.db.tables.account import KeyType, KeyStatus
|
||||
from api.schemas.endpoints.account_keyring import AccountKeyringUpdate
|
||||
from api.db.tables.account import AccountRole, AccountStatus
|
||||
from api.schemas.endpoints.list_events import ListEventUpdate
|
||||
from api.db.tables.events import EventState, EventStatus
|
||||
from api.schemas.endpoints.process_schema import ProcessSchemaUpdate
|
||||
from api.db.tables.process import ProcessStatus
|
||||
|
||||
|
||||
def update_user_data_changes(update_data: UserUpdate, user) -> Optional[dict]:
|
||||
"""
|
||||
Сравнивает данные для обновления с текущими значениями пользователя.
|
||||
Возвращает:
|
||||
- None, если нет изменений
|
||||
- Словарь {поле: новое_значение} для измененных полей
|
||||
"""
|
||||
update_values = {}
|
||||
changes = {}
|
||||
|
||||
for field, value in update_data.model_dump(exclude_unset=True).items():
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
if isinstance(value, (AccountRole, AccountStatus)):
|
||||
update_values[field] = value.value
|
||||
else:
|
||||
update_values[field] = value
|
||||
|
||||
for field, new_value in update_values.items():
|
||||
if not hasattr(user, field):
|
||||
continue
|
||||
|
||||
current_value = getattr(user, field)
|
||||
|
||||
if isinstance(current_value, Enum):
|
||||
current_value = current_value.value
|
||||
|
||||
if current_value != new_value:
|
||||
changes[field] = new_value
|
||||
|
||||
return changes if changes else None
|
||||
|
||||
|
||||
def update_key_data_changes(update_data: AccountKeyringUpdate, key) -> Optional[dict]:
|
||||
"""
|
||||
Сравнивает данные для обновления с текущими значениями пользователя.
|
||||
Возвращает:
|
||||
- None, если нет изменений
|
||||
- Словарь {поле: новое_значение} для измененных полей
|
||||
"""
|
||||
update_values = {}
|
||||
changes = {}
|
||||
|
||||
for field, value in update_data.model_dump(exclude_unset=True).items():
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
if isinstance(value, (KeyType, KeyStatus)):
|
||||
update_values[field] = value.value
|
||||
else:
|
||||
update_values[field] = value
|
||||
|
||||
for field, new_value in update_values.items():
|
||||
if not hasattr(key, field):
|
||||
continue
|
||||
|
||||
current_value = getattr(key, field)
|
||||
|
||||
if isinstance(current_value, Enum):
|
||||
current_value = current_value.value
|
||||
|
||||
if current_value != new_value:
|
||||
changes[field] = new_value
|
||||
|
||||
return changes if changes else None
|
||||
|
||||
|
||||
def update_listevents_data_changes(update_data: ListEventUpdate, listevents) -> Optional[dict]:
|
||||
"""
|
||||
Сравнивает данные для обновления с текущими значениями listevents.
|
||||
Возвращает:
|
||||
- None, если нет изменений
|
||||
- Словарь {поле: новое_значение} для измененных полей
|
||||
"""
|
||||
update_values = {}
|
||||
changes = {}
|
||||
|
||||
for field, value in update_data.model_dump(exclude_unset=True).items():
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
if isinstance(value, (EventState, EventStatus)):
|
||||
update_values[field] = value.value
|
||||
else:
|
||||
update_values[field] = value
|
||||
|
||||
for field, new_value in update_values.items():
|
||||
if not hasattr(listevents, field):
|
||||
continue
|
||||
|
||||
current_value = getattr(listevents, field)
|
||||
|
||||
if isinstance(current_value, Enum):
|
||||
current_value = current_value.value
|
||||
|
||||
if current_value != new_value:
|
||||
changes[field] = new_value
|
||||
|
||||
return changes if changes else None
|
||||
|
||||
|
||||
def update_processschema_data_changes(update_data: ProcessSchemaUpdate, processschema) -> Optional[dict]:
|
||||
"""
|
||||
Сравнивает данные для обновления с текущими значениями processschema.
|
||||
Возвращает:
|
||||
- None, если нет изменений
|
||||
- Словарь {поле: новое_значение} для измененных полей
|
||||
"""
|
||||
update_values = {}
|
||||
changes = {}
|
||||
|
||||
for field, value in update_data.model_dump(exclude_unset=True).items():
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
if isinstance(value, (ProcessStatus)):
|
||||
update_values[field] = value.value
|
||||
else:
|
||||
update_values[field] = value
|
||||
|
||||
for field, new_value in update_values.items():
|
||||
if not hasattr(processschema, field):
|
||||
continue
|
||||
|
||||
current_value = getattr(processschema, field)
|
||||
|
||||
if isinstance(current_value, Enum):
|
||||
current_value = current_value.value
|
||||
|
||||
if current_value != new_value:
|
||||
changes[field] = new_value
|
||||
|
||||
return changes if changes else None
|
Reference in New Issue
Block a user
Может, переименуем
processschema
вprocess_schema
, а то как-то 3 s странновато выглядят?