Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c86b7eb624 | ||
|
|
a70973032f | ||
|
|
0330356ea2 |
@@ -1,7 +1,6 @@
|
||||
import math
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import insert, select, func, or_, and_, asc, desc
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
@@ -11,9 +10,7 @@ from api.schemas.account.account import User
|
||||
from api.schemas.endpoints.account import all_user_adapter, AllUser, AllUserResponse, UserCreate, UserFilterDTO
|
||||
|
||||
|
||||
async def get_user_account_page_DTO(
|
||||
connection: AsyncConnection, filter_dto: UserFilterDTO
|
||||
) -> Optional[AllUserResponse]:
|
||||
async def get_user_account_page_DTO(connection: AsyncConnection, filter_dto: UserFilterDTO) -> AllUserResponse | None:
|
||||
"""
|
||||
Получает список пользователей с пагинацией, фильтрацией и сортировкой через DTO объект.
|
||||
Поддерживает:
|
||||
@@ -118,7 +115,7 @@ async def get_user_account_page_DTO(
|
||||
)
|
||||
|
||||
|
||||
async def get_user_by_id(connection: AsyncConnection, user_id: int) -> Optional[AllUser]:
|
||||
async def get_user_by_id(connection: AsyncConnection, user_id: int) -> AllUser | None:
|
||||
"""
|
||||
Получает юзера по id.
|
||||
"""
|
||||
@@ -133,7 +130,7 @@ async def get_user_by_id(connection: AsyncConnection, user_id: int) -> Optional[
|
||||
return User.model_validate(user)
|
||||
|
||||
|
||||
async def get_user_by_login(connection: AsyncConnection, login: str) -> Optional[User]:
|
||||
async def get_user_by_login(connection: AsyncConnection, login: str) -> User | None:
|
||||
"""
|
||||
Получает юзера по login.
|
||||
"""
|
||||
@@ -147,7 +144,7 @@ async def get_user_by_login(connection: AsyncConnection, login: str) -> Optional
|
||||
return User.model_validate(user_data)
|
||||
|
||||
|
||||
async def update_user_by_id(connection: AsyncConnection, update_values, user) -> Optional[User]:
|
||||
async def update_user_by_id(connection: AsyncConnection, update_values, user) -> User | None:
|
||||
"""
|
||||
Вносит изменеия в нужное поле таблицы account_table.
|
||||
"""
|
||||
@@ -156,7 +153,7 @@ async def update_user_by_id(connection: AsyncConnection, update_values, user) ->
|
||||
await connection.commit()
|
||||
|
||||
|
||||
async def create_user(connection: AsyncConnection, user: UserCreate, creator_id: int) -> Optional[AllUser]:
|
||||
async def create_user(connection: AsyncConnection, user: UserCreate, creator_id: int) -> AllUser | None:
|
||||
"""
|
||||
Создает нове поле в таблице account_table.
|
||||
"""
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import select, update
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
from enum import Enum
|
||||
@@ -15,7 +13,7 @@ from api.utils.key_id_gen import KeyIdGenerator
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
||||
async def get_user(connection: AsyncConnection, login: str) -> tuple[Optional[AllUser], Optional[AccountKeyring]]:
|
||||
async def get_user(connection: AsyncConnection, login: str) -> tuple[AllUser | None, AccountKeyring | None]:
|
||||
query = (
|
||||
select(account_table, account_keyring_table)
|
||||
.join(account_keyring_table, account_table.c.id == account_keyring_table.c.owner_id)
|
||||
@@ -51,7 +49,7 @@ async def get_user(connection: AsyncConnection, login: str) -> tuple[Optional[Al
|
||||
return user, password
|
||||
|
||||
|
||||
async def upgrade_old_refresh_token(connection: AsyncConnection, refresh_token) -> Optional[User]:
|
||||
async def upgrade_old_refresh_token(connection: AsyncConnection, refresh_token) -> User | None:
|
||||
new_status = KeyStatus.EXPIRED
|
||||
|
||||
update_query = (
|
||||
@@ -71,7 +69,7 @@ async def upgrade_old_refresh_token(connection: AsyncConnection, refresh_token)
|
||||
|
||||
async def add_new_refresh_token(
|
||||
connection: AsyncConnection, new_refresh_token, new_refresh_token_expires_time, user
|
||||
) -> Optional[User]:
|
||||
) -> User | None:
|
||||
new_refresh_token = account_keyring_table.insert().values(
|
||||
owner_id=user.id,
|
||||
key_type=KeyType.REFRESH_TOKEN,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import insert, select, update
|
||||
from sqlalchemy.dialects.mysql import insert as mysql_insert
|
||||
@@ -11,7 +10,7 @@ from api.schemas.account.account_keyring import AccountKeyring
|
||||
from api.utils.hasher import hasher
|
||||
|
||||
|
||||
async def get_key_by_id(connection: AsyncConnection, key_id: str) -> Optional[AccountKeyring]:
|
||||
async def get_key_by_id(connection: AsyncConnection, key_id: str) -> AccountKeyring | None:
|
||||
"""
|
||||
Получает key по key_id.
|
||||
"""
|
||||
@@ -26,7 +25,7 @@ async def get_key_by_id(connection: AsyncConnection, key_id: str) -> Optional[Ac
|
||||
return AccountKeyring.model_validate(user_data)
|
||||
|
||||
|
||||
async def update_key_by_id(connection: AsyncConnection, update_values, key) -> Optional[AccountKeyring]:
|
||||
async def update_key_by_id(connection: AsyncConnection, update_values, key) -> AccountKeyring | None:
|
||||
"""
|
||||
Вносит изменеия в нужное поле таблицы account_keyring_table.
|
||||
"""
|
||||
@@ -37,7 +36,7 @@ async def update_key_by_id(connection: AsyncConnection, update_values, key) -> O
|
||||
await connection.commit()
|
||||
|
||||
|
||||
async def create_key(connection: AsyncConnection, key: AccountKeyring, key_id: str) -> Optional[AccountKeyring]:
|
||||
async def create_key(connection: AsyncConnection, key: AccountKeyring, key_id: str) -> AccountKeyring | None:
|
||||
"""
|
||||
Создает нове поле в таблице account_keyring_table).
|
||||
"""
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from typing import Optional
|
||||
import math
|
||||
|
||||
from datetime import datetime, timezone
|
||||
@@ -16,7 +15,7 @@ from api.schemas.endpoints.list_events import all_list_event_adapter, AllListEve
|
||||
|
||||
async def get_list_events_page_DTO(
|
||||
connection: AsyncConnection, filter_dto: ListEventFilterDTO
|
||||
) -> Optional[AllListEventResponse]:
|
||||
) -> AllListEventResponse | None:
|
||||
"""
|
||||
Получает список событий с фильтрацией через DTO объект.
|
||||
Поддерживает:
|
||||
@@ -121,101 +120,7 @@ async def get_list_events_page_DTO(
|
||||
)
|
||||
|
||||
|
||||
async def get_list_events_page_by_creator_id(
|
||||
connection: AsyncConnection, creator_id: int, page: int, limit: int
|
||||
) -> Optional[AllListEventResponse]:
|
||||
"""
|
||||
Получает список событий заданного создателя по значениям page и limit и creator_id.
|
||||
"""
|
||||
|
||||
first_event = page * limit - limit
|
||||
query = (
|
||||
select(
|
||||
list_events_table.c.id,
|
||||
list_events_table.c.name,
|
||||
list_events_table.c.title,
|
||||
list_events_table.c.creator_id,
|
||||
list_events_table.c.created_at,
|
||||
list_events_table.c.schema,
|
||||
list_events_table.c.state,
|
||||
list_events_table.c.status,
|
||||
)
|
||||
.where(list_events_table.c.creator_id == creator_id) # Фильтрация по creator_id
|
||||
.order_by(list_events_table.c.id)
|
||||
.offset(first_event)
|
||||
.limit(limit)
|
||||
)
|
||||
|
||||
count_query = (
|
||||
select(func.count())
|
||||
.select_from(list_events_table)
|
||||
.where(list_events_table.c.creator_id == creator_id) # Фильтрация по creator_id
|
||||
)
|
||||
|
||||
result = await connection.execute(query)
|
||||
count_result = await connection.execute(count_query)
|
||||
|
||||
events_data = result.mappings().all()
|
||||
total_count = count_result.scalar()
|
||||
total_pages = math.ceil(total_count / limit)
|
||||
|
||||
# Здесь предполагается, что all_list_event_adapter.validate_python корректно обрабатывает данные
|
||||
validated_list_event = all_list_event_adapter.validate_python(events_data)
|
||||
|
||||
return AllListEventResponse(
|
||||
list_event=validated_list_event,
|
||||
amount_count=total_count,
|
||||
amount_pages=total_pages,
|
||||
current_page=page,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
|
||||
async def get_list_events_page(connection: AsyncConnection, page, limit) -> Optional[AllListEventResponse]:
|
||||
"""
|
||||
Получает список событий заданного создателя по значениям page и limit.
|
||||
"""
|
||||
|
||||
first_event = page * limit - (limit)
|
||||
|
||||
query = (
|
||||
select(
|
||||
list_events_table.c.id,
|
||||
list_events_table.c.name,
|
||||
list_events_table.c.title,
|
||||
list_events_table.c.creator_id,
|
||||
list_events_table.c.created_at,
|
||||
list_events_table.c.schema,
|
||||
list_events_table.c.state,
|
||||
list_events_table.c.status,
|
||||
)
|
||||
.order_by(list_events_table.c.id)
|
||||
.offset(first_event)
|
||||
.limit(limit)
|
||||
)
|
||||
|
||||
count_query = select(func.count()).select_from(list_events_table)
|
||||
|
||||
result = await connection.execute(query)
|
||||
count_result = await connection.execute(count_query)
|
||||
|
||||
events_data = result.mappings().all()
|
||||
total_count = count_result.scalar()
|
||||
total_pages = math.ceil(total_count / limit)
|
||||
|
||||
# Здесь предполагается, что all_list_event_adapter.validate_python корректно обрабатывает данные
|
||||
validated_list_event = all_list_event_adapter.validate_python(events_data)
|
||||
|
||||
return AllListEventResponse(
|
||||
list_event=validated_list_event,
|
||||
amount_count=total_count,
|
||||
amount_pages=total_pages,
|
||||
current_page=page,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
|
||||
async def get_list_events_by_name(connection: AsyncConnection, name: str) -> Optional[ListEvent]:
|
||||
async def get_list_events_by_name(connection: AsyncConnection, name: str) -> ListEvent | None:
|
||||
"""
|
||||
Получает list events по name.
|
||||
"""
|
||||
@@ -230,7 +135,7 @@ async def get_list_events_by_name(connection: AsyncConnection, name: str) -> Opt
|
||||
return ListEvent.model_validate(list_events_data)
|
||||
|
||||
|
||||
async def get_list_events_by_id(connection: AsyncConnection, id: int) -> Optional[ListEvent]:
|
||||
async def get_list_events_by_id(connection: AsyncConnection, id: int) -> ListEvent | None:
|
||||
"""
|
||||
Получает listevent по id.
|
||||
"""
|
||||
@@ -256,9 +161,7 @@ async def update_list_events_by_id(connection: AsyncConnection, update_values, l
|
||||
await connection.commit()
|
||||
|
||||
|
||||
async def create_list_events(
|
||||
connection: AsyncConnection, list_events: ListEvent, creator_id: int
|
||||
) -> Optional[ListEvent]:
|
||||
async def create_list_events(connection: AsyncConnection, list_events: ListEvent, creator_id: int) -> ListEvent | None:
|
||||
"""
|
||||
Создает нове поле в таблице list_events_table.
|
||||
"""
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import insert, select, desc
|
||||
@@ -11,7 +9,7 @@ from orm.tables.process import ps_node_table, node_link_table
|
||||
from orm.tables.process import NodeLinkStatus
|
||||
|
||||
|
||||
async def get_last_link_name_by_node_id(connection: AsyncConnection, ps_id: int) -> Optional[str]:
|
||||
async def get_last_link_name_by_node_id(connection: AsyncConnection, ps_id: int) -> str | None:
|
||||
"""
|
||||
Получает link_name из последней записи node_link по ps_id.
|
||||
Находит все node_id в ps_node по ps_id, затем ищет связи в node_link
|
||||
@@ -32,7 +30,7 @@ async def get_last_link_name_by_node_id(connection: AsyncConnection, ps_id: int)
|
||||
|
||||
async def get_last_node_link_by_creator_and_ps_id(
|
||||
connection: AsyncConnection, creator_id: int, node_link_id: int
|
||||
) -> Optional[NodeLink]:
|
||||
) -> NodeLink | None:
|
||||
"""
|
||||
Получает последнюю созданную node_link для данного создателя и процесса.
|
||||
"""
|
||||
@@ -59,7 +57,7 @@ async def create_node_link_schema(
|
||||
connection: AsyncConnection,
|
||||
validated_link_schema,
|
||||
creator_id: int,
|
||||
) -> Optional[NodeLink]:
|
||||
) -> NodeLink | None:
|
||||
"""
|
||||
Создает нове поле в таблице process_schema_table.
|
||||
"""
|
||||
@@ -79,3 +77,20 @@ async def create_node_link_schema(
|
||||
await connection.commit()
|
||||
|
||||
return await get_last_node_link_by_creator_and_ps_id(connection, creator_id, validated_link_schema.from_id)
|
||||
|
||||
|
||||
async def get_all_node_links_by_next_node_ids(connection: AsyncConnection, next_node_ids: list[int]) -> list[NodeLink]:
|
||||
"""
|
||||
Получает все активные node_link для списка next_node_id одним запросом.
|
||||
"""
|
||||
if not next_node_ids:
|
||||
return []
|
||||
|
||||
query = select(node_link_table).where(
|
||||
node_link_table.c.next_node_id.in_(next_node_ids), node_link_table.c.status == NodeLinkStatus.ACTIVE.value
|
||||
)
|
||||
|
||||
node_link_db_cursor = await connection.execute(query)
|
||||
node_links_data = node_link_db_cursor.mappings().all()
|
||||
|
||||
return [NodeLink.model_validate(link_data) for link_data in node_links_data]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Optional, Dict, Any
|
||||
from typing import Any
|
||||
import math
|
||||
|
||||
from datetime import datetime, timezone
|
||||
@@ -19,7 +19,7 @@ from api.schemas.endpoints.process_schema import (
|
||||
|
||||
async def get_process_schema_page_DTO(
|
||||
connection: AsyncConnection, filter_dto: ProcessSchemaFilterDTO
|
||||
) -> Optional[AllProcessSchemaResponse]:
|
||||
) -> AllProcessSchemaResponse | None:
|
||||
"""
|
||||
Получает список схем процессов с комплексной фильтрацией через DTO объект.
|
||||
Поддерживает:
|
||||
@@ -115,7 +115,7 @@ async def get_process_schema_page_DTO(
|
||||
)
|
||||
|
||||
|
||||
async def get_process_schema_by_id(connection: AsyncConnection, id: int) -> Optional[ProcessSchema]:
|
||||
async def get_process_schema_by_id(connection: AsyncConnection, id: int) -> ProcessSchema | None:
|
||||
"""
|
||||
Получает process_schema по id.
|
||||
"""
|
||||
@@ -141,54 +141,9 @@ async def update_process_schema_by_id(connection: AsyncConnection, update_values
|
||||
await connection.commit()
|
||||
|
||||
|
||||
async def update_process_schema_settings_by_id(
|
||||
connection: AsyncConnection, process_schema_id: int, node_data: Dict[str, Any]
|
||||
):
|
||||
"""
|
||||
Добавляет новый узел в массив 'nodes' в настройках процесса.
|
||||
Если массив 'nodes' не существует, создает его.
|
||||
"""
|
||||
# Получаем текущие settings
|
||||
query = select(process_schema_table.c.settings).where(process_schema_table.c.id == process_schema_id)
|
||||
result = await connection.execute(query)
|
||||
current_settings = result.scalar_one_or_none()
|
||||
|
||||
# Если settings пустые, создаем пустой словарь
|
||||
if current_settings is None:
|
||||
current_settings = {}
|
||||
|
||||
# Инициализируем массив nodes, если его нет
|
||||
if "nodes" not in current_settings:
|
||||
current_settings["nodes"] = []
|
||||
|
||||
# Добавляем новый узел в массив
|
||||
current_settings["nodes"].append(node_data)
|
||||
|
||||
# Обновляем поле settings
|
||||
await connection.execute(
|
||||
process_schema_table.update()
|
||||
.where(process_schema_table.c.id == process_schema_id)
|
||||
.values(settings=current_settings)
|
||||
)
|
||||
|
||||
await connection.commit()
|
||||
|
||||
|
||||
async def get_last_created_process_schema(connection: AsyncConnection) -> Optional[int]:
|
||||
"""
|
||||
Получает ID последней созданной схемы процесса.
|
||||
"""
|
||||
query = select(process_schema_table.c.id).order_by(desc(process_schema_table.c.id)).limit(1)
|
||||
|
||||
result = await connection.execute(query)
|
||||
last_id = result.scalar_one_or_none()
|
||||
|
||||
return last_id
|
||||
|
||||
|
||||
async def create_process_schema(
|
||||
connection: AsyncConnection, creator_id: int, title: str, description: str
|
||||
) -> Optional[int]:
|
||||
) -> int | None:
|
||||
"""
|
||||
Создает новое поле в таблице process_schema_table.
|
||||
"""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Optional, List
|
||||
from typing import Any
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
@@ -8,11 +8,10 @@ from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
from orm.tables.process import ps_node_table, node_link_table, process_schema_table
|
||||
|
||||
from api.schemas.process.ps_node import Ps_Node
|
||||
from model_nodes.node_listen_models import ListenNodeCoreSchema
|
||||
from orm.tables.process import NodeStatus
|
||||
|
||||
|
||||
async def get_ps_node_by_id(connection: AsyncConnection, id: int) -> Optional[Ps_Node]:
|
||||
async def get_ps_node_by_id(connection: AsyncConnection, id: int) -> Ps_Node | None:
|
||||
"""
|
||||
Получает process_schema по id.
|
||||
"""
|
||||
@@ -29,7 +28,7 @@ async def get_ps_node_by_id(connection: AsyncConnection, id: int) -> Optional[Ps
|
||||
|
||||
async def get_last_ps_node_by_creator_and_ps_id(
|
||||
connection: AsyncConnection, creator_id: int, ps_id: int
|
||||
) -> Optional[Ps_Node]:
|
||||
) -> Ps_Node | None:
|
||||
"""
|
||||
Получает последнюю созданную ps_node для данного создателя и процесса.
|
||||
"""
|
||||
@@ -49,18 +48,34 @@ async def get_last_ps_node_by_creator_and_ps_id(
|
||||
return Ps_Node.model_validate(ps_node_data)
|
||||
|
||||
|
||||
async def get_all_ps_nodes_by_ps_id(connection: AsyncConnection, ps_id: int) -> list[Ps_Node]:
|
||||
"""
|
||||
Получает все активные ps_node для данной process_schema.
|
||||
"""
|
||||
query = select(ps_node_table).where(
|
||||
ps_node_table.c.ps_id == ps_id, ps_node_table.c.status == NodeStatus.ACTIVE.value
|
||||
)
|
||||
|
||||
ps_node_db_cursor = await connection.execute(query)
|
||||
ps_nodes_data = ps_node_db_cursor.mappings().all()
|
||||
|
||||
return [Ps_Node.model_validate(node_data) for node_data in ps_nodes_data]
|
||||
|
||||
|
||||
async def create_ps_node_schema(
|
||||
connection: AsyncConnection,
|
||||
validated_schema,
|
||||
node_descriptor,
|
||||
creator_id: int,
|
||||
) -> Optional[ListenNodeCoreSchema]:
|
||||
settings_payload: dict[str, Any] | None = None,
|
||||
) -> Ps_Node | None:
|
||||
"""
|
||||
Создает нове поле в таблице process_schema_table.
|
||||
"""
|
||||
query = insert(ps_node_table).values(
|
||||
ps_id=validated_schema.ps_id,
|
||||
node_type=validated_schema.node_type,
|
||||
settings=validated_schema.data.model_dump(),
|
||||
settings=settings_payload if settings_payload is not None else node_descriptor.model_dump(),
|
||||
creator_id=creator_id,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
status=NodeStatus.ACTIVE.value,
|
||||
@@ -89,7 +104,7 @@ async def check_node_connection(connection: AsyncConnection, node_id: int, next_
|
||||
return result.mappings().first() is not None
|
||||
|
||||
|
||||
async def get_nodes_for_deletion_ordered(connection: AsyncConnection, node_id: int) -> List[int]:
|
||||
async def get_nodes_for_deletion_ordered(connection: AsyncConnection, node_id: int) -> list[int]:
|
||||
"""
|
||||
Рекурсивно находит ВСЕ дочерние узлы и возвращает их ID в правильном порядке:
|
||||
от самых глубоких к корневым.
|
||||
@@ -129,7 +144,7 @@ async def get_nodes_for_deletion_ordered(connection: AsyncConnection, node_id: i
|
||||
return ordered_node_ids
|
||||
|
||||
|
||||
async def delete_ps_nodes_delete_handler(connection: AsyncConnection, node_ids: List[int]) -> List[int]:
|
||||
async def delete_ps_nodes_delete_handler(connection: AsyncConnection, node_ids: list[int]) -> list[int]:
|
||||
"""
|
||||
Очищает settings и удаляет ноды для каждого ps_id.
|
||||
Возвращает список успешно удаленных ID нод.
|
||||
@@ -162,7 +177,7 @@ async def delete_ps_nodes_delete_handler(connection: AsyncConnection, node_ids:
|
||||
return deleted_all
|
||||
|
||||
|
||||
async def remove_nodes_from_process_schema_settings(connection: AsyncConnection, ps_id: int, node_ids: List[int]):
|
||||
async def remove_nodes_from_process_schema_settings(connection: AsyncConnection, ps_id: int, node_ids: list[int]):
|
||||
"""
|
||||
Удаляет ноды из поля settings в таблице process_schema по списку node_ids.
|
||||
"""
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, Query, status
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
@@ -23,12 +21,12 @@ api_router = APIRouter(
|
||||
async def get_all_account_endpoint(
|
||||
page: int = Query(1, description="Page number", gt=0),
|
||||
limit: int = Query(10, description="КNumber of items per page", gt=0),
|
||||
search: Optional[str] = Query(None, description="Search term to filter by name or login or email"),
|
||||
status_filter: Optional[List[str]] = Query(None, description="Filter by status"),
|
||||
role_filter: Optional[List[str]] = Query(None, description="Filter by role"),
|
||||
creator_id: Optional[int] = Query(None, description="Filter by creator id"),
|
||||
order_field: Optional[str] = Query("id", description="Field to sort by"),
|
||||
order_direction: Optional[str] = Query("asc", description="Sort direction (asc/desc)"),
|
||||
search: str | None = Query(None, description="Search term to filter by name or login or email"),
|
||||
status_filter: list[str] | None = Query(None, description="Filter by status"),
|
||||
role_filter: list[str] | None = Query(None, description="Filter by role"),
|
||||
creator_id: int | None = Query(None, description="Filter by creator id"),
|
||||
order_field: str | None = Query("id", description="Field to sort by"),
|
||||
order_direction: str | None = Query("asc", description="Sort direction (asc/desc)"),
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, Query, status
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
@@ -23,12 +21,12 @@ api_router = APIRouter(
|
||||
async def get_all_list_events_endpoint(
|
||||
page: int = Query(1, description="Page number", gt=0),
|
||||
limit: int = Query(10, description="Number of items per page", gt=0),
|
||||
search: Optional[str] = Query(None, description="Search term to filter by title or name"),
|
||||
order_field: Optional[str] = Query("id", description="Field to sort by"),
|
||||
order_direction: Optional[str] = Query("asc", description="Sort direction (asc/desc)"),
|
||||
status_filter: Optional[List[str]] = Query(None, description="Filter by status"),
|
||||
state_filter: Optional[List[str]] = Query(None, description="Filter by state"),
|
||||
creator_id: Optional[int] = Query(None, description="Filter by creator id"),
|
||||
search: str | None = Query(None, description="Search term to filter by title or name"),
|
||||
order_field: str | None = Query("id", description="Field to sort by"),
|
||||
order_direction: str | None = Query("asc", description="Sort direction (asc/desc)"),
|
||||
status_filter: list[str] | None = Query(None, description="Filter by status"),
|
||||
state_filter: list[str] | None = Query(None, description="Filter by state"),
|
||||
creator_id: int | None = Query(None, description="Filter by creator id"),
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, Query, status, HTTPException
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
@@ -25,14 +23,14 @@ api_router = APIRouter(
|
||||
async def get_all_process_schema_endpoint(
|
||||
page: int = Query(1, description="Page number", gt=0),
|
||||
limit: int = Query(10, description="Number of items per page", gt=0),
|
||||
search: Optional[str] = Query(None, description="Search term to filter by title or description"),
|
||||
order_field: Optional[str] = Query("id", description="Field to sort by"),
|
||||
order_direction: Optional[str] = Query("asc", description="Sort direction (asc/desc)"),
|
||||
status_filter: Optional[List[str]] = Query(None, description="Filter by status"),
|
||||
owner_id: Optional[List[str]] = Query(None, description="Filter by owner id"),
|
||||
search: str | None = Query(None, description="Search term to filter by title or description"),
|
||||
order_field: str | None = Query("id", description="Field to sort by"),
|
||||
order_direction: str | None = Query("asc", description="Sort direction (asc/desc)"),
|
||||
status_filter: list[str] | None = Query(None, description="Filter by status"),
|
||||
owner_id: list[str] | None = Query(None, description="Filter by owner id"),
|
||||
show_deleted: bool = Query(False, description="Show only deleted schemas"),
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
creator_id: Optional[int] = Query(None, description="Filter by creator id"),
|
||||
creator_id: int | None = Query(None, description="Filter by creator id"),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
if show_deleted:
|
||||
@@ -75,16 +73,16 @@ async def get_all_process_schema_endpoint(
|
||||
return to_camel_dict(process_schema_page.model_dump())
|
||||
|
||||
|
||||
@api_router.get("/{process_schema_id}", dependencies=[Depends(bearer_schema)], response_model=ProcessSchema)
|
||||
@api_router.get("/{process_schema_id}", dependencies=[Depends(bearer_schema)], response_model=ProcessSchemaResponse)
|
||||
async def get_process_schema_endpoint(
|
||||
process_schema_id: int,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
service = ProcessSchemaService(connection)
|
||||
process_schema_validation = await service.get(process_schema_id)
|
||||
process_schema_response = await service.get(process_schema_id)
|
||||
|
||||
if process_schema_validation is None:
|
||||
if process_schema_response is None:
|
||||
raise create_operation_error(
|
||||
message="Process schema not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
@@ -92,16 +90,9 @@ async def get_process_schema_endpoint(
|
||||
)
|
||||
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_ownership(current_user, process_schema_validation.creator_id)
|
||||
await validator.validate_ownership(current_user, process_schema_response.process_schema.creator_id)
|
||||
|
||||
if process_schema_id is None:
|
||||
raise create_operation_error(
|
||||
message="Process schema not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"process_schema_id": process_schema_id},
|
||||
)
|
||||
|
||||
return to_camel_dict(process_schema_validation.model_dump())
|
||||
return to_camel_dict(process_schema_response.model_dump())
|
||||
|
||||
|
||||
@api_router.post("", dependencies=[Depends(bearer_schema)], response_model=ProcessSchemaResponse)
|
||||
@@ -125,9 +116,9 @@ async def update_process_schema_endpoint(
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
service = ProcessSchemaService(connection)
|
||||
process_schema_validation = await service.get(process_schema_id)
|
||||
process_schema_response = await service.get(process_schema_id)
|
||||
|
||||
if process_schema_validation is None:
|
||||
if process_schema_response is None:
|
||||
raise create_operation_error(
|
||||
message="Process schema not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
@@ -135,14 +126,14 @@ async def update_process_schema_endpoint(
|
||||
)
|
||||
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_ownership(current_user, process_schema_validation.creator_id)
|
||||
await validator.validate_ownership(current_user, process_schema_response.process_schema.creator_id)
|
||||
|
||||
updated_values = process_schema_update.model_dump(by_alias=True, exclude_none=True)
|
||||
|
||||
if not updated_values:
|
||||
return process_schema_validation
|
||||
return process_schema_response.process_schema
|
||||
|
||||
process_schema = await service.update(process_schema_id, updated_values, process_schema_validation)
|
||||
process_schema = await service.update(process_schema_id, updated_values, process_schema_response.process_schema)
|
||||
|
||||
return process_schema
|
||||
|
||||
@@ -154,9 +145,9 @@ async def delete_process_schema_endpoint(
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
service = ProcessSchemaService(connection)
|
||||
process_schema_validation = await service.get(process_schema_id)
|
||||
process_schema_response = await service.get(process_schema_id)
|
||||
|
||||
if process_schema_validation is None:
|
||||
if process_schema_response is None:
|
||||
raise create_operation_error(
|
||||
message="Process schema not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
@@ -164,8 +155,8 @@ async def delete_process_schema_endpoint(
|
||||
)
|
||||
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_ownership(current_user, process_schema_validation.creator_id)
|
||||
await validator.validate_ownership(current_user, process_schema_response.process_schema.creator_id)
|
||||
|
||||
await service.delete(process_schema_id, process_schema_validation)
|
||||
await service.delete(process_schema_id, process_schema_response.process_schema)
|
||||
|
||||
return HTTPException(status_code=status.HTTP_200_OK, detail="Process schema deleted successfully")
|
||||
|
||||
@@ -61,21 +61,6 @@ async def delete_ps_node_endpoint(
|
||||
details={"next_node_id": ps_node_delete_data.next_node_id},
|
||||
)
|
||||
|
||||
is_connected = await check_node_connection(
|
||||
connection, ps_node_delete_data.node_id, ps_node_delete_data.next_node_id, int(ps_node_delete_data.port)
|
||||
)
|
||||
|
||||
if not is_connected:
|
||||
raise create_validation_error(
|
||||
message="Node connection validation failed",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
details={
|
||||
"node_id": ps_node_delete_data.node_id,
|
||||
"next_node_id": ps_node_delete_data.next_node_id,
|
||||
"port": ps_node_delete_data.port,
|
||||
},
|
||||
)
|
||||
|
||||
service = PsNodeService(connection)
|
||||
try:
|
||||
result = await service.delete(ps_node_delete_data.next_node_id)
|
||||
@@ -153,22 +138,6 @@ async def create_ps_node_endpoint(
|
||||
details={"parent_id": parent_id, "expected_ps_id": target_ps_id, "actual_ps_id": parent_node.ps_id},
|
||||
)
|
||||
|
||||
parent_port_number = node_instance_validated.parent_port_number
|
||||
|
||||
parent_settings = parent_node.settings or {}
|
||||
available_port_numbers = []
|
||||
|
||||
for key, value in parent_settings.items():
|
||||
if "port" in key.lower() and isinstance(value, int):
|
||||
available_port_numbers.append(value)
|
||||
|
||||
if parent_port_number not in available_port_numbers:
|
||||
raise create_validation_error(
|
||||
message="Parent port number is invalid",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
details={"parent_id": parent_id, "parent_settings": parent_settings},
|
||||
)
|
||||
|
||||
service = PsNodeService(connection)
|
||||
try:
|
||||
ps_node_front_response = await service.create(ps_node.data, ps_node.links, user_validation.id)
|
||||
|
||||
@@ -2,14 +2,14 @@
|
||||
Обработчики ошибок для API.
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any
|
||||
from typing import Any
|
||||
from fastapi import HTTPException
|
||||
|
||||
from .error_model.error_types import ServerError, AccessError, OperationError, ValidationError, ErrorType
|
||||
|
||||
|
||||
def handle_api_error(
|
||||
error_type: ErrorType, message: str, status_code: int, details: Optional[Dict[str, Any]] = None
|
||||
error_type: ErrorType, message: str, status_code: int, details: dict[str, Any] | None = None
|
||||
) -> HTTPException:
|
||||
"""
|
||||
Функция для создания HTTPException с правильной структурой ошибки.
|
||||
@@ -30,25 +30,21 @@ def handle_api_error(
|
||||
return HTTPException(status_code=status_code, detail=error.model_dump(mode="json"))
|
||||
|
||||
|
||||
def create_server_error(
|
||||
message: str, status_code: int = 500, details: Optional[Dict[str, Any]] = None
|
||||
) -> HTTPException:
|
||||
def create_server_error(message: str, status_code: int = 500, details: dict[str, Any] | None = None) -> HTTPException:
|
||||
return handle_api_error(error_type=ErrorType.SERVER, message=message, status_code=status_code, details=details)
|
||||
|
||||
|
||||
def create_access_error(
|
||||
message: str, status_code: int = 403, details: Optional[Dict[str, Any]] = None
|
||||
) -> HTTPException:
|
||||
def create_access_error(message: str, status_code: int = 403, details: dict[str, Any] | None = None) -> HTTPException:
|
||||
return handle_api_error(error_type=ErrorType.ACCESS, message=message, status_code=status_code, details=details)
|
||||
|
||||
|
||||
def create_operation_error(
|
||||
message: str, status_code: int = 400, details: Optional[Dict[str, Any]] = None
|
||||
message: str, status_code: int = 400, details: dict[str, Any] | None = None
|
||||
) -> HTTPException:
|
||||
return handle_api_error(error_type=ErrorType.OPERATION, message=message, status_code=status_code, details=details)
|
||||
|
||||
|
||||
def create_validation_error(
|
||||
message: str, status_code: int = 422, details: Optional[Dict[str, Any]] = None
|
||||
message: str, status_code: int = 422, details: dict[str, Any] | None = None
|
||||
) -> HTTPException:
|
||||
return handle_api_error(error_type=ErrorType.VALIDATION, message=message, status_code=status_code, details=details)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from typing import Optional, Dict, Any
|
||||
from typing import Any
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ class BaseError(BaseModel):
|
||||
|
||||
error_type: ErrorType
|
||||
message: str
|
||||
details: Optional[Dict[str, Any]] = None
|
||||
details: dict[str, Any] | None = None
|
||||
|
||||
|
||||
class ServerError(BaseError):
|
||||
@@ -58,4 +58,4 @@ class ValidationError(BaseError):
|
||||
"""
|
||||
|
||||
error_type: ErrorType = ErrorType.VALIDATION
|
||||
field_errors: Optional[Dict[str, str]] = None
|
||||
field_errors: dict[str, str] | None = None
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from orm.tables.account import AccountRole, AccountStatus
|
||||
from pydantic import EmailStr, Field
|
||||
@@ -8,13 +7,13 @@ from api.schemas.base import Base
|
||||
|
||||
|
||||
class User(Base):
|
||||
id: Optional[int] = None
|
||||
id: int | None = None
|
||||
name: str = Field(..., max_length=100)
|
||||
login: str = Field(..., max_length=100)
|
||||
email: Optional[EmailStr] = Field(None, max_length=100) # Электронная почта (может быть None)
|
||||
bind_tenant_id: Optional[str] = Field(None, max_length=40)
|
||||
email: EmailStr | None = Field(None, max_length=100) # Электронная почта (может быть None)
|
||||
bind_tenant_id: str | None = Field(None, max_length=40)
|
||||
role: AccountRole
|
||||
meta: dict
|
||||
creator_id: Optional[int] = None
|
||||
creator_id: int | None = None
|
||||
created_at: datetime
|
||||
status: AccountStatus
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from orm.tables.account import KeyStatus, KeyType
|
||||
from pydantic import Field
|
||||
@@ -10,8 +9,8 @@ from api.schemas.base import Base
|
||||
class AccountKeyring(Base):
|
||||
owner_id: int
|
||||
key_type: KeyType
|
||||
key_id: Optional[str] = Field(None, max_length=40)
|
||||
key_id: str | None = Field(None, max_length=40)
|
||||
key_value: str = Field(..., max_length=255)
|
||||
created_at: datetime
|
||||
expiry: Optional[datetime] = None
|
||||
expiry: datetime | None = None
|
||||
status: KeyStatus
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from orm.tables.account import AccountRole, AccountStatus
|
||||
from pydantic import EmailStr, Field, TypeAdapter
|
||||
@@ -8,24 +7,24 @@ from api.schemas.base import Base
|
||||
|
||||
|
||||
class UserUpdate(Base):
|
||||
name: Optional[str] = Field(None, max_length=100)
|
||||
login: Optional[str] = Field(None, max_length=100)
|
||||
email: Optional[EmailStr] = None
|
||||
password: Optional[str] = None
|
||||
bind_tenant_id: Optional[str] = Field(None, max_length=40)
|
||||
role: Optional[AccountRole] = None
|
||||
meta: Optional[dict] = None
|
||||
status: Optional[AccountStatus] = None
|
||||
name: str | None = Field(None, max_length=100)
|
||||
login: str | None = Field(None, max_length=100)
|
||||
email: EmailStr | None = None
|
||||
password: str | None = None
|
||||
bind_tenant_id: str | None = Field(None, max_length=40)
|
||||
role: AccountRole | None = None
|
||||
meta: dict | None = None
|
||||
status: AccountStatus | None = None
|
||||
|
||||
|
||||
class UserCreate(Base):
|
||||
name: str = Field(max_length=100)
|
||||
login: str = Field(max_length=100)
|
||||
email: Optional[EmailStr] = None
|
||||
password: Optional[str] = None
|
||||
bind_tenant_id: Optional[str] = Field(None, max_length=40)
|
||||
email: EmailStr | None = None
|
||||
password: str | None = None
|
||||
bind_tenant_id: str | None = Field(None, max_length=40)
|
||||
role: AccountRole
|
||||
meta: Optional[dict] = None
|
||||
meta: dict | None = None
|
||||
status: AccountStatus
|
||||
|
||||
|
||||
@@ -33,28 +32,28 @@ class AllUser(Base):
|
||||
id: int
|
||||
name: str
|
||||
login: str
|
||||
email: Optional[EmailStr] = None
|
||||
bind_tenant_id: Optional[str] = None
|
||||
email: EmailStr | None = None
|
||||
bind_tenant_id: str | None = None
|
||||
role: AccountRole
|
||||
meta: Optional[dict] = None
|
||||
creator_id: Optional[int] = None
|
||||
meta: dict | None = None
|
||||
creator_id: int | None = None
|
||||
created_at: datetime
|
||||
status: AccountStatus
|
||||
|
||||
|
||||
class AllUserResponse(Base):
|
||||
users: List[AllUser]
|
||||
users: list[AllUser]
|
||||
amount_count: int
|
||||
amount_pages: int
|
||||
current_page: int
|
||||
limit: int
|
||||
|
||||
|
||||
all_user_adapter = TypeAdapter(List[AllUser])
|
||||
all_user_adapter = TypeAdapter(list[AllUser])
|
||||
|
||||
|
||||
class UserFilterDTO(Base):
|
||||
pagination: Dict[str, int]
|
||||
search: Optional[str] = None
|
||||
order: Optional[Dict[str, str]] = None
|
||||
filters: Optional[Dict[str, List[str]]] = None
|
||||
pagination: dict[str, int]
|
||||
search: str | None = None
|
||||
order: dict[str, str] | None = None
|
||||
filters: dict[str, list[str]] | None = None
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from orm.tables.account import KeyStatus, KeyType
|
||||
from pydantic import Field
|
||||
|
||||
@@ -7,7 +5,7 @@ from api.schemas.base import Base
|
||||
|
||||
|
||||
class AccountKeyringUpdate(Base):
|
||||
owner_id: Optional[int] = None
|
||||
key_type: Optional[KeyType] = None
|
||||
key_value: Optional[str] = Field(None, max_length=255)
|
||||
status: Optional[KeyStatus] = None
|
||||
owner_id: int | None = None
|
||||
key_type: KeyType | None = None
|
||||
key_value: str | None = Field(None, max_length=255)
|
||||
status: KeyStatus | None = None
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any
|
||||
|
||||
from orm.tables.events import EventState, EventStatus
|
||||
from pydantic import Field, TypeAdapter
|
||||
@@ -8,11 +8,11 @@ from api.schemas.base import Base
|
||||
|
||||
|
||||
class ListEventUpdate(Base):
|
||||
name: Optional[str] = Field(None, max_length=40)
|
||||
title: Optional[str] = Field(None, max_length=64)
|
||||
schema_: Optional[Dict[str, Any]] = Field(None, alias="schema")
|
||||
state: Optional[EventState] = None
|
||||
status: Optional[EventStatus] = None
|
||||
name: str | None = Field(None, max_length=40)
|
||||
title: str | None = Field(None, max_length=64)
|
||||
schema_: dict[str, Any] | None = Field(None, alias="schema")
|
||||
state: EventState | None = None
|
||||
status: EventStatus | None = None
|
||||
|
||||
|
||||
class AllListEvent(Base):
|
||||
@@ -21,24 +21,24 @@ class AllListEvent(Base):
|
||||
title: str
|
||||
creator_id: int
|
||||
created_at: datetime
|
||||
schema_: Dict[str, Any] = Field(default={}, alias="schema")
|
||||
schema_: dict[str, Any] = Field(default={}, alias="schema")
|
||||
state: EventState
|
||||
status: EventStatus
|
||||
|
||||
|
||||
class AllListEventResponse(Base):
|
||||
list_event: List[AllListEvent]
|
||||
list_event: list[AllListEvent]
|
||||
amount_count: int
|
||||
amount_pages: int
|
||||
current_page: int
|
||||
limit: int
|
||||
|
||||
|
||||
all_list_event_adapter = TypeAdapter(List[AllListEvent])
|
||||
all_list_event_adapter = TypeAdapter(list[AllListEvent])
|
||||
|
||||
|
||||
class ListEventFilterDTO(Base):
|
||||
pagination: Dict[str, int]
|
||||
search: Optional[str] = None
|
||||
order: Optional[Dict[str, str]] = None
|
||||
filters: Optional[Dict[str, List[str]]] = None
|
||||
pagination: dict[str, int]
|
||||
search: str | None = None
|
||||
order: dict[str, str] | None = None
|
||||
filters: dict[str, list[str]] | None = None
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any
|
||||
|
||||
from orm.tables.process import ProcessStatus
|
||||
from pydantic import Field, TypeAdapter
|
||||
@@ -8,11 +8,11 @@ from api.schemas.base import Base
|
||||
|
||||
|
||||
class ProcessSchemaUpdate(Base):
|
||||
title: Optional[str] = Field(None, max_length=100)
|
||||
description: Optional[str] = None
|
||||
# owner_id: Optional[int] = None
|
||||
settings: Optional[Dict[str, Any]] = None
|
||||
status: Optional[ProcessStatus] = None
|
||||
title: str | None = Field(None, max_length=100)
|
||||
description: str | None = None
|
||||
# owner_id: int | None = None
|
||||
settings: dict[str, Any] | None = None
|
||||
status: ProcessStatus | None = None
|
||||
|
||||
|
||||
class AllProcessSchema(Base):
|
||||
@@ -22,23 +22,23 @@ class AllProcessSchema(Base):
|
||||
owner_id: int
|
||||
creator_id: int
|
||||
created_at: datetime
|
||||
settings: Dict[str, Any]
|
||||
settings: dict[str, Any]
|
||||
status: ProcessStatus
|
||||
|
||||
|
||||
class AllProcessSchemaResponse(Base):
|
||||
process_schema: List[AllProcessSchema]
|
||||
process_schema: list[AllProcessSchema]
|
||||
amount_count: int
|
||||
amount_pages: int
|
||||
current_page: int
|
||||
limit: int
|
||||
|
||||
|
||||
all_process_schema_adapter = TypeAdapter(List[AllProcessSchema])
|
||||
all_process_schema_adapter = TypeAdapter(list[AllProcessSchema])
|
||||
|
||||
|
||||
class ProcessSchemaFilterDTO(Base):
|
||||
pagination: Dict[str, int]
|
||||
search: Optional[str] = None
|
||||
order: Optional[Dict[str, str]] = None
|
||||
filters: Optional[Dict[str, List[str]]] = None
|
||||
pagination: dict[str, int]
|
||||
search: str | None = None
|
||||
order: dict[str, str] | None = None
|
||||
filters: dict[str, list[str]] | None = None
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from orm.tables.events import EventState, EventStatus
|
||||
from pydantic import Field
|
||||
@@ -13,6 +13,6 @@ class ListEvent(Base):
|
||||
title: str = Field(..., max_length=64)
|
||||
creator_id: int
|
||||
created_at: datetime
|
||||
schema_: Dict[str, Any] = Field(..., alias="schema")
|
||||
schema_: dict[str, Any] = Field(..., alias="schema")
|
||||
state: EventState
|
||||
status: EventStatus
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from orm.tables.process import NodeStatus
|
||||
from pydantic import Field
|
||||
@@ -13,7 +13,7 @@ class NodeLink(Base):
|
||||
node_id: int
|
||||
link_point_id: int
|
||||
next_node_id: int
|
||||
settings: Dict[str, Any]
|
||||
settings: dict[str, Any]
|
||||
creator_id: int
|
||||
created_at: datetime
|
||||
status: NodeStatus
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, Optional, List
|
||||
from typing import Any
|
||||
|
||||
from orm.tables.process import ProcessStatus, NodeType
|
||||
from orm.tables.process import ProcessStatus
|
||||
from pydantic import Field
|
||||
|
||||
from api.schemas.base import Base
|
||||
@@ -15,26 +15,10 @@ class ProcessSchema(Base):
|
||||
owner_id: int
|
||||
creator_id: int
|
||||
created_at: datetime
|
||||
settings: Dict[str, Any]
|
||||
settings: dict[str, Any]
|
||||
status: ProcessStatus
|
||||
|
||||
|
||||
class ProcessSchemaSettingsNodeLink(Base):
|
||||
id: int
|
||||
link_name: str
|
||||
parent_port_number: int
|
||||
from_id: int
|
||||
to_id: int
|
||||
|
||||
|
||||
class ProcessSchemaSettingsNode(Base):
|
||||
id: int
|
||||
node_type: NodeType
|
||||
from_node: Optional[Dict[str, Any]] = None
|
||||
data: Dict[str, Any] # Переименовано с 'from' на 'from_node'
|
||||
links: Optional[List[Dict[str, Any]]] = None
|
||||
|
||||
|
||||
class ProcessSchemaResponse(Base):
|
||||
process_schema: ProcessSchema
|
||||
node_listen: Ps_NodeFrontResponse
|
||||
nodes: list[Ps_NodeFrontResponse]
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict
|
||||
|
||||
from api.schemas.base import Base
|
||||
|
||||
|
||||
class ProcessStatusSchema(Base):
|
||||
id: int
|
||||
version: int
|
||||
snapshot: Dict[str, Any]
|
||||
owner_id: int
|
||||
created_at: datetime
|
||||
is_last: int
|
||||
@@ -1,5 +1,5 @@
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, Optional, List
|
||||
from typing import Any
|
||||
|
||||
from orm.tables.process import NodeStatus, NodeType
|
||||
|
||||
@@ -14,8 +14,8 @@ class Ps_NodeDeleteRequest(Base):
|
||||
|
||||
|
||||
class Ps_NodeRequest(Base):
|
||||
data: Dict[str, Any]
|
||||
links: Dict[str, Any]
|
||||
data: dict[str, Any]
|
||||
links: dict[str, Any]
|
||||
|
||||
|
||||
class Ps_Node(Base):
|
||||
@@ -28,21 +28,6 @@ class Ps_Node(Base):
|
||||
status: NodeStatus
|
||||
|
||||
|
||||
class Ps_NodeFrontResponseLink(Base):
|
||||
id: int
|
||||
link_name: str
|
||||
parent_port_number: int
|
||||
from_id: int
|
||||
to_id: int
|
||||
|
||||
|
||||
class Ps_NodeFrontResponseNode(Base):
|
||||
id: int
|
||||
node_type: NodeType
|
||||
data: Dict[str, Any] # Переименовано с 'from' на 'from_node'
|
||||
|
||||
|
||||
class Ps_NodeFrontResponse(Base):
|
||||
description: Optional[Dict[str, Any]] = None
|
||||
node: Optional[Ps_NodeFrontResponseNode] = None
|
||||
links: Optional[List[Dict[str, Any]]] = None
|
||||
node: dict[str, Any] | None = None
|
||||
link: list[dict[str, Any]] | None = None
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import HTTPException, Request
|
||||
from orm.tables.account import AccountStatus
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
@@ -15,7 +13,7 @@ async def get_current_user(request: Request) -> str | HTTPException:
|
||||
return request.state.current_user
|
||||
|
||||
|
||||
async def authenticate_user(connection: AsyncConnection, username: str, password: str) -> Optional[AllUser]:
|
||||
async def authenticate_user(connection: AsyncConnection, username: str, password: str) -> AllUser | None:
|
||||
sql_user, sql_password = await get_user(connection, username)
|
||||
|
||||
if not sql_user or sql_user.status != AccountStatus.ACTIVE:
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from typing import Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
from orm.tables.account import AccountStatus
|
||||
|
||||
@@ -19,13 +18,13 @@ class AccountService:
|
||||
def __init__(self, connection: AsyncConnection):
|
||||
self.connection = connection
|
||||
|
||||
async def list(self, filter_dto: UserFilterDTO) -> Optional[AllUserResponse]:
|
||||
async def list(self, filter_dto: UserFilterDTO) -> AllUserResponse | None:
|
||||
"""
|
||||
Получает список пользователей с пагинацией и фильтрацией.
|
||||
"""
|
||||
return await get_user_account_page_DTO(self.connection, filter_dto)
|
||||
|
||||
async def get(self, user_id: int) -> Optional[User]:
|
||||
async def get(self, user_id: int) -> User | None:
|
||||
"""
|
||||
Получает пользователя по ID.
|
||||
"""
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from typing import Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
from orm.tables.account import KeyStatus
|
||||
|
||||
@@ -15,7 +14,7 @@ class KeyringService:
|
||||
def __init__(self, connection: AsyncConnection):
|
||||
self.connection = connection
|
||||
|
||||
async def get(self, key_id: str) -> Optional[AccountKeyring]:
|
||||
async def get(self, key_id: str) -> AccountKeyring | None:
|
||||
"""
|
||||
Получает keyring по key_id.
|
||||
"""
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from typing import Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
from orm.tables.events import EventStatus
|
||||
|
||||
@@ -19,25 +18,25 @@ class ListEventsService:
|
||||
def __init__(self, connection: AsyncConnection):
|
||||
self.connection = connection
|
||||
|
||||
async def list(self, filter_dto: ListEventFilterDTO) -> Optional[AllListEventResponse]:
|
||||
async def list(self, filter_dto: ListEventFilterDTO) -> AllListEventResponse | None:
|
||||
"""
|
||||
Получает список событий с пагинацией и фильтрацией.
|
||||
"""
|
||||
return await get_list_events_page_DTO(self.connection, filter_dto)
|
||||
|
||||
async def get(self, list_events_id: int) -> Optional[ListEvent]:
|
||||
async def get(self, list_events_id: int) -> ListEvent | None:
|
||||
"""
|
||||
Получает событие по ID.
|
||||
"""
|
||||
return await get_list_events_by_id(self.connection, list_events_id)
|
||||
|
||||
async def get_by_name(self, name: str) -> Optional[ListEvent]:
|
||||
async def get_by_name(self, name: str) -> ListEvent | None:
|
||||
"""
|
||||
Получает событие по name.
|
||||
"""
|
||||
return await get_list_events_by_name(self.connection, name)
|
||||
|
||||
async def create(self, list_events_data: ListEventUpdate, creator_id: int) -> Optional[ListEvent]:
|
||||
async def create(self, list_events_data: ListEventUpdate, creator_id: int) -> ListEvent | None:
|
||||
"""
|
||||
Создаёт новое событие.
|
||||
"""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Optional, Dict, Any
|
||||
from typing import Any
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
from orm.tables.process import ProcessStatus
|
||||
|
||||
@@ -7,12 +7,12 @@ from api.db.logic.process_schema import (
|
||||
get_process_schema_page_DTO,
|
||||
update_process_schema_by_id,
|
||||
create_process_schema,
|
||||
update_process_schema_settings_by_id,
|
||||
)
|
||||
from api.db.logic.ps_node import create_ps_node_schema
|
||||
from api.schemas.process.process_schema import ProcessSchema, ProcessSchemaSettingsNode
|
||||
from api.db.logic.ps_node import create_ps_node_schema, get_all_ps_nodes_by_ps_id
|
||||
from api.db.logic.node_link import get_all_node_links_by_next_node_ids
|
||||
from api.schemas.process.process_schema import ProcessSchema, ProcessSchemaResponse
|
||||
from api.schemas.endpoints.process_schema import AllProcessSchemaResponse, ProcessSchemaFilterDTO, ProcessSchemaUpdate
|
||||
from api.schemas.process.ps_node import Ps_NodeFrontResponse, Ps_NodeFrontResponseNode
|
||||
from api.schemas.process.ps_node import Ps_NodeFrontResponse
|
||||
from orm.tables.process import NodeType
|
||||
from core import VorkNodeRegistry
|
||||
from model_nodes import ListenNodeData
|
||||
@@ -25,19 +25,48 @@ class ProcessSchemaService:
|
||||
def __init__(self, connection: AsyncConnection):
|
||||
self.connection = connection
|
||||
|
||||
async def list(self, filter_dto: ProcessSchemaFilterDTO) -> Optional[AllProcessSchemaResponse]:
|
||||
async def list(self, filter_dto: ProcessSchemaFilterDTO) -> AllProcessSchemaResponse | None:
|
||||
"""
|
||||
Получает список схем процессов с пагинацией и фильтрацией.
|
||||
"""
|
||||
return await get_process_schema_page_DTO(self.connection, filter_dto)
|
||||
|
||||
async def get(self, process_schema_id: int) -> Optional[ProcessSchema]:
|
||||
async def get(self, process_schema_id: int) -> ProcessSchemaResponse | None:
|
||||
"""
|
||||
Получает схему процесса по ID.
|
||||
Получает схему процесса по ID со всеми нодами и линками.
|
||||
"""
|
||||
return await get_process_schema_by_id(self.connection, process_schema_id)
|
||||
process_schema = await get_process_schema_by_id(self.connection, process_schema_id)
|
||||
if process_schema is None:
|
||||
return None
|
||||
|
||||
async def create(self, creator_id: int) -> Dict[str, Any]:
|
||||
nodes = await get_all_ps_nodes_by_ps_id(self.connection, process_schema_id)
|
||||
|
||||
node_ids = [node.id for node in nodes]
|
||||
all_links = await get_all_node_links_by_next_node_ids(self.connection, node_ids)
|
||||
|
||||
links_by_node_id = {}
|
||||
for link in all_links:
|
||||
if link.next_node_id not in links_by_node_id:
|
||||
links_by_node_id[link.next_node_id] = []
|
||||
links_by_node_id[link.next_node_id].append(link)
|
||||
|
||||
nodes_response = []
|
||||
for node in nodes:
|
||||
node_links = links_by_node_id.get(node.id, [])
|
||||
links_list = [{"link": link.model_dump()} for link in node_links]
|
||||
|
||||
ps_node_front_response = Ps_NodeFrontResponse(
|
||||
node=node.model_dump(),
|
||||
link=links_list,
|
||||
)
|
||||
nodes_response.append(ps_node_front_response)
|
||||
|
||||
return ProcessSchemaResponse(
|
||||
process_schema=process_schema,
|
||||
nodes=nodes_response,
|
||||
)
|
||||
|
||||
async def create(self, creator_id: int) -> dict[str, Any]:
|
||||
"""
|
||||
Создаёт новую схему процесса с начальной нодой LISTEN.
|
||||
"""
|
||||
@@ -63,35 +92,27 @@ class ProcessSchemaService:
|
||||
|
||||
validated_start_schema = start_node.validate()
|
||||
|
||||
db_start_schema = await create_ps_node_schema(self.connection, validated_start_schema, creator_id)
|
||||
|
||||
node = ProcessSchemaSettingsNode(
|
||||
id=db_start_schema.id,
|
||||
node_type=NodeType.LISTEN.value,
|
||||
data=validated_start_schema.data.model_dump(),
|
||||
from_node=None,
|
||||
links=None,
|
||||
)
|
||||
|
||||
settings_dict = {"node": node.model_dump(mode="json")}
|
||||
|
||||
await update_process_schema_settings_by_id(self.connection, process_schema_new.id, settings_dict)
|
||||
|
||||
process_schema_new = await get_process_schema_by_id(self.connection, node_id)
|
||||
|
||||
ps_node_front_response = Ps_NodeFrontResponse(
|
||||
description=node_descriptor.model_dump(),
|
||||
node=Ps_NodeFrontResponseNode(
|
||||
id=db_start_schema.id, node_type=NodeType.LISTEN.value, data=validated_start_schema.data.model_dump()
|
||||
),
|
||||
link=None,
|
||||
)
|
||||
|
||||
response_data = {
|
||||
"process_schema": process_schema_new.model_dump(),
|
||||
"node_listen": ps_node_front_response.model_dump(),
|
||||
start_settings_payload = {
|
||||
**node_descriptor.model_dump(),
|
||||
**validated_start_schema.data.model_dump(),
|
||||
}
|
||||
|
||||
db_start_schema = await create_ps_node_schema(
|
||||
self.connection,
|
||||
validated_start_schema,
|
||||
node_descriptor,
|
||||
creator_id,
|
||||
start_settings_payload,
|
||||
)
|
||||
|
||||
ps_node_front_response = Ps_NodeFrontResponse(
|
||||
node=db_start_schema.model_dump(),
|
||||
link=[],
|
||||
)
|
||||
response_data = {
|
||||
"process_schema": process_schema_new.model_dump(),
|
||||
"nodes": [ps_node_front_response], # Список объектов, а не словарей
|
||||
}
|
||||
return response_data
|
||||
|
||||
async def update(self, process_schema_id: int, update_data: dict, process_schema: ProcessSchema) -> ProcessSchema:
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from typing import Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.logic.account import get_user_by_id, get_user_by_login, update_user_by_id
|
||||
@@ -11,7 +10,7 @@ class ProfileService:
|
||||
def __init__(self, connection: AsyncConnection):
|
||||
self.connection = connection
|
||||
|
||||
async def get_by_login(self, login: str) -> Optional[User]:
|
||||
async def get_by_login(self, login: str) -> User | None:
|
||||
"""
|
||||
Получает пользователя по логину.
|
||||
"""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import List, Dict, Any
|
||||
from typing import Any
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.logic.ps_node import (
|
||||
@@ -7,12 +7,9 @@ from api.db.logic.ps_node import (
|
||||
create_ps_node_schema,
|
||||
)
|
||||
from api.db.logic.node_link import get_last_link_name_by_node_id, create_node_link_schema
|
||||
from api.db.logic.process_schema import update_process_schema_settings_by_id
|
||||
from api.schemas.process.process_schema import ProcessSchemaSettingsNodeLink, ProcessSchemaSettingsNode
|
||||
from api.schemas.process.ps_node import Ps_NodeFrontResponse, Ps_NodeFrontResponseNode
|
||||
from api.schemas.process.ps_node import Ps_NodeFrontResponse
|
||||
from core import VorkNodeRegistry, VorkNodeLink
|
||||
from model_nodes import VorkNodeLinkData
|
||||
from api.utils.to_camel_dict import to_camel_dict
|
||||
|
||||
|
||||
class PsNodeService:
|
||||
@@ -21,7 +18,7 @@ class PsNodeService:
|
||||
def __init__(self, connection: AsyncConnection):
|
||||
self.connection = connection
|
||||
|
||||
async def delete(self, next_node_id: int) -> Dict[str, List[int]]:
|
||||
async def delete(self, next_node_id: int) -> dict[str, list[int]]:
|
||||
"""
|
||||
Удаляет ноды в правильном порядке.
|
||||
"""
|
||||
@@ -33,7 +30,7 @@ class PsNodeService:
|
||||
}
|
||||
|
||||
async def create(
|
||||
self, ps_node_data: Dict[str, Any], links: Dict[str, Any], creator_id: int
|
||||
self, ps_node_data: dict[str, Any], links: dict[str, Any], creator_id: int
|
||||
) -> Ps_NodeFrontResponse:
|
||||
"""
|
||||
Создаёт новую ноду с линком и обновляет настройки схемы процесса.
|
||||
@@ -45,7 +42,7 @@ class PsNodeService:
|
||||
node_instance = vork_node(data=ps_node_data, links=links)
|
||||
node_instance_validated = node_instance.validate()
|
||||
|
||||
db_ps_node = await create_ps_node_schema(self.connection, node_instance_validated, creator_id)
|
||||
db_ps_node = await create_ps_node_schema(self.connection, node_instance_validated, node_descriptor, creator_id)
|
||||
link_name = await get_last_link_name_by_node_id(self.connection, db_ps_node.ps_id)
|
||||
|
||||
link_data = VorkNodeLinkData(
|
||||
@@ -60,33 +57,9 @@ class PsNodeService:
|
||||
|
||||
db_node_link = await create_node_link_schema(self.connection, validated_link, creator_id)
|
||||
|
||||
links_settings = ProcessSchemaSettingsNodeLink(
|
||||
id=db_node_link.id,
|
||||
link_name=db_node_link.link_name,
|
||||
parent_port_number=db_node_link.link_point_id,
|
||||
from_id=db_node_link.node_id,
|
||||
to_id=db_node_link.next_node_id,
|
||||
)
|
||||
|
||||
node_settings = ProcessSchemaSettingsNode(
|
||||
id=db_ps_node.id,
|
||||
node_type=db_ps_node.node_type,
|
||||
data=node_instance_validated.data.model_dump(),
|
||||
from_node=None,
|
||||
links=[{"links": links_settings.model_dump()}],
|
||||
)
|
||||
|
||||
settings_dict = {"node": node_settings.model_dump(mode="json")}
|
||||
await update_process_schema_settings_by_id(self.connection, db_ps_node.ps_id, settings_dict)
|
||||
|
||||
ps_node_front_response = Ps_NodeFrontResponse(
|
||||
description=node_descriptor.model_dump(),
|
||||
node=Ps_NodeFrontResponseNode(
|
||||
id=db_ps_node.id,
|
||||
node_type=db_ps_node.node_type,
|
||||
data=to_camel_dict(node_instance_validated.data.model_dump()),
|
||||
),
|
||||
links=[{"links": links_settings.model_dump()}],
|
||||
node=db_ps_node.model_dump(),
|
||||
link=[{"link": db_node_link.model_dump()}],
|
||||
)
|
||||
|
||||
return ps_node_front_response
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
|
||||
|
||||
# Путь к файлу счётчика (в корне проекта)
|
||||
@@ -17,7 +16,7 @@ def get_node_counter() -> int:
|
||||
"""
|
||||
|
||||
if not COUNTER_FILE_PATH.exists():
|
||||
initial_data: Dict[str, int] = {"node_counter": 0}
|
||||
initial_data: dict[str, int] = {"node_counter": 0}
|
||||
with open(COUNTER_FILE_PATH, "w", encoding="utf-8") as f:
|
||||
json.dump(initial_data, f, indent=2, ensure_ascii=False)
|
||||
return 0
|
||||
@@ -45,7 +44,7 @@ def increment_node_counter() -> int:
|
||||
|
||||
new_value = current_value + 1
|
||||
|
||||
data: Dict[str, int] = {"node_counter": new_value}
|
||||
data: dict[str, int] = {"node_counter": new_value}
|
||||
with open(COUNTER_FILE_PATH, "w", encoding="utf-8") as f:
|
||||
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user