4 Commits

Author SHA1 Message Date
TheNoxium
a060f46e0a feat: delete ps node 2025-10-22 16:01:22 +05:00
e4a9fe6d01 Merge pull request 'feat: CRUD process schema' (#19) from VORKOUT-21 into master
Reviewed-on: #19
Reviewed-by: cyrussmeat <dr.cyrill@gmail.com>
Reviewed-by: Vladislav Syrochkin <vlad.dev@heado.ru>
2025-10-17 12:28:52 +05:00
TheNoxium
bfcc4651e1 fix: search for ps by id 2025-10-15 17:50:39 +05:00
TheNoxium
3dfae3235d feat: CRUD process schema 2025-10-13 11:50:57 +05:00
18 changed files with 603 additions and 213 deletions

View File

@@ -11,20 +11,18 @@ from orm.tables.process import ps_node_table, node_link_table
from orm.tables.process import NodeLinkStatus
async def get_last_link_name_by_node_id(
connection: AsyncConnection, ps_id: int
) -> Optional[str]:
async def get_last_link_name_by_node_id(connection: AsyncConnection, ps_id: int) -> Optional[str]:
"""
Получает link_name из последней записи node_link по ps_id.
Находит все node_id в ps_node по ps_id, затем ищет связи в node_link
и возвращает link_name из самой последней записи.
"""
query = select(node_link_table.c.link_name).where(
node_link_table.c.node_id.in_(
select(ps_node_table.c.id).where(ps_node_table.c.ps_id == ps_id)
)
).order_by(desc(node_link_table.c.created_at)).limit(1)
query = (
select(node_link_table.c.link_name)
.where(node_link_table.c.node_id.in_(select(ps_node_table.c.id).where(ps_node_table.c.ps_id == ps_id)))
.order_by(desc(node_link_table.c.created_at))
.limit(1)
)
result = await connection.execute(query)
link_name = result.scalar_one_or_none()
@@ -38,12 +36,15 @@ async def get_last_node_link_by_creator_and_ps_id(
"""
Получает последнюю созданную node_link для данного создателя и процесса.
"""
query = select(node_link_table).where(
node_link_table.c.creator_id == creator_id,
node_link_table.c.node_id.in_(
select(ps_node_table.c.id).where(ps_node_table.c.id == node_link_id)
query = (
select(node_link_table)
.where(
node_link_table.c.creator_id == creator_id,
node_link_table.c.node_id.in_(select(ps_node_table.c.id).where(ps_node_table.c.id == node_link_id)),
)
).order_by(desc(node_link_table.c.created_at)).limit(1)
.order_by(desc(node_link_table.c.created_at))
.limit(1)
)
node_link_db_cursor = await connection.execute(query)
node_link_data = node_link_db_cursor.mappings().one_or_none()
@@ -55,7 +56,9 @@ async def get_last_node_link_by_creator_and_ps_id(
async def create_node_link_schema(
connection: AsyncConnection, validated_link_schema, creator_id: int,
connection: AsyncConnection,
validated_link_schema,
creator_id: int,
) -> Optional[NodeLink]:
"""
Создает нове поле в таблице process_schema_table.

View File

@@ -6,7 +6,7 @@ from datetime import datetime, timezone
from sqlalchemy import insert, select, func, or_, and_, asc, desc
from sqlalchemy.ext.asyncio import AsyncConnection
from orm.tables.process import process_schema_table
from orm.tables.process import process_schema_table, ProcessStatus
from api.schemas.process.process_schema import ProcessSchema
@@ -50,8 +50,9 @@ async def get_process_schema_page_DTO(
or_(process_schema_table.c.title.ilike(search_term), process_schema_table.c.description.ilike(search_term))
)
filter_conditions = []
if filter_dto.filters:
filter_conditions = []
for field, values in filter_dto.filters.items():
column = getattr(process_schema_table.c, field, None)
if column is not None and values:
@@ -60,8 +61,11 @@ async def get_process_schema_page_DTO(
else:
filter_conditions.append(column.in_(values))
if filter_conditions:
query = query.where(and_(*filter_conditions))
if filter_dto.filters is None or "status" not in filter_dto.filters:
filter_conditions.append(process_schema_table.c.status != "DELETED")
if filter_conditions:
query = query.where(and_(*filter_conditions))
if filter_dto.order:
order_field = filter_dto.order.get("field", "id")
@@ -86,7 +90,7 @@ async def get_process_schema_page_DTO(
or_(process_schema_table.c.title.ilike(search_term), process_schema_table.c.description.ilike(search_term))
)
if filter_dto.filters and filter_conditions:
if filter_conditions:
count_query = count_query.where(and_(*filter_conditions))
result = await connection.execute(query)
@@ -111,21 +115,6 @@ async def get_process_schema_page_DTO(
)
async def get_process_schema_by_title(connection: AsyncConnection, title: str) -> Optional[ProcessSchema]:
"""
Получает process schema по title.
"""
query = select(process_schema_table).where(process_schema_table.c.title == title)
process_schema_db_cursor = await connection.execute(query)
process_schema_data = process_schema_db_cursor.mappings().one_or_none()
if not process_schema_data:
return None
return ProcessSchema.model_validate(process_schema_data)
async def get_process_schema_by_id(connection: AsyncConnection, id: int) -> Optional[ProcessSchema]:
"""
Получает process_schema по id.
@@ -152,11 +141,8 @@ async def update_process_schema_by_id(connection: AsyncConnection, update_values
await connection.commit()
async def update_process_schema_settings_by_id(
connection: AsyncConnection,
process_schema_id: int,
node_data: Dict[str, Any]
connection: AsyncConnection, process_schema_id: int, node_data: Dict[str, Any]
):
"""
Добавляет новый узел в массив 'nodes' в настройках процесса.
@@ -188,24 +174,35 @@ async def update_process_schema_settings_by_id(
await connection.commit()
async def create_process_schema(
connection: AsyncConnection, process_schema: ProcessSchema, creator_id: int
) -> Optional[ProcessSchema]:
async def get_last_created_process_schema(connection: AsyncConnection) -> Optional[int]:
"""
Создает нове поле в таблице process_schema_table.
Получает ID последней созданной схемы процесса.
"""
query = select(process_schema_table.c.id).order_by(desc(process_schema_table.c.id)).limit(1)
result = await connection.execute(query)
last_id = result.scalar_one_or_none()
return last_id
async def create_process_schema(
connection: AsyncConnection, creator_id: int, title: str, description: str
) -> Optional[int]:
"""
Создает новое поле в таблице process_schema_table.
"""
query = insert(process_schema_table).values(
title=process_schema.title,
description=process_schema.description,
owner_id=process_schema.owner_id,
title=title,
description=description,
owner_id=creator_id,
creator_id=creator_id,
created_at=datetime.now(timezone.utc),
settings=process_schema.settings,
status=process_schema.status.value,
settings={},
status=ProcessStatus.ACTIVE.value,
)
await connection.execute(query)
result = await connection.execute(query)
await connection.commit()
return process_schema
return result.lastrowid

View File

@@ -1,15 +1,15 @@
from typing import Optional
from typing import Optional, List
from datetime import datetime, timezone
from sqlalchemy import insert, select, desc
from sqlalchemy import insert, select, desc, and_, or_, delete, update
from sqlalchemy.ext.asyncio import AsyncConnection
from orm.tables.process import ps_node_table
from orm.tables.process import ps_node_table, node_link_table, process_schema_table
from api.schemas.process.ps_node import Ps_Node
from model_nodes.node_start_models import StartNodeCoreSchema
from orm.tables.process import NodeStatus,NodeType
from model_nodes.node_listen_models import ListenNodeCoreSchema
from orm.tables.process import NodeStatus
async def get_ps_node_by_id(connection: AsyncConnection, id: int) -> Optional[Ps_Node]:
@@ -27,54 +27,18 @@ async def get_ps_node_by_id(connection: AsyncConnection, id: int) -> Optional[Ps
return Ps_Node.model_validate(ps_node_data)
async def get_ps_node_by_type_and_ps_id(connection: AsyncConnection, node_type: str, ps_id: int) -> Optional[Ps_Node]:
"""
Получает ps_node по node_type и ps_id.
"""
query = select(ps_node_table).where(
ps_node_table.c.node_type == node_type,
ps_node_table.c.ps_id == ps_id
)
ps_node_db_cursor = await connection.execute(query)
ps_node_data = ps_node_db_cursor.mappings().one_or_none()
if not ps_node_data:
return None
return Ps_Node.model_validate(ps_node_data)
async def create_ps_node_start_schema(
connection: AsyncConnection, validated_start_schema: StartNodeCoreSchema, creator_id: int
) -> Optional[Ps_Node]:
"""
Создает нове поле в таблице process_schema_table.
"""
query = insert(ps_node_table).values(
ps_id=validated_start_schema.ps_id,
node_type=NodeType.START.value,
settings=validated_start_schema.data.model_dump(),
creator_id=creator_id,
created_at=datetime.now(timezone.utc),
status=NodeStatus.ACTIVE.value,
)
await connection.execute(query)
await connection.commit()
# return validated_start_schema
async def get_last_ps_node_by_creator_and_ps_id(
connection: AsyncConnection, creator_id: int, ps_id: int
) -> Optional[Ps_Node]:
"""
Получает последнюю созданную ps_node для данного создателя и процесса.
"""
query = select(ps_node_table).where(
ps_node_table.c.creator_id == creator_id,
ps_node_table.c.ps_id == ps_id
).order_by(desc(ps_node_table.c.created_at)).limit(1)
query = (
select(ps_node_table)
.where(ps_node_table.c.creator_id == creator_id, ps_node_table.c.ps_id == ps_id)
.order_by(desc(ps_node_table.c.created_at))
.limit(1)
)
ps_node_db_cursor = await connection.execute(query)
ps_node_data = ps_node_db_cursor.mappings().one_or_none()
@@ -84,9 +48,12 @@ async def get_last_ps_node_by_creator_and_ps_id(
return Ps_Node.model_validate(ps_node_data)
async def create_ps_node_schema(
connection: AsyncConnection, validated_schema, creator_id: int,
) -> Optional[Ps_Node]:
connection: AsyncConnection,
validated_schema,
creator_id: int,
) -> Optional[ListenNodeCoreSchema]:
"""
Создает нове поле в таблице process_schema_table.
"""
@@ -104,3 +71,152 @@ async def create_ps_node_schema(
await connection.commit()
return await get_last_ps_node_by_creator_and_ps_id(connection, creator_id, validated_schema.ps_id)
async def check_node_connection(connection: AsyncConnection, node_id: int, next_node_id: int, port: int) -> bool:
"""
Проверяет, подключен ли next_node_id к node_id через указанный порт.
"""
query = select(node_link_table).where(
and_(
node_link_table.c.node_id == node_id,
node_link_table.c.next_node_id == next_node_id,
node_link_table.c.link_point_id == port,
)
)
result = await connection.execute(query)
return result.mappings().first() is not None
async def get_all_child_nodes_with_depth(connection: AsyncConnection, node_id: int) -> List[tuple[Ps_Node, int]]:
"""
Рекурсивно находит ВСЕ дочерние узлы с их уровнем вложенности.
"""
all_child_nodes = []
visited_nodes = set()
async def find_children_with_depth(current_node_id: int, current_depth: int):
if current_node_id in visited_nodes:
return
visited_nodes.add(current_node_id)
query = (
select(ps_node_table)
.join(node_link_table, ps_node_table.c.id == node_link_table.c.next_node_id)
.where(node_link_table.c.node_id == current_node_id)
)
result = await connection.execute(query)
child_nodes = result.mappings().all()
for node_data in child_nodes:
node = Ps_Node.model_validate(node_data)
all_child_nodes.append((node, current_depth + 1))
await find_children_with_depth(node.id, current_depth + 1)
await find_children_with_depth(node_id, 0)
return all_child_nodes
async def get_nodes_for_deletion_ordered(connection: AsyncConnection, node_id: int) -> List[int]:
"""
Возвращает список ID узлов для удаления в правильном порядке:
от самых последних к первым.
"""
child_nodes_with_depth = await get_all_child_nodes_with_depth(connection, node_id)
child_nodes_with_depth.sort(key=lambda x: x[1], reverse=True)
ordered_node_ids = [node.id for node, depth in child_nodes_with_depth]
ordered_node_ids.append(node_id)
return ordered_node_ids
async def delete_ps_node_by_id_completely(connection: AsyncConnection, node_id: int) -> tuple[bool, str]:
"""
Полностью удаляет узел из базы данных по ID.
"""
try:
node_query = select(ps_node_table).where(ps_node_table.c.id == node_id)
node_result = await connection.execute(node_query)
node_data = node_result.mappings().first()
if not node_data:
return False, "Node not found"
ps_id = node_data["ps_id"]
await connection.execute(
delete(node_link_table).where(
or_(node_link_table.c.node_id == node_id, node_link_table.c.next_node_id == node_id)
)
)
await remove_node_from_process_schema_settings(connection, ps_id, node_id)
result = await connection.execute(delete(ps_node_table).where(ps_node_table.c.id == node_id))
if result.rowcount > 0:
await connection.commit()
return True, "Success"
else:
await connection.rollback()
return False, "Node not found"
except Exception as e:
await connection.rollback()
return False, str(e)
async def delete_ps_nodes_sequentially_with_error_handling(
connection: AsyncConnection, node_ids: List[int]
) -> List[int]:
"""
Поочередно удаляет узлы из базы данных.
Возвращает список успешно удаленных ID узлов.
Выбрасывает исключение при первой ошибке.
"""
successfully_deleted = []
for node_id in node_ids:
success, error_message = await delete_ps_node_by_id_completely(connection, node_id)
if success:
successfully_deleted.append(node_id)
else:
raise Exception(f"Failed to delete node {node_id}: {error_message}")
return successfully_deleted
async def remove_node_from_process_schema_settings(connection: AsyncConnection, ps_id: int, node_id: int):
"""
Удаляет ноду из поля settings в таблице process_schema.
"""
from api.db.logic.process_schema import get_process_schema_by_id
process_schema = await get_process_schema_by_id(connection, ps_id)
if not process_schema or not process_schema.settings:
return
settings = process_schema.settings
if "nodes" in settings and isinstance(settings["nodes"], list):
settings["nodes"] = [
node_item
for node_item in settings["nodes"]
if not (
isinstance(node_item, dict)
and "node" in node_item
and isinstance(node_item["node"], dict)
and node_item["node"].get("id") == node_id
)
]
await connection.execute(
update(process_schema_table).where(process_schema_table.c.id == ps_id).values(settings=settings)
)

View File

@@ -6,7 +6,15 @@ from api.endpoints.list_events import api_router as listevents_router
from api.endpoints.process_schema import api_router as processschema_router
from api.endpoints.ps_node import api_router as ps_node_router
list_of_routes = [auth_router, profile_router, account_router, keyring_router, listevents_router, processschema_router, ps_node_router]
list_of_routes = [
auth_router,
profile_router,
account_router,
keyring_router,
listevents_router,
processschema_router,
ps_node_router,
]
__all__ = [
"list_of_routes",

View File

@@ -22,7 +22,7 @@ api_router = APIRouter(
@api_router.get("/{user_id}/{key_id}", dependencies=[Depends(bearer_schema)], response_model=AccountKeyring)
async def get_keyring_endpoint (
async def get_keyring_endpoint(
key_id: str, connection: AsyncConnection = Depends(get_connection_dep), current_user=Depends(get_current_user)
):
authorize_user = await db_user_role_validation(connection, current_user)

View File

@@ -9,7 +9,6 @@ from api.db.logic.account import get_user_by_login
from api.db.logic.process_schema import (
create_process_schema,
get_process_schema_by_id,
get_process_schema_by_title,
get_process_schema_page_DTO,
update_process_schema_by_id,
)
@@ -26,7 +25,6 @@ from api.services.user_role_validation import (
from api.db.logic.ps_node import create_ps_node_schema
from api.db.logic.process_schema import update_process_schema_settings_by_id
from orm.tables.process import NodeType
@@ -37,6 +35,8 @@ from core import VorkNodeRegistry
from model_nodes import ListenNodeData
from api.utils.node_counter import increment_node_counter
api_router = APIRouter(
prefix="/process_schema",
@@ -44,10 +44,7 @@ api_router = APIRouter(
)
@api_router.get("", dependencies=[Depends(bearer_schema)],
# response_model=AllProcessSchemaResponse
)
@api_router.get("", dependencies=[Depends(bearer_schema)], response_model=AllProcessSchemaResponse)
async def get_all_process_schema_endpoint(
page: int = Query(1, description="Page number", gt=0),
limit: int = Query(10, description="Number of items per page", gt=0),
@@ -56,12 +53,20 @@ async def get_all_process_schema_endpoint(
order_direction: Optional[str] = Query("asc", description="Sort direction (asc/desc)"),
status_filter: Optional[List[str]] = Query(None, description="Filter by status"),
owner_id: Optional[List[str]] = Query(None, description="Filter by owner id"),
show_deleted: bool = Query(False, description="Show only deleted schemas"),
connection: AsyncConnection = Depends(get_connection_dep),
creator_id: Optional[int] = Query(None, description="Filter by creator id"),
current_user=Depends(get_current_user),
):
if show_deleted:
status_to_filter = ["DELETED"]
elif status_filter:
status_to_filter = status_filter
else:
status_to_filter = None
filters = {
**({"status": status_filter} if status_filter else {}),
**({"status": status_to_filter} if status_to_filter else {}),
**({"owner_id": owner_id} if owner_id else {}),
**({"creator_id": [str(creator_id)]} if creator_id else {}),
}
@@ -111,78 +116,66 @@ async def get_process_schema_endpoint(
return to_camel_dict(process_schema_validation.model_dump())
@api_router.post("", dependencies=[Depends(bearer_schema)],
response_model=ProcessSchemaResponse
)
@api_router.post("", dependencies=[Depends(bearer_schema)], response_model=ProcessSchemaResponse)
async def create_processschema_endpoint(
process_schema: ProcessSchemaUpdate,
connection: AsyncConnection = Depends(get_connection_dep),
current_user=Depends(get_current_user),
):
user_validation = await get_user_by_login(connection, current_user)
process_schema_validation = await get_process_schema_by_title(connection, process_schema.title)
if process_schema_validation is None:
current_node_counter = increment_node_counter()
title = f"Новая схема {current_node_counter}"
await create_process_schema(connection, process_schema, user_validation.id)
process_schema_new = await get_process_schema_by_title(connection, process_schema.title)
description = "Default description"
start_node_data = ListenNodeData(
ps_id=process_schema_new.id,
node_type=NodeType.START.value,
is_start="True"
)
node_id = await create_process_schema(connection, user_validation.id, title, description)
start_node_links = {}
process_schema_new = await get_process_schema_by_id(connection, node_id)
registery = VorkNodeRegistry()
start_node_data = ListenNodeData(ps_id=process_schema_new.id, node_type=NodeType.START.value, is_start="True")
vork_node = registery.get("LISTEN")
start_node_links = {}
node_descriptor = vork_node.form()
registery = VorkNodeRegistry()
vork_node = registery.get("LISTEN")
start_node = vork_node(data=start_node_data.model_dump(), links=start_node_links)
node_descriptor = vork_node.form()
validated_start_schema = start_node.validate()
start_node = vork_node(data=start_node_data.model_dump(), links=start_node_links)
print(validated_start_schema)
validated_start_schema = start_node.validate()
db_start_schema = await create_ps_node_schema(connection, validated_start_schema, user_validation.id)
db_start_schema = await create_ps_node_schema(connection, validated_start_schema, user_validation.id)
node = ProcessSchemaSettingsNode(
id=db_start_schema.id,
node_type=NodeType.LISTEN.value,
data=validated_start_schema.data.model_dump(),
from_node=None,
links=None)
node = ProcessSchemaSettingsNode(
id=db_start_schema.id,
node_type=NodeType.LISTEN.value,
data=validated_start_schema.data.model_dump(),
from_node=None,
links=None,
)
settings_dict = {"node": node.model_dump(mode='json')}
settings_dict = {"node": node.model_dump(mode="json")}
await update_process_schema_settings_by_id(connection, process_schema_new.id, settings_dict)
await update_process_schema_settings_by_id(connection, process_schema_new.id, settings_dict)
process_schema_new = await get_process_schema_by_title(connection, process_schema.title)
process_schema_new = await get_process_schema_by_id(connection, node_id)
ps_node_front_response = Ps_NodeFrontResponse(
description=node_descriptor,
node=Ps_NodeFrontResponseNode(
id=db_start_schema.id,
node_type=NodeType.LISTEN.value,
data=validated_start_schema.data.model_dump()),
link=None)
ps_node_front_response = Ps_NodeFrontResponse(
description=node_descriptor.model_dump(),
node=Ps_NodeFrontResponseNode(
id=db_start_schema.id, node_type=NodeType.LISTEN.value, data=validated_start_schema.data.model_dump()
),
link=None,
)
response_data = {
"process_schema": process_schema_new.model_dump(),
"node_listen": ps_node_front_response.model_dump(),
}
response_data = {
"process_schema": process_schema_new.model_dump(),
"node_listen": ps_node_front_response.model_dump()}
return to_camel_dict(response_data)
else:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="An process schema with this information already exists."
)
return to_camel_dict(response_data)
@api_router.put("/{process_schema_id}", dependencies=[Depends(bearer_schema)], response_model=ProcessSchema)
@@ -213,7 +206,7 @@ async def update_process_schema_endpoint(
return process_schema
@api_router.delete("/{process_schema_id}", dependencies=[Depends(bearer_schema)], response_model=ProcessSchema)
@api_router.delete("/{process_schema_id}", dependencies=[Depends(bearer_schema)], status_code=status.HTTP_200_OK)
async def delete_process_schema_endpoint(
process_schema_id: int,
connection: AsyncConnection = Depends(get_connection_dep),
@@ -237,6 +230,6 @@ async def delete_process_schema_endpoint(
await update_process_schema_by_id(connection, updated_values, process_schema_validation)
process_schema = await get_process_schema_by_id(connection, process_schema_id)
await get_process_schema_by_id(connection, process_schema_id)
return process_schema
return HTTPException(status_code=status.HTTP_200_OK, detail="Process schema deleted successfully")

View File

@@ -1,4 +1,4 @@
from fastapi import APIRouter, Depends, HTTPException,status
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.ext.asyncio import AsyncConnection
@@ -7,23 +7,30 @@ from api.db.logic.account import get_user_by_login
from api.schemas.base import bearer_schema
from api.schemas.process.process_schema import ProcessSchemaSettingsNodeLink, ProcessSchemaSettingsNode
from api.schemas.process.ps_node import Ps_NodeFrontResponseNode, Ps_NodeRequest
from api.schemas.process.ps_node import Ps_NodeFrontResponseNode, Ps_NodeRequest, Ps_NodeDeleteRequest
from api.schemas.process.ps_node import Ps_NodeFrontResponse
from api.services.auth import get_current_user
from api.db.logic.ps_node import create_ps_node_schema
from api.db.logic.ps_node import (
create_ps_node_schema,
get_ps_node_by_id,
check_node_connection,
get_nodes_for_deletion_ordered,
delete_ps_nodes_sequentially_with_error_handling,
)
from api.db.logic.node_link import get_last_link_name_by_node_id, create_node_link_schema
from api.db.logic.process_schema import update_process_schema_settings_by_id
from api.db.logic.process_schema import update_process_schema_settings_by_id, get_process_schema_by_id
from api.services.user_role_validation import (
db_user_role_validation_for_list_events_and_process_schema_by_list_event_id,
)
from core import VorkNodeRegistry, VorkNodeLink
from model_nodes import VorkNodeLinkData
from api.utils.to_camel_dict import to_camel_dict
from api.error import create_operation_error, create_access_error, create_validation_error, create_server_error
api_router = APIRouter(
@@ -32,16 +39,90 @@ api_router = APIRouter(
)
@api_router.post("", dependencies=[Depends(bearer_schema)],response_model=Ps_NodeFrontResponse)
@api_router.delete("", dependencies=[Depends(bearer_schema)], status_code=status.HTTP_200_OK)
async def delete_ps_node_endpoint(
ps_node_delete_data: Ps_NodeDeleteRequest,
connection: AsyncConnection = Depends(get_connection_dep),
current_user=Depends(get_current_user),
):
process_schema = await get_process_schema_by_id(connection, ps_node_delete_data.schema_id)
if process_schema is None:
raise create_operation_error(
message="Process schema not found",
status_code=status.HTTP_404_NOT_FOUND,
details={"schema_id": ps_node_delete_data.schema_id},
)
try:
await db_user_role_validation_for_list_events_and_process_schema_by_list_event_id(
connection, current_user, process_schema.creator_id
)
except Exception as e:
raise create_access_error(
message="Access denied",
status_code=status.HTTP_403_FORBIDDEN,
details={"user_id": current_user, "schema_creator_id": process_schema.creator_id, "reason": str(e)},
)
ps_node = await get_ps_node_by_id(connection, ps_node_delete_data.node_id)
if ps_node is None:
raise create_operation_error(
message="PS node not found",
status_code=status.HTTP_404_NOT_FOUND,
details={"node_id": ps_node_delete_data.node_id},
)
next_ps_node = await get_ps_node_by_id(connection, ps_node_delete_data.next_node_id)
if next_ps_node is None:
raise create_operation_error(
message="Next PS node not found",
status_code=status.HTTP_400_BAD_REQUEST,
details={"next_node_id": ps_node_delete_data.next_node_id},
)
is_connected = await check_node_connection(
connection, ps_node_delete_data.node_id, ps_node_delete_data.next_node_id, int(ps_node_delete_data.port)
)
if not is_connected:
raise create_validation_error(
message="Node connection validation failed",
status_code=status.HTTP_400_BAD_REQUEST,
details={
"node_id": ps_node_delete_data.node_id,
"next_node_id": ps_node_delete_data.next_node_id,
"port": ps_node_delete_data.port,
},
)
ordered_node_ids = await get_nodes_for_deletion_ordered(connection, ps_node_delete_data.next_node_id)
try:
deleted_node_ids = await delete_ps_nodes_sequentially_with_error_handling(connection, ordered_node_ids)
except Exception as e:
raise create_server_error(
message="Failed to delete nodes",
status_code=500,
details={"error": str(e), "ordered_node_ids": ordered_node_ids},
)
return {
"deleted_node_ids": deleted_node_ids,
}
@api_router.post("", dependencies=[Depends(bearer_schema)], response_model=Ps_NodeFrontResponse)
async def create_ps_node_endpoint(
ps_node: Ps_NodeRequest,
connection: AsyncConnection = Depends(get_connection_dep),
current_user=Depends(get_current_user),
):
user_validation = await get_user_by_login(connection, current_user)
process_schema = await get_process_schema_by_id(connection, ps_node.data["ps_id"])
if process_schema is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Process schema not found")
registery = VorkNodeRegistry()
vork_node = registery.get(ps_node.data["node_type"])
@@ -58,60 +139,50 @@ async def create_ps_node_endpoint(
except Exception as e:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
db_ps_node = await create_ps_node_schema(connection, node_instance_validated, user_validation.id)
link_name = await get_last_link_name_by_node_id(connection, db_ps_node.ps_id)
link_data = VorkNodeLinkData(
parent_port_number=node_instance_validated.parent_port_number,
to_id=db_ps_node.id,
from_id=node_instance_validated.parent_id,
last_link_name=link_name)
last_link_name=link_name,
)
link = VorkNodeLink(data=link_data.model_dump())
validated_link = link.validate()
db_node_link = await create_node_link_schema(connection, validated_link, user_validation.id)
links_settings = ProcessSchemaSettingsNodeLink(
id=db_node_link.id,
link_name=db_node_link.link_name,
parent_port_number=db_node_link.link_point_id,
from_id=db_node_link.node_id,
to_id=db_node_link.next_node_id,
)
id=db_node_link.id,
link_name=db_node_link.link_name,
parent_port_number=db_node_link.link_point_id,
from_id=db_node_link.node_id,
to_id=db_node_link.next_node_id,
)
node_settings = ProcessSchemaSettingsNode(
id=db_ps_node.id,
node_type=db_ps_node.node_type,
data=node_instance_validated.data.model_dump(),
from_node=None,
links=links_settings.model_dump())
id=db_ps_node.id,
node_type=db_ps_node.node_type,
data=node_instance_validated.data.model_dump(),
from_node=None,
links=[{"links": links_settings.model_dump()}],
)
settings_dict = {"node": node_settings.model_dump(mode='json')}
settings_dict = {"node": node_settings.model_dump(mode="json")}
await update_process_schema_settings_by_id(connection, db_ps_node.ps_id, settings_dict)
ps_node_front_response = Ps_NodeFrontResponse(
description=node_descriptor,
node=Ps_NodeFrontResponseNode(
id=db_ps_node.id,
node_type=db_ps_node.node_type,
data=to_camel_dict(node_instance_validated.data.model_dump())),
link=links_settings.model_dump())
description=node_descriptor.model_dump(),
node=Ps_NodeFrontResponseNode(
id=db_ps_node.id,
node_type=db_ps_node.node_type,
data=to_camel_dict(node_instance_validated.data.model_dump()),
),
links=[{"links": links_settings.model_dump()}],
)
return ps_node_front_response

26
api/api/error/__init__.py Normal file
View File

@@ -0,0 +1,26 @@
"""
Модуль для обработки ошибок API.
"""
from .error_model.error_types import ServerError, AccessError, OperationError, ValidationError, ErrorType
from .error_handlers import (
handle_api_error,
create_server_error,
create_access_error,
create_operation_error,
create_validation_error,
)
__all__ = [
"ServerError",
"AccessError",
"OperationError",
"ValidationError",
"ErrorType",
"handle_api_error",
"create_server_error",
"create_access_error",
"create_operation_error",
"create_validation_error",
]

View File

@@ -0,0 +1,54 @@
"""
Обработчики ошибок для API.
"""
from typing import Optional, Dict, Any
from fastapi import HTTPException
from .error_model.error_types import ServerError, AccessError, OperationError, ValidationError, ErrorType
def handle_api_error(
error_type: ErrorType, message: str, status_code: int, details: Optional[Dict[str, Any]] = None
) -> HTTPException:
"""
Функция для создания HTTPException с правильной структурой ошибки.
"""
match error_type:
case ErrorType.SERVER:
error = ServerError(message=message, details=details)
case ErrorType.ACCESS:
error = AccessError(message=message, details=details)
case ErrorType.OPERATION:
error = OperationError(message=message, details=details)
case ErrorType.VALIDATION:
error = ValidationError(message=message, details=details)
case _:
error = ServerError(message=message, details=details)
return HTTPException(status_code=status_code, detail=error.model_dump(mode="json"))
def create_server_error(
message: str, status_code: int = 500, details: Optional[Dict[str, Any]] = None
) -> HTTPException:
return handle_api_error(error_type=ErrorType.SERVER, message=message, status_code=status_code, details=details)
def create_access_error(
message: str, status_code: int = 403, details: Optional[Dict[str, Any]] = None
) -> HTTPException:
return handle_api_error(error_type=ErrorType.ACCESS, message=message, status_code=status_code, details=details)
def create_operation_error(
message: str, status_code: int = 400, details: Optional[Dict[str, Any]] = None
) -> HTTPException:
return handle_api_error(error_type=ErrorType.OPERATION, message=message, status_code=status_code, details=details)
def create_validation_error(
message: str, status_code: int = 400, details: Optional[Dict[str, Any]] = None
) -> HTTPException:
return handle_api_error(error_type=ErrorType.VALIDATION, message=message, status_code=status_code, details=details)

View File

@@ -0,0 +1,7 @@
"""
Модели ошибок для API.
"""
from .error_types import ServerError, AccessError, OperationError, ValidationError, ErrorType
__all__ = ["ServerError", "AccessError", "OperationError", "ValidationError", "ErrorType"]

View File

@@ -0,0 +1,56 @@
"""
Типизированные модели ошибок для API.
"""
from enum import Enum
from typing import Optional, Dict, Any
from pydantic import BaseModel
class ErrorType(str, Enum):
"""
Типы ошибок API.
"""
SERVER = "SERVER"
ACCESS = "ACCESS"
OPERATION = "OPERATION"
VALIDATION = "VALIDATION"
class BaseError(BaseModel):
"""
Базовая модель ошибки.
"""
error_type: ErrorType
message: str
details: Optional[Dict[str, Any]] = None
class ServerError(BaseError):
"""
Критические серверные ошибки (БД, соединения и прочие неприятности).
"""
error_type: ErrorType = ErrorType.SERVER
class AccessError(BaseError):
"""
Ошибки доступа (несоответствие тенантности, ролям доступа).
"""
error_type: ErrorType = ErrorType.ACCESS
class OperationError(BaseError):
"""
Ошибки операции (несоответствие прохождению верификации, ошибки в датасете).
"""
error_type: ErrorType = ErrorType.OPERATION
class ValidationError(BaseError):
"""
Ошибки валидации (несоответствие первичной валидации).
"""
error_type: ErrorType = ErrorType.VALIDATION
field_errors: Optional[Dict[str, str]] = None

View File

@@ -10,7 +10,7 @@ from api.schemas.base import Base
class ProcessSchemaUpdate(Base):
title: Optional[str] = Field(None, max_length=100)
description: Optional[str] = None
owner_id: Optional[int] = None
# owner_id: Optional[int] = None
settings: Optional[Dict[str, Any]] = None
status: Optional[ProcessStatus] = None

View File

@@ -1,5 +1,5 @@
from datetime import datetime
from typing import Any, Dict, Optional
from typing import Any, Dict, Optional, List
from orm.tables.process import ProcessStatus, NodeType
from pydantic import Field
@@ -31,8 +31,8 @@ class ProcessSchemaSettingsNode(Base):
id: int
node_type: NodeType
from_node: Optional[Dict[str, Any]] = None
data: Dict[str, Any]# Переименовано с 'from' на 'from_node'
links: Optional[ProcessSchemaSettingsNodeLink] = None
data: Dict[str, Any] # Переименовано с 'from' на 'from_node'
links: Optional[List[Dict[str, Any]]] = None
class ProcessSchemaResponse(Base):

View File

@@ -1,16 +1,23 @@
from datetime import datetime
from typing import Any, Dict, Optional
from typing import Any, Dict, Optional, List
from orm.tables.process import NodeStatus, NodeType
from pydantic import Field
from api.schemas.base import Base
class Ps_NodeDeleteRequest(Base):
schema_id: int
node_id: int
port: str
next_node_id: int
class Ps_NodeRequest(Base):
data: Dict[str, Any]
links: Dict[str, Any]
class Ps_Node(Base):
id: int
ps_id: int
@@ -21,7 +28,6 @@ class Ps_Node(Base):
status: NodeStatus
class Ps_NodeFrontResponseLink(Base):
id: int
link_name: str
@@ -33,10 +39,10 @@ class Ps_NodeFrontResponseLink(Base):
class Ps_NodeFrontResponseNode(Base):
id: int
node_type: NodeType
data: Dict[str, Any]# Переименовано с 'from' на 'from_node'
data: Dict[str, Any] # Переименовано с 'from' на 'from_node'
class Ps_NodeFrontResponse(Base):
description: Optional[Dict[str, Any]] = None
node: Optional[Ps_NodeFrontResponseNode] = None
link: Optional[Ps_NodeFrontResponseLink] = None
links: Optional[List[Dict[str, Any]]] = None

View File

@@ -0,0 +1,52 @@
import json
from pathlib import Path
from typing import Dict
# Путь к файлу счётчика (в корне проекта)
COUNTER_FILE_PATH = Path(__file__).parent.parent.parent / "node_counter.json"
def get_node_counter() -> int:
"""
Открывает JSON файл и возвращает значение node_counter.
Если файл не существует, создаёт его со значением по умолчанию 0.
Returns:
int: Текущее значение счётчика узлов
"""
if not COUNTER_FILE_PATH.exists():
initial_data: Dict[str, int] = {"node_counter": 0}
with open(COUNTER_FILE_PATH, "w", encoding="utf-8") as f:
json.dump(initial_data, f, indent=2, ensure_ascii=False)
return 0
try:
with open(COUNTER_FILE_PATH, "r", encoding="utf-8") as f:
data = json.load(f)
return data.get("node_counter", 0)
except (json.JSONDecodeError, IOError):
initial_data = {"node_counter": 0}
with open(COUNTER_FILE_PATH, "w", encoding="utf-8") as f:
json.dump(initial_data, f, indent=2, ensure_ascii=False)
return 0
def increment_node_counter() -> int:
"""
Увеличивает значение node_counter на 1, сохраняет в файл и возвращает новое значение.
Returns:
int: Новое значение счётчика (старое значение + 1)
"""
current_value = get_node_counter()
new_value = current_value + 1
data: Dict[str, int] = {"node_counter": new_value}
with open(COUNTER_FILE_PATH, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2, ensure_ascii=False)
return new_value

View File

@@ -1,5 +1,6 @@
from pydantic.alias_generators import to_camel
def to_camel_dict(obj):
if isinstance(obj, dict):
return {to_camel(key): to_camel_dict(value) for key, value in obj.items()}

2
api/poetry.lock generated
View File

@@ -2020,7 +2020,7 @@ sqlalchemy = "^2.0.43"
type = "git"
url = "http://88.86.199.167:3000/Nox/CORE.git"
reference = "HEAD"
resolved_reference = "b3896e8b5dbed2d609c8ac257419d5492c9e7b8d"
resolved_reference = "43c139512928ab3a4767e771c8e41e39930599ad"
[[package]]
name = "watchfiles"

View File

@@ -18,7 +18,7 @@ dependencies = [
"python-multipart (>=0.0.20,<0.0.21)",
"requests (>=2.31.0,<3.0.0)",
"fastapi-jwt-auth @ git+https://github.com/vvpreo/fastapi-jwt-auth",
"core-library @ git+https://gitea.heado.ru/Vorkout/core.git@VORKOUT-18",
"vork-core @ git+http://88.86.199.167:3000/Nox/CORE.git",
]