1 Commits

Author SHA1 Message Date
TheNoxium
3dfae3235d feat: CRUD process schema 2025-10-13 11:50:57 +05:00
14 changed files with 218 additions and 159 deletions

View File

@@ -11,20 +11,18 @@ from orm.tables.process import ps_node_table, node_link_table
from orm.tables.process import NodeLinkStatus from orm.tables.process import NodeLinkStatus
async def get_last_link_name_by_node_id(connection: AsyncConnection, ps_id: int) -> Optional[str]:
async def get_last_link_name_by_node_id(
connection: AsyncConnection, ps_id: int
) -> Optional[str]:
""" """
Получает link_name из последней записи node_link по ps_id. Получает link_name из последней записи node_link по ps_id.
Находит все node_id в ps_node по ps_id, затем ищет связи в node_link Находит все node_id в ps_node по ps_id, затем ищет связи в node_link
и возвращает link_name из самой последней записи. и возвращает link_name из самой последней записи.
""" """
query = select(node_link_table.c.link_name).where( query = (
node_link_table.c.node_id.in_( select(node_link_table.c.link_name)
select(ps_node_table.c.id).where(ps_node_table.c.ps_id == ps_id) .where(node_link_table.c.node_id.in_(select(ps_node_table.c.id).where(ps_node_table.c.ps_id == ps_id)))
) .order_by(desc(node_link_table.c.created_at))
).order_by(desc(node_link_table.c.created_at)).limit(1) .limit(1)
)
result = await connection.execute(query) result = await connection.execute(query)
link_name = result.scalar_one_or_none() link_name = result.scalar_one_or_none()
@@ -38,12 +36,15 @@ async def get_last_node_link_by_creator_and_ps_id(
""" """
Получает последнюю созданную node_link для данного создателя и процесса. Получает последнюю созданную node_link для данного создателя и процесса.
""" """
query = select(node_link_table).where( query = (
node_link_table.c.creator_id == creator_id, select(node_link_table)
node_link_table.c.node_id.in_( .where(
select(ps_node_table.c.id).where(ps_node_table.c.id == node_link_id) node_link_table.c.creator_id == creator_id,
node_link_table.c.node_id.in_(select(ps_node_table.c.id).where(ps_node_table.c.id == node_link_id)),
) )
).order_by(desc(node_link_table.c.created_at)).limit(1) .order_by(desc(node_link_table.c.created_at))
.limit(1)
)
node_link_db_cursor = await connection.execute(query) node_link_db_cursor = await connection.execute(query)
node_link_data = node_link_db_cursor.mappings().one_or_none() node_link_data = node_link_db_cursor.mappings().one_or_none()
@@ -55,7 +56,9 @@ async def get_last_node_link_by_creator_and_ps_id(
async def create_node_link_schema( async def create_node_link_schema(
connection: AsyncConnection, validated_link_schema, creator_id: int, connection: AsyncConnection,
validated_link_schema,
creator_id: int,
) -> Optional[NodeLink]: ) -> Optional[NodeLink]:
""" """
Создает нове поле в таблице process_schema_table. Создает нове поле в таблице process_schema_table.

View File

@@ -6,7 +6,7 @@ from datetime import datetime, timezone
from sqlalchemy import insert, select, func, or_, and_, asc, desc from sqlalchemy import insert, select, func, or_, and_, asc, desc
from sqlalchemy.ext.asyncio import AsyncConnection from sqlalchemy.ext.asyncio import AsyncConnection
from orm.tables.process import process_schema_table from orm.tables.process import process_schema_table, ProcessStatus
from api.schemas.process.process_schema import ProcessSchema from api.schemas.process.process_schema import ProcessSchema
@@ -50,8 +50,9 @@ async def get_process_schema_page_DTO(
or_(process_schema_table.c.title.ilike(search_term), process_schema_table.c.description.ilike(search_term)) or_(process_schema_table.c.title.ilike(search_term), process_schema_table.c.description.ilike(search_term))
) )
filter_conditions = []
if filter_dto.filters: if filter_dto.filters:
filter_conditions = []
for field, values in filter_dto.filters.items(): for field, values in filter_dto.filters.items():
column = getattr(process_schema_table.c, field, None) column = getattr(process_schema_table.c, field, None)
if column is not None and values: if column is not None and values:
@@ -60,8 +61,11 @@ async def get_process_schema_page_DTO(
else: else:
filter_conditions.append(column.in_(values)) filter_conditions.append(column.in_(values))
if filter_conditions: if filter_dto.filters is None or "status" not in filter_dto.filters:
query = query.where(and_(*filter_conditions)) filter_conditions.append(process_schema_table.c.status != "DELETED")
if filter_conditions:
query = query.where(and_(*filter_conditions))
if filter_dto.order: if filter_dto.order:
order_field = filter_dto.order.get("field", "id") order_field = filter_dto.order.get("field", "id")
@@ -86,7 +90,7 @@ async def get_process_schema_page_DTO(
or_(process_schema_table.c.title.ilike(search_term), process_schema_table.c.description.ilike(search_term)) or_(process_schema_table.c.title.ilike(search_term), process_schema_table.c.description.ilike(search_term))
) )
if filter_dto.filters and filter_conditions: if filter_conditions:
count_query = count_query.where(and_(*filter_conditions)) count_query = count_query.where(and_(*filter_conditions))
result = await connection.execute(query) result = await connection.execute(query)
@@ -152,11 +156,8 @@ async def update_process_schema_by_id(connection: AsyncConnection, update_values
await connection.commit() await connection.commit()
async def update_process_schema_settings_by_id( async def update_process_schema_settings_by_id(
connection: AsyncConnection, connection: AsyncConnection, process_schema_id: int, node_data: Dict[str, Any]
process_schema_id: int,
node_data: Dict[str, Any]
): ):
""" """
Добавляет новый узел в массив 'nodes' в настройках процесса. Добавляет новый узел в массив 'nodes' в настройках процесса.
@@ -188,24 +189,34 @@ async def update_process_schema_settings_by_id(
await connection.commit() await connection.commit()
async def get_last_created_process_schema(connection: AsyncConnection) -> Optional[int]:
"""
Получает ID последней созданной схемы процесса.
"""
query = select(process_schema_table.c.id).order_by(desc(process_schema_table.c.id)).limit(1)
result = await connection.execute(query)
last_id = result.scalar_one_or_none()
return last_id
async def create_process_schema( async def create_process_schema(
connection: AsyncConnection, process_schema: ProcessSchema, creator_id: int connection: AsyncConnection, creator_id: int, title: str, description: str
) -> Optional[ProcessSchema]: ) -> Optional[ProcessSchema]:
""" """
Создает нове поле в таблице process_schema_table. Создает нове поле в таблице process_schema_table.
""" """
query = insert(process_schema_table).values( query = insert(process_schema_table).values(
title=process_schema.title, title=title,
description=process_schema.description, description=description,
owner_id=process_schema.owner_id, owner_id=creator_id,
creator_id=creator_id, creator_id=creator_id,
created_at=datetime.now(timezone.utc), created_at=datetime.now(timezone.utc),
settings=process_schema.settings, settings={},
status=process_schema.status.value, status=ProcessStatus.ACTIVE.value,
) )
await connection.execute(query) await connection.execute(query)
await connection.commit() await connection.commit()
return process_schema

View File

@@ -8,8 +8,8 @@ from sqlalchemy.ext.asyncio import AsyncConnection
from orm.tables.process import ps_node_table from orm.tables.process import ps_node_table
from api.schemas.process.ps_node import Ps_Node from api.schemas.process.ps_node import Ps_Node
from model_nodes.node_start_models import StartNodeCoreSchema from model_nodes.node_listen_models import ListenNodeCoreSchema
from orm.tables.process import NodeStatus,NodeType from orm.tables.process import NodeStatus, NodeType
async def get_ps_node_by_id(connection: AsyncConnection, id: int) -> Optional[Ps_Node]: async def get_ps_node_by_id(connection: AsyncConnection, id: int) -> Optional[Ps_Node]:
@@ -31,10 +31,7 @@ async def get_ps_node_by_type_and_ps_id(connection: AsyncConnection, node_type:
""" """
Получает ps_node по node_type и ps_id. Получает ps_node по node_type и ps_id.
""" """
query = select(ps_node_table).where( query = select(ps_node_table).where(ps_node_table.c.node_type == node_type, ps_node_table.c.ps_id == ps_id)
ps_node_table.c.node_type == node_type,
ps_node_table.c.ps_id == ps_id
)
ps_node_db_cursor = await connection.execute(query) ps_node_db_cursor = await connection.execute(query)
@@ -44,16 +41,17 @@ async def get_ps_node_by_type_and_ps_id(connection: AsyncConnection, node_type:
return Ps_Node.model_validate(ps_node_data) return Ps_Node.model_validate(ps_node_data)
async def create_ps_node_start_schema( async def create_ps_node_start_schema(
connection: AsyncConnection, validated_start_schema: StartNodeCoreSchema, creator_id: int connection: AsyncConnection, validated_listen_schema: ListenNodeCoreSchema, creator_id: int
) -> Optional[Ps_Node]: ) -> Optional[ListenNodeCoreSchema]:
""" """
Создает нове поле в таблице process_schema_table. Создает нове поле в таблице process_schema_table.
""" """
query = insert(ps_node_table).values( query = insert(ps_node_table).values(
ps_id=validated_start_schema.ps_id, ps_id=validated_listen_schema.ps_id,
node_type=NodeType.START.value, node_type=NodeType.LISTEN.value,
settings=validated_start_schema.data.model_dump(), settings=validated_listen_schema.data.model_dump(),
creator_id=creator_id, creator_id=creator_id,
created_at=datetime.now(timezone.utc), created_at=datetime.now(timezone.utc),
status=NodeStatus.ACTIVE.value, status=NodeStatus.ACTIVE.value,
@@ -63,7 +61,8 @@ async def create_ps_node_start_schema(
await connection.commit() await connection.commit()
# return validated_start_schema # return validated_listen_schema
async def get_last_ps_node_by_creator_and_ps_id( async def get_last_ps_node_by_creator_and_ps_id(
connection: AsyncConnection, creator_id: int, ps_id: int connection: AsyncConnection, creator_id: int, ps_id: int
@@ -71,10 +70,12 @@ async def get_last_ps_node_by_creator_and_ps_id(
""" """
Получает последнюю созданную ps_node для данного создателя и процесса. Получает последнюю созданную ps_node для данного создателя и процесса.
""" """
query = select(ps_node_table).where( query = (
ps_node_table.c.creator_id == creator_id, select(ps_node_table)
ps_node_table.c.ps_id == ps_id .where(ps_node_table.c.creator_id == creator_id, ps_node_table.c.ps_id == ps_id)
).order_by(desc(ps_node_table.c.created_at)).limit(1) .order_by(desc(ps_node_table.c.created_at))
.limit(1)
)
ps_node_db_cursor = await connection.execute(query) ps_node_db_cursor = await connection.execute(query)
ps_node_data = ps_node_db_cursor.mappings().one_or_none() ps_node_data = ps_node_db_cursor.mappings().one_or_none()
@@ -84,9 +85,12 @@ async def get_last_ps_node_by_creator_and_ps_id(
return Ps_Node.model_validate(ps_node_data) return Ps_Node.model_validate(ps_node_data)
async def create_ps_node_schema( async def create_ps_node_schema(
connection: AsyncConnection, validated_schema, creator_id: int, connection: AsyncConnection,
) -> Optional[Ps_Node]: validated_schema,
creator_id: int,
) -> Optional[ListenNodeCoreSchema]:
""" """
Создает нове поле в таблице process_schema_table. Создает нове поле в таблице process_schema_table.
""" """

View File

@@ -6,7 +6,15 @@ from api.endpoints.list_events import api_router as listevents_router
from api.endpoints.process_schema import api_router as processschema_router from api.endpoints.process_schema import api_router as processschema_router
from api.endpoints.ps_node import api_router as ps_node_router from api.endpoints.ps_node import api_router as ps_node_router
list_of_routes = [auth_router, profile_router, account_router, keyring_router, listevents_router, processschema_router, ps_node_router] list_of_routes = [
auth_router,
profile_router,
account_router,
keyring_router,
listevents_router,
processschema_router,
ps_node_router,
]
__all__ = [ __all__ = [
"list_of_routes", "list_of_routes",

View File

@@ -22,7 +22,7 @@ api_router = APIRouter(
@api_router.get("/{user_id}/{key_id}", dependencies=[Depends(bearer_schema)], response_model=AccountKeyring) @api_router.get("/{user_id}/{key_id}", dependencies=[Depends(bearer_schema)], response_model=AccountKeyring)
async def get_keyring_endpoint ( async def get_keyring_endpoint(
key_id: str, connection: AsyncConnection = Depends(get_connection_dep), current_user=Depends(get_current_user) key_id: str, connection: AsyncConnection = Depends(get_connection_dep), current_user=Depends(get_current_user)
): ):
authorize_user = await db_user_role_validation(connection, current_user) authorize_user = await db_user_role_validation(connection, current_user)

View File

@@ -26,7 +26,6 @@ from api.services.user_role_validation import (
from api.db.logic.ps_node import create_ps_node_schema from api.db.logic.ps_node import create_ps_node_schema
from api.db.logic.process_schema import update_process_schema_settings_by_id from api.db.logic.process_schema import update_process_schema_settings_by_id
from orm.tables.process import NodeType from orm.tables.process import NodeType
@@ -37,6 +36,8 @@ from core import VorkNodeRegistry
from model_nodes import ListenNodeData from model_nodes import ListenNodeData
from api.utils.node_counter import increment_node_counter
api_router = APIRouter( api_router = APIRouter(
prefix="/process_schema", prefix="/process_schema",
@@ -44,10 +45,7 @@ api_router = APIRouter(
) )
@api_router.get("", dependencies=[Depends(bearer_schema)], @api_router.get("", dependencies=[Depends(bearer_schema)], response_model=AllProcessSchemaResponse)
# response_model=AllProcessSchemaResponse
)
async def get_all_process_schema_endpoint( async def get_all_process_schema_endpoint(
page: int = Query(1, description="Page number", gt=0), page: int = Query(1, description="Page number", gt=0),
limit: int = Query(10, description="Number of items per page", gt=0), limit: int = Query(10, description="Number of items per page", gt=0),
@@ -56,12 +54,20 @@ async def get_all_process_schema_endpoint(
order_direction: Optional[str] = Query("asc", description="Sort direction (asc/desc)"), order_direction: Optional[str] = Query("asc", description="Sort direction (asc/desc)"),
status_filter: Optional[List[str]] = Query(None, description="Filter by status"), status_filter: Optional[List[str]] = Query(None, description="Filter by status"),
owner_id: Optional[List[str]] = Query(None, description="Filter by owner id"), owner_id: Optional[List[str]] = Query(None, description="Filter by owner id"),
show_deleted: bool = Query(False, description="Show only deleted schemas"),
connection: AsyncConnection = Depends(get_connection_dep), connection: AsyncConnection = Depends(get_connection_dep),
creator_id: Optional[int] = Query(None, description="Filter by creator id"), creator_id: Optional[int] = Query(None, description="Filter by creator id"),
current_user=Depends(get_current_user), current_user=Depends(get_current_user),
): ):
if show_deleted:
status_to_filter = ["DELETED"]
elif status_filter:
status_to_filter = status_filter
else:
status_to_filter = None
filters = { filters = {
**({"status": status_filter} if status_filter else {}), **({"status": status_to_filter} if status_to_filter else {}),
**({"owner_id": owner_id} if owner_id else {}), **({"owner_id": owner_id} if owner_id else {}),
**({"creator_id": [str(creator_id)]} if creator_id else {}), **({"creator_id": [str(creator_id)]} if creator_id else {}),
} }
@@ -111,78 +117,68 @@ async def get_process_schema_endpoint(
return to_camel_dict(process_schema_validation.model_dump()) return to_camel_dict(process_schema_validation.model_dump())
@api_router.post("", dependencies=[Depends(bearer_schema)], @api_router.post("", dependencies=[Depends(bearer_schema)], response_model=ProcessSchemaResponse)
response_model=ProcessSchemaResponse
)
async def create_processschema_endpoint( async def create_processschema_endpoint(
process_schema: ProcessSchemaUpdate,
connection: AsyncConnection = Depends(get_connection_dep), connection: AsyncConnection = Depends(get_connection_dep),
current_user=Depends(get_current_user), current_user=Depends(get_current_user),
): ):
user_validation = await get_user_by_login(connection, current_user) user_validation = await get_user_by_login(connection, current_user)
process_schema_validation = await get_process_schema_by_title(connection, process_schema.title)
if process_schema_validation is None: current_node_counter = increment_node_counter()
title = f"Новая схема {current_node_counter}"
await create_process_schema(connection, process_schema, user_validation.id) description = "Default description"
process_schema_new = await get_process_schema_by_title(connection, process_schema.title)
start_node_data = ListenNodeData( await create_process_schema(connection, user_validation.id, title, description)
ps_id=process_schema_new.id,
node_type=NodeType.START.value,
is_start="True"
)
start_node_links = {} process_schema_new = await get_process_schema_by_title(connection, title)
registery = VorkNodeRegistry() start_node_data = ListenNodeData(ps_id=process_schema_new.id, node_type=NodeType.START.value, is_start="True")
vork_node = registery.get("LISTEN") start_node_links = {}
node_descriptor = vork_node.form() registery = VorkNodeRegistry()
vork_node = registery.get("LISTEN")
start_node = vork_node(data=start_node_data.model_dump(), links=start_node_links) node_descriptor = vork_node.form()
validated_start_schema = start_node.validate() start_node = vork_node(data=start_node_data.model_dump(), links=start_node_links)
print(validated_start_schema) validated_start_schema = start_node.validate()
db_start_schema = await create_ps_node_schema(connection, validated_start_schema, user_validation.id) print(validated_start_schema)
node = ProcessSchemaSettingsNode( db_start_schema = await create_ps_node_schema(connection, validated_start_schema, user_validation.id)
id=db_start_schema.id,
node_type=NodeType.LISTEN.value,
data=validated_start_schema.data.model_dump(),
from_node=None,
links=None)
settings_dict = {"node": node.model_dump(mode='json')} node = ProcessSchemaSettingsNode(
id=db_start_schema.id,
node_type=NodeType.LISTEN.value,
data=validated_start_schema.data.model_dump(),
from_node=None,
links=None,
)
await update_process_schema_settings_by_id(connection, process_schema_new.id, settings_dict) settings_dict = {"node": node.model_dump(mode="json")}
process_schema_new = await get_process_schema_by_title(connection, process_schema.title) await update_process_schema_settings_by_id(connection, process_schema_new.id, settings_dict)
ps_node_front_response = Ps_NodeFrontResponse( process_schema_new = await get_process_schema_by_title(connection, title)
description=node_descriptor,
node=Ps_NodeFrontResponseNode(
id=db_start_schema.id,
node_type=NodeType.LISTEN.value,
data=validated_start_schema.data.model_dump()),
link=None)
ps_node_front_response = Ps_NodeFrontResponse(
description=node_descriptor.model_dump(),
node=Ps_NodeFrontResponseNode(
id=db_start_schema.id, node_type=NodeType.LISTEN.value, data=validated_start_schema.data.model_dump()
),
link=None,
)
response_data = { response_data = {
"process_schema": process_schema_new.model_dump(), "process_schema": process_schema_new.model_dump(),
"node_listen": ps_node_front_response.model_dump()} "node_listen": ps_node_front_response.model_dump(),
}
return to_camel_dict(response_data) return to_camel_dict(response_data)
else:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, detail="An process schema with this information already exists."
)
@api_router.put("/{process_schema_id}", dependencies=[Depends(bearer_schema)], response_model=ProcessSchema) @api_router.put("/{process_schema_id}", dependencies=[Depends(bearer_schema)], response_model=ProcessSchema)
@@ -213,7 +209,7 @@ async def update_process_schema_endpoint(
return process_schema return process_schema
@api_router.delete("/{process_schema_id}", dependencies=[Depends(bearer_schema)], response_model=ProcessSchema) @api_router.delete("/{process_schema_id}", dependencies=[Depends(bearer_schema)], status_code=status.HTTP_200_OK)
async def delete_process_schema_endpoint( async def delete_process_schema_endpoint(
process_schema_id: int, process_schema_id: int,
connection: AsyncConnection = Depends(get_connection_dep), connection: AsyncConnection = Depends(get_connection_dep),
@@ -237,6 +233,6 @@ async def delete_process_schema_endpoint(
await update_process_schema_by_id(connection, updated_values, process_schema_validation) await update_process_schema_by_id(connection, updated_values, process_schema_validation)
process_schema = await get_process_schema_by_id(connection, process_schema_id) await get_process_schema_by_id(connection, process_schema_id)
return process_schema return HTTPException(status_code=status.HTTP_200_OK, detail="Process schema deleted successfully")

View File

@@ -1,4 +1,4 @@
from fastapi import APIRouter, Depends, HTTPException,status from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.ext.asyncio import AsyncConnection from sqlalchemy.ext.asyncio import AsyncConnection
@@ -23,23 +23,18 @@ from model_nodes import VorkNodeLinkData
from api.utils.to_camel_dict import to_camel_dict from api.utils.to_camel_dict import to_camel_dict
api_router = APIRouter( api_router = APIRouter(
prefix="/ps_node", prefix="/ps_node",
tags=["ps node"], tags=["ps node"],
) )
@api_router.post("", dependencies=[Depends(bearer_schema)],response_model=Ps_NodeFrontResponse) @api_router.post("", dependencies=[Depends(bearer_schema)], response_model=Ps_NodeFrontResponse)
async def create_ps_node_endpoint( async def create_ps_node_endpoint(
ps_node: Ps_NodeRequest, ps_node: Ps_NodeRequest,
connection: AsyncConnection = Depends(get_connection_dep), connection: AsyncConnection = Depends(get_connection_dep),
current_user=Depends(get_current_user), current_user=Depends(get_current_user),
): ):
user_validation = await get_user_by_login(connection, current_user) user_validation = await get_user_by_login(connection, current_user)
registery = VorkNodeRegistry() registery = VorkNodeRegistry()
@@ -58,60 +53,50 @@ async def create_ps_node_endpoint(
except Exception as e: except Exception as e:
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
db_ps_node = await create_ps_node_schema(connection, node_instance_validated, user_validation.id) db_ps_node = await create_ps_node_schema(connection, node_instance_validated, user_validation.id)
link_name = await get_last_link_name_by_node_id(connection, db_ps_node.ps_id) link_name = await get_last_link_name_by_node_id(connection, db_ps_node.ps_id)
link_data = VorkNodeLinkData( link_data = VorkNodeLinkData(
parent_port_number=node_instance_validated.parent_port_number, parent_port_number=node_instance_validated.parent_port_number,
to_id=db_ps_node.id, to_id=db_ps_node.id,
from_id=node_instance_validated.parent_id, from_id=node_instance_validated.parent_id,
last_link_name=link_name) last_link_name=link_name,
)
link = VorkNodeLink(data=link_data.model_dump()) link = VorkNodeLink(data=link_data.model_dump())
validated_link = link.validate() validated_link = link.validate()
db_node_link = await create_node_link_schema(connection, validated_link, user_validation.id) db_node_link = await create_node_link_schema(connection, validated_link, user_validation.id)
links_settings = ProcessSchemaSettingsNodeLink( links_settings = ProcessSchemaSettingsNodeLink(
id=db_node_link.id, id=db_node_link.id,
link_name=db_node_link.link_name, link_name=db_node_link.link_name,
parent_port_number=db_node_link.link_point_id, parent_port_number=db_node_link.link_point_id,
from_id=db_node_link.node_id, from_id=db_node_link.node_id,
to_id=db_node_link.next_node_id, to_id=db_node_link.next_node_id,
) )
node_settings = ProcessSchemaSettingsNode( node_settings = ProcessSchemaSettingsNode(
id=db_ps_node.id, id=db_ps_node.id,
node_type=db_ps_node.node_type, node_type=db_ps_node.node_type,
data=node_instance_validated.data.model_dump(), data=node_instance_validated.data.model_dump(),
from_node=None, from_node=None,
links=links_settings.model_dump()) links=[{"links": links_settings.model_dump()}],
)
settings_dict = {"node": node_settings.model_dump(mode="json")}
settings_dict = {"node": node_settings.model_dump(mode='json')}
await update_process_schema_settings_by_id(connection, db_ps_node.ps_id, settings_dict) await update_process_schema_settings_by_id(connection, db_ps_node.ps_id, settings_dict)
ps_node_front_response = Ps_NodeFrontResponse( ps_node_front_response = Ps_NodeFrontResponse(
description=node_descriptor, description=node_descriptor.model_dump(),
node=Ps_NodeFrontResponseNode( node=Ps_NodeFrontResponseNode(
id=db_ps_node.id, id=db_ps_node.id,
node_type=db_ps_node.node_type, node_type=db_ps_node.node_type,
data=to_camel_dict(node_instance_validated.data.model_dump())), data=to_camel_dict(node_instance_validated.data.model_dump()),
link=links_settings.model_dump()) ),
links=[{"links": links_settings.model_dump()}],
)
return ps_node_front_response return ps_node_front_response

View File

@@ -10,7 +10,7 @@ from api.schemas.base import Base
class ProcessSchemaUpdate(Base): class ProcessSchemaUpdate(Base):
title: Optional[str] = Field(None, max_length=100) title: Optional[str] = Field(None, max_length=100)
description: Optional[str] = None description: Optional[str] = None
owner_id: Optional[int] = None # owner_id: Optional[int] = None
settings: Optional[Dict[str, Any]] = None settings: Optional[Dict[str, Any]] = None
status: Optional[ProcessStatus] = None status: Optional[ProcessStatus] = None

View File

@@ -1,5 +1,5 @@
from datetime import datetime from datetime import datetime
from typing import Any, Dict, Optional from typing import Any, Dict, Optional, List
from orm.tables.process import ProcessStatus, NodeType from orm.tables.process import ProcessStatus, NodeType
from pydantic import Field from pydantic import Field
@@ -31,8 +31,8 @@ class ProcessSchemaSettingsNode(Base):
id: int id: int
node_type: NodeType node_type: NodeType
from_node: Optional[Dict[str, Any]] = None from_node: Optional[Dict[str, Any]] = None
data: Dict[str, Any]# Переименовано с 'from' на 'from_node' data: Dict[str, Any] # Переименовано с 'from' на 'from_node'
links: Optional[ProcessSchemaSettingsNodeLink] = None links: Optional[List[Dict[str, Any]]] = None
class ProcessSchemaResponse(Base): class ProcessSchemaResponse(Base):

View File

@@ -1,8 +1,7 @@
from datetime import datetime from datetime import datetime
from typing import Any, Dict, Optional from typing import Any, Dict, Optional, List
from orm.tables.process import NodeStatus, NodeType from orm.tables.process import NodeStatus, NodeType
from pydantic import Field
from api.schemas.base import Base from api.schemas.base import Base
@@ -11,6 +10,7 @@ class Ps_NodeRequest(Base):
data: Dict[str, Any] data: Dict[str, Any]
links: Dict[str, Any] links: Dict[str, Any]
class Ps_Node(Base): class Ps_Node(Base):
id: int id: int
ps_id: int ps_id: int
@@ -21,7 +21,6 @@ class Ps_Node(Base):
status: NodeStatus status: NodeStatus
class Ps_NodeFrontResponseLink(Base): class Ps_NodeFrontResponseLink(Base):
id: int id: int
link_name: str link_name: str
@@ -33,10 +32,10 @@ class Ps_NodeFrontResponseLink(Base):
class Ps_NodeFrontResponseNode(Base): class Ps_NodeFrontResponseNode(Base):
id: int id: int
node_type: NodeType node_type: NodeType
data: Dict[str, Any]# Переименовано с 'from' на 'from_node' data: Dict[str, Any] # Переименовано с 'from' на 'from_node'
class Ps_NodeFrontResponse(Base): class Ps_NodeFrontResponse(Base):
description: Optional[Dict[str, Any]] = None description: Optional[Dict[str, Any]] = None
node: Optional[Ps_NodeFrontResponseNode] = None node: Optional[Ps_NodeFrontResponseNode] = None
link: Optional[Ps_NodeFrontResponseLink] = None links: Optional[List[Dict[str, Any]]] = None

View File

@@ -0,0 +1,52 @@
import json
from pathlib import Path
from typing import Dict
# Путь к файлу счётчика (в корне проекта)
COUNTER_FILE_PATH = Path(__file__).parent.parent.parent / "node_counter.json"
def get_node_counter() -> int:
"""
Открывает JSON файл и возвращает значение node_counter.
Если файл не существует, создаёт его со значением по умолчанию 0.
Returns:
int: Текущее значение счётчика узлов
"""
if not COUNTER_FILE_PATH.exists():
initial_data: Dict[str, int] = {"node_counter": 0}
with open(COUNTER_FILE_PATH, "w", encoding="utf-8") as f:
json.dump(initial_data, f, indent=2, ensure_ascii=False)
return 0
try:
with open(COUNTER_FILE_PATH, "r", encoding="utf-8") as f:
data = json.load(f)
return data.get("node_counter", 0)
except (json.JSONDecodeError, IOError):
initial_data = {"node_counter": 0}
with open(COUNTER_FILE_PATH, "w", encoding="utf-8") as f:
json.dump(initial_data, f, indent=2, ensure_ascii=False)
return 0
def increment_node_counter() -> int:
"""
Увеличивает значение node_counter на 1, сохраняет в файл и возвращает новое значение.
Returns:
int: Новое значение счётчика (старое значение + 1)
"""
current_value = get_node_counter()
new_value = current_value + 1
data: Dict[str, int] = {"node_counter": new_value}
with open(COUNTER_FILE_PATH, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2, ensure_ascii=False)
return new_value

View File

@@ -1,5 +1,6 @@
from pydantic.alias_generators import to_camel from pydantic.alias_generators import to_camel
def to_camel_dict(obj): def to_camel_dict(obj):
if isinstance(obj, dict): if isinstance(obj, dict):
return {to_camel(key): to_camel_dict(value) for key, value in obj.items()} return {to_camel(key): to_camel_dict(value) for key, value in obj.items()}

2
api/poetry.lock generated
View File

@@ -2020,7 +2020,7 @@ sqlalchemy = "^2.0.43"
type = "git" type = "git"
url = "http://88.86.199.167:3000/Nox/CORE.git" url = "http://88.86.199.167:3000/Nox/CORE.git"
reference = "HEAD" reference = "HEAD"
resolved_reference = "b3896e8b5dbed2d609c8ac257419d5492c9e7b8d" resolved_reference = "43c139512928ab3a4767e771c8e41e39930599ad"
[[package]] [[package]]
name = "watchfiles" name = "watchfiles"

View File

@@ -18,7 +18,7 @@ dependencies = [
"python-multipart (>=0.0.20,<0.0.21)", "python-multipart (>=0.0.20,<0.0.21)",
"requests (>=2.31.0,<3.0.0)", "requests (>=2.31.0,<3.0.0)",
"fastapi-jwt-auth @ git+https://github.com/vvpreo/fastapi-jwt-auth", "fastapi-jwt-auth @ git+https://github.com/vvpreo/fastapi-jwt-auth",
"core-library @ git+https://gitea.heado.ru/Vorkout/core.git@VORKOUT-18", "vork-core @ git+http://88.86.199.167:3000/Nox/CORE.git",
] ]