feat: node enpoin, node_start process_schema
This commit is contained in:
@@ -0,0 +1,7 @@
|
||||
from sqlalchemy import MetaData
|
||||
|
||||
metadata = MetaData()
|
||||
|
||||
__all__ = [
|
||||
"metadata",
|
||||
]
|
||||
|
38
api/api/db/alembic/versions/816be8c60ab4_.py
Normal file
38
api/api/db/alembic/versions/816be8c60ab4_.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 816be8c60ab4
|
||||
Revises: 93106fbe7d83
|
||||
Create Date: 2025-09-12 14:48:47.726269
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '816be8c60ab4'
|
||||
down_revision: Union[str, None] = '93106fbe7d83'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.alter_column('ps_node', 'node_type',
|
||||
existing_type=mysql.ENUM('TYPE1', 'TYPE2', 'TYPE3'),
|
||||
type_=sa.Enum('LISTEN', 'IF', 'START', name='nodetype'),
|
||||
existing_nullable=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.alter_column('ps_node', 'node_type',
|
||||
existing_type=sa.Enum('LISTEN', 'IF', 'START', name='nodetype'),
|
||||
type_=mysql.ENUM('TYPE1', 'TYPE2', 'TYPE3'),
|
||||
existing_nullable=False)
|
||||
# ### end Alembic commands ###
|
@@ -0,0 +1,32 @@
|
||||
"""update node_link_table link_point_id default
|
||||
|
||||
Revision ID: cc3b95f1f99d
|
||||
Revises: 816be8c60ab4
|
||||
Create Date: 2025-09-12 19:17:03.125276
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'cc3b95f1f99d'
|
||||
down_revision: Union[str, None] = '816be8c60ab4'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('node_link', sa.Column('link_point_id', sa.Integer().with_variant(mysql.INTEGER(unsigned=True), 'mysql'), nullable=False))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('node_link', 'link_point_id')
|
||||
# ### end Alembic commands ###
|
78
api/api/db/logic/node_link.py
Normal file
78
api/api/db/logic/node_link.py
Normal file
@@ -0,0 +1,78 @@
|
||||
from typing import Optional
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import insert, select, desc
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.schemas.process.node_link import NodeLink
|
||||
|
||||
from orm.tables.process import ps_node_table, node_link_table
|
||||
from orm.tables.process import NodeLinkStatus
|
||||
|
||||
|
||||
|
||||
async def get_last_link_name_by_node_id(
|
||||
connection: AsyncConnection, ps_id: int
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Получает link_name из последней записи node_link по ps_id.
|
||||
Находит все node_id в ps_node по ps_id, затем ищет связи в node_link
|
||||
и возвращает link_name из самой последней записи.
|
||||
"""
|
||||
query = select(node_link_table.c.link_name).where(
|
||||
node_link_table.c.node_id.in_(
|
||||
select(ps_node_table.c.id).where(ps_node_table.c.ps_id == ps_id)
|
||||
)
|
||||
).order_by(desc(node_link_table.c.created_at)).limit(1)
|
||||
|
||||
result = await connection.execute(query)
|
||||
link_name = result.scalar_one_or_none()
|
||||
|
||||
return link_name
|
||||
|
||||
|
||||
async def get_last_node_link_by_creator_and_ps_id(
|
||||
connection: AsyncConnection, creator_id: int, node_link_id: int
|
||||
) -> Optional[NodeLink]:
|
||||
"""
|
||||
Получает последнюю созданную node_link для данного создателя и процесса.
|
||||
"""
|
||||
query = select(node_link_table).where(
|
||||
node_link_table.c.creator_id == creator_id,
|
||||
node_link_table.c.node_id.in_(
|
||||
select(ps_node_table.c.id).where(ps_node_table.c.id == node_link_id)
|
||||
)
|
||||
).order_by(desc(node_link_table.c.created_at)).limit(1)
|
||||
|
||||
node_link_db_cursor = await connection.execute(query)
|
||||
node_link_data = node_link_db_cursor.mappings().one_or_none()
|
||||
|
||||
if not node_link_data:
|
||||
return None
|
||||
|
||||
return NodeLink.model_validate(node_link_data)
|
||||
|
||||
|
||||
async def create_node_link_schema(
|
||||
connection: AsyncConnection, validated_link_schema, creator_id: int,
|
||||
) -> Optional[NodeLink]:
|
||||
"""
|
||||
Создает нове поле в таблице process_schema_table.
|
||||
"""
|
||||
query = insert(node_link_table).values(
|
||||
link_name=validated_link_schema.link_name,
|
||||
node_id=validated_link_schema.from_id,
|
||||
link_point_id=validated_link_schema.parent_port_number,
|
||||
next_node_id=validated_link_schema.to_id,
|
||||
settings={},
|
||||
creator_id=creator_id,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
status=NodeLinkStatus.ACTIVE.value,
|
||||
)
|
||||
|
||||
await connection.execute(query)
|
||||
|
||||
await connection.commit()
|
||||
|
||||
return await get_last_node_link_by_creator_and_ps_id(connection, creator_id, validated_link_schema.from_id)
|
@@ -1,4 +1,4 @@
|
||||
from typing import Optional
|
||||
from typing import Optional, Dict, Any
|
||||
import math
|
||||
|
||||
from datetime import datetime, timezone
|
||||
@@ -152,6 +152,42 @@ async def update_process_schema_by_id(connection: AsyncConnection, update_values
|
||||
await connection.commit()
|
||||
|
||||
|
||||
|
||||
async def update_process_schema_settings_by_id(
|
||||
connection: AsyncConnection,
|
||||
process_schema_id: int,
|
||||
node_data: Dict[str, Any]
|
||||
):
|
||||
"""
|
||||
Добавляет новый узел в массив 'nodes' в настройках процесса.
|
||||
Если массив 'nodes' не существует, создает его.
|
||||
"""
|
||||
# Получаем текущие settings
|
||||
query = select(process_schema_table.c.settings).where(process_schema_table.c.id == process_schema_id)
|
||||
result = await connection.execute(query)
|
||||
current_settings = result.scalar_one_or_none()
|
||||
|
||||
# Если settings пустые, создаем пустой словарь
|
||||
if current_settings is None:
|
||||
current_settings = {}
|
||||
|
||||
# Инициализируем массив nodes, если его нет
|
||||
if "nodes" not in current_settings:
|
||||
current_settings["nodes"] = []
|
||||
|
||||
# Добавляем новый узел в массив
|
||||
current_settings["nodes"].append(node_data)
|
||||
|
||||
# Обновляем поле settings
|
||||
await connection.execute(
|
||||
process_schema_table.update()
|
||||
.where(process_schema_table.c.id == process_schema_id)
|
||||
.values(settings=current_settings)
|
||||
)
|
||||
|
||||
await connection.commit()
|
||||
|
||||
|
||||
async def create_process_schema(
|
||||
connection: AsyncConnection, process_schema: ProcessSchema, creator_id: int
|
||||
) -> Optional[ProcessSchema]:
|
||||
|
106
api/api/db/logic/ps_node.py
Normal file
106
api/api/db/logic/ps_node.py
Normal file
@@ -0,0 +1,106 @@
|
||||
from typing import Optional
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import insert, select, desc
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from orm.tables.process import ps_node_table
|
||||
|
||||
from api.schemas.process.ps_node import Ps_Node
|
||||
from model_nodes.node_start_models import StartNodeCoreSchema
|
||||
from orm.tables.process import NodeStatus,NodeType
|
||||
|
||||
|
||||
async def get_ps_node_by_id(connection: AsyncConnection, id: int) -> Optional[Ps_Node]:
|
||||
"""
|
||||
Получает process_schema по id.
|
||||
"""
|
||||
query = select(ps_node_table).where(ps_node_table.c.id == id)
|
||||
|
||||
ps_node_db_cursor = await connection.execute(query)
|
||||
|
||||
ps_node_data = ps_node_db_cursor.mappings().one_or_none()
|
||||
if not ps_node_data:
|
||||
return None
|
||||
|
||||
return Ps_Node.model_validate(ps_node_data)
|
||||
|
||||
|
||||
async def get_ps_node_by_type_and_ps_id(connection: AsyncConnection, node_type: str, ps_id: int) -> Optional[Ps_Node]:
|
||||
"""
|
||||
Получает ps_node по node_type и ps_id.
|
||||
"""
|
||||
query = select(ps_node_table).where(
|
||||
ps_node_table.c.node_type == node_type,
|
||||
ps_node_table.c.ps_id == ps_id
|
||||
)
|
||||
|
||||
ps_node_db_cursor = await connection.execute(query)
|
||||
|
||||
ps_node_data = ps_node_db_cursor.mappings().one_or_none()
|
||||
if not ps_node_data:
|
||||
return None
|
||||
|
||||
return Ps_Node.model_validate(ps_node_data)
|
||||
|
||||
async def create_ps_node_start_schema(
|
||||
connection: AsyncConnection, validated_start_schema: StartNodeCoreSchema, creator_id: int
|
||||
) -> Optional[Ps_Node]:
|
||||
"""
|
||||
Создает нове поле в таблице process_schema_table.
|
||||
"""
|
||||
query = insert(ps_node_table).values(
|
||||
ps_id=validated_start_schema.ps_id,
|
||||
node_type=NodeType.START.value,
|
||||
settings=validated_start_schema.data.model_dump(),
|
||||
creator_id=creator_id,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
status=NodeStatus.ACTIVE.value,
|
||||
)
|
||||
|
||||
await connection.execute(query)
|
||||
|
||||
await connection.commit()
|
||||
|
||||
# return validated_start_schema
|
||||
|
||||
async def get_last_ps_node_by_creator_and_ps_id(
|
||||
connection: AsyncConnection, creator_id: int, ps_id: int
|
||||
) -> Optional[Ps_Node]:
|
||||
"""
|
||||
Получает последнюю созданную ps_node для данного создателя и процесса.
|
||||
"""
|
||||
query = select(ps_node_table).where(
|
||||
ps_node_table.c.creator_id == creator_id,
|
||||
ps_node_table.c.ps_id == ps_id
|
||||
).order_by(desc(ps_node_table.c.created_at)).limit(1)
|
||||
|
||||
ps_node_db_cursor = await connection.execute(query)
|
||||
ps_node_data = ps_node_db_cursor.mappings().one_or_none()
|
||||
|
||||
if not ps_node_data:
|
||||
return None
|
||||
|
||||
return Ps_Node.model_validate(ps_node_data)
|
||||
|
||||
async def create_ps_node_schema(
|
||||
connection: AsyncConnection, validated_schema, creator_id: int,
|
||||
) -> Optional[Ps_Node]:
|
||||
"""
|
||||
Создает нове поле в таблице process_schema_table.
|
||||
"""
|
||||
query = insert(ps_node_table).values(
|
||||
ps_id=validated_schema.ps_id,
|
||||
node_type=validated_schema.node_type,
|
||||
settings=validated_schema.data.model_dump(),
|
||||
creator_id=creator_id,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
status=NodeStatus.ACTIVE.value,
|
||||
)
|
||||
|
||||
await connection.execute(query)
|
||||
|
||||
await connection.commit()
|
||||
|
||||
return await get_last_ps_node_by_creator_and_ps_id(connection, creator_id, validated_schema.ps_id)
|
@@ -4,8 +4,9 @@ from api.endpoints.account import api_router as account_router
|
||||
from api.endpoints.keyring import api_router as keyring_router
|
||||
from api.endpoints.list_events import api_router as listevents_router
|
||||
from api.endpoints.process_schema import api_router as processschema_router
|
||||
from api.endpoints.ps_node import api_router as ps_node_router
|
||||
|
||||
list_of_routes = [auth_router, profile_router, account_router, keyring_router, listevents_router, processschema_router]
|
||||
list_of_routes = [auth_router, profile_router, account_router, keyring_router, listevents_router, processschema_router, ps_node_router]
|
||||
|
||||
__all__ = [
|
||||
"list_of_routes",
|
||||
|
@@ -26,7 +26,7 @@ api_router = APIRouter(
|
||||
|
||||
|
||||
@api_router.get("", dependencies=[Depends(bearer_schema)], response_model=AllUserResponse)
|
||||
async def get_all_account(
|
||||
async def get_all_account_endpoint(
|
||||
page: int = Query(1, description="Page number", gt=0),
|
||||
limit: int = Query(10, description="КNumber of items per page", gt=0),
|
||||
search: Optional[str] = Query(None, description="Search term to filter by name or login or email"),
|
||||
@@ -62,7 +62,7 @@ async def get_all_account(
|
||||
|
||||
|
||||
@api_router.get("/{user_id}", dependencies=[Depends(bearer_schema)], response_model=User)
|
||||
async def get_account(
|
||||
async def get_account_endpoint(
|
||||
user_id: int,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
@@ -78,7 +78,7 @@ async def get_account(
|
||||
|
||||
|
||||
@api_router.post("", dependencies=[Depends(bearer_schema)], response_model=User)
|
||||
async def create_account(
|
||||
async def create_account_endpoint(
|
||||
user: UserCreate,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
@@ -98,7 +98,7 @@ async def create_account(
|
||||
|
||||
|
||||
@api_router.put("/{user_id}", dependencies=[Depends(bearer_schema)], response_model=UserUpdate)
|
||||
async def update_account(
|
||||
async def update_account_endpoint(
|
||||
user_id: int,
|
||||
user_update: UserUpdate,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
@@ -126,7 +126,7 @@ async def update_account(
|
||||
|
||||
|
||||
@api_router.delete("/{user_id}", dependencies=[Depends(bearer_schema)], response_model=User)
|
||||
async def delete_account(
|
||||
async def delete_account_endpoint(
|
||||
user_id: int,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
|
@@ -49,7 +49,7 @@ def get_config():
|
||||
|
||||
|
||||
@api_router.post("", response_model=Tokens)
|
||||
async def login_for_access_token(
|
||||
async def login_for_access_token_endpoint(
|
||||
user: Auth,
|
||||
response: Response,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
@@ -84,7 +84,7 @@ async def login_for_access_token(
|
||||
|
||||
|
||||
@api_router.post("/refresh", response_model=Tokens)
|
||||
async def refresh(
|
||||
async def refresh_endpoint(
|
||||
request: Request,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
Authorize: AuthJWT = Depends(),
|
||||
|
@@ -22,7 +22,7 @@ api_router = APIRouter(
|
||||
|
||||
|
||||
@api_router.get("/{user_id}/{key_id}", dependencies=[Depends(bearer_schema)], response_model=AccountKeyring)
|
||||
async def get_keyring(
|
||||
async def get_keyring_endpoint (
|
||||
key_id: str, connection: AsyncConnection = Depends(get_connection_dep), current_user=Depends(get_current_user)
|
||||
):
|
||||
authorize_user = await db_user_role_validation(connection, current_user)
|
||||
@@ -36,7 +36,7 @@ async def get_keyring(
|
||||
|
||||
|
||||
@api_router.post("/{user_id}/{key_id}", dependencies=[Depends(bearer_schema)], response_model=AccountKeyring)
|
||||
async def create_keyring(
|
||||
async def create_keyring_endpoint(
|
||||
user_id: int,
|
||||
key_id: str,
|
||||
key: AccountKeyringUpdate,
|
||||
@@ -62,7 +62,7 @@ async def create_keyring(
|
||||
|
||||
|
||||
@api_router.put("/{user_id}/{key_id}", dependencies=[Depends(bearer_schema)], response_model=AccountKeyring)
|
||||
async def update_keyring(
|
||||
async def update_keyring_endpoint(
|
||||
user_id: int,
|
||||
key_id: str,
|
||||
keyring_update: AccountKeyringUpdate,
|
||||
@@ -88,7 +88,7 @@ async def update_keyring(
|
||||
|
||||
|
||||
@api_router.delete("/{user_id}/{key_id}", dependencies=[Depends(bearer_schema)], response_model=AccountKeyring)
|
||||
async def delete_keyring(
|
||||
async def delete_keyring_endpoint(
|
||||
user_id: int,
|
||||
key_id: str,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
|
@@ -29,7 +29,7 @@ api_router = APIRouter(
|
||||
|
||||
|
||||
@api_router.get("", dependencies=[Depends(bearer_schema)], response_model=AllListEventResponse)
|
||||
async def get_all_list_events(
|
||||
async def get_all_list_events_endpoint(
|
||||
page: int = Query(1, description="Page number", gt=0),
|
||||
limit: int = Query(10, description="Number of items per page", gt=0),
|
||||
search: Optional[str] = Query(None, description="Search term to filter by title or name"),
|
||||
@@ -72,7 +72,7 @@ async def get_all_list_events(
|
||||
|
||||
|
||||
@api_router.get("/{list_events_id}", dependencies=[Depends(bearer_schema)], response_model=ListEvent)
|
||||
async def get_list_events(
|
||||
async def get_list_events_endpoint(
|
||||
list_events_id: int,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
@@ -93,7 +93,7 @@ async def get_list_events(
|
||||
|
||||
|
||||
@api_router.post("", dependencies=[Depends(bearer_schema)], response_model=ListEvent)
|
||||
async def create_list_events(
|
||||
async def create_list_events_endpoint(
|
||||
list_events: ListEventUpdate,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
@@ -141,7 +141,7 @@ async def update_list_events(
|
||||
|
||||
|
||||
@api_router.delete("/{list_events_id}", dependencies=[Depends(bearer_schema)], response_model=ListEvent)
|
||||
async def delete_list_events(
|
||||
async def delete_list_events_endpoint(
|
||||
list_events_id: int,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
|
@@ -15,13 +15,27 @@ from api.db.logic.process_schema import (
|
||||
)
|
||||
from api.schemas.base import bearer_schema
|
||||
from api.schemas.endpoints.process_schema import AllProcessSchemaResponse, ProcessSchemaFilterDTO, ProcessSchemaUpdate
|
||||
from api.schemas.process.process_schema import ProcessSchema
|
||||
from api.schemas.process.process_schema import ProcessSchema, ProcessSchemaSettingsNode, ProcessSchemaResponse
|
||||
from api.schemas.process.ps_node import Ps_NodeFrontResponseNode
|
||||
from api.schemas.process.ps_node import Ps_NodeFrontResponse
|
||||
from api.services.auth import get_current_user
|
||||
from api.services.user_role_validation import (
|
||||
db_user_role_validation_for_list_events_and_process_schema,
|
||||
db_user_role_validation_for_list_events_and_process_schema_by_list_event_id,
|
||||
)
|
||||
|
||||
from core import VorkNodeStart
|
||||
from model_nodes import StartNodeData
|
||||
|
||||
from api.db.logic.ps_node import create_ps_node_schema
|
||||
|
||||
from api.db.logic.process_schema import update_process_schema_settings_by_id
|
||||
|
||||
from orm.tables.process import NodeType
|
||||
|
||||
|
||||
|
||||
|
||||
api_router = APIRouter(
|
||||
prefix="/process_schema",
|
||||
tags=["process schema"],
|
||||
@@ -29,7 +43,7 @@ api_router = APIRouter(
|
||||
|
||||
|
||||
@api_router.get("", dependencies=[Depends(bearer_schema)], response_model=AllProcessSchemaResponse)
|
||||
async def get_all_process_schema(
|
||||
async def get_all_process_schema_endpoint(
|
||||
page: int = Query(1, description="Page number", gt=0),
|
||||
limit: int = Query(10, description="Number of items per page", gt=0),
|
||||
search: Optional[str] = Query(None, description="Search term to filter by title or description"),
|
||||
@@ -72,7 +86,7 @@ async def get_all_process_schema(
|
||||
|
||||
|
||||
@api_router.get("/{process_schema_id}", dependencies=[Depends(bearer_schema)], response_model=ProcessSchema)
|
||||
async def get_process_schema(
|
||||
async def get_process_schema_endpoint(
|
||||
process_schema_id: int,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
@@ -92,8 +106,8 @@ async def get_process_schema(
|
||||
return process_schema_validation
|
||||
|
||||
|
||||
@api_router.post("", dependencies=[Depends(bearer_schema)], response_model=ProcessSchema)
|
||||
async def create_processschema(
|
||||
@api_router.post("", dependencies=[Depends(bearer_schema)], response_model=ProcessSchemaResponse)
|
||||
async def create_processschema_endpoint(
|
||||
process_schema: ProcessSchemaUpdate,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
@@ -102,9 +116,50 @@ async def create_processschema(
|
||||
process_schema_validation = await get_process_schema_by_title(connection, process_schema.title)
|
||||
|
||||
if process_schema_validation is None:
|
||||
|
||||
await create_process_schema(connection, process_schema, user_validation.id)
|
||||
process_schema_new = await get_process_schema_by_title(connection, process_schema.title)
|
||||
return process_schema_new
|
||||
|
||||
start_node_data = StartNodeData(
|
||||
ps_id=process_schema_new.id,
|
||||
node_type=NodeType.START.value
|
||||
)
|
||||
start_node_links = {}
|
||||
|
||||
start_node = VorkNodeStart(data=start_node_data.model_dump(), links=start_node_links)
|
||||
|
||||
validated_start_schema = start_node.validate()
|
||||
|
||||
db_start_schema = await create_ps_node_schema(connection, validated_start_schema, user_validation.id)
|
||||
|
||||
node = ProcessSchemaSettingsNode(
|
||||
id=db_start_schema.id,
|
||||
node_type=NodeType.START.value,
|
||||
data=validated_start_schema.data.model_dump(),
|
||||
from_node=None,
|
||||
links=None)
|
||||
|
||||
settings_dict = {"node": node.model_dump(mode='json')}
|
||||
|
||||
await update_process_schema_settings_by_id(connection, process_schema_new.id, settings_dict)
|
||||
|
||||
process_schema_new = await get_process_schema_by_title(connection, process_schema.title)
|
||||
|
||||
ps_node_front_response = Ps_NodeFrontResponse(
|
||||
description=None,
|
||||
node=Ps_NodeFrontResponseNode(
|
||||
id=db_start_schema.id,
|
||||
node_type=NodeType.START.value,
|
||||
data=validated_start_schema.data.model_dump()),
|
||||
link=None)
|
||||
|
||||
|
||||
response_data = {
|
||||
"process_schema": process_schema_new.model_dump(mode='json'),
|
||||
"node_start":ps_node_front_response.model_dump(mode='json')}
|
||||
|
||||
return response_data
|
||||
|
||||
|
||||
else:
|
||||
raise HTTPException(
|
||||
@@ -113,7 +168,7 @@ async def create_processschema(
|
||||
|
||||
|
||||
@api_router.put("/{process_schema_id}", dependencies=[Depends(bearer_schema)], response_model=ProcessSchema)
|
||||
async def update_process_schema(
|
||||
async def update_process_schema_endpoint(
|
||||
process_schema_id: int,
|
||||
process_schema_update: ProcessSchemaUpdate,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
@@ -141,7 +196,7 @@ async def update_process_schema(
|
||||
|
||||
|
||||
@api_router.delete("/{process_schema_id}", dependencies=[Depends(bearer_schema)], response_model=ProcessSchema)
|
||||
async def delete_process_schema(
|
||||
async def delete_process_schema_endpoint(
|
||||
process_schema_id: int,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
|
116
api/api/endpoints/ps_node.py
Normal file
116
api/api/endpoints/ps_node.py
Normal file
@@ -0,0 +1,116 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException,status
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.connection.session import get_connection_dep
|
||||
from api.db.logic.account import get_user_by_login
|
||||
|
||||
from api.schemas.base import bearer_schema
|
||||
from api.schemas.process.process_schema import ProcessSchemaSettingsNodeLink, ProcessSchemaSettingsNode
|
||||
from api.schemas.process.ps_node import Ps_NodeFrontResponseNode, Ps_NodeRequest
|
||||
from api.schemas.process.ps_node import Ps_NodeFrontResponse
|
||||
from api.services.auth import get_current_user
|
||||
|
||||
|
||||
from api.db.logic.ps_node import create_ps_node_schema
|
||||
from api.db.logic.node_link import get_last_link_name_by_node_id, create_node_link_schema
|
||||
|
||||
from api.db.logic.process_schema import update_process_schema_settings_by_id
|
||||
|
||||
from core import VorkNodeRegistry, VorkNodeLink
|
||||
|
||||
from model_nodes import VorkNodeLinkData
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
api_router = APIRouter(
|
||||
prefix="/ps_node",
|
||||
tags=["ps node"],
|
||||
)
|
||||
|
||||
|
||||
@api_router.post("", dependencies=[Depends(bearer_schema)],response_model=Ps_NodeFrontResponse)
|
||||
|
||||
async def create_ps_node_endpoint(
|
||||
ps_node: Ps_NodeRequest,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
|
||||
user_validation = await get_user_by_login(connection, current_user)
|
||||
|
||||
registery = VorkNodeRegistry()
|
||||
|
||||
vork_node = registery.get(ps_node.data["node_type"])
|
||||
|
||||
if vork_node is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Node not found")
|
||||
|
||||
node_descriptor = vork_node.form()
|
||||
try:
|
||||
node_instance = vork_node(data=ps_node.data, links=ps_node.links)
|
||||
|
||||
node_instance_validated = node_instance.validate()
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
|
||||
|
||||
|
||||
db_ps_node = await create_ps_node_schema(connection, node_instance_validated, user_validation.id)
|
||||
link_name = await get_last_link_name_by_node_id(connection, db_ps_node.ps_id)
|
||||
|
||||
|
||||
|
||||
link_data = VorkNodeLinkData(
|
||||
parent_port_number=node_instance_validated.parent_port_number,
|
||||
to_id=db_ps_node.id,
|
||||
from_id=node_instance_validated.parent_id,
|
||||
last_link_name=link_name)
|
||||
|
||||
|
||||
|
||||
link = VorkNodeLink(data=link_data.model_dump())
|
||||
|
||||
validated_link = link.validate()
|
||||
|
||||
|
||||
|
||||
|
||||
db_node_link = await create_node_link_schema(connection, validated_link, user_validation.id)
|
||||
|
||||
|
||||
|
||||
|
||||
links_settings = ProcessSchemaSettingsNodeLink(
|
||||
id=db_node_link.id,
|
||||
link_name=db_node_link.link_name,
|
||||
parent_port_number=db_node_link.link_point_id,
|
||||
from_id=db_node_link.node_id,
|
||||
to_id=db_node_link.next_node_id,
|
||||
)
|
||||
|
||||
node_settings = ProcessSchemaSettingsNode(
|
||||
id=db_ps_node.id,
|
||||
node_type=db_ps_node.node_type,
|
||||
data=node_instance_validated.data.model_dump(),
|
||||
from_node=None,
|
||||
links=links_settings)
|
||||
|
||||
|
||||
settings_dict = {"node": node_settings.model_dump(mode='json')}
|
||||
|
||||
await update_process_schema_settings_by_id(connection, db_ps_node.ps_id, settings_dict)
|
||||
|
||||
|
||||
ps_node_front_response = Ps_NodeFrontResponse(
|
||||
description=node_descriptor.model_dump(),
|
||||
node=Ps_NodeFrontResponseNode(
|
||||
id=db_ps_node.id,
|
||||
node_type=db_ps_node.node_type,
|
||||
data=node_instance_validated.data.model_dump()),
|
||||
link=links_settings)
|
||||
|
||||
|
||||
return ps_node_front_response.model_dump(mode='json')
|
@@ -7,10 +7,11 @@ from pydantic import Field
|
||||
from api.schemas.base import Base
|
||||
|
||||
|
||||
class MyModel(Base):
|
||||
class NodeLink(Base):
|
||||
id: int
|
||||
link_name: str = Field(..., max_length=20)
|
||||
node_id: int
|
||||
link_point_id: int
|
||||
next_node_id: int
|
||||
settings: Dict[str, Any]
|
||||
creator_id: int
|
||||
|
@@ -1,10 +1,11 @@
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from orm.tables.process import ProcessStatus
|
||||
from orm.tables.process import ProcessStatus, NodeType
|
||||
from pydantic import Field
|
||||
|
||||
from api.schemas.base import Base
|
||||
from api.schemas.process.ps_node import Ps_NodeFrontResponse
|
||||
|
||||
|
||||
class ProcessSchema(Base):
|
||||
@@ -16,3 +17,24 @@ class ProcessSchema(Base):
|
||||
created_at: datetime
|
||||
settings: Dict[str, Any]
|
||||
status: ProcessStatus
|
||||
|
||||
|
||||
class ProcessSchemaSettingsNodeLink(Base):
|
||||
id: int
|
||||
link_name: str
|
||||
parent_port_number: int
|
||||
from_id: int
|
||||
to_id: int
|
||||
|
||||
|
||||
class ProcessSchemaSettingsNode(Base):
|
||||
id: int
|
||||
node_type: NodeType
|
||||
from_node: Optional[Dict[str, Any]] = None
|
||||
data: Dict[str, Any]# Переименовано с 'from' на 'from_node'
|
||||
links: Optional[ProcessSchemaSettingsNodeLink] = None
|
||||
|
||||
|
||||
class ProcessSchemaResponse(Base):
|
||||
process_schema: ProcessSchema
|
||||
node_start: Ps_NodeFrontResponse
|
||||
|
@@ -1,16 +1,42 @@
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from orm.tables.process import NodeStatus, NodeType
|
||||
from pydantic import Field
|
||||
|
||||
from api.schemas.base import Base
|
||||
|
||||
|
||||
class Ps_NodeRequest(Base):
|
||||
data: Dict[str, Any]
|
||||
links: Dict[str, Any]
|
||||
|
||||
class Ps_Node(Base):
|
||||
id: int
|
||||
ps_id: int
|
||||
node_type: NodeType
|
||||
settings: dict
|
||||
creator_id: Dict[str, Any]
|
||||
creator_id: int
|
||||
created_at: datetime
|
||||
status: NodeStatus
|
||||
|
||||
|
||||
|
||||
class Ps_NodeFrontResponseLink(Base):
|
||||
id: int
|
||||
link_name: str
|
||||
parent_port_number: int
|
||||
from_id: int
|
||||
to_id: int
|
||||
|
||||
|
||||
class Ps_NodeFrontResponseNode(Base):
|
||||
id: int
|
||||
node_type: NodeType
|
||||
data: Dict[str, Any]# Переименовано с 'from' на 'from_node'
|
||||
|
||||
|
||||
class Ps_NodeFrontResponse(Base):
|
||||
description: Optional[Dict[str, Any]] = None
|
||||
node: Optional[Ps_NodeFrontResponseNode] = None
|
||||
link: Optional[Ps_NodeFrontResponseLink] = None
|
||||
|
9
api/poetry.lock
generated
9
api/poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aio-pika"
|
||||
@@ -238,13 +238,14 @@ files = []
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
pydantic = ">=2.0.0,<3.0.0"
|
||||
sqlalchemy = ">=2.0.43,<3.0.0"
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://gitea.heado.ru/Vorkout/core.git"
|
||||
reference = "0.1.0"
|
||||
resolved_reference = "96ddb52582660600dbacead4919e67f948e96898"
|
||||
reference = "VORKOUT-18"
|
||||
resolved_reference = "a5a52a64ef6e456e19586e0834d9531d4a88b0db"
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
@@ -1952,4 +1953,4 @@ propcache = ">=0.2.0"
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.11,<4.0"
|
||||
content-hash = "0ac41110571b3bba38672fceef1c107864b8582378c6afba6418e05f99a75da1"
|
||||
content-hash = "abdeab9600b3023bd1dcf38b64df49bdd3f9baea99b72aa3bb3d3d48f05ed0c2"
|
||||
|
@@ -17,7 +17,7 @@ dependencies = [
|
||||
"pydantic[email] (>=2.11.3,<3.0.0)",
|
||||
"python-multipart (>=0.0.20,<0.0.21)",
|
||||
"fastapi-jwt-auth @ git+https://github.com/vvpreo/fastapi-jwt-auth",
|
||||
"core-library @ git+https://gitea.heado.ru/Vorkout/core.git@0.1.0",
|
||||
"core-library @ git+https://gitea.heado.ru/Vorkout/core.git@VORKOUT-18",
|
||||
]
|
||||
|
||||
|
||||
|
Reference in New Issue
Block a user