Compare commits
15 Commits
VORKOUT-16
...
codestyle-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3933a05582 | ||
| 453efdd095 | |||
|
|
efabd27a8a | ||
|
|
7f2d2291b6 | ||
|
|
43e6053c6d | ||
|
|
729ef83d86 | ||
|
|
42741f4d98 | ||
|
|
a060f46e0a | ||
| e4a9fe6d01 | |||
|
|
bfcc4651e1 | ||
|
|
3dfae3235d | ||
| 59d2d57ee1 | |||
| 7fd0a732b3 | |||
| 571186151b | |||
| 1b95c9825b |
@@ -1,15 +1,15 @@
|
||||
import sys
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import loguru
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from uvicorn import run
|
||||
|
||||
from api.config import get_settings, DefaultSettings
|
||||
from api.config import DefaultSettings, get_settings
|
||||
from api.endpoints import list_of_routes
|
||||
from api.utils.common import get_hostname
|
||||
from api.services.middleware import MiddlewareAccessTokenValidadtion
|
||||
from api.utils.common import get_hostname
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.DEBUG)
|
||||
@@ -52,7 +52,6 @@ prod_origins = [""]
|
||||
|
||||
origins = dev_origins if get_settings().ENV == "local" else prod_origins
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
settings_for_application = get_settings()
|
||||
if settings_for_application.ENV == "prod":
|
||||
|
||||
@@ -7,7 +7,7 @@ from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
from api.db import metadata, tables
|
||||
from orm import metadata, tables
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
"""add_cascade_delete_to_node_link_foreign_keys
|
||||
|
||||
Revision ID: 80840e78631e
|
||||
Revises: cc3b95f1f99d
|
||||
Create Date: 2025-10-26 18:47:24.004327
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '80840e78631e'
|
||||
down_revision: Union[str, None] = 'cc3b95f1f99d'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# Drop existing foreign key constraints
|
||||
# Note: These constraint names are MySQL auto-generated names
|
||||
# If they differ, check with: SHOW CREATE TABLE node_link;
|
||||
op.drop_constraint('node_link_ibfk_2', 'node_link', type_='foreignkey') # next_node_id
|
||||
op.drop_constraint('node_link_ibfk_3', 'node_link', type_='foreignkey') # node_id
|
||||
|
||||
# Add new foreign key constraints with CASCADE
|
||||
op.create_foreign_key(
|
||||
'fk_node_link_next_node_id_cascade',
|
||||
'node_link', 'ps_node',
|
||||
['next_node_id'], ['id'],
|
||||
ondelete='CASCADE'
|
||||
)
|
||||
op.create_foreign_key(
|
||||
'fk_node_link_node_id_cascade',
|
||||
'node_link', 'ps_node',
|
||||
['node_id'], ['id'],
|
||||
ondelete='CASCADE'
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# Drop CASCADE foreign key constraints
|
||||
op.drop_constraint('fk_node_link_next_node_id_cascade', 'node_link', type_='foreignkey')
|
||||
op.drop_constraint('fk_node_link_node_id_cascade', 'node_link', type_='foreignkey')
|
||||
|
||||
# Restore original foreign key constraints without CASCADE
|
||||
op.create_foreign_key('node_link_ibfk_2', 'node_link', 'ps_node', ['next_node_id'], ['id'])
|
||||
op.create_foreign_key('node_link_ibfk_3', 'node_link', 'ps_node', ['node_id'], ['id'])
|
||||
38
api/api/db/alembic/versions/816be8c60ab4_.py
Normal file
38
api/api/db/alembic/versions/816be8c60ab4_.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: 816be8c60ab4
|
||||
Revises: 93106fbe7d83
|
||||
Create Date: 2025-09-12 14:48:47.726269
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '816be8c60ab4'
|
||||
down_revision: Union[str, None] = '93106fbe7d83'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.alter_column('ps_node', 'node_type',
|
||||
existing_type=mysql.ENUM('TYPE1', 'TYPE2', 'TYPE3'),
|
||||
type_=sa.Enum('LISTEN', 'IF', 'START', name='nodetype'),
|
||||
existing_nullable=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.alter_column('ps_node', 'node_type',
|
||||
existing_type=sa.Enum('LISTEN', 'IF', 'START', name='nodetype'),
|
||||
type_=mysql.ENUM('TYPE1', 'TYPE2', 'TYPE3'),
|
||||
existing_nullable=False)
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,32 @@
|
||||
"""update node_link_table link_point_id default
|
||||
|
||||
Revision ID: cc3b95f1f99d
|
||||
Revises: 816be8c60ab4
|
||||
Create Date: 2025-09-12 19:17:03.125276
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'cc3b95f1f99d'
|
||||
down_revision: Union[str, None] = '816be8c60ab4'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('node_link', sa.Column('link_point_id', sa.Integer().with_variant(mysql.INTEGER(unsigned=True), 'mysql'), nullable=False))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('node_link', 'link_point_id')
|
||||
# ### end Alembic commands ###
|
||||
@@ -6,7 +6,7 @@ from typing import Optional
|
||||
from sqlalchemy import insert, select, func, or_, and_, asc, desc
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.tables.account import account_table
|
||||
from orm.tables.account import account_table
|
||||
from api.schemas.account.account import User
|
||||
from api.schemas.endpoints.account import all_user_adapter, AllUser, AllUserResponse, UserCreate, UserFilterDTO
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from sqlalchemy import select, update
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
from enum import Enum
|
||||
|
||||
from api.db.tables.account import account_table, account_keyring_table, KeyType, KeyStatus
|
||||
from orm.tables.account import account_table, account_keyring_table, KeyType, KeyStatus
|
||||
|
||||
from api.schemas.account.account import User
|
||||
from api.schemas.account.account_keyring import AccountKeyring
|
||||
|
||||
@@ -6,7 +6,7 @@ from sqlalchemy import insert, select, update
|
||||
from sqlalchemy.dialects.mysql import insert as mysql_insert
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.tables.account import account_keyring_table, KeyStatus, KeyType
|
||||
from orm.tables.account import account_keyring_table, KeyStatus, KeyType
|
||||
from api.schemas.account.account_keyring import AccountKeyring
|
||||
from api.utils.hasher import hasher
|
||||
|
||||
@@ -37,7 +37,7 @@ async def update_key_by_id(connection: AsyncConnection, update_values, key) -> O
|
||||
await connection.commit()
|
||||
|
||||
|
||||
async def create_key(connection: AsyncConnection, key: AccountKeyring, key_id: int) -> Optional[AccountKeyring]:
|
||||
async def create_key(connection: AsyncConnection, key: AccountKeyring, key_id: str) -> Optional[AccountKeyring]:
|
||||
"""
|
||||
Создает нове поле в таблице account_keyring_table).
|
||||
"""
|
||||
|
||||
@@ -7,7 +7,7 @@ from datetime import datetime, timezone
|
||||
from sqlalchemy import insert, select, func, or_, and_, asc, desc
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.tables.events import list_events_table
|
||||
from orm.tables.events import list_events_table
|
||||
|
||||
from api.schemas.events.list_events import ListEvent
|
||||
|
||||
|
||||
81
api/api/db/logic/node_link.py
Normal file
81
api/api/db/logic/node_link.py
Normal file
@@ -0,0 +1,81 @@
|
||||
from typing import Optional
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import insert, select, desc
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.schemas.process.node_link import NodeLink
|
||||
|
||||
from orm.tables.process import ps_node_table, node_link_table
|
||||
from orm.tables.process import NodeLinkStatus
|
||||
|
||||
|
||||
async def get_last_link_name_by_node_id(connection: AsyncConnection, ps_id: int) -> Optional[str]:
|
||||
"""
|
||||
Получает link_name из последней записи node_link по ps_id.
|
||||
Находит все node_id в ps_node по ps_id, затем ищет связи в node_link
|
||||
и возвращает link_name из самой последней записи.
|
||||
"""
|
||||
query = (
|
||||
select(node_link_table.c.link_name)
|
||||
.where(node_link_table.c.node_id.in_(select(ps_node_table.c.id).where(ps_node_table.c.ps_id == ps_id)))
|
||||
.order_by(desc(node_link_table.c.created_at))
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
result = await connection.execute(query)
|
||||
link_name = result.scalar_one_or_none()
|
||||
|
||||
return link_name
|
||||
|
||||
|
||||
async def get_last_node_link_by_creator_and_ps_id(
|
||||
connection: AsyncConnection, creator_id: int, node_link_id: int
|
||||
) -> Optional[NodeLink]:
|
||||
"""
|
||||
Получает последнюю созданную node_link для данного создателя и процесса.
|
||||
"""
|
||||
query = (
|
||||
select(node_link_table)
|
||||
.where(
|
||||
node_link_table.c.creator_id == creator_id,
|
||||
node_link_table.c.node_id.in_(select(ps_node_table.c.id).where(ps_node_table.c.id == node_link_id)),
|
||||
)
|
||||
.order_by(desc(node_link_table.c.created_at))
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
node_link_db_cursor = await connection.execute(query)
|
||||
node_link_data = node_link_db_cursor.mappings().one_or_none()
|
||||
|
||||
if not node_link_data:
|
||||
return None
|
||||
|
||||
return NodeLink.model_validate(node_link_data)
|
||||
|
||||
|
||||
async def create_node_link_schema(
|
||||
connection: AsyncConnection,
|
||||
validated_link_schema,
|
||||
creator_id: int,
|
||||
) -> Optional[NodeLink]:
|
||||
"""
|
||||
Создает нове поле в таблице process_schema_table.
|
||||
"""
|
||||
query = insert(node_link_table).values(
|
||||
link_name=validated_link_schema.link_name,
|
||||
node_id=validated_link_schema.from_id,
|
||||
link_point_id=validated_link_schema.parent_port_number,
|
||||
next_node_id=validated_link_schema.to_id,
|
||||
settings={},
|
||||
creator_id=creator_id,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
status=NodeLinkStatus.ACTIVE.value,
|
||||
)
|
||||
|
||||
await connection.execute(query)
|
||||
|
||||
await connection.commit()
|
||||
|
||||
return await get_last_node_link_by_creator_and_ps_id(connection, creator_id, validated_link_schema.from_id)
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Optional
|
||||
from typing import Optional, Dict, Any
|
||||
import math
|
||||
|
||||
from datetime import datetime, timezone
|
||||
@@ -6,7 +6,7 @@ from datetime import datetime, timezone
|
||||
from sqlalchemy import insert, select, func, or_, and_, asc, desc
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.tables.process import process_schema_table
|
||||
from orm.tables.process import process_schema_table, ProcessStatus
|
||||
|
||||
from api.schemas.process.process_schema import ProcessSchema
|
||||
|
||||
@@ -50,8 +50,9 @@ async def get_process_schema_page_DTO(
|
||||
or_(process_schema_table.c.title.ilike(search_term), process_schema_table.c.description.ilike(search_term))
|
||||
)
|
||||
|
||||
filter_conditions = []
|
||||
|
||||
if filter_dto.filters:
|
||||
filter_conditions = []
|
||||
for field, values in filter_dto.filters.items():
|
||||
column = getattr(process_schema_table.c, field, None)
|
||||
if column is not None and values:
|
||||
@@ -60,8 +61,11 @@ async def get_process_schema_page_DTO(
|
||||
else:
|
||||
filter_conditions.append(column.in_(values))
|
||||
|
||||
if filter_conditions:
|
||||
query = query.where(and_(*filter_conditions))
|
||||
if filter_dto.filters is None or "status" not in filter_dto.filters:
|
||||
filter_conditions.append(process_schema_table.c.status != "DELETED")
|
||||
|
||||
if filter_conditions:
|
||||
query = query.where(and_(*filter_conditions))
|
||||
|
||||
if filter_dto.order:
|
||||
order_field = filter_dto.order.get("field", "id")
|
||||
@@ -86,7 +90,7 @@ async def get_process_schema_page_DTO(
|
||||
or_(process_schema_table.c.title.ilike(search_term), process_schema_table.c.description.ilike(search_term))
|
||||
)
|
||||
|
||||
if filter_dto.filters and filter_conditions:
|
||||
if filter_conditions:
|
||||
count_query = count_query.where(and_(*filter_conditions))
|
||||
|
||||
result = await connection.execute(query)
|
||||
@@ -111,21 +115,6 @@ async def get_process_schema_page_DTO(
|
||||
)
|
||||
|
||||
|
||||
async def get_process_schema_by_title(connection: AsyncConnection, title: str) -> Optional[ProcessSchema]:
|
||||
"""
|
||||
Получает process schema по title.
|
||||
"""
|
||||
query = select(process_schema_table).where(process_schema_table.c.title == title)
|
||||
|
||||
process_schema_db_cursor = await connection.execute(query)
|
||||
|
||||
process_schema_data = process_schema_db_cursor.mappings().one_or_none()
|
||||
if not process_schema_data:
|
||||
return None
|
||||
|
||||
return ProcessSchema.model_validate(process_schema_data)
|
||||
|
||||
|
||||
async def get_process_schema_by_id(connection: AsyncConnection, id: int) -> Optional[ProcessSchema]:
|
||||
"""
|
||||
Получает process_schema по id.
|
||||
@@ -152,24 +141,68 @@ async def update_process_schema_by_id(connection: AsyncConnection, update_values
|
||||
await connection.commit()
|
||||
|
||||
|
||||
async def create_process_schema(
|
||||
connection: AsyncConnection, process_schema: ProcessSchema, creator_id: int
|
||||
) -> Optional[ProcessSchema]:
|
||||
async def update_process_schema_settings_by_id(
|
||||
connection: AsyncConnection, process_schema_id: int, node_data: Dict[str, Any]
|
||||
):
|
||||
"""
|
||||
Создает нове поле в таблице process_schema_table.
|
||||
Добавляет новый узел в массив 'nodes' в настройках процесса.
|
||||
Если массив 'nodes' не существует, создает его.
|
||||
"""
|
||||
query = insert(process_schema_table).values(
|
||||
title=process_schema.title,
|
||||
description=process_schema.description,
|
||||
owner_id=process_schema.owner_id,
|
||||
creator_id=creator_id,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
settings=process_schema.settings,
|
||||
status=process_schema.status.value,
|
||||
)
|
||||
# Получаем текущие settings
|
||||
query = select(process_schema_table.c.settings).where(process_schema_table.c.id == process_schema_id)
|
||||
result = await connection.execute(query)
|
||||
current_settings = result.scalar_one_or_none()
|
||||
|
||||
await connection.execute(query)
|
||||
# Если settings пустые, создаем пустой словарь
|
||||
if current_settings is None:
|
||||
current_settings = {}
|
||||
|
||||
# Инициализируем массив nodes, если его нет
|
||||
if "nodes" not in current_settings:
|
||||
current_settings["nodes"] = []
|
||||
|
||||
# Добавляем новый узел в массив
|
||||
current_settings["nodes"].append(node_data)
|
||||
|
||||
# Обновляем поле settings
|
||||
await connection.execute(
|
||||
process_schema_table.update()
|
||||
.where(process_schema_table.c.id == process_schema_id)
|
||||
.values(settings=current_settings)
|
||||
)
|
||||
|
||||
await connection.commit()
|
||||
|
||||
return process_schema
|
||||
|
||||
async def get_last_created_process_schema(connection: AsyncConnection) -> Optional[int]:
|
||||
"""
|
||||
Получает ID последней созданной схемы процесса.
|
||||
"""
|
||||
query = select(process_schema_table.c.id).order_by(desc(process_schema_table.c.id)).limit(1)
|
||||
|
||||
result = await connection.execute(query)
|
||||
last_id = result.scalar_one_or_none()
|
||||
|
||||
return last_id
|
||||
|
||||
|
||||
async def create_process_schema(
|
||||
connection: AsyncConnection, creator_id: int, title: str, description: str
|
||||
) -> Optional[int]:
|
||||
"""
|
||||
Создает новое поле в таблице process_schema_table.
|
||||
"""
|
||||
query = insert(process_schema_table).values(
|
||||
title=title,
|
||||
description=description,
|
||||
owner_id=creator_id,
|
||||
creator_id=creator_id,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
settings={},
|
||||
status=ProcessStatus.ACTIVE.value,
|
||||
)
|
||||
|
||||
result = await connection.execute(query)
|
||||
await connection.commit()
|
||||
|
||||
return result.lastrowid
|
||||
|
||||
191
api/api/db/logic/ps_node.py
Normal file
191
api/api/db/logic/ps_node.py
Normal file
@@ -0,0 +1,191 @@
|
||||
from typing import Optional, List
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import insert, select, desc, and_, delete, update
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from orm.tables.process import ps_node_table, node_link_table, process_schema_table
|
||||
|
||||
from api.schemas.process.ps_node import Ps_Node
|
||||
from model_nodes.node_listen_models import ListenNodeCoreSchema
|
||||
from orm.tables.process import NodeStatus
|
||||
|
||||
|
||||
async def get_ps_node_by_id(connection: AsyncConnection, id: int) -> Optional[Ps_Node]:
|
||||
"""
|
||||
Получает process_schema по id.
|
||||
"""
|
||||
query = select(ps_node_table).where(ps_node_table.c.id == id)
|
||||
|
||||
ps_node_db_cursor = await connection.execute(query)
|
||||
|
||||
ps_node_data = ps_node_db_cursor.mappings().one_or_none()
|
||||
if not ps_node_data:
|
||||
return None
|
||||
|
||||
return Ps_Node.model_validate(ps_node_data)
|
||||
|
||||
|
||||
async def get_last_ps_node_by_creator_and_ps_id(
|
||||
connection: AsyncConnection, creator_id: int, ps_id: int
|
||||
) -> Optional[Ps_Node]:
|
||||
"""
|
||||
Получает последнюю созданную ps_node для данного создателя и процесса.
|
||||
"""
|
||||
query = (
|
||||
select(ps_node_table)
|
||||
.where(ps_node_table.c.creator_id == creator_id, ps_node_table.c.ps_id == ps_id)
|
||||
.order_by(desc(ps_node_table.c.created_at))
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
ps_node_db_cursor = await connection.execute(query)
|
||||
ps_node_data = ps_node_db_cursor.mappings().one_or_none()
|
||||
|
||||
if not ps_node_data:
|
||||
return None
|
||||
|
||||
return Ps_Node.model_validate(ps_node_data)
|
||||
|
||||
|
||||
async def create_ps_node_schema(
|
||||
connection: AsyncConnection,
|
||||
validated_schema,
|
||||
creator_id: int,
|
||||
) -> Optional[ListenNodeCoreSchema]:
|
||||
"""
|
||||
Создает нове поле в таблице process_schema_table.
|
||||
"""
|
||||
query = insert(ps_node_table).values(
|
||||
ps_id=validated_schema.ps_id,
|
||||
node_type=validated_schema.node_type,
|
||||
settings=validated_schema.data.model_dump(),
|
||||
creator_id=creator_id,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
status=NodeStatus.ACTIVE.value,
|
||||
)
|
||||
|
||||
await connection.execute(query)
|
||||
|
||||
await connection.commit()
|
||||
|
||||
return await get_last_ps_node_by_creator_and_ps_id(connection, creator_id, validated_schema.ps_id)
|
||||
|
||||
|
||||
async def check_node_connection(connection: AsyncConnection, node_id: int, next_node_id: int, port: int) -> bool:
|
||||
"""
|
||||
Проверяет, подключен ли next_node_id к node_id через указанный порт.
|
||||
"""
|
||||
query = select(node_link_table).where(
|
||||
and_(
|
||||
node_link_table.c.node_id == node_id,
|
||||
node_link_table.c.next_node_id == next_node_id,
|
||||
node_link_table.c.link_point_id == port,
|
||||
)
|
||||
)
|
||||
|
||||
result = await connection.execute(query)
|
||||
return result.mappings().first() is not None
|
||||
|
||||
|
||||
async def get_nodes_for_deletion_ordered(connection: AsyncConnection, node_id: int) -> List[int]:
|
||||
"""
|
||||
Рекурсивно находит ВСЕ дочерние узлы и возвращает их ID в правильном порядке:
|
||||
от самых глубоких к корневым.
|
||||
"""
|
||||
all_child_nodes = []
|
||||
visited_nodes = set()
|
||||
|
||||
# тут надо будет поработать с зацикливанием на вышестояющую ноду, сейчас вышестоящая нода если она уже была учтена, не будет занесена в спиок на удаление.
|
||||
async def find_children_with_depth(current_node_id: int, current_depth: int):
|
||||
if current_node_id in visited_nodes:
|
||||
return
|
||||
|
||||
visited_nodes.add(current_node_id)
|
||||
|
||||
query = (
|
||||
select(ps_node_table)
|
||||
.join(node_link_table, ps_node_table.c.id == node_link_table.c.next_node_id)
|
||||
.where(node_link_table.c.node_id == current_node_id)
|
||||
)
|
||||
|
||||
result = await connection.execute(query)
|
||||
child_nodes = result.mappings().all()
|
||||
|
||||
for node_data in child_nodes:
|
||||
node = Ps_Node.model_validate(node_data)
|
||||
all_child_nodes.append((node, current_depth + 1))
|
||||
await find_children_with_depth(node.id, current_depth + 1)
|
||||
|
||||
await find_children_with_depth(node_id, 0)
|
||||
|
||||
all_child_nodes.sort(key=lambda x: x[1], reverse=True)
|
||||
|
||||
ordered_node_ids = [node.id for node, depth in all_child_nodes]
|
||||
|
||||
ordered_node_ids.append(node_id)
|
||||
|
||||
return ordered_node_ids
|
||||
|
||||
|
||||
async def delete_ps_nodes_delete_handler(connection: AsyncConnection, node_ids: List[int]) -> List[int]:
|
||||
"""
|
||||
Очищает settings и удаляет ноды для каждого ps_id.
|
||||
Возвращает список успешно удаленных ID нод.
|
||||
"""
|
||||
if not node_ids:
|
||||
return []
|
||||
|
||||
ps_id_rows = await connection.execute(
|
||||
select(ps_node_table.c.id, ps_node_table.c.ps_id).where(ps_node_table.c.id.in_(node_ids))
|
||||
)
|
||||
rows = ps_id_rows.mappings().all()
|
||||
if not rows:
|
||||
return []
|
||||
|
||||
ps_to_node_ids = {}
|
||||
for r in rows:
|
||||
ps_to_node_ids.setdefault(r["ps_id"], []).append(r["id"])
|
||||
|
||||
deleted_all = []
|
||||
|
||||
for ps_id, ids_in_ps in ps_to_node_ids.items():
|
||||
await remove_nodes_from_process_schema_settings(connection, ps_id, ids_in_ps)
|
||||
result = await connection.execute(delete(ps_node_table).where(ps_node_table.c.id.in_(ids_in_ps)))
|
||||
if result.rowcount and result.rowcount > 0:
|
||||
deleted_all.extend(ids_in_ps)
|
||||
else:
|
||||
raise Exception(f"Failed to delete nodes for ps_id={ps_id}")
|
||||
|
||||
await connection.commit()
|
||||
return deleted_all
|
||||
|
||||
|
||||
async def remove_nodes_from_process_schema_settings(connection: AsyncConnection, ps_id: int, node_ids: List[int]):
|
||||
"""
|
||||
Удаляет ноды из поля settings в таблице process_schema по списку node_ids.
|
||||
"""
|
||||
from api.db.logic.process_schema import get_process_schema_by_id
|
||||
|
||||
process_schema = await get_process_schema_by_id(connection, ps_id)
|
||||
if not process_schema or not process_schema.settings:
|
||||
return
|
||||
|
||||
settings = process_schema.settings
|
||||
if "nodes" in settings and isinstance(settings["nodes"], list):
|
||||
node_ids_set = set(node_ids)
|
||||
settings["nodes"] = [
|
||||
node_item
|
||||
for node_item in settings["nodes"]
|
||||
if not (
|
||||
isinstance(node_item, dict)
|
||||
and "node" in node_item
|
||||
and isinstance(node_item["node"], dict)
|
||||
and node_item["node"].get("id") in node_ids_set
|
||||
)
|
||||
]
|
||||
|
||||
await connection.execute(
|
||||
update(process_schema_table).where(process_schema_table.c.id == ps_id).values(settings=settings)
|
||||
)
|
||||
@@ -1,18 +0,0 @@
|
||||
__all__ = ["BigIntegerPK", "SAEnum", "UnsignedInt"]
|
||||
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import BigInteger, Enum, Integer
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
|
||||
# class SAEnum(Enum):
|
||||
# def __init__(self, *enums: object, **kw: Any):
|
||||
# validate_strings = kw.pop("validate_strings", True)
|
||||
# super().__init__(*enums, **kw, validate_strings=validate_strings)
|
||||
|
||||
|
||||
# # https://docs.sqlalchemy.org/en/20/dialects/sqlite.html#allowing-autoincrement-behavior-sqlalchemy-types-other-than-integer-integer
|
||||
|
||||
# BigIntegerPK = BigInteger().with_variant(Integer, "sqlite")
|
||||
UnsignedInt = Integer().with_variant(mysql.INTEGER(unsigned=True), "mysql")
|
||||
@@ -1 +0,0 @@
|
||||
from . import account, events, process
|
||||
@@ -1,65 +0,0 @@
|
||||
import enum
|
||||
|
||||
from sqlalchemy import Table, Column, String, Enum as SQLAEnum, JSON, ForeignKey, DateTime, Index
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from api.db.sql_types import UnsignedInt
|
||||
from api.db import metadata
|
||||
|
||||
|
||||
class AccountRole(enum.StrEnum):
|
||||
OWNER = "OWNER"
|
||||
ADMIN = "ADMIN"
|
||||
EDITOR = "EDITOR"
|
||||
VIEWER = "VIEWER"
|
||||
|
||||
|
||||
class AccountStatus(enum.StrEnum):
|
||||
ACTIVE = "ACTIVE"
|
||||
DISABLED = "DISABLED"
|
||||
BLOCKED = "BLOCKED"
|
||||
DELETED = "DELETED"
|
||||
|
||||
|
||||
account_table = Table(
|
||||
"account",
|
||||
metadata,
|
||||
Column("id", UnsignedInt, primary_key=True, autoincrement=True),
|
||||
Column("name", String(100), nullable=False),
|
||||
Column("login", String(100), nullable=False),
|
||||
Column("email", String(100), nullable=True),
|
||||
Column("bind_tenant_id", String(40), nullable=True),
|
||||
Column("role", SQLAEnum(AccountRole), nullable=False),
|
||||
Column("meta", JSON, default={}),
|
||||
Column("creator_id", UnsignedInt, ForeignKey("account.id"), nullable=True),
|
||||
Column("created_at", DateTime(timezone=True), server_default=func.now()),
|
||||
Column("status", SQLAEnum(AccountStatus), nullable=False),
|
||||
Index("idx_login", "login"),
|
||||
Index("idx_name", "name"),
|
||||
)
|
||||
|
||||
|
||||
class KeyType(enum.StrEnum):
|
||||
PASSWORD = "PASSWORD"
|
||||
ACCESS_TOKEN = "ACCESS_TOKEN"
|
||||
REFRESH_TOKEN = "REFRESH_TOKEN"
|
||||
API_KEY = "API_KEY"
|
||||
|
||||
|
||||
class KeyStatus(enum.StrEnum):
|
||||
ACTIVE = "ACTIVE"
|
||||
EXPIRED = "EXPIRED"
|
||||
DELETED = "DELETED"
|
||||
|
||||
|
||||
account_keyring_table = Table(
|
||||
"account_keyring",
|
||||
metadata,
|
||||
Column("owner_id", UnsignedInt, ForeignKey("account.id"), primary_key=True, nullable=False),
|
||||
Column("key_type", SQLAEnum(KeyType), primary_key=True, nullable=False),
|
||||
Column("key_id", String(40), primary_key=True, default=None),
|
||||
Column("key_value", String(512), nullable=False),
|
||||
Column("created_at", DateTime(timezone=True), server_default=func.now()),
|
||||
Column("expiry", DateTime(timezone=True), nullable=True),
|
||||
Column("status", SQLAEnum(KeyStatus), nullable=False),
|
||||
)
|
||||
@@ -1,33 +0,0 @@
|
||||
import enum
|
||||
|
||||
from sqlalchemy import Table, Column, String, Enum as SQLAEnum, JSON, ForeignKey, DateTime
|
||||
from sqlalchemy.sql import func
|
||||
|
||||
from api.db.sql_types import UnsignedInt
|
||||
|
||||
from api.db import metadata
|
||||
|
||||
|
||||
class EventState(enum.StrEnum):
|
||||
AUTO = "AUTO"
|
||||
DESCRIPTED = "DESCRIPTED"
|
||||
|
||||
|
||||
class EventStatus(enum.StrEnum):
|
||||
ACTIVE = "ACTIVE"
|
||||
DISABLED = "DISABLED"
|
||||
DELETED = "DELETED"
|
||||
|
||||
|
||||
list_events_table = Table(
|
||||
"list_events",
|
||||
metadata,
|
||||
Column("id", UnsignedInt, primary_key=True, autoincrement=True),
|
||||
Column("name", String(40, collation="latin1_bin"), nullable=False, unique=True),
|
||||
Column("title", String(64), nullable=False),
|
||||
Column("creator_id", UnsignedInt, ForeignKey("account.id"), nullable=False),
|
||||
Column("created_at", DateTime(timezone=True), server_default=func.now()),
|
||||
Column("schema", JSON, default={}),
|
||||
Column("state", SQLAEnum(EventState), nullable=False),
|
||||
Column("status", SQLAEnum(EventStatus), nullable=False),
|
||||
)
|
||||
@@ -1,107 +0,0 @@
|
||||
import enum
|
||||
|
||||
from sqlalchemy import (
|
||||
Table,
|
||||
Column,
|
||||
String,
|
||||
Text,
|
||||
Enum as SQLAEnum,
|
||||
JSON,
|
||||
ForeignKey,
|
||||
DateTime,
|
||||
Index,
|
||||
PrimaryKeyConstraint,
|
||||
)
|
||||
from sqlalchemy.sql import func
|
||||
from enum import Enum
|
||||
|
||||
from api.db.sql_types import UnsignedInt
|
||||
|
||||
from api.db import metadata
|
||||
|
||||
|
||||
class ProcessStatus(enum.StrEnum):
|
||||
ACTIVE = "ACTIVE"
|
||||
STOPPING = "STOPPING"
|
||||
STOPPED = "STOPPED"
|
||||
DELETED = "DELETED"
|
||||
|
||||
|
||||
process_schema_table = Table(
|
||||
"process_schema",
|
||||
metadata,
|
||||
Column("id", UnsignedInt, primary_key=True, autoincrement=True),
|
||||
Column("title", String(100), nullable=False),
|
||||
Column("description", Text, nullable=False),
|
||||
Column("owner_id", UnsignedInt, ForeignKey("account.id"), nullable=False),
|
||||
Column("creator_id", UnsignedInt, ForeignKey("account.id"), nullable=False),
|
||||
Column("created_at", DateTime(timezone=True), server_default=func.now()),
|
||||
Column("settings", JSON, default={}),
|
||||
Column("status", SQLAEnum(ProcessStatus), nullable=False),
|
||||
Index(
|
||||
"idx_owner_id",
|
||||
"owner_id",
|
||||
),
|
||||
)
|
||||
|
||||
process_version_archive_table = Table(
|
||||
"process_version_archive",
|
||||
metadata,
|
||||
Column("id", UnsignedInt, autoincrement=True, nullable=False),
|
||||
Column("ps_id", UnsignedInt, ForeignKey("process_schema.id"), nullable=False),
|
||||
Column("version", UnsignedInt, default=1, nullable=False),
|
||||
Column("snapshot", JSON, default={}),
|
||||
Column("owner_id", UnsignedInt, ForeignKey("account.id"), nullable=False),
|
||||
Column("created_at", DateTime(timezone=True), server_default=func.now()),
|
||||
Column("is_last", UnsignedInt, default=0),
|
||||
PrimaryKeyConstraint("id", "version"),
|
||||
)
|
||||
|
||||
|
||||
class NodeStatus(enum.StrEnum):
|
||||
ACTIVE = "ACTIVE"
|
||||
DISABLED = "DISABLED"
|
||||
DELETED = "DELETED"
|
||||
|
||||
|
||||
class NodeType(Enum):
|
||||
TYPE1 = "Type1"
|
||||
TYPE2 = "Type2"
|
||||
TYPE3 = "Type3"
|
||||
|
||||
|
||||
ps_node_table = Table(
|
||||
"ps_node",
|
||||
metadata,
|
||||
Column("id", UnsignedInt, autoincrement=True, primary_key=True, nullable=False),
|
||||
Column("ps_id", UnsignedInt, ForeignKey("process_schema.id"), nullable=False),
|
||||
Column("node_type", SQLAEnum(NodeType), nullable=False),
|
||||
Column("settings", JSON, default={}),
|
||||
Column("creator_id", UnsignedInt, ForeignKey("account.id"), nullable=False),
|
||||
Column("created_at", DateTime(timezone=True), server_default=func.now()),
|
||||
Column("status", SQLAEnum(NodeStatus), nullable=False),
|
||||
Index("idx_ps_id", "ps_id"),
|
||||
)
|
||||
|
||||
|
||||
class NodeLinkStatus(enum.StrEnum):
|
||||
ACTIVE = "ACTIVE"
|
||||
STOPPING = "STOPPING"
|
||||
STOPPED = "STOPPED"
|
||||
DELETED = "DELETED"
|
||||
|
||||
|
||||
node_link_table = Table(
|
||||
"node_link",
|
||||
metadata,
|
||||
Column("id", UnsignedInt, autoincrement=True, primary_key=True, nullable=False),
|
||||
Column("link_name", String(20), nullable=False),
|
||||
Column("node_id", UnsignedInt, ForeignKey("ps_node.id"), nullable=False),
|
||||
Column("next_node_id", UnsignedInt, ForeignKey("ps_node.id"), nullable=False),
|
||||
Column("settings", JSON, default={}),
|
||||
Column("creator_id", UnsignedInt, ForeignKey("account.id"), nullable=False),
|
||||
Column("created_at", DateTime(timezone=True), server_default=func.now()),
|
||||
Column("status", SQLAEnum(NodeLinkStatus), nullable=False),
|
||||
Index("idx_node_id", "node_id"),
|
||||
Index("idx_next_node_id", "next_node_id"),
|
||||
)
|
||||
@@ -4,8 +4,17 @@ from api.endpoints.account import api_router as account_router
|
||||
from api.endpoints.keyring import api_router as keyring_router
|
||||
from api.endpoints.list_events import api_router as listevents_router
|
||||
from api.endpoints.process_schema import api_router as processschema_router
|
||||
from api.endpoints.ps_node import api_router as ps_node_router
|
||||
|
||||
list_of_routes = [auth_router, profile_router, account_router, keyring_router, listevents_router, processschema_router]
|
||||
list_of_routes = [
|
||||
auth_router,
|
||||
profile_router,
|
||||
account_router,
|
||||
keyring_router,
|
||||
listevents_router,
|
||||
processschema_router,
|
||||
ps_node_router,
|
||||
]
|
||||
|
||||
__all__ = [
|
||||
"list_of_routes",
|
||||
|
||||
@@ -1,23 +1,17 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||
from typing import List, Optional
|
||||
|
||||
from typing import Optional, List
|
||||
from fastapi import APIRouter, Depends, Query, status
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.connection.session import get_connection_dep
|
||||
from api.db.logic.account import (
|
||||
create_user,
|
||||
get_user_account_page_DTO,
|
||||
get_user_by_id,
|
||||
get_user_by_login,
|
||||
update_user_by_id,
|
||||
)
|
||||
from api.db.logic.keyring import create_password_key, update_password_key
|
||||
from api.db.tables.account import AccountStatus
|
||||
from api.db.logic.account import get_user_by_login
|
||||
from api.schemas.account.account import User
|
||||
from api.schemas.base import bearer_schema
|
||||
from api.schemas.endpoints.account import AllUserResponse, UserCreate, UserUpdate, UserFilterDTO
|
||||
from api.services.auth import get_current_user
|
||||
from api.services.user_role_validation import db_user_role_validation
|
||||
from api.services.user_role_validation import UserRoleValidator
|
||||
from api.error import create_operation_error, create_validation_error
|
||||
from api.services.endpoints.account import AccountService
|
||||
|
||||
api_router = APIRouter(
|
||||
prefix="/account",
|
||||
@@ -26,7 +20,7 @@ api_router = APIRouter(
|
||||
|
||||
|
||||
@api_router.get("", dependencies=[Depends(bearer_schema)], response_model=AllUserResponse)
|
||||
async def get_all_account(
|
||||
async def get_all_account_endpoint(
|
||||
page: int = Query(1, description="Page number", gt=0),
|
||||
limit: int = Query(10, description="КNumber of items per page", gt=0),
|
||||
search: Optional[str] = Query(None, description="Search term to filter by name or login or email"),
|
||||
@@ -38,7 +32,8 @@ async def get_all_account(
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
authorize_user = await db_user_role_validation(connection, current_user)
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_admin(current_user)
|
||||
|
||||
filters = {
|
||||
**({"status": status_filter} if status_filter else {}),
|
||||
@@ -53,99 +48,115 @@ async def get_all_account(
|
||||
filters=filters if filters else None,
|
||||
)
|
||||
|
||||
user_list = await get_user_account_page_DTO(connection, filter_dto)
|
||||
service = AccountService(connection)
|
||||
user_list = await service.list(filter_dto)
|
||||
|
||||
if user_list is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Accounts not found")
|
||||
raise create_operation_error(
|
||||
message="Accounts not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
return user_list
|
||||
|
||||
|
||||
@api_router.get("/{user_id}", dependencies=[Depends(bearer_schema)], response_model=User)
|
||||
async def get_account(
|
||||
async def get_account_endpoint(
|
||||
user_id: int,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
authorize_user = await db_user_role_validation(connection, current_user)
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_admin(current_user)
|
||||
|
||||
user = await get_user_by_id(connection, user_id)
|
||||
service = AccountService(connection)
|
||||
user = await service.get(user_id)
|
||||
|
||||
if user is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found")
|
||||
raise create_operation_error(
|
||||
message="Account not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"user_id": user_id},
|
||||
)
|
||||
|
||||
return user
|
||||
|
||||
|
||||
@api_router.post("", dependencies=[Depends(bearer_schema)], response_model=User)
|
||||
async def create_account(
|
||||
async def create_account_endpoint(
|
||||
user: UserCreate,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
authorize_user = await db_user_role_validation(connection, current_user)
|
||||
validator = UserRoleValidator(connection)
|
||||
authorize_user = await validator.validate_admin(current_user)
|
||||
|
||||
user_validation = await get_user_by_login(connection, user.login)
|
||||
|
||||
if user_validation is None:
|
||||
new_user = await create_user(connection, user, authorize_user.id)
|
||||
await create_password_key(connection, user.password, new_user.id)
|
||||
service = AccountService(connection)
|
||||
new_user = await service.create(user_data=user, creator_id=authorize_user.id)
|
||||
return new_user
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST, detail="An account with this information already exists."
|
||||
raise create_validation_error(
|
||||
message="An account with this information already exists.",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
details={"login": user.login},
|
||||
)
|
||||
|
||||
|
||||
@api_router.put("/{user_id}", dependencies=[Depends(bearer_schema)], response_model=UserUpdate)
|
||||
async def update_account(
|
||||
async def update_account_endpoint(
|
||||
user_id: int,
|
||||
user_update: UserUpdate,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
authorize_user = await db_user_role_validation(connection, current_user)
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_admin(current_user)
|
||||
|
||||
service = AccountService(connection)
|
||||
user = await service.get(user_id)
|
||||
|
||||
user = await get_user_by_id(connection, user_id)
|
||||
if user is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found")
|
||||
|
||||
if user_update.password is not None:
|
||||
await update_password_key(connection, user.id, user_update.password)
|
||||
raise create_operation_error(
|
||||
message="Account not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"user_id": user_id},
|
||||
)
|
||||
|
||||
updated_values = user_update.model_dump(by_alias=True, exclude_none=True)
|
||||
|
||||
if not updated_values:
|
||||
return user
|
||||
|
||||
await update_user_by_id(connection, updated_values, user)
|
||||
updated_user = await service.update(
|
||||
user_id=user_id,
|
||||
user_update_data=updated_values,
|
||||
password=user_update.password,
|
||||
)
|
||||
|
||||
user = await get_user_by_id(connection, user_id)
|
||||
|
||||
return user
|
||||
return updated_user
|
||||
|
||||
|
||||
@api_router.delete("/{user_id}", dependencies=[Depends(bearer_schema)], response_model=User)
|
||||
async def delete_account(
|
||||
async def delete_account_endpoint(
|
||||
user_id: int,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
authorize_user = await db_user_role_validation(connection, current_user)
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_admin(current_user)
|
||||
|
||||
service = AccountService(connection)
|
||||
user = await service.get(user_id)
|
||||
|
||||
user = await get_user_by_id(connection, user_id)
|
||||
if user is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found")
|
||||
raise create_operation_error(
|
||||
message="Account not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"user_id": user_id},
|
||||
)
|
||||
deleted_user = await service.delete(user_id=user_id)
|
||||
|
||||
user_update = UserUpdate(status=AccountStatus.DELETED.value)
|
||||
|
||||
updated_values = user_update.model_dump(by_alias=True, exclude_none=True)
|
||||
|
||||
if not updated_values:
|
||||
return user
|
||||
|
||||
await update_user_by_id(connection, updated_values, user)
|
||||
|
||||
user = await get_user_by_id(connection, user_id)
|
||||
|
||||
return user
|
||||
return deleted_user
|
||||
|
||||
@@ -1,15 +1,11 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from fastapi import (
|
||||
APIRouter,
|
||||
Depends,
|
||||
HTTPException,
|
||||
Response,
|
||||
status,
|
||||
Request,
|
||||
)
|
||||
|
||||
from loguru import logger
|
||||
from fastapi_jwt_auth import AuthJWT
|
||||
|
||||
from pydantic import BaseModel
|
||||
@@ -19,10 +15,10 @@ from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
from api.config import get_settings
|
||||
from api.db.connection.session import get_connection_dep
|
||||
from api.services.auth import authenticate_user
|
||||
|
||||
from api.db.logic.auth import add_new_refresh_token, upgrade_old_refresh_token
|
||||
from api.services.endpoints.auth import AuthService
|
||||
|
||||
from api.schemas.endpoints.auth import Auth, Tokens
|
||||
from api.error import create_access_error
|
||||
|
||||
api_router = APIRouter(
|
||||
prefix="/auth",
|
||||
@@ -49,7 +45,7 @@ def get_config():
|
||||
|
||||
|
||||
@api_router.post("", response_model=Tokens)
|
||||
async def login_for_access_token(
|
||||
async def login_for_access_token_endpoint(
|
||||
user: Auth,
|
||||
response: Response,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
@@ -57,50 +53,39 @@ async def login_for_access_token(
|
||||
):
|
||||
"""Авторизирует, выставляет токены в куки."""
|
||||
|
||||
user = await authenticate_user(connection, user.login, user.password)
|
||||
authenticated_user = await authenticate_user(connection, user.login, user.password)
|
||||
|
||||
# print("login_for_access_token", user)
|
||||
|
||||
if not user:
|
||||
raise HTTPException(
|
||||
if not authenticated_user:
|
||||
raise create_access_error(
|
||||
message="Incorrect username or password",
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Incorrect username or password",
|
||||
# headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
access_token_expires = timedelta(minutes=get_settings().ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
refresh_token_expires = timedelta(days=get_settings().REFRESH_TOKEN_EXPIRE_DAYS)
|
||||
service = AuthService(connection)
|
||||
tokens = await service.login(authenticated_user, Authorize)
|
||||
|
||||
logger.debug(f"refresh_token_expires {refresh_token_expires}")
|
||||
|
||||
access_token = Authorize.create_access_token(subject=user.login, expires_time=access_token_expires)
|
||||
refresh_token = Authorize.create_refresh_token(subject=user.login, expires_time=refresh_token_expires)
|
||||
|
||||
refresh_token_expires_time = datetime.now(timezone.utc) + refresh_token_expires
|
||||
|
||||
await add_new_refresh_token(connection, refresh_token, refresh_token_expires_time, user)
|
||||
|
||||
return Tokens(access_token=access_token, refresh_token=refresh_token)
|
||||
return tokens
|
||||
|
||||
|
||||
@api_router.post("/refresh", response_model=Tokens)
|
||||
async def refresh(
|
||||
async def refresh_endpoint(
|
||||
request: Request,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
Authorize: AuthJWT = Depends(),
|
||||
) -> Tokens:
|
||||
service = AuthService(connection)
|
||||
|
||||
try:
|
||||
Authorize.jwt_refresh_token_required()
|
||||
current_user = Authorize.get_jwt_subject()
|
||||
except Exception:
|
||||
refresh_token = request.headers.get("Authorization").split(" ")[1]
|
||||
await upgrade_old_refresh_token(connection, refresh_token)
|
||||
raise HTTPException(
|
||||
await service.invalidate_refresh_token(refresh_token)
|
||||
raise create_access_error(
|
||||
message="Invalid refresh token",
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid refresh token",
|
||||
)
|
||||
|
||||
access_token_expires = timedelta(minutes=get_settings().ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
new_access_token = Authorize.create_access_token(subject=current_user, expires_time=access_token_expires)
|
||||
tokens = await service.refresh(current_user, Authorize)
|
||||
|
||||
return Tokens(access_token=new_access_token)
|
||||
return tokens
|
||||
|
||||
@@ -1,30 +1,18 @@
|
||||
from fastapi import (
|
||||
APIRouter,
|
||||
Body,
|
||||
Depends,
|
||||
Form,
|
||||
HTTPException,
|
||||
Response,
|
||||
status,
|
||||
)
|
||||
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.connection.session import get_connection_dep
|
||||
|
||||
from api.db.logic.keyring import get_key_by_id, create_key, update_key_by_id
|
||||
|
||||
|
||||
from api.db.tables.account import KeyStatus
|
||||
from api.schemas.account.account_keyring import AccountKeyring
|
||||
from api.schemas.base import bearer_schema
|
||||
from api.schemas.endpoints.account_keyring import AccountKeyringUpdate
|
||||
|
||||
from api.schemas.account.account_keyring import AccountKeyring
|
||||
from api.services.auth import get_current_user
|
||||
|
||||
from api.services.user_role_validation import db_user_role_validation
|
||||
|
||||
from api.services.user_role_validation import UserRoleValidator
|
||||
from api.error import create_operation_error, create_validation_error
|
||||
from api.services.endpoints.keyring import KeyringService
|
||||
|
||||
api_router = APIRouter(
|
||||
prefix="/keyring",
|
||||
@@ -33,93 +21,102 @@ api_router = APIRouter(
|
||||
|
||||
|
||||
@api_router.get("/{user_id}/{key_id}", dependencies=[Depends(bearer_schema)], response_model=AccountKeyring)
|
||||
async def get_keyring(
|
||||
async def get_keyring_endpoint(
|
||||
key_id: str, connection: AsyncConnection = Depends(get_connection_dep), current_user=Depends(get_current_user)
|
||||
):
|
||||
authorize_user = await db_user_role_validation(connection, current_user)
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_admin(current_user)
|
||||
|
||||
keyring = await get_key_by_id(connection, key_id)
|
||||
service = KeyringService(connection)
|
||||
keyring = await service.get(key_id)
|
||||
|
||||
if keyring is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Key not found")
|
||||
raise create_operation_error(
|
||||
message="Key not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"key_id": key_id},
|
||||
)
|
||||
|
||||
return keyring
|
||||
|
||||
|
||||
@api_router.post("/{user_id}/{key_id}", dependencies=[Depends(bearer_schema)], response_model=AccountKeyring)
|
||||
async def create_keyring(
|
||||
async def create_keyring_endpoint(
|
||||
user_id: int,
|
||||
key_id: str,
|
||||
key: AccountKeyringUpdate,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
authorize_user = await db_user_role_validation(connection, current_user)
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_admin(current_user)
|
||||
|
||||
keyring = await get_key_by_id(connection, key_id)
|
||||
service = KeyringService(connection)
|
||||
keyring = await service.get(key_id)
|
||||
|
||||
if keyring is None:
|
||||
keyring_new = await create_key(
|
||||
connection,
|
||||
key,
|
||||
key_id,
|
||||
)
|
||||
keyring_new = await service.create(key, key_id)
|
||||
return keyring_new
|
||||
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST, detail="An keyring with this information already exists."
|
||||
raise create_validation_error(
|
||||
message="A keyring with this information already exists.",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
details={"key_id": key_id},
|
||||
)
|
||||
|
||||
|
||||
@api_router.put("/{user_id}/{key_id}", dependencies=[Depends(bearer_schema)], response_model=AccountKeyring)
|
||||
async def update_keyring(
|
||||
async def update_keyring_endpoint(
|
||||
user_id: int,
|
||||
key_id: str,
|
||||
keyring_update: AccountKeyringUpdate,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
authorize_user = await db_user_role_validation(connection, current_user)
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_admin(current_user)
|
||||
|
||||
service = KeyringService(connection)
|
||||
keyring = await service.get(key_id)
|
||||
|
||||
keyring = await get_key_by_id(connection, key_id)
|
||||
if keyring is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="keyring not found")
|
||||
raise create_operation_error(
|
||||
message="keyring not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"key_id": key_id},
|
||||
)
|
||||
|
||||
updated_values = keyring_update.model_dump(by_alias=True, exclude_none=True)
|
||||
|
||||
if not updated_values:
|
||||
return keyring
|
||||
|
||||
await update_key_by_id(connection, updated_values, keyring)
|
||||
updated_keyring = await service.update(key_id, updated_values)
|
||||
|
||||
keyring = await get_key_by_id(connection, key_id)
|
||||
|
||||
return keyring
|
||||
return updated_keyring
|
||||
|
||||
|
||||
@api_router.delete("/{user_id}/{key_id}", dependencies=[Depends(bearer_schema)], response_model=AccountKeyring)
|
||||
async def delete_keyring(
|
||||
async def delete_keyring_endpoint(
|
||||
user_id: int,
|
||||
key_id: str,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
authorize_user = await db_user_role_validation(connection, current_user)
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_admin(current_user)
|
||||
|
||||
service = KeyringService(connection)
|
||||
keyring = await service.get(key_id)
|
||||
|
||||
keyring = await get_key_by_id(connection, key_id)
|
||||
if keyring is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="keyring not found")
|
||||
raise create_operation_error(
|
||||
message="keyring not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"key_id": key_id},
|
||||
)
|
||||
|
||||
keyring_update = AccountKeyringUpdate(status=KeyStatus.DELETED.value)
|
||||
deleted_keyring = await service.delete(key_id)
|
||||
|
||||
updated_values = keyring_update.model_dump(by_alias=True, exclude_none=True)
|
||||
|
||||
if not updated_values:
|
||||
return keyring
|
||||
|
||||
await update_key_by_id(connection, updated_values, keyring)
|
||||
|
||||
keyring = await get_key_by_id(connection, key_id)
|
||||
|
||||
return keyring
|
||||
return deleted_keyring
|
||||
|
||||
@@ -1,36 +1,17 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||
|
||||
from typing import Optional, List
|
||||
from typing import List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, Query, status
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.connection.session import get_connection_dep
|
||||
|
||||
from api.db.logic.account import get_user_by_login
|
||||
|
||||
from api.db.logic.list_events import (
|
||||
get_list_events_by_name,
|
||||
get_list_events_by_id,
|
||||
create_list_events,
|
||||
update_list_events_by_id,
|
||||
get_list_events_page_DTO,
|
||||
)
|
||||
|
||||
|
||||
from api.schemas.events.list_events import ListEvent
|
||||
from api.db.tables.events import EventStatus
|
||||
|
||||
from api.schemas.base import bearer_schema
|
||||
|
||||
from api.schemas.endpoints.list_events import ListEventUpdate, AllListEventResponse, ListEventFilterDTO
|
||||
|
||||
from api.schemas.endpoints.list_events import AllListEventResponse, ListEventFilterDTO, ListEventUpdate
|
||||
from api.schemas.events.list_events import ListEvent
|
||||
from api.services.auth import get_current_user
|
||||
|
||||
from api.services.user_role_validation import (
|
||||
db_user_role_validation_for_list_events_and_process_schema_by_list_event_id,
|
||||
db_user_role_validation_for_list_events_and_process_schema,
|
||||
)
|
||||
|
||||
from api.services.user_role_validation import UserRoleValidator
|
||||
from api.error import create_operation_error, create_validation_error
|
||||
from api.services.endpoints.list_events import ListEventsService
|
||||
|
||||
api_router = APIRouter(
|
||||
prefix="/list_events",
|
||||
@@ -39,7 +20,7 @@ api_router = APIRouter(
|
||||
|
||||
|
||||
@api_router.get("", dependencies=[Depends(bearer_schema)], response_model=AllListEventResponse)
|
||||
async def get_all_list_events(
|
||||
async def get_all_list_events_endpoint(
|
||||
page: int = Query(1, description="Page number", gt=0),
|
||||
limit: int = Query(10, description="Number of items per page", gt=0),
|
||||
search: Optional[str] = Query(None, description="Search term to filter by title or name"),
|
||||
@@ -64,63 +45,82 @@ async def get_all_list_events(
|
||||
filters=filters if filters else None,
|
||||
)
|
||||
|
||||
authorize_user, page_flag = await db_user_role_validation_for_list_events_and_process_schema(
|
||||
connection, current_user
|
||||
)
|
||||
validator = UserRoleValidator(connection)
|
||||
authorize_user, page_flag = await validator.get_user(current_user)
|
||||
|
||||
if not page_flag:
|
||||
if filter_dto.filters is None:
|
||||
filter_dto.filters = {}
|
||||
filter_dto.filters["creator_id"] = [str(authorize_user.id)]
|
||||
|
||||
list_events_page = await get_list_events_page_DTO(connection, filter_dto)
|
||||
service = ListEventsService(connection)
|
||||
list_events_page = await service.list(filter_dto)
|
||||
|
||||
if list_events_page is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="List events not found")
|
||||
raise create_operation_error(
|
||||
message="List events not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
return list_events_page
|
||||
|
||||
|
||||
@api_router.get("/{list_events_id}", dependencies=[Depends(bearer_schema)], response_model=ListEvent)
|
||||
async def get_list_events(
|
||||
async def get_list_events_endpoint(
|
||||
list_events_id: int,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
list_events_validation = await get_list_events_by_id(connection, list_events_id)
|
||||
service = ListEventsService(connection)
|
||||
list_events_validation = await service.get(list_events_id)
|
||||
|
||||
if list_events_validation is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="List events not found")
|
||||
raise create_operation_error(
|
||||
message="List events not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"list_events_id": list_events_id},
|
||||
)
|
||||
|
||||
authorize_user = await db_user_role_validation_for_list_events_and_process_schema_by_list_event_id(
|
||||
connection, current_user, list_events_validation.creator_id
|
||||
)
|
||||
|
||||
if list_events_id is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="List events not found")
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_ownership(current_user, list_events_validation.creator_id)
|
||||
|
||||
return list_events_validation
|
||||
|
||||
|
||||
@api_router.post("", dependencies=[Depends(bearer_schema)], response_model=ListEvent)
|
||||
async def create_list_events(
|
||||
async def create_list_events_endpoint(
|
||||
list_events: ListEventUpdate,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
user_validation = await get_user_by_login(connection, current_user)
|
||||
list_events_validation = await get_list_events_by_name(connection, list_events.name)
|
||||
|
||||
if list_events_validation is None:
|
||||
await create_list_events(connection, list_events, user_validation.id)
|
||||
list_events_new = await get_list_events_by_name(connection, list_events.name)
|
||||
return list_events_new
|
||||
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST, detail="An List events with this information already exists."
|
||||
if list_events.name is None:
|
||||
raise create_validation_error(
|
||||
message="Name is required for list event creation",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
user_validation = await get_user_by_login(connection, current_user)
|
||||
|
||||
service = ListEventsService(connection)
|
||||
list_events_validation = await service.get_by_name(list_events.name)
|
||||
|
||||
if list_events_validation is not None:
|
||||
raise create_validation_error(
|
||||
message="A List events with this information already exists.",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
details={"name": list_events.name},
|
||||
)
|
||||
|
||||
list_events_new = await service.create(list_events, user_validation.id)
|
||||
|
||||
if list_events_new is None:
|
||||
raise create_operation_error(
|
||||
message="Failed to create list event",
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
return list_events_new
|
||||
|
||||
|
||||
@api_router.put("/{list_events_id}", dependencies=[Depends(bearer_schema)], response_model=ListEvent)
|
||||
async def update_list_events(
|
||||
@@ -129,51 +129,48 @@ async def update_list_events(
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
list_events_validation = await get_list_events_by_id(connection, list_events_id)
|
||||
service = ListEventsService(connection)
|
||||
list_events_validation = await service.get(list_events_id)
|
||||
|
||||
if list_events_validation is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="List events not found")
|
||||
raise create_operation_error(
|
||||
message="List events not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"list_events_id": list_events_id},
|
||||
)
|
||||
|
||||
authorize_user = await db_user_role_validation_for_list_events_and_process_schema_by_list_event_id(
|
||||
connection, current_user, list_events_validation.creator_id
|
||||
)
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_ownership(current_user, list_events_validation.creator_id)
|
||||
|
||||
updated_values = list_events_update.model_dump(by_alias=True, exclude_none=True)
|
||||
|
||||
if not updated_values:
|
||||
return list_events_validation
|
||||
|
||||
await update_list_events_by_id(connection, updated_values, list_events_validation)
|
||||
updated_list_events = await service.update(list_events_id, updated_values)
|
||||
|
||||
list_events = await get_list_events_by_id(connection, list_events_id)
|
||||
|
||||
return list_events
|
||||
return updated_list_events
|
||||
|
||||
|
||||
@api_router.delete("/{list_events_id}", dependencies=[Depends(bearer_schema)], response_model=ListEvent)
|
||||
async def delete_list_events(
|
||||
async def delete_list_events_endpoint(
|
||||
list_events_id: int,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
list_events_validation = await get_list_events_by_id(connection, list_events_id)
|
||||
service = ListEventsService(connection)
|
||||
list_events_validation = await service.get(list_events_id)
|
||||
|
||||
if list_events_validation is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="List events not found")
|
||||
raise create_operation_error(
|
||||
message="List events not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"list_events_id": list_events_id},
|
||||
)
|
||||
|
||||
authorize_user = await db_user_role_validation_for_list_events_and_process_schema_by_list_event_id(
|
||||
connection, current_user, list_events_validation.creator_id
|
||||
)
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_ownership(current_user, list_events_validation.creator_id)
|
||||
|
||||
list_events_update = ListEventUpdate(status=EventStatus.DELETED.value)
|
||||
deleted_list_events = await service.delete(list_events_id)
|
||||
|
||||
updated_values = list_events_update.model_dump(by_alias=True, exclude_none=True)
|
||||
|
||||
if not updated_values:
|
||||
return list_events_validation
|
||||
|
||||
await update_list_events_by_id(connection, updated_values, list_events_validation)
|
||||
|
||||
list_events = await get_list_events_by_id(connection, list_events_id)
|
||||
|
||||
return list_events
|
||||
return deleted_list_events
|
||||
|
||||
@@ -1,34 +1,18 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||
from typing import List, Optional
|
||||
|
||||
from typing import Optional, List
|
||||
from fastapi import APIRouter, Depends, Query, status, HTTPException
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.connection.session import get_connection_dep
|
||||
|
||||
from api.db.logic.account import get_user_by_login
|
||||
|
||||
from api.db.logic.process_schema import (
|
||||
get_process_schema_by_title,
|
||||
create_process_schema,
|
||||
get_process_schema_by_id,
|
||||
update_process_schema_by_id,
|
||||
get_process_schema_page_DTO,
|
||||
)
|
||||
|
||||
from api.schemas.process.process_schema import ProcessSchema
|
||||
|
||||
from api.db.tables.process import ProcessStatus
|
||||
|
||||
from api.schemas.base import bearer_schema
|
||||
|
||||
from api.schemas.endpoints.process_schema import ProcessSchemaUpdate, AllProcessSchemaResponse, ProcessSchemaFilterDTO
|
||||
|
||||
from api.schemas.endpoints.process_schema import AllProcessSchemaResponse, ProcessSchemaFilterDTO, ProcessSchemaUpdate
|
||||
from api.schemas.process.process_schema import ProcessSchema, ProcessSchemaResponse
|
||||
from api.services.auth import get_current_user
|
||||
|
||||
from api.services.user_role_validation import (
|
||||
db_user_role_validation_for_list_events_and_process_schema_by_list_event_id,
|
||||
db_user_role_validation_for_list_events_and_process_schema,
|
||||
)
|
||||
from api.services.user_role_validation import UserRoleValidator
|
||||
from api.utils.to_camel_dict import to_camel_dict
|
||||
from api.error import create_operation_error
|
||||
from api.services.endpoints.process_schema import ProcessSchemaService
|
||||
|
||||
|
||||
api_router = APIRouter(
|
||||
@@ -38,7 +22,7 @@ api_router = APIRouter(
|
||||
|
||||
|
||||
@api_router.get("", dependencies=[Depends(bearer_schema)], response_model=AllProcessSchemaResponse)
|
||||
async def get_all_process_schema(
|
||||
async def get_all_process_schema_endpoint(
|
||||
page: int = Query(1, description="Page number", gt=0),
|
||||
limit: int = Query(10, description="Number of items per page", gt=0),
|
||||
search: Optional[str] = Query(None, description="Search term to filter by title or description"),
|
||||
@@ -46,12 +30,20 @@ async def get_all_process_schema(
|
||||
order_direction: Optional[str] = Query("asc", description="Sort direction (asc/desc)"),
|
||||
status_filter: Optional[List[str]] = Query(None, description="Filter by status"),
|
||||
owner_id: Optional[List[str]] = Query(None, description="Filter by owner id"),
|
||||
show_deleted: bool = Query(False, description="Show only deleted schemas"),
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
creator_id: Optional[int] = Query(None, description="Filter by creator id"),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
if show_deleted:
|
||||
status_to_filter = ["DELETED"]
|
||||
elif status_filter:
|
||||
status_to_filter = status_filter
|
||||
else:
|
||||
status_to_filter = None
|
||||
|
||||
filters = {
|
||||
**({"status": status_filter} if status_filter else {}),
|
||||
**({"status": status_to_filter} if status_to_filter else {}),
|
||||
**({"owner_id": owner_id} if owner_id else {}),
|
||||
**({"creator_id": [str(creator_id)]} if creator_id else {}),
|
||||
}
|
||||
@@ -63,116 +55,117 @@ async def get_all_process_schema(
|
||||
filters=filters if filters else None,
|
||||
)
|
||||
|
||||
authorize_user, page_flag = await db_user_role_validation_for_list_events_and_process_schema(
|
||||
connection, current_user
|
||||
)
|
||||
validator = UserRoleValidator(connection)
|
||||
authorize_user, page_flag = await validator.get_user(current_user)
|
||||
|
||||
if not page_flag:
|
||||
if filter_dto.filters is None:
|
||||
filter_dto.filters = {}
|
||||
filter_dto.filters["creator_id"] = [str(authorize_user.id)]
|
||||
|
||||
process_schema_page = await get_process_schema_page_DTO(connection, filter_dto)
|
||||
service = ProcessSchemaService(connection)
|
||||
process_schema_page = await service.list(filter_dto)
|
||||
|
||||
if process_schema_page is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Process schema not found")
|
||||
raise create_operation_error(
|
||||
message="Process schema not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
return process_schema_page
|
||||
return to_camel_dict(process_schema_page.model_dump())
|
||||
|
||||
|
||||
@api_router.get("/{process_schema_id}", dependencies=[Depends(bearer_schema)], response_model=ProcessSchema)
|
||||
async def get_process_schema(
|
||||
async def get_process_schema_endpoint(
|
||||
process_schema_id: int,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
process_schema_validation = await get_process_schema_by_id(connection, process_schema_id)
|
||||
service = ProcessSchemaService(connection)
|
||||
process_schema_validation = await service.get(process_schema_id)
|
||||
|
||||
if process_schema_validation is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Process schema not found")
|
||||
raise create_operation_error(
|
||||
message="Process schema not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"process_schema_id": process_schema_id},
|
||||
)
|
||||
|
||||
authorize_user = await db_user_role_validation_for_list_events_and_process_schema_by_list_event_id(
|
||||
connection, current_user, process_schema_validation.creator_id
|
||||
)
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_ownership(current_user, process_schema_validation.creator_id)
|
||||
|
||||
if process_schema_id is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Process schema not found")
|
||||
raise create_operation_error(
|
||||
message="Process schema not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"process_schema_id": process_schema_id},
|
||||
)
|
||||
|
||||
return process_schema_validation
|
||||
return to_camel_dict(process_schema_validation.model_dump())
|
||||
|
||||
|
||||
@api_router.post("", dependencies=[Depends(bearer_schema)], response_model=ProcessSchema)
|
||||
async def create_processschema(
|
||||
process_schema: ProcessSchemaUpdate,
|
||||
@api_router.post("", dependencies=[Depends(bearer_schema)], response_model=ProcessSchemaResponse)
|
||||
async def create_processschema_endpoint(
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
user_validation = await get_user_by_login(connection, current_user)
|
||||
process_schema_validation = await get_process_schema_by_title(connection, process_schema.title)
|
||||
|
||||
if process_schema_validation is None:
|
||||
await create_process_schema(connection, process_schema, user_validation.id)
|
||||
process_schema_new = await get_process_schema_by_title(connection, process_schema.title)
|
||||
return process_schema_new
|
||||
service = ProcessSchemaService(connection)
|
||||
response_data = await service.create(user_validation.id)
|
||||
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST, detail="An process schema with this information already exists."
|
||||
)
|
||||
return to_camel_dict(response_data)
|
||||
|
||||
|
||||
@api_router.put("/{process_schema_id}", dependencies=[Depends(bearer_schema)], response_model=ProcessSchema)
|
||||
async def update_process_schema(
|
||||
async def update_process_schema_endpoint(
|
||||
process_schema_id: int,
|
||||
process_schema_update: ProcessSchemaUpdate,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
process_schema_validation = await get_process_schema_by_id(connection, process_schema_id)
|
||||
service = ProcessSchemaService(connection)
|
||||
process_schema_validation = await service.get(process_schema_id)
|
||||
|
||||
if process_schema_validation is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Process schema not found")
|
||||
raise create_operation_error(
|
||||
message="Process schema not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"process_schema_id": process_schema_id},
|
||||
)
|
||||
|
||||
authorize_user = await db_user_role_validation_for_list_events_and_process_schema_by_list_event_id(
|
||||
connection, current_user, process_schema_validation.creator_id
|
||||
)
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_ownership(current_user, process_schema_validation.creator_id)
|
||||
|
||||
updated_values = process_schema_update.model_dump(by_alias=True, exclude_none=True)
|
||||
|
||||
if not updated_values:
|
||||
return process_schema_validation
|
||||
|
||||
await update_process_schema_by_id(connection, updated_values, process_schema_validation)
|
||||
|
||||
process_schema = await get_process_schema_by_id(connection, process_schema_id)
|
||||
process_schema = await service.update(process_schema_id, updated_values, process_schema_validation)
|
||||
|
||||
return process_schema
|
||||
|
||||
|
||||
@api_router.delete("/{process_schema_id}", dependencies=[Depends(bearer_schema)], response_model=ProcessSchema)
|
||||
async def delete_process_schema(
|
||||
@api_router.delete("/{process_schema_id}", dependencies=[Depends(bearer_schema)], status_code=status.HTTP_200_OK)
|
||||
async def delete_process_schema_endpoint(
|
||||
process_schema_id: int,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
process_schema_validation = await get_process_schema_by_id(connection, process_schema_id)
|
||||
service = ProcessSchemaService(connection)
|
||||
process_schema_validation = await service.get(process_schema_id)
|
||||
|
||||
if process_schema_validation is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Process schema not found")
|
||||
raise create_operation_error(
|
||||
message="Process schema not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"process_schema_id": process_schema_id},
|
||||
)
|
||||
|
||||
authorize_user = await db_user_role_validation_for_list_events_and_process_schema_by_list_event_id(
|
||||
connection, current_user, process_schema_validation.creator_id
|
||||
)
|
||||
validator = UserRoleValidator(connection)
|
||||
await validator.validate_ownership(current_user, process_schema_validation.creator_id)
|
||||
|
||||
process_schema_update = ProcessSchemaUpdate(status=ProcessStatus.DELETED.value)
|
||||
await service.delete(process_schema_id, process_schema_validation)
|
||||
|
||||
updated_values = process_schema_update.model_dump(by_alias=True, exclude_none=True)
|
||||
|
||||
if not updated_values:
|
||||
return process_schema_validation
|
||||
|
||||
await update_process_schema_by_id(connection, updated_values, process_schema_validation)
|
||||
|
||||
process_schema = await get_process_schema_by_id(connection, process_schema_id)
|
||||
|
||||
return process_schema
|
||||
return HTTPException(status_code=status.HTTP_200_OK, detail="Process schema deleted successfully")
|
||||
|
||||
@@ -1,21 +1,17 @@
|
||||
from fastapi import (
|
||||
APIRouter,
|
||||
Depends,
|
||||
HTTPException,
|
||||
status,
|
||||
)
|
||||
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.connection.session import get_connection_dep
|
||||
from api.db.logic.account import get_user_by_id, update_user_by_id, get_user_by_login
|
||||
from api.schemas.base import bearer_schema
|
||||
from api.services.auth import get_current_user
|
||||
|
||||
from api.schemas.endpoints.account import UserUpdate
|
||||
from api.schemas.account.account import User
|
||||
|
||||
from api.schemas.base import bearer_schema
|
||||
from api.schemas.endpoints.account import UserUpdate
|
||||
from api.services.auth import get_current_user
|
||||
from api.error import create_operation_error, create_validation_error
|
||||
from api.services.endpoints.profile import ProfileService
|
||||
|
||||
api_router = APIRouter(
|
||||
prefix="/profile",
|
||||
@@ -27,10 +23,15 @@ api_router = APIRouter(
|
||||
async def get_profile(
|
||||
connection: AsyncConnection = Depends(get_connection_dep), current_user=Depends(get_current_user)
|
||||
):
|
||||
user = await get_user_by_login(connection, current_user)
|
||||
service = ProfileService(connection)
|
||||
user = await service.get_by_login(current_user)
|
||||
|
||||
if user is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found")
|
||||
raise create_operation_error(
|
||||
message="Account not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"user_id": current_user},
|
||||
)
|
||||
|
||||
return user
|
||||
|
||||
@@ -41,20 +42,25 @@ async def update_profile(
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
user = await get_user_by_login(connection, current_user)
|
||||
service = ProfileService(connection)
|
||||
user = await service.get_by_login(current_user)
|
||||
|
||||
if user is None:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Account not found")
|
||||
raise create_operation_error(
|
||||
message="Account not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"user_id": current_user},
|
||||
)
|
||||
|
||||
if user_update.role is None and user_update.login is None:
|
||||
updated_values = user_update.model_dump(by_alias=True, exclude_none=True)
|
||||
|
||||
if updated_values is None:
|
||||
return user
|
||||
|
||||
await update_user_by_id(connection, updated_values, user)
|
||||
|
||||
user = await get_user_by_id(connection, user.id)
|
||||
user = await service.update(user.id, updated_values, user)
|
||||
|
||||
return user
|
||||
else:
|
||||
raise HTTPException(status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail="Bad body")
|
||||
raise create_validation_error(
|
||||
message="Bad body",
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
details={"reason": "role and login fields cannot be updated"},
|
||||
)
|
||||
|
||||
182
api/api/endpoints/ps_node.py
Normal file
182
api/api/endpoints/ps_node.py
Normal file
@@ -0,0 +1,182 @@
|
||||
from fastapi import APIRouter, Depends, status
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.connection.session import get_connection_dep
|
||||
from api.db.logic.account import get_user_by_login
|
||||
from api.schemas.base import bearer_schema
|
||||
from api.schemas.process.ps_node import Ps_NodeFrontResponse, Ps_NodeRequest, Ps_NodeDeleteRequest
|
||||
from api.services.auth import get_current_user
|
||||
from api.db.logic.ps_node import get_ps_node_by_id, check_node_connection
|
||||
from api.db.logic.process_schema import get_process_schema_by_id
|
||||
from api.services.user_role_validation import UserRoleValidator
|
||||
from core import VorkNodeRegistry
|
||||
from api.error import create_operation_error, create_access_error, create_validation_error, create_server_error
|
||||
from api.services.endpoints.ps_node import PsNodeService
|
||||
|
||||
|
||||
api_router = APIRouter(
|
||||
prefix="/ps_node",
|
||||
tags=["ps node"],
|
||||
)
|
||||
|
||||
|
||||
@api_router.delete("", dependencies=[Depends(bearer_schema)], status_code=status.HTTP_200_OK)
|
||||
async def delete_ps_node_endpoint(
|
||||
ps_node_delete_data: Ps_NodeDeleteRequest,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
process_schema = await get_process_schema_by_id(connection, ps_node_delete_data.schema_id)
|
||||
if process_schema is None:
|
||||
raise create_operation_error(
|
||||
message="Process schema not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"schema_id": ps_node_delete_data.schema_id},
|
||||
)
|
||||
|
||||
validator = UserRoleValidator(connection)
|
||||
try:
|
||||
await validator.validate_ownership(current_user, process_schema.creator_id)
|
||||
except Exception as e:
|
||||
raise create_access_error(
|
||||
message="Access denied",
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
details={"user_id": current_user, "schema_creator_id": process_schema.creator_id, "reason": str(e)},
|
||||
)
|
||||
|
||||
ps_node = await get_ps_node_by_id(connection, ps_node_delete_data.node_id)
|
||||
if ps_node is None:
|
||||
raise create_operation_error(
|
||||
message="PS node not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"node_id": ps_node_delete_data.node_id},
|
||||
)
|
||||
|
||||
next_ps_node = await get_ps_node_by_id(connection, ps_node_delete_data.next_node_id)
|
||||
if next_ps_node is None:
|
||||
raise create_operation_error(
|
||||
message="Next PS node not found",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
details={"next_node_id": ps_node_delete_data.next_node_id},
|
||||
)
|
||||
|
||||
is_connected = await check_node_connection(
|
||||
connection, ps_node_delete_data.node_id, ps_node_delete_data.next_node_id, int(ps_node_delete_data.port)
|
||||
)
|
||||
|
||||
if not is_connected:
|
||||
raise create_validation_error(
|
||||
message="Node connection validation failed",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
details={
|
||||
"node_id": ps_node_delete_data.node_id,
|
||||
"next_node_id": ps_node_delete_data.next_node_id,
|
||||
"port": ps_node_delete_data.port,
|
||||
},
|
||||
)
|
||||
|
||||
service = PsNodeService(connection)
|
||||
try:
|
||||
result = await service.delete(ps_node_delete_data.next_node_id)
|
||||
except Exception as e:
|
||||
raise create_server_error(
|
||||
message="Failed to delete nodes",
|
||||
status_code=500,
|
||||
details={"error": str(e), "next_node_id": ps_node_delete_data.next_node_id},
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@api_router.post("", dependencies=[Depends(bearer_schema)], response_model=Ps_NodeFrontResponse)
|
||||
async def create_ps_node_endpoint(
|
||||
ps_node: Ps_NodeRequest,
|
||||
connection: AsyncConnection = Depends(get_connection_dep),
|
||||
current_user=Depends(get_current_user),
|
||||
):
|
||||
user_validation = await get_user_by_login(connection, current_user)
|
||||
|
||||
process_schema = await get_process_schema_by_id(connection, ps_node.data["ps_id"])
|
||||
if process_schema is None:
|
||||
raise create_operation_error(
|
||||
message="Process schema not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"schema_id": ps_node.data["ps_id"]},
|
||||
)
|
||||
|
||||
validator = UserRoleValidator(connection)
|
||||
try:
|
||||
await validator.validate_ownership(current_user, process_schema.creator_id)
|
||||
except Exception as e:
|
||||
raise create_access_error(
|
||||
message="Access denied",
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
details={"user_id": current_user, "schema_creator_id": process_schema.creator_id, "reason": str(e)},
|
||||
)
|
||||
|
||||
registery = VorkNodeRegistry()
|
||||
|
||||
vork_node = registery.get(ps_node.data["node_type"])
|
||||
|
||||
if vork_node is None:
|
||||
raise create_operation_error(
|
||||
message="Node type not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"node_type": ps_node.data["node_type"]},
|
||||
)
|
||||
|
||||
try:
|
||||
node_instance = vork_node(data=ps_node.data, links=ps_node.links)
|
||||
node_instance_validated = node_instance.validate()
|
||||
except Exception as e:
|
||||
raise create_validation_error(
|
||||
message="Node validation failed",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
details={"error": str(e)},
|
||||
)
|
||||
|
||||
parent_id = node_instance_validated.parent_id
|
||||
target_ps_id = ps_node.data["ps_id"]
|
||||
|
||||
parent_node = await get_ps_node_by_id(connection, parent_id)
|
||||
if parent_node is None:
|
||||
raise create_operation_error(
|
||||
message="Parent PS node not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"parent_id": parent_id},
|
||||
)
|
||||
if parent_node.ps_id != target_ps_id:
|
||||
raise create_validation_error(
|
||||
message="Parent PS node belongs to another process schema",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
details={"parent_id": parent_id, "expected_ps_id": target_ps_id, "actual_ps_id": parent_node.ps_id},
|
||||
)
|
||||
|
||||
parent_port_number = node_instance_validated.parent_port_number
|
||||
|
||||
parent_settings = parent_node.settings or {}
|
||||
available_port_numbers = []
|
||||
|
||||
for key, value in parent_settings.items():
|
||||
if "port" in key.lower() and isinstance(value, int):
|
||||
available_port_numbers.append(value)
|
||||
|
||||
if parent_port_number not in available_port_numbers:
|
||||
raise create_validation_error(
|
||||
message="Parent port number is invalid",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
details={"parent_id": parent_id, "parent_settings": parent_settings},
|
||||
)
|
||||
|
||||
service = PsNodeService(connection)
|
||||
try:
|
||||
ps_node_front_response = await service.create(ps_node.data, ps_node.links, user_validation.id)
|
||||
except Exception as e:
|
||||
raise create_server_error(
|
||||
message="Failed to create node",
|
||||
status_code=500,
|
||||
details={"error": str(e)},
|
||||
)
|
||||
|
||||
return ps_node_front_response
|
||||
26
api/api/error/__init__.py
Normal file
26
api/api/error/__init__.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""
|
||||
Модуль для обработки ошибок API.
|
||||
"""
|
||||
|
||||
from .error_model.error_types import ServerError, AccessError, OperationError, ValidationError, ErrorType
|
||||
|
||||
from .error_handlers import (
|
||||
handle_api_error,
|
||||
create_server_error,
|
||||
create_access_error,
|
||||
create_operation_error,
|
||||
create_validation_error,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"ServerError",
|
||||
"AccessError",
|
||||
"OperationError",
|
||||
"ValidationError",
|
||||
"ErrorType",
|
||||
"handle_api_error",
|
||||
"create_server_error",
|
||||
"create_access_error",
|
||||
"create_operation_error",
|
||||
"create_validation_error",
|
||||
]
|
||||
54
api/api/error/error_handlers.py
Normal file
54
api/api/error/error_handlers.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""
|
||||
Обработчики ошибок для API.
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any
|
||||
from fastapi import HTTPException
|
||||
|
||||
from .error_model.error_types import ServerError, AccessError, OperationError, ValidationError, ErrorType
|
||||
|
||||
|
||||
def handle_api_error(
|
||||
error_type: ErrorType, message: str, status_code: int, details: Optional[Dict[str, Any]] = None
|
||||
) -> HTTPException:
|
||||
"""
|
||||
Функция для создания HTTPException с правильной структурой ошибки.
|
||||
|
||||
"""
|
||||
match error_type:
|
||||
case ErrorType.SERVER:
|
||||
error = ServerError(message=message, details=details)
|
||||
case ErrorType.ACCESS:
|
||||
error = AccessError(message=message, details=details)
|
||||
case ErrorType.OPERATION:
|
||||
error = OperationError(message=message, details=details)
|
||||
case ErrorType.VALIDATION:
|
||||
error = ValidationError(message=message, details=details)
|
||||
case _:
|
||||
error = ServerError(message=message, details=details)
|
||||
|
||||
return HTTPException(status_code=status_code, detail=error.model_dump(mode="json"))
|
||||
|
||||
|
||||
def create_server_error(
|
||||
message: str, status_code: int = 500, details: Optional[Dict[str, Any]] = None
|
||||
) -> HTTPException:
|
||||
return handle_api_error(error_type=ErrorType.SERVER, message=message, status_code=status_code, details=details)
|
||||
|
||||
|
||||
def create_access_error(
|
||||
message: str, status_code: int = 403, details: Optional[Dict[str, Any]] = None
|
||||
) -> HTTPException:
|
||||
return handle_api_error(error_type=ErrorType.ACCESS, message=message, status_code=status_code, details=details)
|
||||
|
||||
|
||||
def create_operation_error(
|
||||
message: str, status_code: int = 400, details: Optional[Dict[str, Any]] = None
|
||||
) -> HTTPException:
|
||||
return handle_api_error(error_type=ErrorType.OPERATION, message=message, status_code=status_code, details=details)
|
||||
|
||||
|
||||
def create_validation_error(
|
||||
message: str, status_code: int = 422, details: Optional[Dict[str, Any]] = None
|
||||
) -> HTTPException:
|
||||
return handle_api_error(error_type=ErrorType.VALIDATION, message=message, status_code=status_code, details=details)
|
||||
7
api/api/error/error_model/__init__.py
Normal file
7
api/api/error/error_model/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""
|
||||
Модели ошибок для API.
|
||||
"""
|
||||
|
||||
from .error_types import ServerError, AccessError, OperationError, ValidationError, ErrorType
|
||||
|
||||
__all__ = ["ServerError", "AccessError", "OperationError", "ValidationError", "ErrorType"]
|
||||
61
api/api/error/error_model/error_types.py
Normal file
61
api/api/error/error_model/error_types.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""
|
||||
Типизированные модели ошибок для API.
|
||||
"""
|
||||
|
||||
from enum import Enum
|
||||
from typing import Optional, Dict, Any
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class ErrorType(str, Enum):
|
||||
"""
|
||||
Типы ошибок API.
|
||||
"""
|
||||
|
||||
SERVER = "SERVER"
|
||||
ACCESS = "ACCESS"
|
||||
OPERATION = "OPERATION"
|
||||
VALIDATION = "VALIDATION"
|
||||
|
||||
|
||||
class BaseError(BaseModel):
|
||||
"""
|
||||
Базовая модель ошибки.
|
||||
"""
|
||||
|
||||
error_type: ErrorType
|
||||
message: str
|
||||
details: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class ServerError(BaseError):
|
||||
"""
|
||||
Критические серверные ошибки (БД, соединения и прочие неприятности).
|
||||
"""
|
||||
|
||||
error_type: ErrorType = ErrorType.SERVER
|
||||
|
||||
|
||||
class AccessError(BaseError):
|
||||
"""
|
||||
Ошибки доступа (несоответствие тенантности, ролям доступа).
|
||||
"""
|
||||
|
||||
error_type: ErrorType = ErrorType.ACCESS
|
||||
|
||||
|
||||
class OperationError(BaseError):
|
||||
"""
|
||||
Ошибки операции (несоответствие прохождению верификации, ошибки в датасете).
|
||||
"""
|
||||
|
||||
error_type: ErrorType = ErrorType.OPERATION
|
||||
|
||||
|
||||
class ValidationError(BaseError):
|
||||
"""
|
||||
Ошибки валидации (несоответствие первичной валидации).
|
||||
"""
|
||||
|
||||
error_type: ErrorType = ErrorType.VALIDATION
|
||||
field_errors: Optional[Dict[str, str]] = None
|
||||
@@ -1,8 +1,8 @@
|
||||
import datetime
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from orm.tables.account import AccountRole, AccountStatus
|
||||
from pydantic import EmailStr, Field
|
||||
from api.db.tables.account import AccountRole, AccountStatus
|
||||
|
||||
from api.schemas.base import Base
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import datetime
|
||||
from typing import Optional
|
||||
from pydantic import Field
|
||||
from datetime import datetime
|
||||
from api.db.tables.account import KeyType, KeyStatus
|
||||
from typing import Optional
|
||||
|
||||
from orm.tables.account import KeyStatus, KeyType
|
||||
from pydantic import Field
|
||||
|
||||
from api.schemas.base import Base
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
from datetime import datetime
|
||||
from typing import List, Optional, Dict
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from orm.tables.account import AccountRole, AccountStatus
|
||||
from pydantic import EmailStr, Field, TypeAdapter
|
||||
|
||||
from api.db.tables.account import AccountRole, AccountStatus
|
||||
from api.schemas.base import Base
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from typing import Optional
|
||||
|
||||
from orm.tables.account import KeyStatus, KeyType
|
||||
from pydantic import Field
|
||||
from api.db.tables.account import KeyType, KeyStatus
|
||||
|
||||
from api.schemas.base import Base
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from api.schemas.base import Base
|
||||
|
||||
|
||||
# Таблица для получения информации из запроса
|
||||
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from pydantic import Field, TypeAdapter
|
||||
from typing import Optional, Dict, Any, List
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from orm.tables.events import EventState, EventStatus
|
||||
from pydantic import Field, TypeAdapter
|
||||
|
||||
from api.schemas.base import Base
|
||||
from api.db.tables.events import EventState, EventStatus
|
||||
|
||||
|
||||
class ListEventUpdate(Base):
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
from pydantic import Field, TypeAdapter
|
||||
from typing import Optional, Dict, Any, List
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from orm.tables.process import ProcessStatus
|
||||
from pydantic import Field, TypeAdapter
|
||||
|
||||
from api.schemas.base import Base
|
||||
from api.db.tables.process import ProcessStatus
|
||||
|
||||
|
||||
class ProcessSchemaUpdate(Base):
|
||||
title: Optional[str] = Field(None, max_length=100)
|
||||
description: Optional[str] = None
|
||||
owner_id: Optional[int] = None
|
||||
# owner_id: Optional[int] = None
|
||||
settings: Optional[Dict[str, Any]] = None
|
||||
status: Optional[ProcessStatus] = None
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
from pydantic import Field
|
||||
from typing import Dict, Any
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict
|
||||
|
||||
from orm.tables.events import EventState, EventStatus
|
||||
from pydantic import Field
|
||||
|
||||
from api.schemas.base import Base
|
||||
from api.db.tables.events import EventState, EventStatus
|
||||
|
||||
|
||||
class ListEvent(Base):
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
from pydantic import Field
|
||||
from typing import Dict, Any
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict
|
||||
|
||||
from orm.tables.process import NodeStatus
|
||||
from pydantic import Field
|
||||
|
||||
from api.schemas.base import Base
|
||||
from api.db.tables.process import NodeStatus
|
||||
|
||||
|
||||
class MyModel(Base):
|
||||
class NodeLink(Base):
|
||||
id: int
|
||||
link_name: str = Field(..., max_length=20)
|
||||
node_id: int
|
||||
link_point_id: int
|
||||
next_node_id: int
|
||||
settings: Dict[str, Any]
|
||||
creator_id: int
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
from pydantic import Field
|
||||
from typing import Dict, Any
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, Optional, List
|
||||
|
||||
from orm.tables.process import ProcessStatus, NodeType
|
||||
from pydantic import Field
|
||||
|
||||
from api.schemas.base import Base
|
||||
from api.db.tables.process import ProcessStatus
|
||||
from api.schemas.process.ps_node import Ps_NodeFrontResponse
|
||||
|
||||
|
||||
class ProcessSchema(Base):
|
||||
@@ -15,3 +17,24 @@ class ProcessSchema(Base):
|
||||
created_at: datetime
|
||||
settings: Dict[str, Any]
|
||||
status: ProcessStatus
|
||||
|
||||
|
||||
class ProcessSchemaSettingsNodeLink(Base):
|
||||
id: int
|
||||
link_name: str
|
||||
parent_port_number: int
|
||||
from_id: int
|
||||
to_id: int
|
||||
|
||||
|
||||
class ProcessSchemaSettingsNode(Base):
|
||||
id: int
|
||||
node_type: NodeType
|
||||
from_node: Optional[Dict[str, Any]] = None
|
||||
data: Dict[str, Any] # Переименовано с 'from' на 'from_node'
|
||||
links: Optional[List[Dict[str, Any]]] = None
|
||||
|
||||
|
||||
class ProcessSchemaResponse(Base):
|
||||
process_schema: ProcessSchema
|
||||
node_listen: Ps_NodeFrontResponse
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from typing import Dict, Any
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict
|
||||
|
||||
from api.schemas.base import Base
|
||||
|
||||
|
||||
@@ -1,8 +1,21 @@
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any
|
||||
from typing import Any, Dict, Optional, List
|
||||
|
||||
from orm.tables.process import NodeStatus, NodeType
|
||||
|
||||
from api.schemas.base import Base
|
||||
from api.db.tables.process import NodeType, NodeStatus
|
||||
|
||||
|
||||
class Ps_NodeDeleteRequest(Base):
|
||||
schema_id: int
|
||||
node_id: int
|
||||
port: str
|
||||
next_node_id: int
|
||||
|
||||
|
||||
class Ps_NodeRequest(Base):
|
||||
data: Dict[str, Any]
|
||||
links: Dict[str, Any]
|
||||
|
||||
|
||||
class Ps_Node(Base):
|
||||
@@ -10,6 +23,26 @@ class Ps_Node(Base):
|
||||
ps_id: int
|
||||
node_type: NodeType
|
||||
settings: dict
|
||||
creator_id: Dict[str, Any]
|
||||
creator_id: int
|
||||
created_at: datetime
|
||||
status: NodeStatus
|
||||
|
||||
|
||||
class Ps_NodeFrontResponseLink(Base):
|
||||
id: int
|
||||
link_name: str
|
||||
parent_port_number: int
|
||||
from_id: int
|
||||
to_id: int
|
||||
|
||||
|
||||
class Ps_NodeFrontResponseNode(Base):
|
||||
id: int
|
||||
node_type: NodeType
|
||||
data: Dict[str, Any] # Переименовано с 'from' на 'from_node'
|
||||
|
||||
|
||||
class Ps_NodeFrontResponse(Base):
|
||||
description: Optional[Dict[str, Any]] = None
|
||||
node: Optional[Ps_NodeFrontResponseNode] = None
|
||||
links: Optional[List[Dict[str, Any]]] = None
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import HTTPException, Request
|
||||
from orm.tables.account import AccountStatus
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.logic.auth import get_user
|
||||
from api.db.tables.account import AccountStatus
|
||||
from api.schemas.endpoints.account import AllUser
|
||||
from api.utils.hasher import hasher
|
||||
|
||||
|
||||
0
api/api/services/endpoints/__init__.py
Normal file
0
api/api/services/endpoints/__init__.py
Normal file
67
api/api/services/endpoints/account.py
Normal file
67
api/api/services/endpoints/account.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from typing import Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
from orm.tables.account import AccountStatus
|
||||
|
||||
from api.db.logic.account import (
|
||||
get_user_by_id,
|
||||
update_user_by_id,
|
||||
get_user_account_page_DTO,
|
||||
create_user,
|
||||
)
|
||||
from api.db.logic.keyring import update_password_key, create_password_key
|
||||
from api.schemas.account.account import User
|
||||
from api.schemas.endpoints.account import AllUserResponse, UserCreate, UserFilterDTO, AllUser
|
||||
|
||||
|
||||
class AccountService:
|
||||
"""Сервис для работы с аккаунтами пользователей"""
|
||||
|
||||
def __init__(self, connection: AsyncConnection):
|
||||
self.connection = connection
|
||||
|
||||
async def list(self, filter_dto: UserFilterDTO) -> Optional[AllUserResponse]:
|
||||
"""
|
||||
Получает список пользователей с пагинацией и фильтрацией.
|
||||
"""
|
||||
return await get_user_account_page_DTO(self.connection, filter_dto)
|
||||
|
||||
async def get(self, user_id: int) -> Optional[User]:
|
||||
"""
|
||||
Получает пользователя по ID.
|
||||
"""
|
||||
return await get_user_by_id(self.connection, user_id)
|
||||
|
||||
async def create(self, user_data: UserCreate, creator_id: int) -> AllUser:
|
||||
"""
|
||||
Создаёт нового пользователя.
|
||||
"""
|
||||
new_user = await create_user(self.connection, user_data, creator_id)
|
||||
await create_password_key(self.connection, user_data.password, new_user.id)
|
||||
return new_user
|
||||
|
||||
async def update(self, user_id: int, user_update_data: dict, password: str | None = None) -> User:
|
||||
"""
|
||||
Обновляет данные аккаунта пользователя.
|
||||
|
||||
"""
|
||||
if password is not None:
|
||||
await update_password_key(self.connection, user_id, password)
|
||||
|
||||
if user_update_data:
|
||||
user = await get_user_by_id(self.connection, user_id)
|
||||
await update_user_by_id(self.connection, user_update_data, user)
|
||||
|
||||
updated_user = await get_user_by_id(self.connection, user_id)
|
||||
return updated_user
|
||||
|
||||
async def delete(self, user_id: int) -> User:
|
||||
"""
|
||||
Помечает аккаунт пользователя как удалённый.
|
||||
"""
|
||||
user = await get_user_by_id(self.connection, user_id)
|
||||
|
||||
update_data = {"status": AccountStatus.DELETED.value}
|
||||
await update_user_by_id(self.connection, update_data, user)
|
||||
|
||||
deleted_user = await get_user_by_id(self.connection, user_id)
|
||||
return deleted_user
|
||||
47
api/api/services/endpoints/auth.py
Normal file
47
api/api/services/endpoints/auth.py
Normal file
@@ -0,0 +1,47 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from fastapi_jwt_auth import AuthJWT
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.config import get_settings
|
||||
from api.db.logic.auth import add_new_refresh_token, upgrade_old_refresh_token
|
||||
from api.schemas.endpoints.account import AllUser
|
||||
from api.schemas.endpoints.auth import Tokens
|
||||
|
||||
|
||||
class AuthService:
|
||||
"""Сервис для работы с аутентификацией"""
|
||||
|
||||
def __init__(self, connection: AsyncConnection):
|
||||
self.connection = connection
|
||||
|
||||
async def login(self, user: AllUser, authorize: AuthJWT) -> Tokens:
|
||||
"""
|
||||
Создаёт access и refresh токены для пользователя.
|
||||
"""
|
||||
access_token_expires = timedelta(minutes=get_settings().ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
refresh_token_expires = timedelta(days=get_settings().REFRESH_TOKEN_EXPIRE_DAYS)
|
||||
|
||||
access_token = authorize.create_access_token(subject=user.login, expires_time=access_token_expires)
|
||||
refresh_token = authorize.create_refresh_token(subject=user.login, expires_time=refresh_token_expires)
|
||||
|
||||
refresh_token_expires_time = datetime.now(timezone.utc) + refresh_token_expires
|
||||
|
||||
await add_new_refresh_token(self.connection, refresh_token, refresh_token_expires_time, user)
|
||||
|
||||
return Tokens(access_token=access_token, refresh_token=refresh_token)
|
||||
|
||||
async def refresh(self, current_user: str, authorize: AuthJWT) -> Tokens:
|
||||
"""
|
||||
Создаёт новый access токен на основе refresh токена.
|
||||
"""
|
||||
access_token_expires = timedelta(minutes=get_settings().ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
new_access_token = authorize.create_access_token(subject=current_user, expires_time=access_token_expires)
|
||||
|
||||
return Tokens(access_token=new_access_token)
|
||||
|
||||
async def invalidate_refresh_token(self, refresh_token: str) -> None:
|
||||
"""
|
||||
Помечает refresh токен как невалидный.
|
||||
"""
|
||||
await upgrade_old_refresh_token(self.connection, refresh_token)
|
||||
68
api/api/services/endpoints/keyring.py
Normal file
68
api/api/services/endpoints/keyring.py
Normal file
@@ -0,0 +1,68 @@
|
||||
from typing import Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
from orm.tables.account import KeyStatus
|
||||
|
||||
from api.db.logic.keyring import get_key_by_id, update_key_by_id, create_key
|
||||
from api.schemas.account.account_keyring import AccountKeyring
|
||||
from api.schemas.endpoints.account_keyring import AccountKeyringUpdate
|
||||
from api.error import create_validation_error
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class KeyringService:
|
||||
"""Сервис для работы с keyring"""
|
||||
|
||||
def __init__(self, connection: AsyncConnection):
|
||||
self.connection = connection
|
||||
|
||||
async def get(self, key_id: str) -> Optional[AccountKeyring]:
|
||||
"""
|
||||
Получает keyring по key_id.
|
||||
"""
|
||||
return await get_key_by_id(self.connection, key_id)
|
||||
|
||||
async def create(self, key: AccountKeyringUpdate, key_id: str) -> AccountKeyring:
|
||||
"""
|
||||
Создаёт новый keyring.
|
||||
"""
|
||||
from api.schemas.account.account_keyring import AccountKeyring
|
||||
from datetime import datetime, timezone
|
||||
|
||||
if key.owner_id is None or key.key_type is None or key.key_value is None or key.status is None:
|
||||
raise create_validation_error(
|
||||
message="All required fields must be provided for keyring creation",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
account_keyring = AccountKeyring(
|
||||
owner_id=key.owner_id,
|
||||
key_type=key.key_type,
|
||||
key_value=key.key_value,
|
||||
status=key.status,
|
||||
expiry=None,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
return await create_key(self.connection, account_keyring, key_id)
|
||||
|
||||
async def update(self, key_id: str, update_data: dict) -> AccountKeyring:
|
||||
"""
|
||||
Обновляет данные keyring.
|
||||
"""
|
||||
keyring = await get_key_by_id(self.connection, key_id)
|
||||
await update_key_by_id(self.connection, update_data, keyring)
|
||||
|
||||
updated_keyring = await get_key_by_id(self.connection, key_id)
|
||||
return updated_keyring
|
||||
|
||||
async def delete(self, key_id: str) -> AccountKeyring:
|
||||
"""
|
||||
Помечает keyring как удалённый.
|
||||
"""
|
||||
keyring = await get_key_by_id(self.connection, key_id)
|
||||
|
||||
update_data = {"status": KeyStatus.DELETED.value}
|
||||
await update_key_by_id(self.connection, update_data, keyring)
|
||||
|
||||
deleted_keyring = await get_key_by_id(self.connection, key_id)
|
||||
return deleted_keyring
|
||||
63
api/api/services/endpoints/list_events.py
Normal file
63
api/api/services/endpoints/list_events.py
Normal file
@@ -0,0 +1,63 @@
|
||||
from typing import Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
from orm.tables.events import EventStatus
|
||||
|
||||
from api.db.logic.list_events import (
|
||||
get_list_events_by_id,
|
||||
get_list_events_by_name,
|
||||
get_list_events_page_DTO,
|
||||
update_list_events_by_id,
|
||||
create_list_events,
|
||||
)
|
||||
from api.schemas.events.list_events import ListEvent
|
||||
from api.schemas.endpoints.list_events import AllListEventResponse, ListEventFilterDTO, ListEventUpdate
|
||||
|
||||
|
||||
class ListEventsService:
|
||||
"""Сервис для работы с list events"""
|
||||
|
||||
def __init__(self, connection: AsyncConnection):
|
||||
self.connection = connection
|
||||
|
||||
async def list(self, filter_dto: ListEventFilterDTO) -> Optional[AllListEventResponse]:
|
||||
"""
|
||||
Получает список событий с пагинацией и фильтрацией.
|
||||
"""
|
||||
return await get_list_events_page_DTO(self.connection, filter_dto)
|
||||
|
||||
async def get(self, list_events_id: int) -> Optional[ListEvent]:
|
||||
"""
|
||||
Получает событие по ID.
|
||||
"""
|
||||
return await get_list_events_by_id(self.connection, list_events_id)
|
||||
|
||||
async def get_by_name(self, name: str) -> Optional[ListEvent]:
|
||||
"""
|
||||
Получает событие по name.
|
||||
"""
|
||||
return await get_list_events_by_name(self.connection, name)
|
||||
|
||||
async def create(self, list_events_data: ListEventUpdate, creator_id: int) -> Optional[ListEvent]:
|
||||
"""
|
||||
Создаёт новое событие.
|
||||
"""
|
||||
|
||||
await create_list_events(self.connection, list_events_data, creator_id)
|
||||
return await get_list_events_by_name(self.connection, list_events_data.name)
|
||||
|
||||
async def update(self, list_events_id: int, update_data: dict) -> ListEvent:
|
||||
"""
|
||||
Обновляет данные события.
|
||||
"""
|
||||
list_events = await get_list_events_by_id(self.connection, list_events_id)
|
||||
await update_list_events_by_id(self.connection, update_data, list_events)
|
||||
return await get_list_events_by_id(self.connection, list_events_id)
|
||||
|
||||
async def delete(self, list_events_id: int) -> ListEvent:
|
||||
"""
|
||||
Помечает событие как удалённое.
|
||||
"""
|
||||
list_events = await get_list_events_by_id(self.connection, list_events_id)
|
||||
update_data = {"status": EventStatus.DELETED.value}
|
||||
await update_list_events_by_id(self.connection, update_data, list_events)
|
||||
return await get_list_events_by_id(self.connection, list_events_id)
|
||||
110
api/api/services/endpoints/process_schema.py
Normal file
110
api/api/services/endpoints/process_schema.py
Normal file
@@ -0,0 +1,110 @@
|
||||
from typing import Optional, Dict, Any
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
from orm.tables.process import ProcessStatus
|
||||
|
||||
from api.db.logic.process_schema import (
|
||||
get_process_schema_by_id,
|
||||
get_process_schema_page_DTO,
|
||||
update_process_schema_by_id,
|
||||
create_process_schema,
|
||||
update_process_schema_settings_by_id,
|
||||
)
|
||||
from api.db.logic.ps_node import create_ps_node_schema
|
||||
from api.schemas.process.process_schema import ProcessSchema, ProcessSchemaSettingsNode
|
||||
from api.schemas.endpoints.process_schema import AllProcessSchemaResponse, ProcessSchemaFilterDTO, ProcessSchemaUpdate
|
||||
from api.schemas.process.ps_node import Ps_NodeFrontResponse, Ps_NodeFrontResponseNode
|
||||
from orm.tables.process import NodeType
|
||||
from core import VorkNodeRegistry
|
||||
from model_nodes import ListenNodeData
|
||||
from api.utils.node_counter import increment_node_counter
|
||||
|
||||
|
||||
class ProcessSchemaService:
|
||||
"""Сервис для работы с process schema"""
|
||||
|
||||
def __init__(self, connection: AsyncConnection):
|
||||
self.connection = connection
|
||||
|
||||
async def list(self, filter_dto: ProcessSchemaFilterDTO) -> Optional[AllProcessSchemaResponse]:
|
||||
"""
|
||||
Получает список схем процессов с пагинацией и фильтрацией.
|
||||
"""
|
||||
return await get_process_schema_page_DTO(self.connection, filter_dto)
|
||||
|
||||
async def get(self, process_schema_id: int) -> Optional[ProcessSchema]:
|
||||
"""
|
||||
Получает схему процесса по ID.
|
||||
"""
|
||||
return await get_process_schema_by_id(self.connection, process_schema_id)
|
||||
|
||||
async def create(self, creator_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Создаёт новую схему процесса с начальной нодой LISTEN.
|
||||
"""
|
||||
current_node_counter = increment_node_counter()
|
||||
title = f"Новая схема {current_node_counter}"
|
||||
description = "Default description"
|
||||
|
||||
node_id = await create_process_schema(self.connection, creator_id, title, description)
|
||||
|
||||
process_schema_new = await get_process_schema_by_id(self.connection, node_id)
|
||||
|
||||
start_node_data = ListenNodeData(ps_id=process_schema_new.id, node_type=NodeType.START.value, is_start="True")
|
||||
|
||||
start_node_links = {}
|
||||
|
||||
registery = VorkNodeRegistry()
|
||||
|
||||
vork_node = registery.get("LISTEN")
|
||||
|
||||
node_descriptor = vork_node.form()
|
||||
|
||||
start_node = vork_node(data=start_node_data.model_dump(), links=start_node_links)
|
||||
|
||||
validated_start_schema = start_node.validate()
|
||||
|
||||
db_start_schema = await create_ps_node_schema(self.connection, validated_start_schema, creator_id)
|
||||
|
||||
node = ProcessSchemaSettingsNode(
|
||||
id=db_start_schema.id,
|
||||
node_type=NodeType.LISTEN.value,
|
||||
data=validated_start_schema.data.model_dump(),
|
||||
from_node=None,
|
||||
links=None,
|
||||
)
|
||||
|
||||
settings_dict = {"node": node.model_dump(mode="json")}
|
||||
|
||||
await update_process_schema_settings_by_id(self.connection, process_schema_new.id, settings_dict)
|
||||
|
||||
process_schema_new = await get_process_schema_by_id(self.connection, node_id)
|
||||
|
||||
ps_node_front_response = Ps_NodeFrontResponse(
|
||||
description=node_descriptor.model_dump(),
|
||||
node=Ps_NodeFrontResponseNode(
|
||||
id=db_start_schema.id, node_type=NodeType.LISTEN.value, data=validated_start_schema.data.model_dump()
|
||||
),
|
||||
link=None,
|
||||
)
|
||||
|
||||
response_data = {
|
||||
"process_schema": process_schema_new.model_dump(),
|
||||
"node_listen": ps_node_front_response.model_dump(),
|
||||
}
|
||||
|
||||
return response_data
|
||||
|
||||
async def update(self, process_schema_id: int, update_data: dict, process_schema: ProcessSchema) -> ProcessSchema:
|
||||
"""
|
||||
Обновляет данные схемы процесса.
|
||||
"""
|
||||
await update_process_schema_by_id(self.connection, update_data, process_schema)
|
||||
return await get_process_schema_by_id(self.connection, process_schema_id)
|
||||
|
||||
async def delete(self, process_schema_id: int, process_schema: ProcessSchema) -> None:
|
||||
"""
|
||||
Помечает схему процесса как удалённую.
|
||||
"""
|
||||
process_schema_update = ProcessSchemaUpdate(status=ProcessStatus.DELETED.value)
|
||||
update_data = process_schema_update.model_dump(by_alias=True, exclude_none=True)
|
||||
await update_process_schema_by_id(self.connection, update_data, process_schema)
|
||||
30
api/api/services/endpoints/profile.py
Normal file
30
api/api/services/endpoints/profile.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from typing import Optional
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.logic.account import get_user_by_id, get_user_by_login, update_user_by_id
|
||||
from api.schemas.account.account import User
|
||||
|
||||
|
||||
class ProfileService:
|
||||
"""Сервис для работы с профилем пользователя"""
|
||||
|
||||
def __init__(self, connection: AsyncConnection):
|
||||
self.connection = connection
|
||||
|
||||
async def get_by_login(self, login: str) -> Optional[User]:
|
||||
"""
|
||||
Получает пользователя по логину.
|
||||
"""
|
||||
return await get_user_by_login(self.connection, login)
|
||||
|
||||
async def update(self, user_id: int, update_data: dict, user: User) -> User:
|
||||
"""
|
||||
Обновляет данные профиля пользователя.
|
||||
"""
|
||||
if update_data is None or not update_data:
|
||||
return user
|
||||
|
||||
await update_user_by_id(self.connection, update_data, user)
|
||||
|
||||
updated_user = await get_user_by_id(self.connection, user_id)
|
||||
return updated_user
|
||||
92
api/api/services/endpoints/ps_node.py
Normal file
92
api/api/services/endpoints/ps_node.py
Normal file
@@ -0,0 +1,92 @@
|
||||
from typing import List, Dict, Any
|
||||
from sqlalchemy.ext.asyncio import AsyncConnection
|
||||
|
||||
from api.db.logic.ps_node import (
|
||||
get_nodes_for_deletion_ordered,
|
||||
delete_ps_nodes_delete_handler,
|
||||
create_ps_node_schema,
|
||||
)
|
||||
from api.db.logic.node_link import get_last_link_name_by_node_id, create_node_link_schema
|
||||
from api.db.logic.process_schema import update_process_schema_settings_by_id
|
||||
from api.schemas.process.process_schema import ProcessSchemaSettingsNodeLink, ProcessSchemaSettingsNode
|
||||
from api.schemas.process.ps_node import Ps_NodeFrontResponse, Ps_NodeFrontResponseNode
|
||||
from core import VorkNodeRegistry, VorkNodeLink
|
||||
from model_nodes import VorkNodeLinkData
|
||||
from api.utils.to_camel_dict import to_camel_dict
|
||||
|
||||
|
||||
class PsNodeService:
|
||||
"""Сервис для работы с ps nodes"""
|
||||
|
||||
def __init__(self, connection: AsyncConnection):
|
||||
self.connection = connection
|
||||
|
||||
async def delete(self, next_node_id: int) -> Dict[str, List[int]]:
|
||||
"""
|
||||
Удаляет ноды в правильном порядке.
|
||||
"""
|
||||
ordered_node_ids = await get_nodes_for_deletion_ordered(self.connection, next_node_id)
|
||||
deleted_node_ids = await delete_ps_nodes_delete_handler(self.connection, ordered_node_ids)
|
||||
|
||||
return {
|
||||
"deleted_node_ids": deleted_node_ids,
|
||||
}
|
||||
|
||||
async def create(
|
||||
self, ps_node_data: Dict[str, Any], links: Dict[str, Any], creator_id: int
|
||||
) -> Ps_NodeFrontResponse:
|
||||
"""
|
||||
Создаёт новую ноду с линком и обновляет настройки схемы процесса.
|
||||
"""
|
||||
registery = VorkNodeRegistry()
|
||||
vork_node = registery.get(ps_node_data["node_type"])
|
||||
node_descriptor = vork_node.form()
|
||||
|
||||
node_instance = vork_node(data=ps_node_data, links=links)
|
||||
node_instance_validated = node_instance.validate()
|
||||
|
||||
db_ps_node = await create_ps_node_schema(self.connection, node_instance_validated, creator_id)
|
||||
link_name = await get_last_link_name_by_node_id(self.connection, db_ps_node.ps_id)
|
||||
|
||||
link_data = VorkNodeLinkData(
|
||||
parent_port_number=node_instance_validated.parent_port_number,
|
||||
to_id=db_ps_node.id,
|
||||
from_id=node_instance_validated.parent_id,
|
||||
last_link_name=link_name,
|
||||
)
|
||||
|
||||
link = VorkNodeLink(data=link_data.model_dump())
|
||||
validated_link = link.validate()
|
||||
|
||||
db_node_link = await create_node_link_schema(self.connection, validated_link, creator_id)
|
||||
|
||||
links_settings = ProcessSchemaSettingsNodeLink(
|
||||
id=db_node_link.id,
|
||||
link_name=db_node_link.link_name,
|
||||
parent_port_number=db_node_link.link_point_id,
|
||||
from_id=db_node_link.node_id,
|
||||
to_id=db_node_link.next_node_id,
|
||||
)
|
||||
|
||||
node_settings = ProcessSchemaSettingsNode(
|
||||
id=db_ps_node.id,
|
||||
node_type=db_ps_node.node_type,
|
||||
data=node_instance_validated.data.model_dump(),
|
||||
from_node=None,
|
||||
links=[{"links": links_settings.model_dump()}],
|
||||
)
|
||||
|
||||
settings_dict = {"node": node_settings.model_dump(mode="json")}
|
||||
await update_process_schema_settings_by_id(self.connection, db_ps_node.ps_id, settings_dict)
|
||||
|
||||
ps_node_front_response = Ps_NodeFrontResponse(
|
||||
description=node_descriptor.model_dump(),
|
||||
node=Ps_NodeFrontResponseNode(
|
||||
id=db_ps_node.id,
|
||||
node_type=db_ps_node.node_type,
|
||||
data=to_camel_dict(node_instance_validated.data.model_dump()),
|
||||
),
|
||||
links=[{"links": links_settings.model_dump()}],
|
||||
)
|
||||
|
||||
return ps_node_front_response
|
||||
@@ -1,15 +1,15 @@
|
||||
from fastapi_jwt_auth import AuthJWT
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
import re
|
||||
from re import escape
|
||||
|
||||
from fastapi import (
|
||||
Request,
|
||||
status,
|
||||
)
|
||||
|
||||
from fastapi.responses import JSONResponse
|
||||
from api.config import get_settings
|
||||
from fastapi_jwt_auth import AuthJWT
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
|
||||
import re
|
||||
from re import escape
|
||||
from api.config import get_settings
|
||||
|
||||
|
||||
class MiddlewareAccessTokenValidadtion(BaseHTTPMiddleware):
|
||||
|
||||
@@ -1,31 +1,75 @@
|
||||
from fastapi import (
|
||||
HTTPException,
|
||||
status,
|
||||
)
|
||||
from fastapi import status
|
||||
from orm.tables.account import AccountRole
|
||||
from api.db.logic.account import get_user_by_login
|
||||
from api.db.tables.account import AccountRole
|
||||
from api.error import create_operation_error, create_access_error
|
||||
|
||||
|
||||
async def db_user_role_validation(connection, current_user):
|
||||
authorize_user = await get_user_by_login(connection, current_user)
|
||||
if authorize_user.role not in {AccountRole.OWNER, AccountRole.ADMIN}:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="You do not have enough permissions")
|
||||
return authorize_user
|
||||
class UserRoleValidator:
|
||||
"""Валидатор ролей пользователей"""
|
||||
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
|
||||
async def db_user_role_validation_for_list_events_and_process_schema_by_list_event_id(
|
||||
connection, current_user, current_listevents_creator_id
|
||||
):
|
||||
authorize_user = await get_user_by_login(connection, current_user)
|
||||
if authorize_user.role not in {AccountRole.OWNER, AccountRole.ADMIN}:
|
||||
if authorize_user.id != current_listevents_creator_id:
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="You do not have enough permissions")
|
||||
return authorize_user
|
||||
async def validate_admin(self, current_user: int):
|
||||
"""Проверяет права пользователя"""
|
||||
try:
|
||||
authorize_user = await get_user_by_login(self.connection, current_user)
|
||||
except Exception as e:
|
||||
raise create_operation_error(
|
||||
message="User not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"user_id": current_user, "error": str(e)},
|
||||
)
|
||||
|
||||
if authorize_user.role not in {AccountRole.OWNER, AccountRole.ADMIN}:
|
||||
raise create_access_error(
|
||||
message="Insufficient permissions",
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
details={
|
||||
"user_id": current_user,
|
||||
"user_role": authorize_user.role.value,
|
||||
"required_roles": [AccountRole.OWNER.value, AccountRole.ADMIN.value],
|
||||
},
|
||||
)
|
||||
|
||||
async def db_user_role_validation_for_list_events_and_process_schema(connection, current_user):
|
||||
authorize_user = await get_user_by_login(connection, current_user)
|
||||
if authorize_user.role not in {AccountRole.OWNER, AccountRole.ADMIN}:
|
||||
return authorize_user, False
|
||||
else:
|
||||
return authorize_user, True
|
||||
return authorize_user
|
||||
|
||||
async def validate_ownership(self, current_user: int, resource_owner_id: int):
|
||||
"""Проверяет владение ресурсом или права администратора"""
|
||||
try:
|
||||
authorize_user = await get_user_by_login(self.connection, current_user)
|
||||
except Exception as e:
|
||||
raise create_operation_error(
|
||||
message="User not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"user_id": current_user, "error": str(e)},
|
||||
)
|
||||
|
||||
if authorize_user.role not in {AccountRole.OWNER, AccountRole.ADMIN}:
|
||||
if authorize_user.id != resource_owner_id:
|
||||
raise create_access_error(
|
||||
message="Access denied",
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
details={
|
||||
"user_id": current_user,
|
||||
"resource_owner_id": resource_owner_id,
|
||||
"user_role": authorize_user.role.value,
|
||||
"reason": "User is not the owner and does not have admin privileges",
|
||||
},
|
||||
)
|
||||
|
||||
return authorize_user
|
||||
|
||||
async def get_user(self, current_user: int):
|
||||
"""Получает пользователя с админ-статусом"""
|
||||
try:
|
||||
authorize_user = await get_user_by_login(self.connection, current_user)
|
||||
except Exception as e:
|
||||
raise create_operation_error(
|
||||
message="User not found",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
details={"user_id": current_user, "error": str(e)},
|
||||
)
|
||||
|
||||
is_admin = authorize_user.role in {AccountRole.OWNER, AccountRole.ADMIN}
|
||||
return authorize_user, is_admin
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
from orm.tables.account import account_keyring_table, account_table, AccountRole, KeyStatus, KeyType
|
||||
|
||||
from api.db.connection.session import get_connection
|
||||
from api.db.tables.account import account_keyring_table, account_table, AccountRole, KeyStatus, KeyType
|
||||
from api.utils.hasher import hasher
|
||||
from api.utils.key_id_gen import KeyIdGenerator
|
||||
|
||||
|
||||
52
api/api/utils/node_counter.py
Normal file
52
api/api/utils/node_counter.py
Normal file
@@ -0,0 +1,52 @@
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
|
||||
|
||||
# Путь к файлу счётчика (в корне проекта)
|
||||
COUNTER_FILE_PATH = Path(__file__).parent.parent.parent / "node_counter.json"
|
||||
|
||||
|
||||
def get_node_counter() -> int:
|
||||
"""
|
||||
Открывает JSON файл и возвращает значение node_counter.
|
||||
Если файл не существует, создаёт его со значением по умолчанию 0.
|
||||
|
||||
Returns:
|
||||
int: Текущее значение счётчика узлов
|
||||
"""
|
||||
|
||||
if not COUNTER_FILE_PATH.exists():
|
||||
initial_data: Dict[str, int] = {"node_counter": 0}
|
||||
with open(COUNTER_FILE_PATH, "w", encoding="utf-8") as f:
|
||||
json.dump(initial_data, f, indent=2, ensure_ascii=False)
|
||||
return 0
|
||||
|
||||
try:
|
||||
with open(COUNTER_FILE_PATH, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
return data.get("node_counter", 0)
|
||||
except (json.JSONDecodeError, IOError):
|
||||
initial_data = {"node_counter": 0}
|
||||
with open(COUNTER_FILE_PATH, "w", encoding="utf-8") as f:
|
||||
json.dump(initial_data, f, indent=2, ensure_ascii=False)
|
||||
return 0
|
||||
|
||||
|
||||
def increment_node_counter() -> int:
|
||||
"""
|
||||
Увеличивает значение node_counter на 1, сохраняет в файл и возвращает новое значение.
|
||||
|
||||
Returns:
|
||||
int: Новое значение счётчика (старое значение + 1)
|
||||
"""
|
||||
|
||||
current_value = get_node_counter()
|
||||
|
||||
new_value = current_value + 1
|
||||
|
||||
data: Dict[str, int] = {"node_counter": new_value}
|
||||
with open(COUNTER_FILE_PATH, "w", encoding="utf-8") as f:
|
||||
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||
|
||||
return new_value
|
||||
10
api/api/utils/to_camel_dict.py
Normal file
10
api/api/utils/to_camel_dict.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from pydantic.alias_generators import to_camel
|
||||
|
||||
|
||||
def to_camel_dict(obj):
|
||||
if isinstance(obj, dict):
|
||||
return {to_camel(key): to_camel_dict(value) for key, value in obj.items()}
|
||||
elif isinstance(obj, list):
|
||||
return [to_camel_dict(item) for item in obj]
|
||||
else:
|
||||
return obj
|
||||
2155
api/poetry.lock
generated
2155
api/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -16,7 +16,9 @@ dependencies = [
|
||||
"cryptography (>=44.0.2,<45.0.0)",
|
||||
"pydantic[email] (>=2.11.3,<3.0.0)",
|
||||
"python-multipart (>=0.0.20,<0.0.21)",
|
||||
"requests (>=2.31.0,<3.0.0)",
|
||||
"fastapi-jwt-auth @ git+https://github.com/vvpreo/fastapi-jwt-auth",
|
||||
"vork-core @ git+http://88.86.199.167:3000/Nox/CORE.git",
|
||||
]
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user