Folders, Persistent Import Progress Tracking & UX Enhancements (#3841)
Co-authored-by: Jonathan Dobson <jon.m.dobson@gmail.com>
This commit is contained in:
@@ -27,6 +27,7 @@ from skyvern.forge.sdk.db.models import (
|
||||
CredentialModel,
|
||||
CredentialParameterModel,
|
||||
DebugSessionModel,
|
||||
FolderModel,
|
||||
OnePasswordCredentialParameterModel,
|
||||
OrganizationAuthTokenModel,
|
||||
OrganizationBitwardenCollectionModel,
|
||||
@@ -1405,6 +1406,7 @@ class AgentDB:
|
||||
cache_key: str | None = None,
|
||||
run_sequentially: bool = False,
|
||||
sequential_key: str | None = None,
|
||||
folder_id: str | None = None,
|
||||
) -> Workflow:
|
||||
async with self.Session() as session:
|
||||
workflow = WorkflowModel(
|
||||
@@ -1427,6 +1429,7 @@ class AgentDB:
|
||||
cache_key=cache_key or DEFAULT_SCRIPT_RUN_ID,
|
||||
run_sequentially=run_sequentially,
|
||||
sequential_key=sequential_key,
|
||||
folder_id=folder_id,
|
||||
)
|
||||
if workflow_permanent_id:
|
||||
workflow.workflow_permanent_id = workflow_permanent_id
|
||||
@@ -1605,14 +1608,15 @@ class AgentDB:
|
||||
only_saved_tasks: bool = False,
|
||||
only_workflows: bool = False,
|
||||
search_key: str | None = None,
|
||||
folder_id: str | None = None,
|
||||
statuses: list[WorkflowStatus] | None = None,
|
||||
) -> list[Workflow]:
|
||||
"""
|
||||
Get all workflows with the latest version for the organization.
|
||||
|
||||
Search semantics:
|
||||
- If `search_key` is provided, its value is used as a unified search term for both
|
||||
`workflows.title` and workflow parameter metadata (key, description, and default_value).
|
||||
- If `search_key` is provided, its value is used as a unified search term for
|
||||
`workflows.title`, `folders.title`, and workflow parameter metadata (key, description, and default_value).
|
||||
- If `search_key` is not provided, no search filtering is applied.
|
||||
- Parameter metadata search excludes soft-deleted parameter rows across parameter tables.
|
||||
"""
|
||||
@@ -1635,11 +1639,19 @@ class AgentDB:
|
||||
)
|
||||
.subquery()
|
||||
)
|
||||
main_query = select(WorkflowModel).join(
|
||||
subquery,
|
||||
(WorkflowModel.organization_id == subquery.c.organization_id)
|
||||
& (WorkflowModel.workflow_permanent_id == subquery.c.workflow_permanent_id)
|
||||
& (WorkflowModel.version == subquery.c.max_version),
|
||||
main_query = (
|
||||
select(WorkflowModel)
|
||||
.join(
|
||||
subquery,
|
||||
(WorkflowModel.organization_id == subquery.c.organization_id)
|
||||
& (WorkflowModel.workflow_permanent_id == subquery.c.workflow_permanent_id)
|
||||
& (WorkflowModel.version == subquery.c.max_version),
|
||||
)
|
||||
.outerjoin(
|
||||
FolderModel,
|
||||
(WorkflowModel.folder_id == FolderModel.folder_id)
|
||||
& (FolderModel.organization_id == WorkflowModel.organization_id),
|
||||
)
|
||||
)
|
||||
if only_saved_tasks:
|
||||
main_query = main_query.where(WorkflowModel.is_saved_task.is_(True))
|
||||
@@ -1647,9 +1659,12 @@ class AgentDB:
|
||||
main_query = main_query.where(WorkflowModel.is_saved_task.is_(False))
|
||||
if statuses:
|
||||
main_query = main_query.where(WorkflowModel.status.in_(statuses))
|
||||
if folder_id:
|
||||
main_query = main_query.where(WorkflowModel.folder_id == folder_id)
|
||||
if search_key:
|
||||
search_like = f"%{search_key}%"
|
||||
title_like = WorkflowModel.title.ilike(search_like)
|
||||
folder_title_like = FolderModel.title.ilike(search_like)
|
||||
|
||||
parameter_filters = [
|
||||
# WorkflowParameterModel
|
||||
@@ -1771,7 +1786,7 @@ class AgentDB:
|
||||
)
|
||||
),
|
||||
]
|
||||
main_query = main_query.where(or_(title_like, or_(*parameter_filters)))
|
||||
main_query = main_query.where(or_(title_like, folder_title_like, or_(*parameter_filters)))
|
||||
main_query = (
|
||||
main_query.order_by(WorkflowModel.created_at.desc()).limit(page_size).offset(db_page * page_size)
|
||||
)
|
||||
@@ -1791,6 +1806,8 @@ class AgentDB:
|
||||
version: int | None = None,
|
||||
run_with: str | None = None,
|
||||
cache_key: str | None = None,
|
||||
status: str | None = None,
|
||||
import_error: str | None = None,
|
||||
) -> Workflow:
|
||||
try:
|
||||
async with self.Session() as session:
|
||||
@@ -1812,6 +1829,10 @@ class AgentDB:
|
||||
workflow.run_with = run_with
|
||||
if cache_key is not None:
|
||||
workflow.cache_key = cache_key
|
||||
if status is not None:
|
||||
workflow.status = status
|
||||
if import_error is not None:
|
||||
workflow.import_error = import_error
|
||||
await session.commit()
|
||||
await session.refresh(workflow)
|
||||
return convert_to_workflow(workflow, self.debug_enabled)
|
||||
@@ -1846,6 +1867,372 @@ class AgentDB:
|
||||
await session.execute(update_deleted_at_query)
|
||||
await session.commit()
|
||||
|
||||
async def create_folder(
|
||||
self,
|
||||
organization_id: str,
|
||||
title: str,
|
||||
description: str | None = None,
|
||||
) -> FolderModel:
|
||||
"""Create a new folder."""
|
||||
try:
|
||||
async with self.Session() as session:
|
||||
folder = FolderModel(
|
||||
organization_id=organization_id,
|
||||
title=title,
|
||||
description=description,
|
||||
)
|
||||
session.add(folder)
|
||||
await session.commit()
|
||||
await session.refresh(folder)
|
||||
return folder
|
||||
except SQLAlchemyError:
|
||||
LOG.error("SQLAlchemyError in create_folder", exc_info=True)
|
||||
raise
|
||||
|
||||
async def get_folders(
|
||||
self,
|
||||
organization_id: str,
|
||||
page: int = 1,
|
||||
page_size: int = 10,
|
||||
search_query: str | None = None,
|
||||
) -> list[FolderModel]:
|
||||
"""Get all folders for an organization with pagination and optional search."""
|
||||
try:
|
||||
async with self.Session() as session:
|
||||
stmt = (
|
||||
select(FolderModel)
|
||||
.filter_by(organization_id=organization_id)
|
||||
.filter(FolderModel.deleted_at.is_(None))
|
||||
)
|
||||
|
||||
if search_query:
|
||||
search_pattern = f"%{search_query}%"
|
||||
stmt = stmt.filter(
|
||||
or_(
|
||||
FolderModel.title.ilike(search_pattern),
|
||||
FolderModel.description.ilike(search_pattern),
|
||||
)
|
||||
)
|
||||
|
||||
stmt = stmt.order_by(FolderModel.modified_at.desc())
|
||||
stmt = stmt.offset((page - 1) * page_size).limit(page_size)
|
||||
|
||||
result = await session.execute(stmt)
|
||||
return list(result.scalars().all())
|
||||
except SQLAlchemyError:
|
||||
LOG.error("SQLAlchemyError in get_folders", exc_info=True)
|
||||
raise
|
||||
|
||||
async def get_folder(
|
||||
self,
|
||||
folder_id: str,
|
||||
organization_id: str,
|
||||
) -> FolderModel | None:
|
||||
"""Get a folder by ID."""
|
||||
try:
|
||||
async with self.Session() as session:
|
||||
stmt = (
|
||||
select(FolderModel)
|
||||
.filter_by(folder_id=folder_id, organization_id=organization_id)
|
||||
.filter(FolderModel.deleted_at.is_(None))
|
||||
)
|
||||
result = await session.execute(stmt)
|
||||
return result.scalar_one_or_none()
|
||||
except SQLAlchemyError:
|
||||
LOG.error("SQLAlchemyError in get_folder", exc_info=True)
|
||||
raise
|
||||
|
||||
async def update_folder(
|
||||
self,
|
||||
folder_id: str,
|
||||
organization_id: str,
|
||||
title: str | None = None,
|
||||
description: str | None = None,
|
||||
) -> FolderModel | None:
|
||||
"""Update a folder's title or description."""
|
||||
try:
|
||||
async with self.Session() as session:
|
||||
stmt = (
|
||||
select(FolderModel)
|
||||
.filter_by(folder_id=folder_id, organization_id=organization_id)
|
||||
.filter(FolderModel.deleted_at.is_(None))
|
||||
)
|
||||
result = await session.execute(stmt)
|
||||
folder = result.scalar_one_or_none()
|
||||
if not folder:
|
||||
return None
|
||||
|
||||
if title is not None:
|
||||
folder.title = title
|
||||
if description is not None:
|
||||
folder.description = description
|
||||
|
||||
folder.modified_at = datetime.utcnow()
|
||||
await session.commit()
|
||||
await session.refresh(folder)
|
||||
return folder
|
||||
except SQLAlchemyError:
|
||||
LOG.error("SQLAlchemyError in update_folder", exc_info=True)
|
||||
raise
|
||||
|
||||
async def get_workflow_permanent_ids_in_folder(
|
||||
self,
|
||||
folder_id: str,
|
||||
organization_id: str,
|
||||
) -> list[str]:
|
||||
"""Get workflow permanent IDs (latest versions only) in a folder."""
|
||||
try:
|
||||
async with self.Session() as session:
|
||||
# Subquery to get the latest version for each workflow
|
||||
subquery = (
|
||||
select(
|
||||
WorkflowModel.organization_id,
|
||||
WorkflowModel.workflow_permanent_id,
|
||||
func.max(WorkflowModel.version).label("max_version"),
|
||||
)
|
||||
.where(WorkflowModel.organization_id == organization_id)
|
||||
.where(WorkflowModel.deleted_at.is_(None))
|
||||
.group_by(
|
||||
WorkflowModel.organization_id,
|
||||
WorkflowModel.workflow_permanent_id,
|
||||
)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
# Get workflow_permanent_ids where the latest version is in this folder
|
||||
stmt = (
|
||||
select(WorkflowModel.workflow_permanent_id)
|
||||
.join(
|
||||
subquery,
|
||||
(WorkflowModel.organization_id == subquery.c.organization_id)
|
||||
& (WorkflowModel.workflow_permanent_id == subquery.c.workflow_permanent_id)
|
||||
& (WorkflowModel.version == subquery.c.max_version),
|
||||
)
|
||||
.where(WorkflowModel.folder_id == folder_id)
|
||||
)
|
||||
result = await session.execute(stmt)
|
||||
return list(result.scalars().all())
|
||||
except SQLAlchemyError:
|
||||
LOG.error("SQLAlchemyError in get_workflow_permanent_ids_in_folder", exc_info=True)
|
||||
raise
|
||||
|
||||
async def soft_delete_folder(
|
||||
self,
|
||||
folder_id: str,
|
||||
organization_id: str,
|
||||
delete_workflows: bool = False,
|
||||
) -> bool:
|
||||
"""Soft delete a folder. Optionally delete all workflows in the folder."""
|
||||
try:
|
||||
async with self.Session() as session:
|
||||
# Check if folder exists
|
||||
folder_stmt = (
|
||||
select(FolderModel)
|
||||
.filter_by(folder_id=folder_id, organization_id=organization_id)
|
||||
.filter(FolderModel.deleted_at.is_(None))
|
||||
)
|
||||
folder_result = await session.execute(folder_stmt)
|
||||
folder = folder_result.scalar_one_or_none()
|
||||
if not folder:
|
||||
return False
|
||||
|
||||
# If delete_workflows is True, delete all workflows in the folder
|
||||
if delete_workflows:
|
||||
# Get workflow permanent IDs in the folder (inline logic)
|
||||
subquery = (
|
||||
select(
|
||||
WorkflowModel.organization_id,
|
||||
WorkflowModel.workflow_permanent_id,
|
||||
func.max(WorkflowModel.version).label("max_version"),
|
||||
)
|
||||
.where(WorkflowModel.organization_id == organization_id)
|
||||
.where(WorkflowModel.deleted_at.is_(None))
|
||||
.group_by(
|
||||
WorkflowModel.organization_id,
|
||||
WorkflowModel.workflow_permanent_id,
|
||||
)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
workflow_permanent_ids_stmt = (
|
||||
select(WorkflowModel.workflow_permanent_id)
|
||||
.join(
|
||||
subquery,
|
||||
(WorkflowModel.organization_id == subquery.c.organization_id)
|
||||
& (WorkflowModel.workflow_permanent_id == subquery.c.workflow_permanent_id)
|
||||
& (WorkflowModel.version == subquery.c.max_version),
|
||||
)
|
||||
.where(WorkflowModel.folder_id == folder_id)
|
||||
)
|
||||
result = await session.execute(workflow_permanent_ids_stmt)
|
||||
workflow_permanent_ids = list(result.scalars().all())
|
||||
|
||||
# Soft delete all workflows with these permanent IDs in a single bulk update
|
||||
if workflow_permanent_ids:
|
||||
update_workflows_query = (
|
||||
update(WorkflowModel)
|
||||
.where(WorkflowModel.workflow_permanent_id.in_(workflow_permanent_ids))
|
||||
.where(WorkflowModel.organization_id == organization_id)
|
||||
.where(WorkflowModel.deleted_at.is_(None))
|
||||
.values(deleted_at=datetime.utcnow())
|
||||
)
|
||||
await session.execute(update_workflows_query)
|
||||
else:
|
||||
# Just remove folder_id from all workflows in this folder
|
||||
update_workflows_query = (
|
||||
update(WorkflowModel)
|
||||
.where(WorkflowModel.folder_id == folder_id)
|
||||
.where(WorkflowModel.organization_id == organization_id)
|
||||
.values(folder_id=None, modified_at=datetime.utcnow())
|
||||
)
|
||||
await session.execute(update_workflows_query)
|
||||
|
||||
# Soft delete the folder
|
||||
folder.deleted_at = datetime.utcnow()
|
||||
await session.commit()
|
||||
return True
|
||||
except SQLAlchemyError:
|
||||
LOG.error("SQLAlchemyError in soft_delete_folder", exc_info=True)
|
||||
raise
|
||||
|
||||
async def get_folder_workflow_count(
|
||||
self,
|
||||
folder_id: str,
|
||||
organization_id: str,
|
||||
) -> int:
|
||||
"""Get the count of workflows (latest versions only) in a folder."""
|
||||
try:
|
||||
async with self.Session() as session:
|
||||
# Subquery to get the latest version for each workflow (same pattern as get_workflows_by_organization_id)
|
||||
subquery = (
|
||||
select(
|
||||
WorkflowModel.organization_id,
|
||||
WorkflowModel.workflow_permanent_id,
|
||||
func.max(WorkflowModel.version).label("max_version"),
|
||||
)
|
||||
.where(WorkflowModel.organization_id == organization_id)
|
||||
.where(WorkflowModel.deleted_at.is_(None))
|
||||
.group_by(
|
||||
WorkflowModel.organization_id,
|
||||
WorkflowModel.workflow_permanent_id,
|
||||
)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
# Count workflows where the latest version is in this folder
|
||||
stmt = (
|
||||
select(func.count(WorkflowModel.workflow_permanent_id))
|
||||
.join(
|
||||
subquery,
|
||||
(WorkflowModel.organization_id == subquery.c.organization_id)
|
||||
& (WorkflowModel.workflow_permanent_id == subquery.c.workflow_permanent_id)
|
||||
& (WorkflowModel.version == subquery.c.max_version),
|
||||
)
|
||||
.where(WorkflowModel.folder_id == folder_id)
|
||||
)
|
||||
result = await session.execute(stmt)
|
||||
return result.scalar_one()
|
||||
except SQLAlchemyError:
|
||||
LOG.error("SQLAlchemyError in get_folder_workflow_count", exc_info=True)
|
||||
raise
|
||||
|
||||
async def get_folder_workflow_counts_batch(
|
||||
self,
|
||||
folder_ids: list[str],
|
||||
organization_id: str,
|
||||
) -> dict[str, int]:
|
||||
"""Get workflow counts for multiple folders in a single query."""
|
||||
try:
|
||||
async with self.Session() as session:
|
||||
# Subquery to get the latest version for each workflow
|
||||
subquery = (
|
||||
select(
|
||||
WorkflowModel.organization_id,
|
||||
WorkflowModel.workflow_permanent_id,
|
||||
func.max(WorkflowModel.version).label("max_version"),
|
||||
)
|
||||
.where(WorkflowModel.organization_id == organization_id)
|
||||
.where(WorkflowModel.deleted_at.is_(None))
|
||||
.group_by(
|
||||
WorkflowModel.organization_id,
|
||||
WorkflowModel.workflow_permanent_id,
|
||||
)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
# Count workflows grouped by folder_id
|
||||
stmt = (
|
||||
select(
|
||||
WorkflowModel.folder_id,
|
||||
func.count(WorkflowModel.workflow_permanent_id).label("count"),
|
||||
)
|
||||
.join(
|
||||
subquery,
|
||||
(WorkflowModel.organization_id == subquery.c.organization_id)
|
||||
& (WorkflowModel.workflow_permanent_id == subquery.c.workflow_permanent_id)
|
||||
& (WorkflowModel.version == subquery.c.max_version),
|
||||
)
|
||||
.where(WorkflowModel.folder_id.in_(folder_ids))
|
||||
.group_by(WorkflowModel.folder_id)
|
||||
)
|
||||
result = await session.execute(stmt)
|
||||
rows = result.all()
|
||||
|
||||
# Convert to dict, defaulting to 0 for folders with no workflows
|
||||
return {row.folder_id: row.count for row in rows}
|
||||
except SQLAlchemyError:
|
||||
LOG.error("SQLAlchemyError in get_folder_workflow_counts_batch", exc_info=True)
|
||||
raise
|
||||
|
||||
async def update_workflow_folder(
|
||||
self,
|
||||
workflow_permanent_id: str,
|
||||
organization_id: str,
|
||||
folder_id: str | None,
|
||||
) -> Workflow | None:
|
||||
"""Update folder assignment for the latest version of a workflow."""
|
||||
try:
|
||||
# Get the latest version of the workflow
|
||||
latest_workflow = await self.get_workflow_by_permanent_id(
|
||||
workflow_permanent_id=workflow_permanent_id,
|
||||
organization_id=organization_id,
|
||||
)
|
||||
|
||||
if not latest_workflow:
|
||||
return None
|
||||
|
||||
async with self.Session() as session:
|
||||
# Validate folder exists in-org if folder_id is provided
|
||||
if folder_id:
|
||||
stmt = (
|
||||
select(FolderModel.folder_id)
|
||||
.where(FolderModel.folder_id == folder_id)
|
||||
.where(FolderModel.organization_id == organization_id)
|
||||
.where(FolderModel.deleted_at.is_(None))
|
||||
)
|
||||
if (await session.scalar(stmt)) is None:
|
||||
raise ValueError(f"Folder {folder_id} not found")
|
||||
|
||||
workflow_model = await session.get(WorkflowModel, latest_workflow.workflow_id)
|
||||
if workflow_model:
|
||||
workflow_model.folder_id = folder_id
|
||||
workflow_model.modified_at = datetime.utcnow()
|
||||
|
||||
# Update folder's modified_at in the same transaction
|
||||
if folder_id:
|
||||
folder_model = await session.get(FolderModel, folder_id)
|
||||
if folder_model:
|
||||
folder_model.modified_at = datetime.utcnow()
|
||||
|
||||
await session.commit()
|
||||
await session.refresh(workflow_model)
|
||||
|
||||
return convert_to_workflow(workflow_model, self.debug_enabled)
|
||||
return None
|
||||
except SQLAlchemyError:
|
||||
LOG.error("SQLAlchemyError in update_workflow_folder", exc_info=True)
|
||||
raise
|
||||
|
||||
async def create_workflow_run(
|
||||
self,
|
||||
workflow_permanent_id: str,
|
||||
|
||||
@@ -39,6 +39,7 @@ CREDENTIAL_AZURE_VAULT_PARAMETER_PREFIX = "azcp"
|
||||
CREDENTIAL_PARAMETER_PREFIX = "cp"
|
||||
CREDENTIAL_PREFIX = "cred"
|
||||
DEBUG_SESSION_PREFIX = "ds"
|
||||
FOLDER_PREFIX = "fld"
|
||||
BROWSER_PROFILE_PREFIX = "bp"
|
||||
ORGANIZATION_BITWARDEN_COLLECTION_PREFIX = "obc"
|
||||
TASK_V2_ID = "tsk_v2"
|
||||
@@ -220,6 +221,11 @@ def generate_debug_session_id() -> str:
|
||||
return f"{DEBUG_SESSION_PREFIX}_{int_id}"
|
||||
|
||||
|
||||
def generate_folder_id() -> str:
|
||||
int_id = generate_id()
|
||||
return f"{FOLDER_PREFIX}_{int_id}"
|
||||
|
||||
|
||||
def generate_organization_bitwarden_collection_id() -> str:
|
||||
int_id = generate_id()
|
||||
return f"{ORGANIZATION_BITWARDEN_COLLECTION_PREFIX}_{int_id}"
|
||||
|
||||
@@ -32,6 +32,7 @@ from skyvern.forge.sdk.db.id import (
|
||||
generate_credential_id,
|
||||
generate_credential_parameter_id,
|
||||
generate_debug_session_id,
|
||||
generate_folder_id,
|
||||
generate_onepassword_credential_parameter_id,
|
||||
generate_org_id,
|
||||
generate_organization_auth_token_id,
|
||||
@@ -218,6 +219,28 @@ class ArtifactModel(Base):
|
||||
)
|
||||
|
||||
|
||||
class FolderModel(Base):
|
||||
__tablename__ = "folders"
|
||||
__table_args__ = (
|
||||
Index("folder_organization_id_idx", "organization_id"),
|
||||
Index("folder_organization_title_idx", "organization_id", "title"),
|
||||
)
|
||||
|
||||
folder_id = Column(String, primary_key=True, default=generate_folder_id)
|
||||
organization_id = Column(String, ForeignKey("organizations.organization_id", ondelete="CASCADE"), nullable=False)
|
||||
title = Column(String, nullable=False)
|
||||
description = Column(String, nullable=True)
|
||||
|
||||
created_at = Column(DateTime, default=datetime.datetime.utcnow, nullable=False)
|
||||
modified_at = Column(
|
||||
DateTime,
|
||||
default=datetime.datetime.utcnow,
|
||||
onupdate=datetime.datetime.utcnow,
|
||||
nullable=False,
|
||||
)
|
||||
deleted_at = Column(DateTime, nullable=True)
|
||||
|
||||
|
||||
class WorkflowModel(Base):
|
||||
__tablename__ = "workflows"
|
||||
__table_args__ = (
|
||||
@@ -230,6 +253,7 @@ class WorkflowModel(Base):
|
||||
Index("permanent_id_version_idx", "workflow_permanent_id", "version"),
|
||||
Index("organization_id_title_idx", "organization_id", "title"),
|
||||
Index("workflow_oid_status_idx", "organization_id", "status"),
|
||||
Index("workflow_folder_id_idx", "folder_id"),
|
||||
)
|
||||
|
||||
workflow_id = Column(String, primary_key=True, default=generate_workflow_id)
|
||||
@@ -252,6 +276,8 @@ class WorkflowModel(Base):
|
||||
cache_key = Column(String, nullable=True)
|
||||
run_sequentially = Column(Boolean, nullable=True)
|
||||
sequential_key = Column(String, nullable=True)
|
||||
folder_id = Column(String, ForeignKey("folders.folder_id", ondelete="SET NULL"), nullable=True)
|
||||
import_error = Column(String, nullable=True) # Error message if import failed
|
||||
|
||||
created_at = Column(DateTime, default=datetime.datetime.utcnow, nullable=False)
|
||||
modified_at = Column(
|
||||
|
||||
@@ -288,6 +288,8 @@ def convert_to_workflow(workflow_model: WorkflowModel, debug_enabled: bool = Fal
|
||||
cache_key=workflow_model.cache_key,
|
||||
run_sequentially=workflow_model.run_sequentially,
|
||||
sequential_key=workflow_model.sequential_key,
|
||||
folder_id=workflow_model.folder_id,
|
||||
import_error=workflow_model.import_error,
|
||||
)
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user