-
-
Notifications
You must be signed in to change notification settings - Fork 502
Add/model dataset group #1481
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Add/model dataset group #1481
Changes from 19 commits
b7f3e42
0085f7a
ff23303
cea7182
2932b65
a4a826d
929bddf
361ee4a
cf65d08
4d9b36c
7216854
048fad2
bc1d8c4
af276e8
4cc8e42
47e604e
df7059c
bd82eb0
c4ae4ee
d3b04b5
ff09e24
64f93fc
95654d4
e134fec
c9beb7f
cf0b63f
a768a2e
6a81ec5
486ba4d
5e5102a
6430b5d
3609538
3730526
d3f1820
de09a30
81d67e3
47cc2e6
381e4ea
5010b4f
0f1bf42
07e13ad
ba28351
359c223
6d39e5b
f6fddf8
4983e88
cf2d46c
9d47020
a532aa5
a232fda
573f5ca
5fe5ccf
65d42aa
5495c7f
a5a629d
db6ea3e
b72e103
9e00a8a
c1131b1
008bd0e
890dc37
576897a
e45e694
97f0809
d264f0c
8c9f65b
c6cfb5d
b8a4668
9d5e99c
8a86bde
d62d451
454be16
1e208a4
b6a540b
2e5109a
8af076f
6e0a768
1571740
a89875d
4bfac62
a93f186
dc283d0
435b474
8c0ff17
1ffac84
d88d566
03df35b
6cfcd2d
887f040
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,63 @@ | ||
| """create_asset_versions_table | ||
|
|
||
| Revision ID: a3d2e5f8c901 | ||
| Revises: a1b2c3d4e5f6 | ||
| Create Date: 2026-03-06 12:00:00.000000 | ||
|
|
||
| """ | ||
|
|
||
| from typing import Sequence, Union | ||
|
|
||
| from alembic import op | ||
| import sqlalchemy as sa | ||
|
|
||
|
|
||
| # revision identifiers, used by Alembic. | ||
| revision: str = "a3d2e5f8c901" | ||
| down_revision: Union[str, Sequence[str], None] = "a1b2c3d4e5f6" | ||
| branch_labels: Union[str, Sequence[str], None] = None | ||
| depends_on: Union[str, Sequence[str], None] = None | ||
|
|
||
|
|
||
| def upgrade() -> None: | ||
| """Create asset_versions table for tracking versioned groups of models and datasets.""" | ||
| connection = op.get_bind() | ||
|
|
||
| # Helper function to check if table exists | ||
| def table_exists(table_name: str) -> bool: | ||
| result = connection.execute( | ||
| sa.text("SELECT name FROM sqlite_master WHERE type='table' AND name=:name"), {"name": table_name} | ||
| ) | ||
| return result.fetchone() is not None | ||
|
|
||
| if not table_exists("asset_versions"): | ||
| op.create_table( | ||
| "asset_versions", | ||
| sa.Column("id", sa.String(), nullable=False), | ||
| sa.Column("asset_type", sa.String(), nullable=False), | ||
| sa.Column("group_name", sa.String(), nullable=False), | ||
| sa.Column("version", sa.Integer(), nullable=False), | ||
| sa.Column("asset_id", sa.String(), nullable=False), | ||
| sa.Column("tag", sa.String(), nullable=True), | ||
| sa.Column("job_id", sa.String(), nullable=True), | ||
| sa.Column("description", sa.String(), nullable=True), | ||
| sa.Column("created_at", sa.DateTime(), server_default=sa.text("(CURRENT_TIMESTAMP)"), nullable=False), | ||
| sa.PrimaryKeyConstraint("id"), | ||
| ) | ||
| op.create_index("idx_asset_versions_group", "asset_versions", ["asset_type", "group_name"], unique=False) | ||
| op.create_index("idx_asset_versions_tag", "asset_versions", ["asset_type", "group_name", "tag"], unique=False) | ||
| op.create_index("idx_asset_versions_asset_id", "asset_versions", ["asset_id"], unique=False) | ||
| op.create_index(op.f("ix_asset_versions_asset_type"), "asset_versions", ["asset_type"], unique=False) | ||
| op.create_index(op.f("ix_asset_versions_group_name"), "asset_versions", ["group_name"], unique=False) | ||
| op.create_index(op.f("ix_asset_versions_tag_col"), "asset_versions", ["tag"], unique=False) | ||
|
|
||
|
|
||
| def downgrade() -> None: | ||
| """Drop asset_versions table.""" | ||
| op.drop_index(op.f("ix_asset_versions_tag_col"), table_name="asset_versions") | ||
| op.drop_index(op.f("ix_asset_versions_group_name"), table_name="asset_versions") | ||
| op.drop_index(op.f("ix_asset_versions_asset_type"), table_name="asset_versions") | ||
| op.drop_index("idx_asset_versions_asset_id", table_name="asset_versions") | ||
| op.drop_index("idx_asset_versions_tag", table_name="asset_versions") | ||
| op.drop_index("idx_asset_versions_group", table_name="asset_versions") | ||
| op.drop_table("asset_versions") | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Just adding this here what we discussed on Discord.
|
| Original file line number | Diff line number | Diff line change | ||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|
| @@ -0,0 +1,179 @@ | ||||||||||||
| """ | ||||||||||||
| asset_versions.py | ||||||||||||
|
|
||||||||||||
| API router for managing versioned groups of models and datasets. | ||||||||||||
| """ | ||||||||||||
|
|
||||||||||||
| from typing import Optional | ||||||||||||
|
|
||||||||||||
| from fastapi import APIRouter, HTTPException, Query | ||||||||||||
| from pydantic import BaseModel | ||||||||||||
|
|
||||||||||||
| from transformerlab.services import asset_version_service | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| router = APIRouter(prefix="/asset_versions", tags=["asset_versions"]) | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| # ─── Request / Response schemas ─────────────────────────────────────────────── | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| class CreateVersionRequest(BaseModel): | ||||||||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Bug: CreateVersionRequest lacks validation on asset_type and group_name fieldsCreateVersionRequest lacks validation on asset_type and group_name fields. Any string is accepted by Pydantic. Add Literal type or field validator. View DetailsLocation: AnalysisCreateVersionRequest lacks validation on asset_type and group_name fields
How to reproducePOST /asset_versions/versions with body {"asset_type": "model", "group_name": "", "asset_id": "test"}. Empty group_name is accepted and creates a version with empty group.Patch Details-class CreateVersionRequest(BaseModel):
- asset_type: str # 'model' or 'dataset'
- group_name: str
+class CreateVersionRequest(BaseModel):
+ asset_type: Literal["model", "dataset"]
+ group_name: str = Field(..., min_length=1, max_length=255)AI Fix PromptTip: Reply with |
||||||||||||
| asset_type: str # 'model' or 'dataset' | ||||||||||||
| group_name: str | ||||||||||||
| asset_id: str | ||||||||||||
| job_id: Optional[str] = None | ||||||||||||
| description: Optional[str] = None | ||||||||||||
| tag: Optional[str] = "latest" | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| class SetTagRequest(BaseModel): | ||||||||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Bug: SetTagRequest has no validation on tag value at schema levelSetTagRequest has no validation on tag value at schema level. Arbitrary strings bypass Pydantic, relying only on service-layer check. Add Literal constraint to the schema. View DetailsLocation: AnalysisSetTagRequest has no validation on tag value at schema level
How to reproducePUT /asset_versions/versions/model/group/1/tag with body {"tag": "invalid"}. Pydantic accepts it; only the service raises ValueError.Patch Details-class SetTagRequest(BaseModel):
- tag: str # 'latest', 'production', 'draft'
+class SetTagRequest(BaseModel):
+ tag: Literal["latest", "production", "draft"]AI Fix PromptTip: Reply with |
||||||||||||
| tag: str # 'latest', 'production', 'draft' | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| # ─── Group endpoints ───────────────────────────────────────────────────────── | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| @router.get("/groups", summary="List all version groups for a given asset type.") | ||||||||||||
| async def list_groups(asset_type: str = Query(..., description="'model' or 'dataset'")): | ||||||||||||
| try: | ||||||||||||
| return await asset_version_service.list_groups(asset_type) | ||||||||||||
| except ValueError as e: | ||||||||||||
| raise HTTPException(status_code=400, detail=str(e)) | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| @router.delete( | ||||||||||||
| "/groups/{asset_type}/{group_name}", | ||||||||||||
| summary="Delete all versions in a group.", | ||||||||||||
| ) | ||||||||||||
| async def delete_group(asset_type: str, group_name: str): | ||||||||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Security: delete_group and delete_version have no authorization checksdelete_group and delete_version have no authorization checks. Any authenticated user can delete any group's versions. Add ownership or role-based access control. View DetailsLocation: Analysisdelete_group and delete_version have no authorization checks
How to reproduceAs any authenticated user, call DELETE /asset_versions/groups/model/some_group. The group is deleted regardless of who created it.AI Fix PromptTip: Reply with |
||||||||||||
| try: | ||||||||||||
| count = await asset_version_service.delete_group(asset_type, group_name) | ||||||||||||
| except ValueError as e: | ||||||||||||
| raise HTTPException(status_code=400, detail=str(e)) | ||||||||||||
| return {"status": "success", "deleted_count": count} | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| # ─── Version CRUD ───────────────────────────────────────────────────────────── | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| @router.post("/versions", summary="Create a new version in a group.") | ||||||||||||
| async def create_version(body: CreateVersionRequest): | ||||||||||||
| try: | ||||||||||||
| result = await asset_version_service.create_version( | ||||||||||||
| asset_type=body.asset_type, | ||||||||||||
| group_name=body.group_name, | ||||||||||||
| asset_id=body.asset_id, | ||||||||||||
| job_id=body.job_id, | ||||||||||||
| description=body.description, | ||||||||||||
| tag=body.tag, | ||||||||||||
| ) | ||||||||||||
| except ValueError as e: | ||||||||||||
| raise HTTPException(status_code=400, detail=str(e)) | ||||||||||||
| return result | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| @router.get( | ||||||||||||
| "/versions/{asset_type}/{group_name}", | ||||||||||||
| summary="List all versions in a group.", | ||||||||||||
| ) | ||||||||||||
| async def list_versions(asset_type: str, group_name: str): | ||||||||||||
| try: | ||||||||||||
| return await asset_version_service.list_versions(asset_type, group_name) | ||||||||||||
| except ValueError as e: | ||||||||||||
| raise HTTPException(status_code=400, detail=str(e)) | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| @router.get( | ||||||||||||
| "/versions/{asset_type}/{group_name}/{version}", | ||||||||||||
| summary="Get a specific version by number.", | ||||||||||||
| ) | ||||||||||||
| async def get_version(asset_type: str, group_name: str, version: int): | ||||||||||||
| try: | ||||||||||||
| result = await asset_version_service.get_version(asset_type, group_name, version) | ||||||||||||
| except ValueError as e: | ||||||||||||
| raise HTTPException(status_code=400, detail=str(e)) | ||||||||||||
| if result is None: | ||||||||||||
| raise HTTPException(status_code=404, detail="Version not found") | ||||||||||||
| return result | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| @router.delete( | ||||||||||||
| "/versions/{asset_type}/{group_name}/{version}", | ||||||||||||
| summary="Delete a specific version.", | ||||||||||||
| ) | ||||||||||||
| async def delete_version(asset_type: str, group_name: str, version: int): | ||||||||||||
| try: | ||||||||||||
| deleted = await asset_version_service.delete_version(asset_type, group_name, version) | ||||||||||||
| except ValueError as e: | ||||||||||||
| raise HTTPException(status_code=400, detail=str(e)) | ||||||||||||
| if not deleted: | ||||||||||||
| raise HTTPException(status_code=404, detail="Version not found") | ||||||||||||
| return {"status": "success"} | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| # ─── Tag management ────────────────────────────────────────────────────────── | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| @router.put( | ||||||||||||
| "/versions/{asset_type}/{group_name}/{version}/tag", | ||||||||||||
| summary="Set a tag on a specific version. Moves the tag from any other version in the group.", | ||||||||||||
| ) | ||||||||||||
| async def set_tag(asset_type: str, group_name: str, version: int, body: SetTagRequest): | ||||||||||||
| try: | ||||||||||||
| result = await asset_version_service.set_tag(asset_type, group_name, version, body.tag) | ||||||||||||
| except ValueError as e: | ||||||||||||
| raise HTTPException(status_code=400, detail=str(e)) | ||||||||||||
| if result is None: | ||||||||||||
| raise HTTPException(status_code=404, detail="Version not found") | ||||||||||||
| return result | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| @router.delete( | ||||||||||||
| "/versions/{asset_type}/{group_name}/{version}/tag", | ||||||||||||
| summary="Clear the tag from a specific version.", | ||||||||||||
| ) | ||||||||||||
| async def clear_tag(asset_type: str, group_name: str, version: int): | ||||||||||||
| try: | ||||||||||||
| result = await asset_version_service.clear_tag(asset_type, group_name, version) | ||||||||||||
| except ValueError as e: | ||||||||||||
| raise HTTPException(status_code=400, detail=str(e)) | ||||||||||||
| if result is None: | ||||||||||||
| raise HTTPException(status_code=404, detail="Version not found") | ||||||||||||
| return result | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| # ─── Resolution ────────────────────────────────────────────────────────────── | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| @router.get( | ||||||||||||
| "/resolve/{asset_type}/{group_name}", | ||||||||||||
| summary="Resolve a group to a specific version. Defaults to 'latest' tag.", | ||||||||||||
| ) | ||||||||||||
| async def resolve( | ||||||||||||
| asset_type: str, | ||||||||||||
| group_name: str, | ||||||||||||
| tag: Optional[str] = Query(None, description="Tag to resolve: 'latest', 'production', 'draft'"), | ||||||||||||
| version: Optional[int] = Query(None, description="Exact version number to resolve"), | ||||||||||||
| ): | ||||||||||||
| try: | ||||||||||||
| result = await asset_version_service.resolve(asset_type, group_name, tag=tag, version=version) | ||||||||||||
| except ValueError as e: | ||||||||||||
| raise HTTPException(status_code=400, detail=str(e)) | ||||||||||||
| if result is None: | ||||||||||||
| raise HTTPException(status_code=404, detail="No matching version found") | ||||||||||||
| return result | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| # ─── Bulk lookups (used by list views) ──────────────────────────────────────── | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| @router.get( | ||||||||||||
| "/map/{asset_type}", | ||||||||||||
| summary="Get a map of asset_id -> group memberships for annotating list views.", | ||||||||||||
| ) | ||||||||||||
| async def get_asset_group_map(asset_type: str): | ||||||||||||
| try: | ||||||||||||
| return await asset_version_service.get_all_asset_group_map(asset_type) | ||||||||||||
| except ValueError as e: | ||||||||||||
| raise HTTPException(status_code=400, detail=str(e)) | ||||||||||||
| Original file line number | Diff line number | Diff line change | ||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -35,6 +35,8 @@ | |||||||||||
| get_job_models_dir, | ||||||||||||
| get_models_dir, | ||||||||||||
| ) | ||||||||||||
| from transformerlab.services import asset_version_service | ||||||||||||
|
|
||||||||||||
| from transformerlab.services.cache_service import cache, cached | ||||||||||||
|
|
||||||||||||
| router = APIRouter(prefix="/jobs", tags=["train"]) | ||||||||||||
|
|
@@ -1103,7 +1105,7 @@ async def get_artifacts(job_id: str, request: Request): | |||||||||||
| from lab.dirs import get_job_artifacts_dir | ||||||||||||
|
|
||||||||||||
| artifacts_dir = await get_job_artifacts_dir(job_id) | ||||||||||||
| artifacts = await get_artifacts_from_directory(artifacts_dir, storage) | ||||||||||||
| artifacts = await get_artifacts_from_directory(artifacts_dir) | ||||||||||||
| except Exception as e: | ||||||||||||
| print(f"Error getting artifacts for job {job_id}: {e}") | ||||||||||||
| artifacts = [] | ||||||||||||
|
|
@@ -1419,7 +1421,11 @@ async def save_dataset_to_registry( | |||||||||||
| If a dataset with that name already exists, a timestamped suffix is added. | ||||||||||||
| - mode='existing': Merge into an existing dataset in the registry. target_name must be provided and must | ||||||||||||
| refer to an existing dataset. Files from the job dataset are copied into the existing dataset directory. | ||||||||||||
|
|
||||||||||||
| In both modes a new version entry is recorded in the asset_versions table | ||||||||||||
| so the asset can be tracked as part of a versioned group. | ||||||||||||
| """ | ||||||||||||
| from transformerlab.services import asset_version_service | ||||||||||||
|
|
||||||||||||
| try: | ||||||||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Bug: Version entry created even when file copy failsVersion entry created even when file copy fails. Dataset/model save catches copy error but proceeds to create_version. Move versioning inside the copy try block. View DetailsLocation: Analysis**Version entry created even when file copy fails. Dataset/model save catches copy error but proceeds **
How to reproduce1. Trigger a save_dataset_to_registry where the copy fails (e.g., disk full, permissions error). 2. Observe the version entry is still created and 'success' is returned.AI Fix PromptTip: Reply with |
||||||||||||
| # Secure the source dataset name | ||||||||||||
|
|
@@ -1471,7 +1477,21 @@ async def save_dataset_to_registry( | |||||||||||
| except Exception as copy_err: | ||||||||||||
| print(f"Storage.copy_dir failed: {copy_err}") | ||||||||||||
|
|
||||||||||||
| return {"status": "success", "message": f"Dataset saved to registry as '{final_name}'"} | ||||||||||||
| # Create a version entry for the dataset | ||||||||||||
| group_name = dataset_name_secure | ||||||||||||
| version_entry = await asset_version_service.create_version( | ||||||||||||
| asset_type="dataset", | ||||||||||||
| group_name=group_name, | ||||||||||||
| asset_id=final_name, | ||||||||||||
| job_id=job_id, | ||||||||||||
| description=f"Created from job {job_id}", | ||||||||||||
| ) | ||||||||||||
|
|
||||||||||||
| return { | ||||||||||||
| "status": "success", | ||||||||||||
| "message": f"Dataset saved to registry as '{final_name}'", | ||||||||||||
| "version": version_entry, | ||||||||||||
| } | ||||||||||||
|
|
||||||||||||
| except HTTPException: | ||||||||||||
| raise | ||||||||||||
|
|
@@ -1548,7 +1568,21 @@ async def save_model_to_registry( | |||||||||||
| except Exception as copy_err: | ||||||||||||
| print(f"storage.copy_dir failed: {copy_err}") | ||||||||||||
|
|
||||||||||||
| return {"status": "success", "message": f"Model saved to registry as '{final_name}'"} | ||||||||||||
| # Create a version entry for the model | ||||||||||||
| group_name = model_name_secure | ||||||||||||
| version_entry = await asset_version_service.create_version( | ||||||||||||
| asset_type="model", | ||||||||||||
| group_name=group_name, | ||||||||||||
| asset_id=final_name, | ||||||||||||
| job_id=job_id, | ||||||||||||
| description=f"Created from job {job_id}", | ||||||||||||
| ) | ||||||||||||
|
|
||||||||||||
| return { | ||||||||||||
| "status": "success", | ||||||||||||
| "message": f"Model saved to registry as '{final_name}'", | ||||||||||||
| "version": version_entry, | ||||||||||||
| } | ||||||||||||
|
|
||||||||||||
| except HTTPException: | ||||||||||||
| raise | ||||||||||||
|
|
||||||||||||
| Original file line number | Diff line number | Diff line change | ||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -818,7 +818,23 @@ async def get_model_prompt_template(model: str): | |||||||||||
| @router.get("/model/list") | ||||||||||||
| async def model_local_list(embedding=False): | ||||||||||||
| # the model list is a combination of downloaded hugging face models and locally generated models | ||||||||||||
| return await model_helper.list_installed_models(embedding) | ||||||||||||
| models = await model_helper.list_installed_models(embedding) | ||||||||||||
|
|
||||||||||||
| # Augment each model with version group info if any | ||||||||||||
| try: | ||||||||||||
| from transformerlab.services import asset_version_service | ||||||||||||
|
|
||||||||||||
| group_map = await asset_version_service.get_all_asset_group_map("model") | ||||||||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Bug: loop reuses
|
||||||||||||
| What fails | The for loop for model in models shadows the outer models variable's items. After the loop, model points to the last item. While the return uses models (the list), this shadowing makes the code fragile and confusing. |
| Result | After the loop, model references the last element of the list. While this doesn't immediately break since return models uses the list, it's error-prone for future edits. |
| Expected | Use a different loop variable name like entry or m to avoid shadowing and reduce confusion. |
| Impact | Code fragility: any future code added after the loop that references model will get the last item, not what was intended. |
How to reproduce
Read model.py:820-836. The variable `models` is assigned from list_installed_models, then `for model in models` reuses `model` as the loop var.Patch Details
- for model in models:
- model_id = model.get("model_id", "")
- if model_id in group_map:
- model["version_groups"] = group_map[model_id]
+ for entry in models:
+ model_id = entry.get("model_id", "")
+ if model_id in group_map:
+ entry["version_groups"] = group_map[model_id]AI Fix Prompt
Fix this issue: Variable shadowing: loop reuses `model` as iterator over `models` list. Outer return still works but hides bugs. Use `m` or `entry` as loop variable.
Location: api/transformerlab/routers/model.py (lines 826)
Problem: The for loop `for model in models` shadows the outer `models` variable's items. After the loop, `model` points to the last item. While the return uses `models` (the list), this shadowing makes the code fragile and confusing.
Current behavior: After the loop, `model` references the last element of the list. While this doesn't immediately break since `return models` uses the list, it's error-prone for future edits.
Expected: Use a different loop variable name like `entry` or `m` to avoid shadowing and reduce confusion.
Steps to reproduce: Read model.py:820-836. The variable `models` is assigned from list_installed_models, then `for model in models` reuses `model` as the loop var.
Provide a code fix.
Tip: Reply with @paragon-run to automatically fix this issue
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Bug: Silent failure on version group augmentation in model and data list
Silent failure on version group augmentation in model and data list. Bare except catches all errors and only prints. Add proper logging instead of print.
View Details
Location: api/transformerlab/routers/model.py (lines 833)
Analysis
Silent failure on version group augmentation in model and data list
| What fails | Both model_local_list and dataset_list catch all exceptions from version group augmentation with a broad except Exception and only print a warning. Database connection failures, import errors, or schema mismatches are silently swallowed. |
| Result | Version group data silently missing from responses. Only a print statement indicates the failure. No structured logging for monitoring/alerting. |
| Expected | Use the application logger (not print) at WARNING level, and consider whether some errors (like DB connection failures) should propagate. |
| Impact | Debugging difficulty: silent failures in production with no structured logging. Missing version data without any user-visible indication. |
How to reproduce
Break the asset_versions table (e.g., drop it). Call GET /model/list. The warning is printed to stdout but no error is visible to API callers or monitoring.AI Fix Prompt
Fix this issue: Silent failure on version group augmentation in model and data list. Bare except catches all errors and only prints. Add proper logging instead of print.
Location: api/transformerlab/routers/model.py (lines 833)
Problem: Both model_local_list and dataset_list catch all exceptions from version group augmentation with a broad `except Exception` and only print a warning. Database connection failures, import errors, or schema mismatches are silently swallowed.
Current behavior: Version group data silently missing from responses. Only a print statement indicates the failure. No structured logging for monitoring/alerting.
Expected: Use the application logger (not print) at WARNING level, and consider whether some errors (like DB connection failures) should propagate.
Steps to reproduce: Break the asset_versions table (e.g., drop it). Call GET /model/list. The warning is printed to stdout but no error is visible to API callers or monitoring.
Provide a code fix.
Tip: Reply with @paragon-run to automatically fix this issue



There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Bug: Migration uses sqlite_master, breaking Postgres
Migration uses sqlite_master, breaking Postgres. AGENTS.md says both SQLite and Postgres are supported. Use dialect-agnostic table existence check via sa.inspect.
View Details
Location:
api/alembic/versions/a3d2e5f8c901_create_asset_versions_table.py(lines 27)Analysis
Migration uses sqlite_master, breaking Postgres. AGENTS
How to reproduce
Patch Details
AI Fix Prompt
Tip: Reply with
@paragon-runto automatically fix this issue