Compare commits
81 Commits
28e8722a10
...
compare
| Author | SHA1 | Date | |
|---|---|---|---|
| 0c2504ff83 | |||
| 43772aba68 | |||
| 145368692e | |||
| bf1134e47f | |||
| 7580036c9d | |||
| 47a75b428e | |||
| b91a2f0a31 | |||
| 66795471a8 | |||
| 67acb8e08a | |||
| af0cc52d89 | |||
| 3669814731 | |||
| a8edc251a2 | |||
| d43813cc2a | |||
| 97424ea0af | |||
| eafc5de6f2 | |||
| fa710e914e | |||
| e3e337af88 | |||
| 15ca74ad4b | |||
| a11d76fd5f | |||
| cf1238bbff | |||
| d3b7f31730 | |||
| 52c06c4db7 | |||
| 3a320f3187 | |||
| c37e2bd5e0 | |||
| 9418661be9 | |||
| 7349015177 | |||
| 918a6e9414 | |||
| 5909c0ec99 | |||
| 286b0410ff | |||
| 0c18f570d4 | |||
| 3f2160405a | |||
| f3f57f7c53 | |||
| 957aab0656 | |||
| 0a94548f5e | |||
| 124fbacd2a | |||
| 0f0aeed2f1 | |||
| fe6e55de16 | |||
| dd454ebf6f | |||
| 2854907359 | |||
| 48417b6d73 | |||
| ce7abd8a29 | |||
| df12413c5d | |||
| c56b07f999 | |||
| c89cecd43f | |||
| 37f6166b37 | |||
| dc31b0bebb | |||
| f0b0114fc5 | |||
| 0c9446b3f8 | |||
| 4c49635018 | |||
| 826ae384df | |||
| 54ba10d4e5 | |||
| 0e6de4ae0b | |||
| b919c52255 | |||
| 8fc8372a9b | |||
| 246b78719e | |||
| 0d5f393aff | |||
| 4fb038eda1 | |||
| 690aaafacf | |||
| 3e9ff43bc9 | |||
| 91a0cc5138 | |||
| 588822f856 | |||
| 1cbad1a3ed | |||
| b5794e9db5 | |||
| b938dc68fa | |||
| dde0e90442 | |||
| 0b5e9377e4 | |||
| 091936069a | |||
| 0d1eca4ef3 | |||
| 39153d3493 | |||
| af2c148747 | |||
| bf845292ee | |||
| 40453dad94 | |||
| 13818737e2 | |||
| e4b126075d | |||
| 04a29d7424 | |||
| a6314cadd9 | |||
| 024caac5e5 | |||
| c9a2817f41 | |||
| 69f34a84c4 | |||
| 470e3114c4 | |||
| ac189d75ba |
61
CLAUDE.md
Normal file
61
CLAUDE.md
Normal file
@@ -0,0 +1,61 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Project Overview
|
||||
|
||||
Turbo Sorter Pro v12.5 - A dual-interface image organization tool combining Streamlit (admin dashboard) and NiceGUI (gallery interface) for managing large image collections through time-sync matching, ID collision resolution, category-based sorting, and gallery tagging with pairing capabilities.
|
||||
|
||||
## Commands
|
||||
|
||||
```bash
|
||||
# Install dependencies
|
||||
pip install -r requirements.txt
|
||||
|
||||
# Run Streamlit dashboard (port 8501)
|
||||
streamlit run app.py --server.port=8501 --server.address=0.0.0.0
|
||||
|
||||
# Run NiceGUI gallery (port 8080)
|
||||
python3 gallery_app.py
|
||||
|
||||
# Both services (container startup)
|
||||
./start.sh
|
||||
|
||||
# Syntax check all Python files
|
||||
python3 -m py_compile *.py
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
### Dual-Framework Design
|
||||
- **Streamlit (app.py, port 8501)**: Administrative dashboard with 5 modular tabs for management workflows
|
||||
- **NiceGUI (gallery_app.py, port 8080)**: Modern gallery interface for image tagging and pairing operations
|
||||
- **Shared Backend**: Both UIs use `SorterEngine` (engine.py) and the same SQLite database
|
||||
|
||||
### Core Components
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `engine.py` | Static `SorterEngine` class - all DB operations, file handling, image compression |
|
||||
| `gallery_app.py` | NiceGUI gallery with `AppState` class for centralized state management |
|
||||
| `app.py` | Streamlit entry point, loads tab modules |
|
||||
| `tab_*.py` | Independent tab modules for each workflow |
|
||||
|
||||
### Database
|
||||
SQLite at `/app/sorter_database.db` with tables: profiles, folder_ids, categories, staging_area, processed_log, folder_tags, profile_categories, pairing_settings.
|
||||
|
||||
### Tab Workflows
|
||||
1. **Time-Sync Discovery** - Match images by timestamp across folders
|
||||
2. **ID Review** - Resolve ID collisions between target/control folders
|
||||
3. **Unused Archive** - Manage rejected image pairs
|
||||
4. **Category Sorter** - One-to-many categorization
|
||||
5. **Gallery Staged** - Grid-based tagging with Gallery/Pairing dual modes
|
||||
|
||||
## Key Patterns
|
||||
|
||||
- **ID Format**: `id001_`, `id002_` (zero-padded 3-digit prefix)
|
||||
- **Staging Pattern**: Two-phase commit (stage → commit) with undo support
|
||||
- **Image Formats**: .jpg, .jpeg, .png, .webp, .bmp, .tiff
|
||||
- **Compression**: WebP with ThreadPoolExecutor (8 workers)
|
||||
- **Permissions**: chmod 0o777 applied to committed files
|
||||
- **Default Paths**: `/storage` when not configured
|
||||
BIN
__pycache__/engine.cpython-312.pyc
Normal file
BIN
__pycache__/engine.cpython-312.pyc
Normal file
Binary file not shown.
BIN
__pycache__/gallery_app.cpython-312.pyc
Normal file
BIN
__pycache__/gallery_app.cpython-312.pyc
Normal file
Binary file not shown.
813
engine.py
813
engine.py
@@ -1,12 +1,31 @@
|
||||
import os
|
||||
import shutil
|
||||
import sqlite3
|
||||
import base64
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from contextlib import contextmanager
|
||||
from PIL import Image
|
||||
from io import BytesIO
|
||||
|
||||
class SorterEngine:
|
||||
DB_PATH = "/app/sorter_database.db"
|
||||
|
||||
@staticmethod
|
||||
@contextmanager
|
||||
def get_db():
|
||||
"""Context manager for database connections.
|
||||
Ensures proper commit/rollback and always closes connection."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
try:
|
||||
yield conn
|
||||
conn.commit()
|
||||
except Exception:
|
||||
conn.rollback()
|
||||
raise
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
# --- 1. DATABASE INITIALIZATION ---
|
||||
@staticmethod
|
||||
def init_db():
|
||||
@@ -27,21 +46,85 @@ class SorterEngine:
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS processed_log
|
||||
(source_path TEXT PRIMARY KEY, category TEXT, action_type TEXT)''')
|
||||
|
||||
# Seed categories if empty
|
||||
# --- NEW: FOLDER TAGS TABLE (persists tags by folder) ---
|
||||
# Check if old schema exists (without profile column) and migrate
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='folder_tags'")
|
||||
if cursor.fetchone():
|
||||
cursor.execute("PRAGMA table_info(folder_tags)")
|
||||
columns = [row[1] for row in cursor.fetchall()]
|
||||
if 'profile' not in columns:
|
||||
# Migrate: drop old table and recreate with profile column
|
||||
cursor.execute("DROP TABLE folder_tags")
|
||||
conn.commit()
|
||||
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS folder_tags
|
||||
(profile TEXT, folder_path TEXT, filename TEXT, category TEXT, tag_index INTEGER,
|
||||
PRIMARY KEY (profile, folder_path, filename))''')
|
||||
|
||||
# --- NEW: PROFILE CATEGORIES TABLE (each profile has its own categories) ---
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS profile_categories
|
||||
(profile TEXT, category TEXT, PRIMARY KEY (profile, category))''')
|
||||
|
||||
# --- NEW: PAIRING SETTINGS TABLE ---
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS pairing_settings
|
||||
(profile TEXT PRIMARY KEY,
|
||||
adjacent_folder TEXT,
|
||||
main_category TEXT,
|
||||
adj_category TEXT,
|
||||
main_output TEXT,
|
||||
adj_output TEXT,
|
||||
time_window INTEGER)''')
|
||||
|
||||
# Seed categories if empty (legacy table)
|
||||
cursor.execute("SELECT COUNT(*) FROM categories")
|
||||
if cursor.fetchone()[0] == 0:
|
||||
for cat in ["_TRASH", "Default", "Action", "Solo"]:
|
||||
for cat in ["_TRASH", "control", "Default", "Action", "Solo"]:
|
||||
cursor.execute("INSERT OR IGNORE INTO categories VALUES (?)", (cat,))
|
||||
|
||||
# --- CAPTION TABLES ---
|
||||
# Per-category prompt templates
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS category_prompts
|
||||
(profile TEXT, category TEXT, prompt_template TEXT,
|
||||
PRIMARY KEY (profile, category))''')
|
||||
|
||||
# Stored captions
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS image_captions
|
||||
(image_path TEXT PRIMARY KEY, caption TEXT, model TEXT,
|
||||
generated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP)''')
|
||||
|
||||
# Caption API settings per profile
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS caption_settings
|
||||
(profile TEXT PRIMARY KEY,
|
||||
api_endpoint TEXT DEFAULT 'http://localhost:8080/v1/chat/completions',
|
||||
model_name TEXT DEFAULT 'local-model',
|
||||
max_tokens INTEGER DEFAULT 300,
|
||||
temperature REAL DEFAULT 0.7,
|
||||
timeout_seconds INTEGER DEFAULT 60,
|
||||
batch_size INTEGER DEFAULT 4)''')
|
||||
|
||||
# --- PERFORMANCE INDEXES ---
|
||||
# Index for staging_area queries filtered by category
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_staging_category ON staging_area(target_category)")
|
||||
# Index for folder_tags queries filtered by profile and folder_path
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_folder_tags_profile ON folder_tags(profile, folder_path)")
|
||||
# Index for profile_categories lookups
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_profile_categories ON profile_categories(profile)")
|
||||
# Index for caption lookups by image path
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_image_captions ON image_captions(image_path)")
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
# --- 2. PROFILE & PATH MANAGEMENT ---
|
||||
@staticmethod
|
||||
def save_tab_paths(profile_name, t1_t=None, t2_t=None, t2_c=None, t4_s=None, t4_o=None, mode=None, t5_s=None, t5_o=None):
|
||||
def save_tab_paths(profile_name, t1_t=None, t2_t=None, t2_c=None, t4_s=None, t4_o=None, mode=None, t5_s=None, t5_o=None,
|
||||
pair_adjacent_folder=None, pair_main_category=None, pair_adj_category=None,
|
||||
pair_main_output=None, pair_adj_output=None, pair_time_window=None):
|
||||
"""Updates specific tab paths in the database while preserving others."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Save main profile settings
|
||||
cursor.execute("SELECT * FROM profiles WHERE name = ?", (profile_name,))
|
||||
row = cursor.fetchone()
|
||||
|
||||
@@ -61,6 +144,38 @@ class SorterEngine:
|
||||
t5_o if t5_o is not None else row[8]
|
||||
)
|
||||
cursor.execute("INSERT OR REPLACE INTO profiles VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", new_values)
|
||||
|
||||
# Save pairing settings if any are provided
|
||||
if any(x is not None for x in [pair_adjacent_folder, pair_main_category, pair_adj_category,
|
||||
pair_main_output, pair_adj_output, pair_time_window]):
|
||||
# Ensure table exists
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS pairing_settings
|
||||
(profile TEXT PRIMARY KEY,
|
||||
adjacent_folder TEXT,
|
||||
main_category TEXT,
|
||||
adj_category TEXT,
|
||||
main_output TEXT,
|
||||
adj_output TEXT,
|
||||
time_window INTEGER)''')
|
||||
|
||||
# Get existing values
|
||||
cursor.execute("SELECT * FROM pairing_settings WHERE profile = ?", (profile_name,))
|
||||
pair_row = cursor.fetchone()
|
||||
|
||||
if not pair_row:
|
||||
pair_row = (profile_name, "", "control", "control", "/storage", "/storage", 60)
|
||||
|
||||
pair_values = (
|
||||
profile_name,
|
||||
pair_adjacent_folder if pair_adjacent_folder is not None else pair_row[1],
|
||||
pair_main_category if pair_main_category is not None else pair_row[2],
|
||||
pair_adj_category if pair_adj_category is not None else pair_row[3],
|
||||
pair_main_output if pair_main_output is not None else pair_row[4],
|
||||
pair_adj_output if pair_adj_output is not None else pair_row[5],
|
||||
pair_time_window if pair_time_window is not None else pair_row[6]
|
||||
)
|
||||
cursor.execute("INSERT OR REPLACE INTO pairing_settings VALUES (?, ?, ?, ?, ?, ?, ?)", pair_values)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
@staticmethod
|
||||
@@ -91,33 +206,90 @@ class SorterEngine:
|
||||
|
||||
@staticmethod
|
||||
def load_profiles():
|
||||
"""Loads all workspace presets."""
|
||||
"""Loads all workspace presets including pairing settings.
|
||||
Uses LEFT JOIN to fetch all data in a single query (fixes N+1 problem)."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT * FROM profiles")
|
||||
|
||||
# Ensure pairing_settings table exists before JOIN
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS pairing_settings
|
||||
(profile TEXT PRIMARY KEY,
|
||||
adjacent_folder TEXT,
|
||||
main_category TEXT,
|
||||
adj_category TEXT,
|
||||
main_output TEXT,
|
||||
adj_output TEXT,
|
||||
time_window INTEGER)''')
|
||||
|
||||
# Single query with LEFT JOIN - eliminates N+1 queries
|
||||
cursor.execute('''
|
||||
SELECT p.name, p.tab1_target, p.tab2_target, p.tab2_control,
|
||||
p.tab4_source, p.tab4_out, p.mode, p.tab5_source, p.tab5_out,
|
||||
ps.adjacent_folder, ps.main_category, ps.adj_category,
|
||||
ps.main_output, ps.adj_output, ps.time_window
|
||||
FROM profiles p
|
||||
LEFT JOIN pairing_settings ps ON p.name = ps.profile
|
||||
''')
|
||||
rows = cursor.fetchall()
|
||||
conn.close()
|
||||
return {r[0]: {
|
||||
|
||||
profiles = {}
|
||||
for r in rows:
|
||||
profile_name = r[0]
|
||||
profiles[profile_name] = {
|
||||
"tab1_target": r[1], "tab2_target": r[2], "tab2_control": r[3],
|
||||
"tab4_source": r[4], "tab4_out": r[5], "mode": r[6],
|
||||
"tab5_source": r[7], "tab5_out": r[8]
|
||||
} for r in rows}
|
||||
"tab5_source": r[7], "tab5_out": r[8],
|
||||
# Pairing settings from JOIN (with defaults for NULL)
|
||||
"pair_adjacent_folder": r[9] or "",
|
||||
"pair_main_category": r[10] or "control",
|
||||
"pair_adj_category": r[11] or "control",
|
||||
"pair_main_output": r[12] or "/storage",
|
||||
"pair_adj_output": r[13] or "/storage",
|
||||
"pair_time_window": r[14] or 60
|
||||
}
|
||||
|
||||
# --- 3. CATEGORY MANAGEMENT (Sorted A-Z) ---
|
||||
conn.close()
|
||||
return profiles
|
||||
|
||||
# --- 3. CATEGORY MANAGEMENT (Profile-based) ---
|
||||
@staticmethod
|
||||
def get_categories():
|
||||
def get_categories(profile=None):
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Ensure table exists
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS profile_categories
|
||||
(profile TEXT, category TEXT, PRIMARY KEY (profile, category))''')
|
||||
|
||||
if profile:
|
||||
cursor.execute("SELECT category FROM profile_categories WHERE profile = ? ORDER BY category COLLATE NOCASE ASC", (profile,))
|
||||
cats = [r[0] for r in cursor.fetchall()]
|
||||
# If no categories for this profile, seed with defaults
|
||||
if not cats:
|
||||
for cat in ["_TRASH", "control"]:
|
||||
cursor.execute("INSERT OR IGNORE INTO profile_categories VALUES (?, ?)", (profile, cat))
|
||||
conn.commit()
|
||||
cats = ["_TRASH", "control"]
|
||||
else:
|
||||
# Fallback to legacy table
|
||||
cursor.execute("SELECT name FROM categories ORDER BY name COLLATE NOCASE ASC")
|
||||
cats = [r[0] for r in cursor.fetchall()]
|
||||
|
||||
conn.close()
|
||||
return cats
|
||||
|
||||
@staticmethod
|
||||
def add_category(name):
|
||||
def add_category(name, profile=None):
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
if profile:
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS profile_categories
|
||||
(profile TEXT, category TEXT, PRIMARY KEY (profile, category))''')
|
||||
cursor.execute("INSERT OR IGNORE INTO profile_categories VALUES (?, ?)", (profile, name))
|
||||
else:
|
||||
cursor.execute("INSERT OR IGNORE INTO categories VALUES (?)", (name,))
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
@@ -153,20 +325,42 @@ class SorterEngine:
|
||||
|
||||
# --- 4. IMAGE & ID OPERATIONS ---
|
||||
@staticmethod
|
||||
def get_images(path, recursive=False):
|
||||
"""Image scanner with optional recursive subfolder support."""
|
||||
def get_images(path, recursive=False, exclude_paths=None):
|
||||
"""Image scanner with optional recursive subfolder support.
|
||||
|
||||
Args:
|
||||
path: Directory to scan
|
||||
recursive: Whether to scan subdirectories
|
||||
exclude_paths: List of paths to exclude from scanning
|
||||
"""
|
||||
exts = ('.jpg', '.jpeg', '.png', '.webp', '.bmp', '.tiff')
|
||||
if not path or not os.path.exists(path): return []
|
||||
|
||||
exclude_paths = exclude_paths or []
|
||||
# Normalize exclude paths
|
||||
exclude_normalized = [os.path.normpath(os.path.abspath(p)) for p in exclude_paths]
|
||||
|
||||
image_list = []
|
||||
if recursive:
|
||||
for root, _, files in os.walk(path):
|
||||
for root, dirs, files in os.walk(path):
|
||||
# Skip the trash folder from scanning
|
||||
if "_DELETED" in root: continue
|
||||
|
||||
# Skip excluded paths
|
||||
root_normalized = os.path.normpath(os.path.abspath(root))
|
||||
if any(root_normalized.startswith(exc) or exc.startswith(root_normalized) for exc in exclude_normalized):
|
||||
# Remove excluded dirs from dirs to prevent descending into them
|
||||
dirs[:] = [d for d in dirs if os.path.normpath(os.path.abspath(os.path.join(root, d))) not in exclude_normalized]
|
||||
if root_normalized in exclude_normalized:
|
||||
continue
|
||||
|
||||
for f in files:
|
||||
if f.lower().endswith(exts): image_list.append(os.path.join(root, f))
|
||||
if f.lower().endswith(exts):
|
||||
image_list.append(os.path.join(root, f))
|
||||
else:
|
||||
for f in os.listdir(path):
|
||||
if f.lower().endswith(exts): image_list.append(os.path.join(path, f))
|
||||
if f.lower().endswith(exts):
|
||||
image_list.append(os.path.join(path, f))
|
||||
return sorted(image_list)
|
||||
|
||||
@staticmethod
|
||||
@@ -226,36 +420,45 @@ class SorterEngine:
|
||||
@staticmethod
|
||||
def stage_image(original_path, category, new_name):
|
||||
"""Records a pending rename/move in the database."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
with SorterEngine.get_db() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("INSERT OR REPLACE INTO staging_area VALUES (?, ?, ?, 1)", (original_path, category, new_name))
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
@staticmethod
|
||||
def clear_staged_item(original_path):
|
||||
"""Removes an item from the pending staging area."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
with SorterEngine.get_db() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("DELETE FROM staging_area WHERE original_path = ?", (original_path,))
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
@staticmethod
|
||||
def clear_staging_area():
|
||||
"""Clears all items from the staging area."""
|
||||
with SorterEngine.get_db() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("DELETE FROM staging_area")
|
||||
|
||||
@staticmethod
|
||||
def get_staged_data():
|
||||
"""Retrieves current tagged/staged images."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
with SorterEngine.get_db() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT * FROM staging_area")
|
||||
rows = cursor.fetchall()
|
||||
conn.close()
|
||||
# FIXED: Added "marked": r[3] to the dictionary
|
||||
return {r[0]: {"cat": r[1], "name": r[2], "marked": r[3]} for r in rows}
|
||||
|
||||
@staticmethod
|
||||
def commit_global(output_root, cleanup_mode, operation="Copy", source_root=None):
|
||||
"""Commits ALL staged files and fixes permissions."""
|
||||
def commit_global(output_root, cleanup_mode, operation="Copy", source_root=None, profile=None):
|
||||
"""Commits ALL staged files and fixes permissions.
|
||||
Returns dict mapping original_path -> {dest, cat} for committed files."""
|
||||
data = SorterEngine.get_staged_data()
|
||||
committed = {}
|
||||
|
||||
# Save folder tags BEFORE processing (so we can restore them later)
|
||||
if source_root:
|
||||
SorterEngine.save_folder_tags(source_root, profile)
|
||||
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
@@ -281,6 +484,9 @@ class SorterEngine:
|
||||
# --- FIX PERMISSIONS ---
|
||||
SorterEngine.fix_permissions(final_dst)
|
||||
|
||||
# Track actual destination
|
||||
committed[old_p] = {"dest": final_dst, "cat": info['cat']}
|
||||
|
||||
# Log History
|
||||
cursor.execute("INSERT OR REPLACE INTO processed_log VALUES (?, ?, ?)",
|
||||
(old_p, info['cat'], operation))
|
||||
@@ -304,6 +510,7 @@ class SorterEngine:
|
||||
cursor.execute("DELETE FROM staging_area")
|
||||
conn.commit()
|
||||
conn.close()
|
||||
return committed
|
||||
|
||||
# --- 6. CORE UTILITIES (SYNC & UNDO) ---
|
||||
@staticmethod
|
||||
@@ -415,10 +622,12 @@ class SorterEngine:
|
||||
|
||||
@staticmethod
|
||||
def commit_batch(file_list, output_root, cleanup_mode, operation="Copy"):
|
||||
"""Commits files and fixes permissions."""
|
||||
"""Commits files and fixes permissions.
|
||||
Returns dict mapping original_path -> actual_dest_path for committed files."""
|
||||
data = SorterEngine.get_staged_data()
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
committed = {}
|
||||
|
||||
if not os.path.exists(output_root): os.makedirs(output_root, exist_ok=True)
|
||||
|
||||
@@ -447,6 +656,9 @@ class SorterEngine:
|
||||
# --- FIX PERMISSIONS ---
|
||||
SorterEngine.fix_permissions(final_dst)
|
||||
|
||||
# Track actual destination
|
||||
committed[file_path] = {"dest": final_dst, "cat": info['cat']}
|
||||
|
||||
# Update DB
|
||||
cursor.execute("DELETE FROM staging_area WHERE original_path = ?", (file_path,))
|
||||
cursor.execute("INSERT OR REPLACE INTO processed_log VALUES (?, ?, ?)",
|
||||
@@ -467,6 +679,7 @@ class SorterEngine:
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
return committed
|
||||
|
||||
@staticmethod
|
||||
def rename_category(old_name, new_name):
|
||||
@@ -494,11 +707,553 @@ class SorterEngine:
|
||||
conn.close()
|
||||
|
||||
@staticmethod
|
||||
def delete_category(name):
|
||||
def delete_category(name, profile=None):
|
||||
"""Deletes a category and clears any staged tags associated with it."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
if profile:
|
||||
cursor.execute("DELETE FROM profile_categories WHERE profile = ? AND category = ?", (profile, name))
|
||||
else:
|
||||
cursor.execute("DELETE FROM categories WHERE name = ?", (name,))
|
||||
|
||||
cursor.execute("DELETE FROM staging_area WHERE target_category = ?", (name,))
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
# In engine.py / SorterEngine class
|
||||
@staticmethod
|
||||
def get_tagged_page_indices(all_images, page_size):
|
||||
staged = SorterEngine.get_staged_data()
|
||||
if not staged: return set()
|
||||
tagged_pages = set()
|
||||
staged_keys = set(staged.keys())
|
||||
for idx, img_path in enumerate(all_images):
|
||||
if img_path in staged_keys:
|
||||
tagged_pages.add(idx // page_size)
|
||||
return tagged_pages
|
||||
|
||||
# --- 7. FOLDER TAG PERSISTENCE ---
|
||||
@staticmethod
|
||||
def save_folder_tags(folder_path, profile=None):
|
||||
"""
|
||||
Saves current staging data associated with a folder for later restoration.
|
||||
Call this BEFORE clearing the staging area.
|
||||
"""
|
||||
import re
|
||||
staged = SorterEngine.get_staged_data()
|
||||
if not staged:
|
||||
return 0
|
||||
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Ensure table exists with profile column
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS folder_tags
|
||||
(profile TEXT, folder_path TEXT, filename TEXT, category TEXT, tag_index INTEGER,
|
||||
PRIMARY KEY (profile, folder_path, filename))''')
|
||||
|
||||
# Check if old schema (without profile) - migrate if needed
|
||||
cursor.execute("PRAGMA table_info(folder_tags)")
|
||||
columns = [row[1] for row in cursor.fetchall()]
|
||||
if 'profile' not in columns:
|
||||
cursor.execute("DROP TABLE folder_tags")
|
||||
cursor.execute('''CREATE TABLE folder_tags
|
||||
(profile TEXT, folder_path TEXT, filename TEXT, category TEXT, tag_index INTEGER,
|
||||
PRIMARY KEY (profile, folder_path, filename))''')
|
||||
conn.commit()
|
||||
|
||||
profile = profile or "Default"
|
||||
saved_count = 0
|
||||
for orig_path, info in staged.items():
|
||||
# Only save tags for files that are in this folder (or subfolders)
|
||||
if orig_path.startswith(folder_path):
|
||||
filename = os.path.basename(orig_path)
|
||||
category = info['cat']
|
||||
|
||||
# Extract index from the new_name (e.g., "Action_042.jpg" -> 42)
|
||||
new_name = info['name']
|
||||
match = re.search(r'_(\d+)', new_name)
|
||||
tag_index = int(match.group(1)) if match else 0
|
||||
|
||||
cursor.execute(
|
||||
"INSERT OR REPLACE INTO folder_tags VALUES (?, ?, ?, ?, ?)",
|
||||
(profile, folder_path, filename, category, tag_index)
|
||||
)
|
||||
saved_count += 1
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
return saved_count
|
||||
|
||||
@staticmethod
|
||||
def restore_folder_tags(folder_path, all_images, profile=None):
|
||||
"""
|
||||
Restores previously saved tags for a folder back into the staging area.
|
||||
Call this when loading/reloading a folder.
|
||||
Returns the number of tags restored.
|
||||
"""
|
||||
try:
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Ensure table exists with profile column
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS folder_tags
|
||||
(profile TEXT, folder_path TEXT, filename TEXT, category TEXT, tag_index INTEGER,
|
||||
PRIMARY KEY (profile, folder_path, filename))''')
|
||||
|
||||
# Check if old schema (without profile) - migrate if needed
|
||||
cursor.execute("PRAGMA table_info(folder_tags)")
|
||||
columns = [row[1] for row in cursor.fetchall()]
|
||||
if 'profile' not in columns:
|
||||
cursor.execute("DROP TABLE folder_tags")
|
||||
cursor.execute('''CREATE TABLE folder_tags
|
||||
(profile TEXT, folder_path TEXT, filename TEXT, category TEXT, tag_index INTEGER,
|
||||
PRIMARY KEY (profile, folder_path, filename))''')
|
||||
conn.commit()
|
||||
|
||||
profile = profile or "Default"
|
||||
|
||||
# Get saved tags for this folder and profile
|
||||
cursor.execute(
|
||||
"SELECT filename, category, tag_index FROM folder_tags WHERE profile = ? AND folder_path = ?",
|
||||
(profile, folder_path)
|
||||
)
|
||||
saved_tags = {row[0]: {"cat": row[1], "index": row[2]} for row in cursor.fetchall()}
|
||||
|
||||
if not saved_tags:
|
||||
conn.close()
|
||||
return 0
|
||||
|
||||
# Build a map of filename -> full path from current images
|
||||
filename_to_path = {}
|
||||
for img_path in all_images:
|
||||
fname = os.path.basename(img_path)
|
||||
if fname not in filename_to_path:
|
||||
filename_to_path[fname] = img_path
|
||||
|
||||
# Restore tags to staging area
|
||||
restored = 0
|
||||
for filename, tag_info in saved_tags.items():
|
||||
if filename in filename_to_path:
|
||||
full_path = filename_to_path[filename]
|
||||
cursor.execute("SELECT 1 FROM staging_area WHERE original_path = ?", (full_path,))
|
||||
if not cursor.fetchone():
|
||||
ext = os.path.splitext(filename)[1]
|
||||
new_name = f"{tag_info['cat']}_{tag_info['index']:03d}{ext}"
|
||||
cursor.execute(
|
||||
"INSERT OR REPLACE INTO staging_area VALUES (?, ?, ?, 1)",
|
||||
(full_path, tag_info['cat'], new_name)
|
||||
)
|
||||
restored += 1
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
return restored
|
||||
except Exception as e:
|
||||
print(f"Error restoring folder tags: {e}")
|
||||
return 0
|
||||
|
||||
@staticmethod
|
||||
def clear_folder_tags(folder_path):
|
||||
"""Clears saved tags for a specific folder."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("DELETE FROM folder_tags WHERE folder_path = ?", (folder_path,))
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
@staticmethod
|
||||
def get_saved_folder_tags(folder_path):
|
||||
"""Returns saved tags for a folder (for debugging/display)."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"SELECT filename, category, tag_index FROM folder_tags WHERE folder_path = ?",
|
||||
(folder_path,)
|
||||
)
|
||||
result = {row[0]: {"cat": row[1], "index": row[2]} for row in cursor.fetchall()}
|
||||
conn.close()
|
||||
return result
|
||||
|
||||
# --- 8. CAPTION SETTINGS & PROMPTS ---
|
||||
@staticmethod
|
||||
def get_caption_settings(profile):
|
||||
"""Get caption API settings for a profile."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Ensure table exists
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS caption_settings
|
||||
(profile TEXT PRIMARY KEY,
|
||||
api_endpoint TEXT DEFAULT 'http://localhost:8080/v1/chat/completions',
|
||||
model_name TEXT DEFAULT 'local-model',
|
||||
max_tokens INTEGER DEFAULT 300,
|
||||
temperature REAL DEFAULT 0.7,
|
||||
timeout_seconds INTEGER DEFAULT 60,
|
||||
batch_size INTEGER DEFAULT 4)''')
|
||||
|
||||
cursor.execute("SELECT * FROM caption_settings WHERE profile = ?", (profile,))
|
||||
row = cursor.fetchone()
|
||||
conn.close()
|
||||
|
||||
if row:
|
||||
return {
|
||||
"profile": row[0],
|
||||
"api_endpoint": row[1],
|
||||
"model_name": row[2],
|
||||
"max_tokens": row[3],
|
||||
"temperature": row[4],
|
||||
"timeout_seconds": row[5],
|
||||
"batch_size": row[6]
|
||||
}
|
||||
else:
|
||||
# Return defaults
|
||||
return {
|
||||
"profile": profile,
|
||||
"api_endpoint": "http://localhost:8080/v1/chat/completions",
|
||||
"model_name": "local-model",
|
||||
"max_tokens": 300,
|
||||
"temperature": 0.7,
|
||||
"timeout_seconds": 60,
|
||||
"batch_size": 4
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def save_caption_settings(profile, api_endpoint=None, model_name=None, max_tokens=None,
|
||||
temperature=None, timeout_seconds=None, batch_size=None):
|
||||
"""Save caption API settings for a profile."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Ensure table exists
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS caption_settings
|
||||
(profile TEXT PRIMARY KEY,
|
||||
api_endpoint TEXT DEFAULT 'http://localhost:8080/v1/chat/completions',
|
||||
model_name TEXT DEFAULT 'local-model',
|
||||
max_tokens INTEGER DEFAULT 300,
|
||||
temperature REAL DEFAULT 0.7,
|
||||
timeout_seconds INTEGER DEFAULT 60,
|
||||
batch_size INTEGER DEFAULT 4)''')
|
||||
|
||||
# Get existing values
|
||||
cursor.execute("SELECT * FROM caption_settings WHERE profile = ?", (profile,))
|
||||
row = cursor.fetchone()
|
||||
|
||||
if not row:
|
||||
row = (profile, "http://localhost:8080/v1/chat/completions", "local-model", 300, 0.7, 60, 4)
|
||||
|
||||
new_values = (
|
||||
profile,
|
||||
api_endpoint if api_endpoint is not None else row[1],
|
||||
model_name if model_name is not None else row[2],
|
||||
max_tokens if max_tokens is not None else row[3],
|
||||
temperature if temperature is not None else row[4],
|
||||
timeout_seconds if timeout_seconds is not None else row[5],
|
||||
batch_size if batch_size is not None else row[6]
|
||||
)
|
||||
|
||||
cursor.execute("INSERT OR REPLACE INTO caption_settings VALUES (?, ?, ?, ?, ?, ?, ?)", new_values)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
@staticmethod
|
||||
def get_category_prompt(profile, category):
|
||||
"""Get prompt template for a category."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS category_prompts
|
||||
(profile TEXT, category TEXT, prompt_template TEXT,
|
||||
PRIMARY KEY (profile, category))''')
|
||||
|
||||
cursor.execute(
|
||||
"SELECT prompt_template FROM category_prompts WHERE profile = ? AND category = ?",
|
||||
(profile, category)
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
conn.close()
|
||||
|
||||
if row and row[0]:
|
||||
return row[0]
|
||||
else:
|
||||
# Default prompt
|
||||
return "Describe this image in detail for training purposes. Include subjects, actions, setting, colors, and composition."
|
||||
|
||||
@staticmethod
|
||||
def save_category_prompt(profile, category, prompt):
|
||||
"""Save prompt template for a category."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS category_prompts
|
||||
(profile TEXT, category TEXT, prompt_template TEXT,
|
||||
PRIMARY KEY (profile, category))''')
|
||||
|
||||
cursor.execute(
|
||||
"INSERT OR REPLACE INTO category_prompts VALUES (?, ?, ?)",
|
||||
(profile, category, prompt)
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
@staticmethod
|
||||
def get_all_category_prompts(profile):
|
||||
"""Get all prompt templates for a profile."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS category_prompts
|
||||
(profile TEXT, category TEXT, prompt_template TEXT,
|
||||
PRIMARY KEY (profile, category))''')
|
||||
|
||||
cursor.execute(
|
||||
"SELECT category, prompt_template FROM category_prompts WHERE profile = ?",
|
||||
(profile,)
|
||||
)
|
||||
result = {row[0]: row[1] for row in cursor.fetchall()}
|
||||
conn.close()
|
||||
return result
|
||||
|
||||
# --- 9. CAPTION STORAGE ---
|
||||
@staticmethod
|
||||
def save_caption(image_path, caption, model):
|
||||
"""Save a generated caption to the database."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS image_captions
|
||||
(image_path TEXT PRIMARY KEY, caption TEXT, model TEXT,
|
||||
generated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP)''')
|
||||
|
||||
cursor.execute(
|
||||
"INSERT OR REPLACE INTO image_captions VALUES (?, ?, ?, ?)",
|
||||
(image_path, caption, model, datetime.now().isoformat())
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
@staticmethod
|
||||
def get_caption(image_path):
|
||||
"""Get caption for an image."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS image_captions
|
||||
(image_path TEXT PRIMARY KEY, caption TEXT, model TEXT,
|
||||
generated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP)''')
|
||||
|
||||
cursor.execute(
|
||||
"SELECT caption, model, generated_at FROM image_captions WHERE image_path = ?",
|
||||
(image_path,)
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
conn.close()
|
||||
|
||||
if row:
|
||||
return {"caption": row[0], "model": row[1], "generated_at": row[2]}
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_captions_batch(image_paths):
|
||||
"""Get captions for multiple images."""
|
||||
if not image_paths:
|
||||
return {}
|
||||
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS image_captions
|
||||
(image_path TEXT PRIMARY KEY, caption TEXT, model TEXT,
|
||||
generated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP)''')
|
||||
|
||||
placeholders = ','.join('?' * len(image_paths))
|
||||
cursor.execute(
|
||||
f"SELECT image_path, caption, model, generated_at FROM image_captions WHERE image_path IN ({placeholders})",
|
||||
image_paths
|
||||
)
|
||||
result = {row[0]: {"caption": row[1], "model": row[2], "generated_at": row[3]} for row in cursor.fetchall()}
|
||||
conn.close()
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def delete_caption(image_path):
|
||||
"""Delete caption for an image."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("DELETE FROM image_captions WHERE image_path = ?", (image_path,))
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
@staticmethod
|
||||
def get_all_caption_paths():
|
||||
"""Get set of all image paths that have captions."""
|
||||
conn = sqlite3.connect(SorterEngine.DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute('''CREATE TABLE IF NOT EXISTS image_captions
|
||||
(image_path TEXT PRIMARY KEY, caption TEXT, model TEXT,
|
||||
generated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP)''')
|
||||
|
||||
cursor.execute("SELECT image_path FROM image_captions")
|
||||
result = {row[0] for row in cursor.fetchall()}
|
||||
conn.close()
|
||||
return result
|
||||
|
||||
# --- 10. VLLM API CAPTIONING ---
|
||||
@staticmethod
|
||||
def caption_image_vllm(image_path, prompt, settings):
|
||||
"""
|
||||
Generate caption for an image using VLLM API.
|
||||
|
||||
Args:
|
||||
image_path: Path to the image file
|
||||
prompt: Text prompt for captioning
|
||||
settings: Dict with api_endpoint, model_name, max_tokens, temperature, timeout_seconds
|
||||
|
||||
Returns:
|
||||
Tuple of (caption_text, error_message). If successful, error is None.
|
||||
"""
|
||||
try:
|
||||
# Read and encode image
|
||||
with open(image_path, 'rb') as f:
|
||||
img_bytes = f.read()
|
||||
b64_image = base64.b64encode(img_bytes).decode('utf-8')
|
||||
|
||||
# Determine MIME type
|
||||
ext = os.path.splitext(image_path)[1].lower()
|
||||
mime_types = {
|
||||
'.jpg': 'image/jpeg',
|
||||
'.jpeg': 'image/jpeg',
|
||||
'.png': 'image/png',
|
||||
'.webp': 'image/webp',
|
||||
'.bmp': 'image/bmp',
|
||||
'.tiff': 'image/tiff'
|
||||
}
|
||||
mime_type = mime_types.get(ext, 'image/jpeg')
|
||||
|
||||
# Build request payload (OpenAI-compatible format)
|
||||
payload = {
|
||||
"model": settings.get('model_name', 'local-model'),
|
||||
"messages": [{
|
||||
"role": "user",
|
||||
"content": [
|
||||
{"type": "text", "text": prompt},
|
||||
{"type": "image_url", "image_url": {"url": f"data:{mime_type};base64,{b64_image}"}}
|
||||
]
|
||||
}],
|
||||
"max_tokens": settings.get('max_tokens', 300),
|
||||
"temperature": settings.get('temperature', 0.7)
|
||||
}
|
||||
|
||||
# Make API request
|
||||
response = requests.post(
|
||||
settings.get('api_endpoint', 'http://localhost:8080/v1/chat/completions'),
|
||||
json=payload,
|
||||
timeout=settings.get('timeout_seconds', 60)
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
result = response.json()
|
||||
caption = result['choices'][0]['message']['content']
|
||||
return caption.strip(), None
|
||||
|
||||
except requests.Timeout:
|
||||
return None, f"API timeout after {settings.get('timeout_seconds', 60)}s"
|
||||
except requests.RequestException as e:
|
||||
return None, f"API error: {str(e)}"
|
||||
except KeyError as e:
|
||||
return None, f"Invalid API response: missing {str(e)}"
|
||||
except Exception as e:
|
||||
return None, f"Error: {str(e)}"
|
||||
|
||||
@staticmethod
|
||||
def caption_batch_vllm(image_paths, get_prompt_fn, settings, progress_cb=None):
|
||||
"""
|
||||
Caption multiple images using VLLM API.
|
||||
|
||||
Args:
|
||||
image_paths: List of (image_path, category) tuples
|
||||
get_prompt_fn: Function(category) -> prompt string
|
||||
settings: Caption settings dict
|
||||
progress_cb: Optional callback(current, total, status_msg) for progress updates
|
||||
|
||||
Returns:
|
||||
Dict with results: {"success": count, "failed": count, "captions": {path: caption}}
|
||||
"""
|
||||
results = {"success": 0, "failed": 0, "captions": {}, "errors": {}}
|
||||
total = len(image_paths)
|
||||
|
||||
for i, (image_path, category) in enumerate(image_paths):
|
||||
if progress_cb:
|
||||
progress_cb(i, total, f"Captioning {os.path.basename(image_path)}...")
|
||||
|
||||
prompt = get_prompt_fn(category)
|
||||
caption, error = SorterEngine.caption_image_vllm(image_path, prompt, settings)
|
||||
|
||||
if caption:
|
||||
# Save to database
|
||||
SorterEngine.save_caption(image_path, caption, settings.get('model_name', 'local-model'))
|
||||
results["captions"][image_path] = caption
|
||||
results["success"] += 1
|
||||
else:
|
||||
# Store error
|
||||
error_caption = f"[ERROR] {error}"
|
||||
SorterEngine.save_caption(image_path, error_caption, settings.get('model_name', 'local-model'))
|
||||
results["errors"][image_path] = error
|
||||
results["failed"] += 1
|
||||
|
||||
if progress_cb:
|
||||
progress_cb(total, total, "Complete!")
|
||||
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def write_caption_sidecar(image_path, caption):
|
||||
"""
|
||||
Write caption to a .txt sidecar file next to the image.
|
||||
|
||||
Args:
|
||||
image_path: Path to the image file
|
||||
caption: Caption text to write
|
||||
|
||||
Returns:
|
||||
Path to sidecar file, or None on error
|
||||
"""
|
||||
try:
|
||||
# Create sidecar path (same name, .txt extension)
|
||||
base_path = os.path.splitext(image_path)[0]
|
||||
sidecar_path = f"{base_path}.txt"
|
||||
|
||||
with open(sidecar_path, 'w', encoding='utf-8') as f:
|
||||
f.write(caption)
|
||||
|
||||
# Fix permissions
|
||||
SorterEngine.fix_permissions(sidecar_path)
|
||||
|
||||
return sidecar_path
|
||||
except Exception as e:
|
||||
print(f"Warning: Could not write sidecar for {image_path}: {e}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def read_caption_sidecar(image_path):
|
||||
"""
|
||||
Read caption from a .txt sidecar file if it exists.
|
||||
|
||||
Args:
|
||||
image_path: Path to the image file
|
||||
|
||||
Returns:
|
||||
Caption text or None if no sidecar exists
|
||||
"""
|
||||
try:
|
||||
base_path = os.path.splitext(image_path)[0]
|
||||
sidecar_path = f"{base_path}.txt"
|
||||
|
||||
if os.path.exists(sidecar_path):
|
||||
with open(sidecar_path, 'r', encoding='utf-8') as f:
|
||||
return f.read().strip()
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
1877
gallery_app.py
Normal file
1877
gallery_app.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,2 +1,4 @@
|
||||
streamlit
|
||||
Pillow
|
||||
nicegui
|
||||
requests
|
||||
25
start.sh
Executable file
25
start.sh
Executable file
@@ -0,0 +1,25 @@
|
||||
#!/bin/bash
|
||||
|
||||
# NiceSorter - Start Script
|
||||
# Runs the NiceGUI gallery interface on port 8080
|
||||
|
||||
set -e
|
||||
|
||||
# Navigate to app directory if running in container
|
||||
if [ -d "/app" ]; then
|
||||
cd /app
|
||||
fi
|
||||
|
||||
# Install dependencies if requirements.txt exists
|
||||
if [ -f "requirements.txt" ]; then
|
||||
echo "📦 Installing dependencies..."
|
||||
pip install --no-cache-dir -q -r requirements.txt
|
||||
fi
|
||||
|
||||
# Initialize database
|
||||
echo "🗄️ Initializing database..."
|
||||
python3 -c "from engine import SorterEngine; SorterEngine.init_db()"
|
||||
|
||||
# Start NiceGUI
|
||||
echo "🚀 Starting NiceSorter on http://0.0.0.0:8080"
|
||||
exec python3 gallery_app.py
|
||||
@@ -1,34 +1,134 @@
|
||||
import streamlit as st
|
||||
import os
|
||||
import math
|
||||
import concurrent.futures
|
||||
from typing import Dict, Set, List, Optional, Tuple
|
||||
from engine import SorterEngine
|
||||
|
||||
# ==========================================
|
||||
# 1. CACHED DATA LOADER (The Fix)
|
||||
# STATE MANAGEMENT
|
||||
# ==========================================
|
||||
@st.cache_data(show_spinner=False)
|
||||
def get_cached_images(path, mutation_id):
|
||||
"""
|
||||
Scans the folder ONLY when 'path' or 'mutation_id' changes.
|
||||
Navigating pages does NOT change these, so it remains instant.
|
||||
"""
|
||||
return SorterEngine.get_images(path, recursive=True)
|
||||
|
||||
class StreamlitState:
|
||||
"""Centralized state management with type hints."""
|
||||
|
||||
# ==========================================
|
||||
# 2. CALLBACKS (Updated with Refresh Logic)
|
||||
# ==========================================
|
||||
def trigger_refresh():
|
||||
"""Increments the mutation counter to force a file re-scan."""
|
||||
if 't5_file_id' not in st.session_state: st.session_state.t5_file_id = 0
|
||||
@staticmethod
|
||||
def init():
|
||||
"""Initialize all session state variables."""
|
||||
defaults = {
|
||||
't5_file_id': 0,
|
||||
't5_page': 0,
|
||||
't5_active_cat': 'Default',
|
||||
't5_next_index': 1,
|
||||
't5_op_mode': 'Copy',
|
||||
't5_cleanup_mode': 'Keep',
|
||||
't5_page_size': 24,
|
||||
't5_grid_cols': 4,
|
||||
't5_quality': 50,
|
||||
}
|
||||
for key, value in defaults.items():
|
||||
if key not in st.session_state:
|
||||
st.session_state[key] = value
|
||||
|
||||
@staticmethod
|
||||
def trigger_refresh():
|
||||
"""Force file cache invalidation."""
|
||||
st.session_state.t5_file_id += 1
|
||||
|
||||
def cb_tag_image(img_path, selected_cat, index_val, path_o):
|
||||
@staticmethod
|
||||
def change_page(delta: int):
|
||||
"""Navigate pages by delta."""
|
||||
st.session_state.t5_page = max(0, st.session_state.t5_page + delta)
|
||||
|
||||
@staticmethod
|
||||
def set_page(page_idx: int):
|
||||
"""Jump to specific page."""
|
||||
st.session_state.t5_page = page_idx
|
||||
|
||||
@staticmethod
|
||||
def slider_change(key: str):
|
||||
"""Handle slider-based page navigation (1-based to 0-based)."""
|
||||
st.session_state.t5_page = st.session_state[key] - 1
|
||||
|
||||
# ==========================================
|
||||
# CACHING & DATA LOADING
|
||||
# ==========================================
|
||||
|
||||
@st.cache_data(show_spinner=False)
|
||||
def get_cached_images(path: str, mutation_id: int) -> List[str]:
|
||||
"""Scan folder for images. mutation_id forces refresh."""
|
||||
return SorterEngine.get_images(path, recursive=True)
|
||||
|
||||
@st.cache_data(show_spinner=False, max_entries=2000)
|
||||
def get_cached_thumbnail(path: str, quality: int, target_size: int, mtime: float) -> Optional[bytes]:
|
||||
"""Load and compress thumbnail with caching."""
|
||||
try:
|
||||
return SorterEngine.compress_for_web(path, quality, target_size)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
@st.cache_data(show_spinner=False)
|
||||
def get_cached_green_dots(all_images: List[str], page_size: int, staged_keys: frozenset) -> Set[int]:
|
||||
"""
|
||||
Tags image with manual number.
|
||||
Handles collisions by creating variants (e.g. _001_1) and warning the user.
|
||||
Calculate which pages have tagged images (cached).
|
||||
Returns set of page indices with staged images.
|
||||
"""
|
||||
if selected_cat.startswith("---") or selected_cat == "":
|
||||
staged_set = set(staged_keys)
|
||||
tagged_pages = set()
|
||||
|
||||
for idx, img_path in enumerate(all_images):
|
||||
if img_path in staged_set:
|
||||
tagged_pages.add(idx // page_size)
|
||||
|
||||
return tagged_pages
|
||||
|
||||
@st.cache_data(show_spinner=False)
|
||||
def build_index_map(active_cat: str, path_o: str, staged_data_frozen: frozenset) -> Dict[int, str]:
|
||||
"""
|
||||
Build mapping of index numbers to file paths for active category.
|
||||
Returns: {1: '/path/to/Cat_001.jpg', 2: '/path/to/Cat_002.jpg', ...}
|
||||
"""
|
||||
index_map = {}
|
||||
|
||||
# Convert frozenset back to dict for processing
|
||||
staged_dict = {k: v for k, v in staged_data_frozen}
|
||||
|
||||
# Check staging area
|
||||
for orig_path, info in staged_dict.items():
|
||||
if info['cat'] == active_cat:
|
||||
idx = _extract_index(info['name'])
|
||||
if idx is not None:
|
||||
index_map[idx] = orig_path
|
||||
|
||||
# Check disk
|
||||
cat_path = os.path.join(path_o, active_cat)
|
||||
if os.path.exists(cat_path):
|
||||
for filename in os.listdir(cat_path):
|
||||
if filename.startswith(active_cat):
|
||||
idx = _extract_index(filename)
|
||||
if idx is not None and idx not in index_map:
|
||||
index_map[idx] = os.path.join(cat_path, filename)
|
||||
|
||||
return index_map
|
||||
|
||||
def _extract_index(filename: str) -> Optional[int]:
|
||||
"""Extract numeric index from filename (e.g., 'Cat_042.jpg' -> 42)."""
|
||||
try:
|
||||
parts = filename.rsplit('_', 1)
|
||||
if len(parts) > 1:
|
||||
num_str = parts[1].split('.')[0]
|
||||
return int(num_str)
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
return None
|
||||
|
||||
# ==========================================
|
||||
# ACTIONS
|
||||
# ==========================================
|
||||
|
||||
def action_tag(img_path: str, selected_cat: str, index_val: int, path_o: str):
|
||||
"""Tag image with category and index, handling collisions."""
|
||||
if selected_cat.startswith("---") or not selected_cat:
|
||||
st.toast("⚠️ Select a valid category first!", icon="🚫")
|
||||
return
|
||||
|
||||
@@ -36,316 +136,608 @@ def cb_tag_image(img_path, selected_cat, index_val, path_o):
|
||||
base_name = f"{selected_cat}_{index_val:03d}"
|
||||
new_name = f"{base_name}{ext}"
|
||||
|
||||
# --- COLLISION DETECTION ---
|
||||
# 1. Check Staging DB
|
||||
# Collision detection
|
||||
staged = SorterEngine.get_staged_data()
|
||||
# Get all names currently staged for this category
|
||||
staged_names = {v['name'] for v in staged.values() if v['cat'] == selected_cat}
|
||||
|
||||
# 2. Check Hard Drive
|
||||
dest_path = os.path.join(path_o, selected_cat, new_name)
|
||||
|
||||
collision = False
|
||||
suffix = 1
|
||||
|
||||
# Loop until we find a free name
|
||||
while new_name in staged_names or os.path.exists(dest_path):
|
||||
collision = True
|
||||
new_name = f"{base_name}_{suffix}{ext}"
|
||||
dest_path = os.path.join(path_o, selected_cat, new_name)
|
||||
suffix += 1
|
||||
|
||||
# --- SAVE ---
|
||||
SorterEngine.stage_image(img_path, selected_cat, new_name)
|
||||
|
||||
if collision:
|
||||
st.toast(f"⚠️ Conflict! Saved as variant: {new_name}", icon="🔀")
|
||||
st.toast(f"⚠️ Conflict! Saved as: {new_name}", icon="🔀")
|
||||
|
||||
# REMOVED: st.session_state.t5_next_index += 1
|
||||
# The numbers in the input boxes will now stay static.
|
||||
# Auto-increment index
|
||||
st.session_state.t5_next_index = index_val + 1
|
||||
|
||||
def cb_untag_image(img_path):
|
||||
def action_untag(img_path: str):
|
||||
"""Remove staging from image."""
|
||||
SorterEngine.clear_staged_item(img_path)
|
||||
|
||||
def cb_delete_image(img_path):
|
||||
def action_delete(img_path: str):
|
||||
"""Delete image to trash."""
|
||||
SorterEngine.delete_to_trash(img_path)
|
||||
trigger_refresh() # Force re-scan so the image disappears from the list
|
||||
StreamlitState.trigger_refresh()
|
||||
|
||||
def cb_apply_batch(current_batch, path_o, cleanup_mode, operation):
|
||||
def action_apply_batch(current_batch: List[str], path_o: str, cleanup_mode: str, operation: str):
|
||||
"""Apply staged changes for current page."""
|
||||
SorterEngine.commit_batch(current_batch, path_o, cleanup_mode, operation)
|
||||
trigger_refresh() # Force re-scan to remove moved files
|
||||
StreamlitState.trigger_refresh()
|
||||
|
||||
def cb_apply_global(path_o, cleanup_mode, operation, path_s):
|
||||
def action_apply_global(path_o: str, cleanup_mode: str, operation: str, path_s: str):
|
||||
"""Apply all staged changes globally."""
|
||||
SorterEngine.commit_global(path_o, cleanup_mode, operation, source_root=path_s)
|
||||
trigger_refresh() # Force re-scan
|
||||
StreamlitState.trigger_refresh()
|
||||
|
||||
def cb_change_page(delta):
|
||||
if 't5_page' not in st.session_state: st.session_state.t5_page = 0
|
||||
st.session_state.t5_page += delta
|
||||
# No trigger_refresh() here -> This is why page turning is now instant!
|
||||
def action_add_category(name: str):
|
||||
"""Add new category."""
|
||||
if name:
|
||||
SorterEngine.add_category(name)
|
||||
st.session_state.t5_active_cat = name
|
||||
|
||||
def cb_jump_page(k):
|
||||
val = st.session_state[k]
|
||||
st.session_state.t5_page = val - 1
|
||||
def action_rename_category(old_name: str, new_name: str):
|
||||
"""Rename category."""
|
||||
if new_name and new_name != old_name:
|
||||
SorterEngine.rename_category(old_name, new_name)
|
||||
st.session_state.t5_active_cat = new_name
|
||||
|
||||
def action_delete_category(cat_name: str):
|
||||
"""Delete category."""
|
||||
SorterEngine.delete_category(cat_name)
|
||||
# Reset to first available category
|
||||
cats = SorterEngine.get_categories() or ["Default"]
|
||||
st.session_state.t5_active_cat = cats[0]
|
||||
|
||||
# ==========================================
|
||||
# 3. FRAGMENTS (Sidebar, Grid, Batch)
|
||||
# DIALOGS
|
||||
# ==========================================
|
||||
# ... (Sidebar code remains exactly the same) ...
|
||||
|
||||
@st.dialog("🔍 Full Resolution", width="large")
|
||||
def view_high_res(img_path: str):
|
||||
"""Modal for full resolution inspection."""
|
||||
img_data = SorterEngine.compress_for_web(img_path, quality=90, target_size=None)
|
||||
if img_data:
|
||||
st.image(img_data, use_container_width=True)
|
||||
st.caption(f"📁 {img_path}")
|
||||
else:
|
||||
st.error(f"Could not load: {img_path}")
|
||||
|
||||
@st.dialog("🖼️ Tag Preview", width="large")
|
||||
def view_tag_preview(img_path: str, title: str):
|
||||
"""Show image associated with a numbered tag."""
|
||||
st.subheader(title)
|
||||
|
||||
img_data = SorterEngine.compress_for_web(img_path, quality=80, target_size=800)
|
||||
if img_data:
|
||||
st.image(img_data, use_container_width=True)
|
||||
st.caption(f"📁 {img_path}")
|
||||
else:
|
||||
st.error(f"Could not load: {img_path}")
|
||||
|
||||
# ==========================================
|
||||
# UI COMPONENTS
|
||||
# ==========================================
|
||||
|
||||
@st.fragment
|
||||
def render_sidebar_content():
|
||||
def render_sidebar_content(path_o: str):
|
||||
"""Render category management sidebar."""
|
||||
st.divider()
|
||||
st.subheader("🏷️ Category Manager")
|
||||
cats = SorterEngine.get_categories()
|
||||
processed_cats = []
|
||||
|
||||
# Get and process categories with separators
|
||||
cats = SorterEngine.get_categories() or ["Default"]
|
||||
processed_cats = _add_category_separators(cats)
|
||||
|
||||
# Sync radio selection immediately
|
||||
if "t5_radio_select" in st.session_state:
|
||||
new_selection = st.session_state.t5_radio_select
|
||||
if not new_selection.startswith("---"):
|
||||
st.session_state.t5_active_cat = new_selection
|
||||
|
||||
if "t5_active_cat" not in st.session_state:
|
||||
st.session_state.t5_active_cat = cats[0]
|
||||
|
||||
current_cat = st.session_state.t5_active_cat
|
||||
|
||||
# NUMBER GRID (1-25) with previews
|
||||
if current_cat and not current_cat.startswith("---"):
|
||||
st.caption(f"**{current_cat}** Index Map")
|
||||
|
||||
# Build index map (cached)
|
||||
staged = SorterEngine.get_staged_data()
|
||||
staged_frozen = frozenset(staged.items())
|
||||
index_map = build_index_map(current_cat, path_o, staged_frozen)
|
||||
|
||||
# Render 5x5 grid
|
||||
grid_cols = st.columns(5, gap="small")
|
||||
for i in range(1, 26):
|
||||
is_used = i in index_map
|
||||
btn_type = "primary" if is_used else "secondary"
|
||||
|
||||
with grid_cols[(i - 1) % 5]:
|
||||
if st.button(f"{i}", key=f"grid_{i}", type=btn_type, use_container_width=True):
|
||||
st.session_state.t5_next_index = i
|
||||
if is_used:
|
||||
view_tag_preview(index_map[i], f"{current_cat} #{i}")
|
||||
else:
|
||||
st.toast(f"Next index set to #{i}")
|
||||
|
||||
st.divider()
|
||||
|
||||
# CATEGORY SELECTOR
|
||||
st.radio("Active Category", processed_cats, key="t5_radio_select")
|
||||
|
||||
# INDEX CONTROLS
|
||||
st.caption("Tagging Settings")
|
||||
c_num1, c_num2 = st.columns([3, 1], vertical_alignment="bottom")
|
||||
|
||||
c_num1.number_input("Next Index #", min_value=1, step=1, key="t5_next_index")
|
||||
|
||||
if c_num2.button("🔄", help="Auto-detect next index"):
|
||||
used_indices = list(index_map.keys()) if index_map else []
|
||||
st.session_state.t5_next_index = max(used_indices) + 1 if used_indices else 1
|
||||
st.rerun()
|
||||
|
||||
st.divider()
|
||||
|
||||
# CATEGORY MANAGEMENT TABS
|
||||
tab_add, tab_edit = st.tabs(["➕ Add", "✏️ Edit"])
|
||||
|
||||
with tab_add:
|
||||
c1, c2 = st.columns([3, 1])
|
||||
new_cat = c1.text_input(
|
||||
"New Category",
|
||||
label_visibility="collapsed",
|
||||
placeholder="Enter name...",
|
||||
key="t5_new_cat"
|
||||
)
|
||||
if c2.button("Add", key="btn_add_cat"):
|
||||
action_add_category(new_cat)
|
||||
st.rerun()
|
||||
|
||||
with tab_edit:
|
||||
if current_cat and not current_cat.startswith("---") and current_cat in cats:
|
||||
st.caption(f"Editing: **{current_cat}**")
|
||||
|
||||
rename_val = st.text_input(
|
||||
"Rename to:",
|
||||
value=current_cat,
|
||||
key=f"ren_{current_cat}"
|
||||
)
|
||||
|
||||
if st.button("💾 Save", key=f"save_{current_cat}", use_container_width=True):
|
||||
action_rename_category(current_cat, rename_val)
|
||||
st.rerun()
|
||||
|
||||
st.markdown("---")
|
||||
|
||||
if st.button(
|
||||
"🗑️ Delete Category",
|
||||
key=f"del_cat_{current_cat}",
|
||||
type="primary",
|
||||
use_container_width=True
|
||||
):
|
||||
action_delete_category(current_cat)
|
||||
st.rerun()
|
||||
|
||||
def _add_category_separators(cats: List[str]) -> List[str]:
|
||||
"""Add alphabetical separators between categories."""
|
||||
processed = []
|
||||
last_char = ""
|
||||
if cats:
|
||||
|
||||
for cat in cats:
|
||||
current_char = cat[0].upper()
|
||||
if last_char and current_char != last_char:
|
||||
processed_cats.append(f"--- {current_char} ---")
|
||||
processed_cats.append(cat)
|
||||
processed.append(f"--- {current_char} ---")
|
||||
processed.append(cat)
|
||||
last_char = current_char
|
||||
|
||||
if "t5_active_cat" not in st.session_state: st.session_state.t5_active_cat = cats[0] if cats else "Default"
|
||||
current_selection = st.session_state.t5_active_cat
|
||||
if not current_selection.startswith("---") and current_selection not in cats:
|
||||
st.session_state.t5_active_cat = cats[0] if cats else "Default"
|
||||
return processed
|
||||
|
||||
selection = st.radio("Active Tag", processed_cats, key="t5_radio_select")
|
||||
if not selection.startswith("---"): st.session_state.t5_active_cat = selection
|
||||
def render_pagination_carousel(key_suffix: str, total_pages: int, current_page: int, tagged_pages: Set[int]):
|
||||
"""Render pagination controls with green dot indicators."""
|
||||
if total_pages <= 1:
|
||||
return
|
||||
|
||||
st.divider()
|
||||
tab_add, tab_edit = st.tabs(["➕ Add", "✏️ Edit"])
|
||||
with tab_add:
|
||||
c1, c2 = st.columns([3, 1])
|
||||
new_cat = c1.text_input("New Name", label_visibility="collapsed", placeholder="New...", key="t5_new_cat")
|
||||
if c2.button("Add", key="btn_add_cat"):
|
||||
if new_cat:
|
||||
SorterEngine.add_category(new_cat)
|
||||
st.rerun()
|
||||
with tab_edit:
|
||||
target_cat = st.session_state.t5_active_cat
|
||||
if target_cat and not target_cat.startswith("---") and target_cat in cats:
|
||||
st.caption(f"Editing: **{target_cat}**")
|
||||
rename_val = st.text_input("Rename to:", value=target_cat, key=f"ren_{target_cat}")
|
||||
if st.button("💾 Save", key=f"save_{target_cat}", use_container_width=True):
|
||||
if rename_val and rename_val != target_cat:
|
||||
SorterEngine.rename_category(target_cat, rename_val)
|
||||
st.session_state.t5_active_cat = rename_val
|
||||
st.rerun()
|
||||
st.markdown("---")
|
||||
if st.button("🗑️ Delete", key=f"del_cat_{target_cat}", type="primary", use_container_width=True):
|
||||
SorterEngine.delete_category(target_cat)
|
||||
st.rerun()
|
||||
else:
|
||||
st.info("Select a valid category to edit.")
|
||||
# Rapid navigation slider (1-based)
|
||||
st.slider(
|
||||
"Page Navigator",
|
||||
min_value=1,
|
||||
max_value=total_pages,
|
||||
value=current_page + 1,
|
||||
step=1,
|
||||
key=f"slider_{key_suffix}",
|
||||
label_visibility="collapsed",
|
||||
on_change=StreamlitState.slider_change,
|
||||
args=(f"slider_{key_suffix}",)
|
||||
)
|
||||
|
||||
@st.dialog("🔍 High-Res Inspection", width="large")
|
||||
def view_high_res(img_path):
|
||||
"""
|
||||
Opens a modal and loads the ORIGINAL size image on demand.
|
||||
We still compress to WebP (q=90) to ensure it sends fast,
|
||||
but we do NOT resize the dimensions.
|
||||
"""
|
||||
# Load with target_size=None to keep original dimensions
|
||||
# Quality=90 for high fidelity
|
||||
img_data = SorterEngine.compress_for_web(img_path, quality=90, target_size=None)
|
||||
# Calculate button window (show current ±2 pages)
|
||||
window_radius = 2
|
||||
start_p = max(0, current_page - window_radius)
|
||||
end_p = min(total_pages, current_page + window_radius + 1)
|
||||
|
||||
if img_data:
|
||||
st.image(img_data, use_container_width=True)
|
||||
st.caption(f"Filename: {os.path.basename(img_path)}")
|
||||
else:
|
||||
st.error("Could not load full resolution image.")
|
||||
# Adjust near edges to maintain consistent width
|
||||
if current_page < window_radius:
|
||||
end_p = min(total_pages, 5)
|
||||
elif current_page > total_pages - window_radius - 1:
|
||||
start_p = max(0, total_pages - 5)
|
||||
|
||||
# ... (Gallery Grid code remains exactly the same) ...
|
||||
# --- UPDATED CACHE FUNCTION ---
|
||||
@st.cache_data(show_spinner=False, max_entries=2000)
|
||||
def get_cached_thumbnail(path, quality, target_size, mtime):
|
||||
# We pass the dynamic target_size here
|
||||
return SorterEngine.compress_for_web(path, quality, target_size)
|
||||
num_buttons = end_p - start_p
|
||||
if num_buttons < 1:
|
||||
start_p = 0
|
||||
end_p = total_pages
|
||||
num_buttons = total_pages
|
||||
|
||||
# Render button row: [Prev] [1] [2] [3] ... [Next]
|
||||
cols = st.columns([1] + [1] * num_buttons + [1])
|
||||
|
||||
# Previous button
|
||||
with cols[0]:
|
||||
st.button(
|
||||
"◀",
|
||||
disabled=(current_page == 0),
|
||||
on_click=StreamlitState.change_page,
|
||||
args=(-1,),
|
||||
key=f"prev_{key_suffix}",
|
||||
use_container_width=True
|
||||
)
|
||||
|
||||
# Page number buttons
|
||||
for i, p_idx in enumerate(range(start_p, end_p)):
|
||||
with cols[i + 1]:
|
||||
label = str(p_idx + 1)
|
||||
if p_idx in tagged_pages:
|
||||
label += " 🟢"
|
||||
|
||||
btn_type = "primary" if p_idx == current_page else "secondary"
|
||||
|
||||
st.button(
|
||||
label,
|
||||
type=btn_type,
|
||||
key=f"btn_p{p_idx}_{key_suffix}",
|
||||
use_container_width=True,
|
||||
on_click=StreamlitState.set_page,
|
||||
args=(p_idx,)
|
||||
)
|
||||
|
||||
# Next button
|
||||
with cols[-1]:
|
||||
st.button(
|
||||
"▶",
|
||||
disabled=(current_page >= total_pages - 1),
|
||||
on_click=StreamlitState.change_page,
|
||||
args=(1,),
|
||||
key=f"next_{key_suffix}",
|
||||
use_container_width=True
|
||||
)
|
||||
|
||||
# --- UPDATED GALLERY FRAGMENT ---
|
||||
@st.fragment
|
||||
def render_gallery_grid(current_batch, quality, grid_cols, path_o): # <--- 1. Added path_o
|
||||
def render_gallery_grid(
|
||||
current_batch: List[str],
|
||||
quality: int,
|
||||
grid_cols: int,
|
||||
path_o: str
|
||||
):
|
||||
"""Render image gallery grid with parallel loading."""
|
||||
staged = SorterEngine.get_staged_data()
|
||||
history = SorterEngine.get_processed_log()
|
||||
selected_cat = st.session_state.get("t5_active_cat", "Default")
|
||||
selected_cat = st.session_state.t5_active_cat
|
||||
tagging_disabled = selected_cat.startswith("---")
|
||||
|
||||
# 2. Ensure global counter exists (default to 1)
|
||||
if "t5_next_index" not in st.session_state: st.session_state.t5_next_index = 1
|
||||
|
||||
# 3. Smart Resolution (Wide screen assumption)
|
||||
target_size = int(2400 / grid_cols)
|
||||
|
||||
# 4. Parallel Load (16 threads for WebP)
|
||||
import concurrent.futures
|
||||
# Parallel thumbnail loading
|
||||
batch_cache = _load_thumbnails_parallel(current_batch, quality, target_size)
|
||||
|
||||
# Render grid
|
||||
cols = st.columns(grid_cols)
|
||||
|
||||
for idx, img_path in enumerate(current_batch):
|
||||
with cols[idx % grid_cols]:
|
||||
_render_image_card(
|
||||
img_path=img_path,
|
||||
batch_cache=batch_cache,
|
||||
staged=staged,
|
||||
history=history,
|
||||
selected_cat=selected_cat,
|
||||
tagging_disabled=tagging_disabled,
|
||||
path_o=path_o
|
||||
)
|
||||
|
||||
def _load_thumbnails_parallel(
|
||||
batch: List[str],
|
||||
quality: int,
|
||||
target_size: int
|
||||
) -> Dict[str, Optional[bytes]]:
|
||||
"""Load thumbnails in parallel using ThreadPoolExecutor."""
|
||||
batch_cache = {}
|
||||
|
||||
def fetch_one(p):
|
||||
def fetch_one(path: str) -> Tuple[str, Optional[bytes]]:
|
||||
try:
|
||||
mtime = os.path.getmtime(p)
|
||||
return p, get_cached_thumbnail(p, quality, target_size, mtime)
|
||||
except:
|
||||
return p, None
|
||||
mtime = os.path.getmtime(path)
|
||||
data = get_cached_thumbnail(path, quality, target_size, mtime)
|
||||
return path, data
|
||||
except Exception:
|
||||
return path, None
|
||||
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=16) as executor:
|
||||
future_to_path = {executor.submit(fetch_one, p): p for p in current_batch}
|
||||
for future in concurrent.futures.as_completed(future_to_path):
|
||||
p, data = future.result()
|
||||
batch_cache[p] = data
|
||||
futures = {executor.submit(fetch_one, p): p for p in batch}
|
||||
for future in concurrent.futures.as_completed(futures):
|
||||
path, data = future.result()
|
||||
batch_cache[path] = data
|
||||
|
||||
# 5. Render Grid
|
||||
cols = st.columns(grid_cols)
|
||||
for idx, img_path in enumerate(current_batch):
|
||||
return batch_cache
|
||||
|
||||
def _render_image_card(
|
||||
img_path: str,
|
||||
batch_cache: Dict[str, Optional[bytes]],
|
||||
staged: Dict,
|
||||
history: Dict,
|
||||
selected_cat: str,
|
||||
tagging_disabled: bool,
|
||||
path_o: str
|
||||
):
|
||||
"""Render individual image card."""
|
||||
unique_key = f"frag_{os.path.basename(img_path)}"
|
||||
with cols[idx % grid_cols]:
|
||||
is_staged = img_path in staged
|
||||
is_processed = img_path in history
|
||||
|
||||
with st.container(border=True):
|
||||
# Header: [Name] [Zoom] [Delete]
|
||||
# Header: filename + zoom + delete
|
||||
c_name, c_zoom, c_del = st.columns([4, 1, 1])
|
||||
c_name.caption(os.path.basename(img_path)[:10])
|
||||
c_name.caption(os.path.basename(img_path)[:15])
|
||||
|
||||
if c_zoom.button("🔍", key=f"zoom_{unique_key}"):
|
||||
view_high_res(img_path)
|
||||
|
||||
c_del.button("❌", key=f"del_{unique_key}", on_click=cb_delete_image, args=(img_path,))
|
||||
c_del.button(
|
||||
"❌",
|
||||
key=f"del_{unique_key}",
|
||||
on_click=action_delete,
|
||||
args=(img_path,)
|
||||
)
|
||||
|
||||
# Status Banners
|
||||
# Status indicator
|
||||
if is_staged:
|
||||
st.success(f"🏷️ {staged[img_path]['cat']}")
|
||||
staged_info = staged[img_path]
|
||||
idx = _extract_index(staged_info['name'])
|
||||
idx_str = f" #{idx}" if idx else ""
|
||||
st.success(f"🏷️ {staged_info['cat']}{idx_str}")
|
||||
elif is_processed:
|
||||
st.info(f"✅ {history[img_path]['action']}")
|
||||
|
||||
# Image (Cached)
|
||||
# Thumbnail
|
||||
img_data = batch_cache.get(img_path)
|
||||
if img_data:
|
||||
st.image(img_data, use_container_width=True)
|
||||
else:
|
||||
st.error("Failed to load")
|
||||
|
||||
# Action Area
|
||||
# Action buttons
|
||||
if not is_staged:
|
||||
# 6. Split Row: [Idx Input] [Tag Button]
|
||||
c_idx, c_tag = st.columns([1, 2], vertical_alignment="bottom")
|
||||
|
||||
# Manual Override Box (Defaults to global session value)
|
||||
card_index = c_idx.number_input(
|
||||
"Idx",
|
||||
min_value=1, step=1,
|
||||
"Index",
|
||||
min_value=1,
|
||||
step=1,
|
||||
value=st.session_state.t5_next_index,
|
||||
label_visibility="collapsed",
|
||||
key=f"idx_{unique_key}"
|
||||
)
|
||||
|
||||
# Tag Button (Passes path_o for conflict check)
|
||||
c_tag.button(
|
||||
"Tag",
|
||||
key=f"tag_{unique_key}",
|
||||
disabled=tagging_disabled,
|
||||
use_container_width=True,
|
||||
on_click=cb_tag_image,
|
||||
# Passing card_index + path_o is vital here
|
||||
on_click=action_tag,
|
||||
args=(img_path, selected_cat, card_index, path_o)
|
||||
)
|
||||
else:
|
||||
st.button("Untag", key=f"untag_{unique_key}", use_container_width=True,
|
||||
on_click=cb_untag_image, args=(img_path,))
|
||||
# Show untag with index number
|
||||
staged_name = staged[img_path]['name']
|
||||
idx = _extract_index(staged_name)
|
||||
untag_label = f"Untag (#{idx})" if idx else "Untag"
|
||||
|
||||
st.button(
|
||||
untag_label,
|
||||
key=f"untag_{unique_key}",
|
||||
use_container_width=True,
|
||||
on_click=action_untag,
|
||||
args=(img_path,)
|
||||
)
|
||||
|
||||
# ... (Batch Actions code remains exactly the same) ...
|
||||
@st.fragment
|
||||
def render_batch_actions(current_batch, path_o, page_num, path_s):
|
||||
st.write(f"### 🚀 Processing Actions")
|
||||
st.caption("Settings apply to both Page and Global actions.")
|
||||
def render_batch_actions(
|
||||
current_batch: List[str],
|
||||
path_o: str,
|
||||
page_num: int,
|
||||
path_s: str
|
||||
):
|
||||
"""Render batch processing controls."""
|
||||
st.write("### 🚀 Processing Actions")
|
||||
st.caption("Settings apply to both Page and Global actions")
|
||||
|
||||
c_set1, c_set2 = st.columns(2)
|
||||
|
||||
# CHANGED: "Copy" is now first, making it the default
|
||||
op_mode = c_set1.radio("Tagged Files:", ["Copy", "Move"], horizontal=True, key="t5_op_mode")
|
||||
c_set1.radio(
|
||||
"Tagged Files:",
|
||||
["Copy", "Move"],
|
||||
horizontal=True,
|
||||
key="t5_op_mode"
|
||||
)
|
||||
|
||||
cleanup = c_set2.radio("Untagged Files:", ["Keep", "Move to Unused", "Delete"], horizontal=True, key="t5_cleanup_mode")
|
||||
c_set2.radio(
|
||||
"Untagged Files:",
|
||||
["Keep", "Move to Unused", "Delete"],
|
||||
horizontal=True,
|
||||
key="t5_cleanup_mode"
|
||||
)
|
||||
|
||||
st.divider()
|
||||
|
||||
c_btn1, c_btn2 = st.columns(2)
|
||||
|
||||
# BUTTON 1: APPLY PAGE
|
||||
if c_btn1.button(f"APPLY PAGE {page_num}", type="secondary", use_container_width=True,
|
||||
on_click=cb_apply_batch, args=(current_batch, path_o, cleanup, op_mode)):
|
||||
st.toast(f"Page {page_num} Applied!")
|
||||
# Apply Page button
|
||||
if c_btn1.button(
|
||||
f"APPLY PAGE {page_num}",
|
||||
type="secondary",
|
||||
use_container_width=True,
|
||||
on_click=action_apply_batch,
|
||||
args=(
|
||||
current_batch,
|
||||
path_o,
|
||||
st.session_state.t5_cleanup_mode,
|
||||
st.session_state.t5_op_mode
|
||||
)
|
||||
):
|
||||
st.toast(f"Page {page_num} applied!")
|
||||
st.rerun()
|
||||
|
||||
# BUTTON 2: APPLY GLOBAL
|
||||
if c_btn2.button("APPLY ALL (GLOBAL)", type="primary", use_container_width=True,
|
||||
help="Process ALL tagged files across all pages.",
|
||||
on_click=cb_apply_global, args=(path_o, cleanup, op_mode, path_s)):
|
||||
st.toast("Global Apply Complete!")
|
||||
# Apply Global button
|
||||
if c_btn2.button(
|
||||
"APPLY ALL (GLOBAL)",
|
||||
type="primary",
|
||||
use_container_width=True,
|
||||
help="Process ALL tagged files",
|
||||
on_click=action_apply_global,
|
||||
args=(
|
||||
path_o,
|
||||
st.session_state.t5_cleanup_mode,
|
||||
st.session_state.t5_op_mode,
|
||||
path_s
|
||||
)
|
||||
):
|
||||
st.toast("Global apply complete!")
|
||||
st.rerun()
|
||||
|
||||
# ==========================================
|
||||
# 4. MAIN RENDERER
|
||||
# MAIN RENDER FUNCTION
|
||||
# ==========================================
|
||||
def render(quality, profile_name):
|
||||
|
||||
def render(quality: int, profile_name: str):
|
||||
"""Main render function for Streamlit app."""
|
||||
st.subheader("🖼️ Gallery Staging Sorter")
|
||||
|
||||
# Init Mutation ID (This triggers the scanner cache refresh)
|
||||
if 't5_file_id' not in st.session_state: st.session_state.t5_file_id = 0
|
||||
if 't5_page' not in st.session_state: st.session_state.t5_page = 0
|
||||
# Initialize state
|
||||
StreamlitState.init()
|
||||
|
||||
# Load profiles and paths
|
||||
profiles = SorterEngine.load_profiles()
|
||||
p_data = profiles.get(profile_name, {})
|
||||
c1, c2 = st.columns(2)
|
||||
path_s = c1.text_input("Source Folder", value=p_data.get("tab5_source", "/storage"), key="t5_s")
|
||||
path_o = c2.text_input("Output Folder", value=p_data.get("tab5_out", "/storage"), key="t5_o")
|
||||
|
||||
if path_s != p_data.get("tab5_source") or path_o != p_data.get("tab5_out"):
|
||||
if st.button("💾 Save Settings"):
|
||||
c1, c2, c3 = st.columns([3, 3, 1])
|
||||
|
||||
path_s = c1.text_input(
|
||||
"Source Folder",
|
||||
value=p_data.get("tab5_source", "/storage"),
|
||||
key="t5_s"
|
||||
)
|
||||
|
||||
path_o = c2.text_input(
|
||||
"Output Folder",
|
||||
value=p_data.get("tab5_out", "/storage"),
|
||||
key="t5_o"
|
||||
)
|
||||
|
||||
# Save settings button
|
||||
if c3.button("💾 Save", use_container_width=True):
|
||||
SorterEngine.save_tab_paths(profile_name, t5_s=path_s, t5_o=path_o)
|
||||
# Saving settings might mean new folder, so we trigger refresh
|
||||
trigger_refresh()
|
||||
StreamlitState.trigger_refresh()
|
||||
st.toast("Settings saved!")
|
||||
st.rerun()
|
||||
|
||||
if not os.path.exists(path_s): return
|
||||
# Validate source path
|
||||
if not os.path.exists(path_s):
|
||||
st.warning("⚠️ Source path does not exist")
|
||||
return
|
||||
|
||||
# Render sidebar
|
||||
with st.sidebar:
|
||||
render_sidebar_content()
|
||||
render_sidebar_content(path_o)
|
||||
|
||||
with st.expander("👀 View Settings"):
|
||||
c_v1, c_v2 = st.columns(2)
|
||||
page_size = c_v1.slider("Images per Page", 12, 100, 24, 4)
|
||||
grid_cols = c_v2.slider("Grid Columns", 2, 8, 4)
|
||||
# View settings
|
||||
with st.expander("👀 View Settings", expanded=False):
|
||||
c_v1, c_v2, c_v3 = st.columns(3)
|
||||
|
||||
# --- USING CACHED LOADER ---
|
||||
# We pass the mutation ID. If ID is same as last run, scan is SKIPPED.
|
||||
st.session_state.t5_page_size = c_v1.slider(
|
||||
"Images/Page",
|
||||
12, 100,
|
||||
st.session_state.t5_page_size,
|
||||
4
|
||||
)
|
||||
|
||||
st.session_state.t5_grid_cols = c_v2.slider(
|
||||
"Grid Columns",
|
||||
2, 8,
|
||||
st.session_state.t5_grid_cols
|
||||
)
|
||||
|
||||
st.session_state.t5_quality = c_v3.slider(
|
||||
"Preview Quality",
|
||||
10, 100,
|
||||
st.session_state.t5_quality,
|
||||
10
|
||||
)
|
||||
|
||||
# Load images (cached)
|
||||
all_images = get_cached_images(path_s, st.session_state.t5_file_id)
|
||||
|
||||
if not all_images:
|
||||
st.info("No images found.")
|
||||
st.info("📂 No images found in source folder")
|
||||
return
|
||||
|
||||
total_items = len(all_images)
|
||||
total_pages = math.ceil(total_items / page_size)
|
||||
if st.session_state.t5_page >= total_pages: st.session_state.t5_page = max(0, total_pages - 1)
|
||||
if st.session_state.t5_page < 0: st.session_state.t5_page = 0
|
||||
# Pagination calculations
|
||||
page_size = st.session_state.t5_page_size
|
||||
total_pages = math.ceil(len(all_images) / page_size)
|
||||
|
||||
start_idx = st.session_state.t5_page * page_size
|
||||
end_idx = start_idx + page_size
|
||||
current_batch = all_images[start_idx:end_idx]
|
||||
# Bounds checking
|
||||
if st.session_state.t5_page >= total_pages:
|
||||
st.session_state.t5_page = max(0, total_pages - 1)
|
||||
if st.session_state.t5_page < 0:
|
||||
st.session_state.t5_page = 0
|
||||
|
||||
def nav_controls(key_suffix):
|
||||
c1, c2, c3, c4 = st.columns([1.5, 1, 0.5, 1.5], vertical_alignment="center")
|
||||
c1.button("⬅️ Prev", disabled=(st.session_state.t5_page == 0), on_click=cb_change_page, args=(-1,), key=f"p_{key_suffix}", use_container_width=True)
|
||||
c2.number_input("Page", min_value=1, max_value=total_pages, value=st.session_state.t5_page + 1, step=1, label_visibility="collapsed", key=f"jump_{key_suffix}", on_change=cb_jump_page, args=(f"jump_{key_suffix}",))
|
||||
c3.markdown(f"<div style='text-align: left; font-weight: bold;'>/ {total_pages}</div>", unsafe_allow_html=True)
|
||||
c4.button("Next ➡️", disabled=(st.session_state.t5_page >= total_pages - 1), on_click=cb_change_page, args=(1,), key=f"n_{key_suffix}", use_container_width=True)
|
||||
current_page = st.session_state.t5_page
|
||||
start_idx = current_page * page_size
|
||||
current_batch = all_images[start_idx : start_idx + page_size]
|
||||
|
||||
st.divider()
|
||||
nav_controls("top")
|
||||
render_gallery_grid(current_batch, quality, grid_cols, path_o)
|
||||
st.divider()
|
||||
nav_controls("bottom")
|
||||
# Calculate green dots (cached)
|
||||
staged = SorterEngine.get_staged_data()
|
||||
green_dots = get_cached_green_dots(
|
||||
all_images,
|
||||
page_size,
|
||||
frozenset(staged.keys())
|
||||
)
|
||||
|
||||
# Render UI components
|
||||
st.divider()
|
||||
|
||||
render_batch_actions(current_batch, path_o, st.session_state.t5_page + 1, path_s)
|
||||
# Top pagination
|
||||
render_pagination_carousel("top", total_pages, current_page, green_dots)
|
||||
|
||||
# Gallery grid
|
||||
render_gallery_grid(
|
||||
current_batch,
|
||||
st.session_state.t5_quality,
|
||||
st.session_state.t5_grid_cols,
|
||||
path_o
|
||||
)
|
||||
|
||||
st.divider()
|
||||
|
||||
# Bottom pagination
|
||||
render_pagination_carousel("bot", total_pages, current_page, green_dots)
|
||||
|
||||
st.divider()
|
||||
|
||||
# Batch actions
|
||||
render_batch_actions(current_batch, path_o, current_page + 1, path_s)
|
||||
Reference in New Issue
Block a user