Compare commits

..

89 Commits

Author SHA1 Message Date
216ca1e460 revert 0db2ee4587
revert claude
2026-01-28 15:22:18 +01:00
0db2ee4587 claude 2026-01-28 15:16:08 +01:00
d43813cc2a Merge pull request 'hotkey' (#8) from hotkey into main
Reviewed-on: #8
2026-01-23 13:19:26 +01:00
97424ea0af Update gallery_app.py 2026-01-23 12:54:08 +01:00
eafc5de6f2 Update gallery_app.py 2026-01-23 12:49:56 +01:00
fa710e914e Update gallery_app.py 2026-01-23 12:41:48 +01:00
e3e337af88 Update gallery_app.py 2026-01-23 12:39:49 +01:00
15ca74ad4b Merge pull request 'profile' (#7) from profile into main
Reviewed-on: #7
2026-01-23 12:39:02 +01:00
a11d76fd5f revert cf1238bbff
revert Update gallery_app.py
2026-01-22 15:39:03 +01:00
cf1238bbff Update gallery_app.py 2026-01-22 15:35:32 +01:00
d3b7f31730 Update gallery_app.py 2026-01-22 15:30:16 +01:00
52c06c4db7 Update gallery_app.py 2026-01-22 15:24:22 +01:00
3a320f3187 Merge pull request 'profile' (#6) from profile into main
Reviewed-on: #6
2026-01-22 15:21:15 +01:00
c37e2bd5e0 Update gallery_app.py 2026-01-22 15:17:04 +01:00
9418661be9 Update engine.py 2026-01-22 15:14:43 +01:00
7349015177 Update gallery_app.py 2026-01-22 15:14:30 +01:00
918a6e9414 revert 5909c0ec99
revert Update gallery_app.py
2026-01-22 15:14:19 +01:00
5909c0ec99 Update gallery_app.py 2026-01-22 15:14:01 +01:00
286b0410ff Update gallery_app.py 2026-01-22 13:03:00 +01:00
0c18f570d4 revert 3f2160405a
revert Update gallery_app.py
2026-01-22 12:59:06 +01:00
3f2160405a Update gallery_app.py 2026-01-22 12:58:48 +01:00
f3f57f7c53 Update gallery_app.py 2026-01-22 12:57:23 +01:00
957aab0656 Update gallery_app.py 2026-01-22 12:50:51 +01:00
0a94548f5e Update engine.py 2026-01-22 12:50:16 +01:00
124fbacd2a Merge pull request 'tag' (#5) from tag into main
Reviewed-on: #5
2026-01-22 12:49:02 +01:00
0f0aeed2f1 Update gallery_app.py 2026-01-20 16:08:21 +01:00
fe6e55de16 Update gallery_app.py 2026-01-20 16:03:03 +01:00
dd454ebf6f Update engine.py 2026-01-20 13:54:06 +01:00
2854907359 Update gallery_app.py 2026-01-20 13:53:51 +01:00
48417b6d73 Update engine.py 2026-01-20 13:27:41 +01:00
ce7abd8a29 Update gallery_app.py 2026-01-20 13:27:30 +01:00
df12413c5d Update engine.py 2026-01-20 13:23:34 +01:00
c56b07f999 Update engine.py 2026-01-20 13:21:11 +01:00
c89cecd43f Update engine.py 2026-01-20 13:19:02 +01:00
37f6166b37 Update gallery_app.py 2026-01-20 13:17:22 +01:00
dc31b0bebb Update engine.py 2026-01-20 13:17:06 +01:00
f0b0114fc5 Merge pull request 'nicegui' (#4) from nicegui into main
Reviewed-on: #4
2026-01-20 11:48:11 +01:00
0c9446b3f8 Update gallery_app.py 2026-01-20 11:27:57 +01:00
4c49635018 Update gallery_app.py 2026-01-20 10:47:09 +01:00
826ae384df Update tab_gallery_sorter.py 2026-01-20 01:32:34 +01:00
54ba10d4e5 clause sonet 2026-01-20 01:27:24 +01:00
0e6de4ae0b Update gallery_app.py 2026-01-19 22:55:10 +01:00
b919c52255 Update gallery_app.py 2026-01-19 22:35:08 +01:00
8fc8372a9b Update gallery_app.py 2026-01-19 22:29:48 +01:00
246b78719e Update gallery_app.py 2026-01-19 21:00:55 +01:00
0d5f393aff Update gallery_app.py 2026-01-19 20:56:33 +01:00
4fb038eda1 Update gallery_app.py 2026-01-19 20:24:32 +01:00
690aaafacf Update gallery_app.py 2026-01-19 20:21:46 +01:00
3e9ff43bc9 Update gallery_app.py 2026-01-19 20:08:03 +01:00
91a0cc5138 Update gallery_app.py 2026-01-19 20:06:23 +01:00
588822f856 Update gallery_app.py 2026-01-19 20:03:47 +01:00
1cbad1a3ed Update gallery_app.py 2026-01-19 19:59:55 +01:00
b5794e9db5 Update gallery_app.py 2026-01-19 19:54:42 +01:00
b938dc68fa Update gallery_app.py 2026-01-19 19:36:47 +01:00
dde0e90442 Update gallery_app.py 2026-01-19 19:34:08 +01:00
0b5e9377e4 Add start.sh 2026-01-19 19:31:29 +01:00
091936069a Update gallery_app.py 2026-01-19 19:26:23 +01:00
0d1eca4ef3 Add gallery_app.py 2026-01-19 19:25:01 +01:00
39153d3493 Update requirements.txt 2026-01-19 19:24:25 +01:00
af2c148747 Merge pull request 'carrousel' (#3) from carrousel into main
Reviewed-on: #3
2026-01-19 17:29:57 +01:00
bf845292ee Update tab_gallery_sorter.py 2026-01-19 16:46:13 +01:00
40453dad94 Update tab_gallery_sorter.py 2026-01-19 16:32:35 +01:00
13818737e2 Update tab_gallery_sorter.py 2026-01-19 16:14:17 +01:00
e4b126075d Update tab_gallery_sorter.py 2026-01-19 16:10:27 +01:00
04a29d7424 Update tab_gallery_sorter.py 2026-01-19 16:03:30 +01:00
a6314cadd9 Update tab_gallery_sorter.py 2026-01-19 15:46:19 +01:00
024caac5e5 Update tab_gallery_sorter.py 2026-01-19 15:39:41 +01:00
c9a2817f41 Update engine.py 2026-01-19 15:38:34 +01:00
69f34a84c4 Update tab_gallery_sorter.py 2026-01-19 15:36:25 +01:00
470e3114c4 Update tab_gallery_sorter.py 2026-01-19 15:32:22 +01:00
ac189d75ba Update engine.py 2026-01-19 15:29:38 +01:00
28e8722a10 Merge pull request 'speed' (#2) from speed into main
Reviewed-on: #2
2026-01-19 15:27:43 +01:00
b909069174 Update tab_gallery_sorter.py 2026-01-19 15:21:55 +01:00
9c86eb4b72 Update engine.py 2026-01-19 14:59:43 +01:00
4636a79ada Update tab_gallery_sorter.py 2026-01-19 14:58:52 +01:00
ff27a3bc83 Update engine.py 2026-01-19 14:50:00 +01:00
7eb71cab56 Update tab_gallery_sorter.py 2026-01-19 14:32:56 +01:00
758125a60b webp 2026-01-19 14:28:22 +01:00
e7144eb6cf webp 2026-01-19 14:27:25 +01:00
8328e4d3b4 Update tab_gallery_sorter.py 2026-01-19 14:24:25 +01:00
6363ea4590 Update engine.py 2026-01-19 14:22:24 +01:00
d30414d972 Merge pull request 'global-button' (#1) from global-button into main
Reviewed-on: #1
2026-01-19 14:19:42 +01:00
c25e71a4c7 Update tab_gallery_sorter.py 2026-01-19 14:17:37 +01:00
bbc784c720 Update engine.py 2026-01-19 14:17:01 +01:00
b155d90853 Update tab_gallery_sorter.py 2026-01-19 14:13:51 +01:00
c30a346a1e Update engine.py 2026-01-19 14:11:28 +01:00
19852d5353 Update engine.py 2026-01-19 14:10:00 +01:00
8662e61690 Update tab_gallery_sorter.py 2026-01-19 14:07:22 +01:00
612da36a77 Update engine.py 2026-01-19 14:06:54 +01:00
5 changed files with 2040 additions and 275 deletions

402
engine.py
View File

@@ -10,33 +10,36 @@ class SorterEngine:
# --- 1. DATABASE INITIALIZATION ---
@staticmethod
def init_db():
"""Initializes all SQLite tables for the multi-tab system."""
"""Initializes tables, including the new HISTORY log."""
conn = sqlite3.connect(SorterEngine.DB_PATH)
cursor = conn.cursor()
# Profiles Table: 9 columns for independent tab paths
# Existing tables...
cursor.execute('''CREATE TABLE IF NOT EXISTS profiles
(name TEXT PRIMARY KEY,
tab1_target TEXT,
tab2_target TEXT, tab2_control TEXT,
tab4_source TEXT, tab4_out TEXT,
mode TEXT,
tab5_source TEXT, tab5_out TEXT)''')
(name TEXT PRIMARY KEY, tab1_target TEXT, tab2_target TEXT, tab2_control TEXT,
tab4_source TEXT, tab4_out TEXT, mode TEXT, tab5_source TEXT, tab5_out TEXT)''')
cursor.execute('''CREATE TABLE IF NOT EXISTS folder_ids (path TEXT PRIMARY KEY, folder_id INTEGER)''')
cursor.execute('''CREATE TABLE IF NOT EXISTS categories (name TEXT PRIMARY KEY)''')
# Staging Area: Tracks pending renames for the Gallery Tab
cursor.execute('''CREATE TABLE IF NOT EXISTS staging_area
(original_path TEXT PRIMARY KEY,
target_category TEXT,
new_name TEXT,
is_marked INTEGER DEFAULT 0)''')
(original_path TEXT PRIMARY KEY, target_category TEXT, new_name TEXT, is_marked INTEGER DEFAULT 0)''')
# Seed default categories
# --- NEW: HISTORY TABLE ---
cursor.execute('''CREATE TABLE IF NOT EXISTS processed_log
(source_path TEXT PRIMARY KEY, category TEXT, action_type TEXT)''')
# --- NEW: FOLDER TAGS TABLE (persists tags by folder) ---
cursor.execute('''CREATE TABLE IF NOT EXISTS folder_tags
(folder_path TEXT, filename TEXT, category TEXT, tag_index INTEGER,
PRIMARY KEY (folder_path, filename))''')
# --- NEW: PROFILE CATEGORIES TABLE (each profile has its own categories) ---
cursor.execute('''CREATE TABLE IF NOT EXISTS profile_categories
(profile TEXT, category TEXT, PRIMARY KEY (profile, category))''')
# Seed categories if empty (legacy table)
cursor.execute("SELECT COUNT(*) FROM categories")
if cursor.fetchone()[0] == 0:
for cat in ["_TRASH", "Default", "Action", "Solo"]:
for cat in ["_TRASH", "control", "Default", "Action", "Solo"]:
cursor.execute("INSERT OR IGNORE INTO categories VALUES (?)", (cat,))
conn.commit()
@@ -69,6 +72,31 @@ class SorterEngine:
cursor.execute("INSERT OR REPLACE INTO profiles VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", new_values)
conn.commit()
conn.close()
@staticmethod
def load_batch_parallel(image_paths, quality):
"""
Multithreaded loader: Compresses multiple images in parallel.
Returns a dictionary {path: bytes_io}
"""
import concurrent.futures
results = {}
# Helper function to run in thread
def process_one(path):
return path, SorterEngine.compress_for_web(path, quality)
# Use ThreadPool to parallelize IO-bound tasks
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
# Submit all tasks
future_to_path = {executor.submit(process_one, p): p for p in image_paths}
# Gather results as they complete
for future in concurrent.futures.as_completed(future_to_path):
path, data = future.result()
results[path] = data
return results
@staticmethod
def load_profiles():
@@ -84,21 +112,45 @@ class SorterEngine:
"tab5_source": r[7], "tab5_out": r[8]
} for r in rows}
# --- 3. CATEGORY MANAGEMENT (Sorted A-Z) ---
# --- 3. CATEGORY MANAGEMENT (Profile-based) ---
@staticmethod
def get_categories():
def get_categories(profile=None):
conn = sqlite3.connect(SorterEngine.DB_PATH)
cursor = conn.cursor()
cursor.execute("SELECT name FROM categories ORDER BY name COLLATE NOCASE ASC")
cats = [r[0] for r in cursor.fetchall()]
# Ensure table exists
cursor.execute('''CREATE TABLE IF NOT EXISTS profile_categories
(profile TEXT, category TEXT, PRIMARY KEY (profile, category))''')
if profile:
cursor.execute("SELECT category FROM profile_categories WHERE profile = ? ORDER BY category COLLATE NOCASE ASC", (profile,))
cats = [r[0] for r in cursor.fetchall()]
# If no categories for this profile, seed with defaults
if not cats:
for cat in ["_TRASH", "control"]:
cursor.execute("INSERT OR IGNORE INTO profile_categories VALUES (?, ?)", (profile, cat))
conn.commit()
cats = ["_TRASH", "control"]
else:
# Fallback to legacy table
cursor.execute("SELECT name FROM categories ORDER BY name COLLATE NOCASE ASC")
cats = [r[0] for r in cursor.fetchall()]
conn.close()
return cats
@staticmethod
def add_category(name):
def add_category(name, profile=None):
conn = sqlite3.connect(SorterEngine.DB_PATH)
cursor = conn.cursor()
cursor.execute("INSERT OR IGNORE INTO categories VALUES (?)", (name,))
if profile:
cursor.execute('''CREATE TABLE IF NOT EXISTS profile_categories
(profile TEXT, category TEXT, PRIMARY KEY (profile, category))''')
cursor.execute("INSERT OR IGNORE INTO profile_categories VALUES (?, ?)", (profile, name))
else:
cursor.execute("INSERT OR IGNORE INTO categories VALUES (?)", (name,))
conn.commit()
conn.close()
@@ -222,6 +274,15 @@ class SorterEngine:
conn.commit()
conn.close()
@staticmethod
def clear_staging_area():
"""Clears all items from the staging area."""
conn = sqlite3.connect(SorterEngine.DB_PATH)
cursor = conn.cursor()
cursor.execute("DELETE FROM staging_area")
conn.commit()
conn.close()
@staticmethod
def get_staged_data():
"""Retrieves current tagged/staged images."""
@@ -234,22 +295,24 @@ class SorterEngine:
return {r[0]: {"cat": r[1], "name": r[2], "marked": r[3]} for r in rows}
@staticmethod
def commit_staging(output_root, cleanup_mode, source_root=None):
"""Global commit directly to output root (No Subfolders)."""
def commit_global(output_root, cleanup_mode, operation="Copy", source_root=None, profile=None):
"""Commits ALL staged files and fixes permissions."""
data = SorterEngine.get_staged_data()
# Save folder tags BEFORE processing (so we can restore them later)
if source_root:
SorterEngine.save_folder_tags(source_root, profile)
conn = sqlite3.connect(SorterEngine.DB_PATH)
cursor = conn.cursor()
staged_paths = set(data.keys())
if not os.path.exists(output_root):
os.makedirs(output_root, exist_ok=True)
if not os.path.exists(output_root): os.makedirs(output_root, exist_ok=True)
# 1. Process all Staged Items
for old_p, info in data.items():
if os.path.exists(old_p):
# CHANGED: Direct move to root
final_dst = os.path.join(output_root, info['name'])
# Collision Safety for global commit
if os.path.exists(final_dst):
root, ext = os.path.splitext(info['name'])
c = 1
@@ -257,16 +320,33 @@ class SorterEngine:
final_dst = os.path.join(output_root, f"{root}_{c}{ext}")
c += 1
shutil.move(old_p, final_dst)
if operation == "Copy":
shutil.copy2(old_p, final_dst)
else:
shutil.move(old_p, final_dst)
# --- FIX PERMISSIONS ---
SorterEngine.fix_permissions(final_dst)
# Log History
cursor.execute("INSERT OR REPLACE INTO processed_log VALUES (?, ?, ?)",
(old_p, info['cat'], operation))
# 2. Global Cleanup
if cleanup_mode != "Keep" and source_root:
for img_p in SorterEngine.get_images(source_root, recursive=True):
if img_p not in staged_paths:
all_imgs = SorterEngine.get_images(source_root, recursive=True)
for img_p in all_imgs:
if img_p not in data:
if cleanup_mode == "Move to Unused":
un_dir = os.path.join(source_root, "unused")
os.makedirs(un_dir, exist_ok=True)
shutil.move(img_p, os.path.join(un_dir, os.path.basename(img_p)))
elif cleanup_mode == "Delete": os.remove(img_p)
unused_dir = os.path.join(source_root, "unused")
os.makedirs(unused_dir, exist_ok=True)
dest_unused = os.path.join(unused_dir, os.path.basename(img_p))
shutil.move(img_p, dest_unused)
SorterEngine.fix_permissions(dest_unused)
elif cleanup_mode == "Delete":
os.remove(img_p)
cursor.execute("DELETE FROM staging_area")
conn.commit()
@@ -327,14 +407,29 @@ class SorterEngine:
return t_dst, c_dst
@staticmethod
def compress_for_web(path, quality):
"""Compresses images for UI performance."""
def compress_for_web(path, quality, target_size=None):
"""
Loads image, resizes smart, and saves as WebP.
"""
try:
with Image.open(path) as img:
# 1. Convert to RGB (WebP handles RGBA, but RGB is safer for consistency)
if img.mode not in ('RGB', 'RGBA'):
img = img.convert("RGB")
# 2. Smart Resize (Only if target_size is provided)
if target_size:
# Only resize if the original is actually bigger
if img.width > target_size or img.height > target_size:
img.thumbnail((target_size, target_size), Image.Resampling.LANCZOS)
# 3. Save as WebP
buf = BytesIO()
img.convert("RGB").save(buf, format="JPEG", quality=quality)
return buf
except: return None
# WebP is faster to decode in browser and smaller on disk
img.save(buf, format="WEBP", quality=quality)
return buf.getvalue()
except Exception:
return None
@staticmethod
def revert_action(action):
@@ -347,27 +442,42 @@ class SorterEngine:
shutil.move(action['c_dst'], action['c_src'])
@staticmethod
def commit_batch(file_list, output_root, cleanup_mode, operation="Move"):
"""
Commits files with support for both MOVE and COPY.
"""
def get_processed_log():
"""Retrieves history of processed files."""
conn = sqlite3.connect(SorterEngine.DB_PATH)
cursor = conn.cursor()
cursor.execute("SELECT * FROM processed_log")
rows = cursor.fetchall()
conn.close()
return {r[0]: {"cat": r[1], "action": r[2]} for r in rows}
@staticmethod
def fix_permissions(path):
"""Forces file to be fully accessible (rwxrwxrwx)."""
try:
# 0o777 gives Read, Write, and Execute access to Owner, Group, and Others.
os.chmod(path, 0o777)
except Exception:
pass # Ignore errors if OS doesn't support chmod (e.g. some Windows setups)
@staticmethod
def commit_batch(file_list, output_root, cleanup_mode, operation="Copy"):
"""Commits files and fixes permissions."""
data = SorterEngine.get_staged_data()
conn = sqlite3.connect(SorterEngine.DB_PATH)
cursor = conn.cursor()
# Ensure output root exists
if not os.path.exists(output_root):
os.makedirs(output_root, exist_ok=True)
if not os.path.exists(output_root): os.makedirs(output_root, exist_ok=True)
for file_path in file_list:
if not os.path.exists(file_path): continue
# --- CASE A: File is TAGGED ---
# --- CASE A: Tagged ---
if file_path in data and data[file_path]['marked']:
info = data[file_path]
final_dst = os.path.join(output_root, info['name'])
# Collision Safety
# Collision Check
if os.path.exists(final_dst):
root, ext = os.path.splitext(info['name'])
c = 1
@@ -375,24 +485,30 @@ class SorterEngine:
final_dst = os.path.join(output_root, f"{root}_{c}{ext}")
c += 1
# OPERATION CHECK: Move vs Copy
# Perform Action
if operation == "Copy":
shutil.copy2(file_path, final_dst) # copy2 preserves metadata
shutil.copy2(file_path, final_dst)
else:
shutil.move(file_path, final_dst)
# Remove from staging database
cursor.execute("DELETE FROM staging_area WHERE original_path = ?", (file_path,))
# --- FIX PERMISSIONS ---
SorterEngine.fix_permissions(final_dst)
# --- CASE B: File is UNTAGGED (Cleanup) ---
# Note: If we COPIED a tagged file, the original stays in source,
# but this 'elif' ensures we don't accidentally delete it as 'untagged'.
# Update DB
cursor.execute("DELETE FROM staging_area WHERE original_path = ?", (file_path,))
cursor.execute("INSERT OR REPLACE INTO processed_log VALUES (?, ?, ?)",
(file_path, info['cat'], operation))
# --- CASE B: Cleanup ---
elif cleanup_mode != "Keep":
if cleanup_mode == "Move to Unused":
parent = os.path.dirname(file_path)
unused_dir = os.path.join(parent, "unused")
unused_dir = os.path.join(os.path.dirname(file_path), "unused")
os.makedirs(unused_dir, exist_ok=True)
shutil.move(file_path, os.path.join(unused_dir, os.path.basename(file_path)))
dest_unused = os.path.join(unused_dir, os.path.basename(file_path))
shutil.move(file_path, dest_unused)
SorterEngine.fix_permissions(dest_unused) # Fix here too
elif cleanup_mode == "Delete":
os.remove(file_path)
@@ -425,11 +541,171 @@ class SorterEngine:
conn.close()
@staticmethod
def delete_category(name):
def delete_category(name, profile=None):
"""Deletes a category and clears any staged tags associated with it."""
conn = sqlite3.connect(SorterEngine.DB_PATH)
cursor = conn.cursor()
cursor.execute("DELETE FROM categories WHERE name = ?", (name,))
if profile:
cursor.execute("DELETE FROM profile_categories WHERE profile = ? AND category = ?", (profile, name))
else:
cursor.execute("DELETE FROM categories WHERE name = ?", (name,))
cursor.execute("DELETE FROM staging_area WHERE target_category = ?", (name,))
conn.commit()
conn.close()
# In engine.py / SorterEngine class
@staticmethod
def get_tagged_page_indices(all_images, page_size):
staged = SorterEngine.get_staged_data()
if not staged: return set()
tagged_pages = set()
staged_keys = set(staged.keys())
for idx, img_path in enumerate(all_images):
if img_path in staged_keys:
tagged_pages.add(idx // page_size)
return tagged_pages
# --- 7. FOLDER TAG PERSISTENCE ---
@staticmethod
def save_folder_tags(folder_path, profile=None):
"""
Saves current staging data associated with a folder for later restoration.
Call this BEFORE clearing the staging area.
"""
import re
staged = SorterEngine.get_staged_data()
if not staged:
return 0
conn = sqlite3.connect(SorterEngine.DB_PATH)
cursor = conn.cursor()
# Ensure table exists with profile column
cursor.execute('''CREATE TABLE IF NOT EXISTS folder_tags
(profile TEXT, folder_path TEXT, filename TEXT, category TEXT, tag_index INTEGER,
PRIMARY KEY (profile, folder_path, filename))''')
# Check if old schema (without profile) - migrate if needed
cursor.execute("PRAGMA table_info(folder_tags)")
columns = [row[1] for row in cursor.fetchall()]
if 'profile' not in columns:
cursor.execute("DROP TABLE folder_tags")
cursor.execute('''CREATE TABLE folder_tags
(profile TEXT, folder_path TEXT, filename TEXT, category TEXT, tag_index INTEGER,
PRIMARY KEY (profile, folder_path, filename))''')
conn.commit()
profile = profile or "Default"
saved_count = 0
for orig_path, info in staged.items():
# Only save tags for files that are in this folder (or subfolders)
if orig_path.startswith(folder_path):
filename = os.path.basename(orig_path)
category = info['cat']
# Extract index from the new_name (e.g., "Action_042.jpg" -> 42)
new_name = info['name']
match = re.search(r'_(\d+)', new_name)
tag_index = int(match.group(1)) if match else 0
cursor.execute(
"INSERT OR REPLACE INTO folder_tags VALUES (?, ?, ?, ?, ?)",
(profile, folder_path, filename, category, tag_index)
)
saved_count += 1
conn.commit()
conn.close()
return saved_count
@staticmethod
def restore_folder_tags(folder_path, all_images, profile=None):
"""
Restores previously saved tags for a folder back into the staging area.
Call this when loading/reloading a folder.
Returns the number of tags restored.
"""
try:
conn = sqlite3.connect(SorterEngine.DB_PATH)
cursor = conn.cursor()
# Ensure table exists with profile column
cursor.execute('''CREATE TABLE IF NOT EXISTS folder_tags
(profile TEXT, folder_path TEXT, filename TEXT, category TEXT, tag_index INTEGER,
PRIMARY KEY (profile, folder_path, filename))''')
# Check if old schema (without profile) - migrate if needed
cursor.execute("PRAGMA table_info(folder_tags)")
columns = [row[1] for row in cursor.fetchall()]
if 'profile' not in columns:
cursor.execute("DROP TABLE folder_tags")
cursor.execute('''CREATE TABLE folder_tags
(profile TEXT, folder_path TEXT, filename TEXT, category TEXT, tag_index INTEGER,
PRIMARY KEY (profile, folder_path, filename))''')
conn.commit()
profile = profile or "Default"
# Get saved tags for this folder and profile
cursor.execute(
"SELECT filename, category, tag_index FROM folder_tags WHERE profile = ? AND folder_path = ?",
(profile, folder_path)
)
saved_tags = {row[0]: {"cat": row[1], "index": row[2]} for row in cursor.fetchall()}
if not saved_tags:
conn.close()
return 0
# Build a map of filename -> full path from current images
filename_to_path = {}
for img_path in all_images:
fname = os.path.basename(img_path)
if fname not in filename_to_path:
filename_to_path[fname] = img_path
# Restore tags to staging area
restored = 0
for filename, tag_info in saved_tags.items():
if filename in filename_to_path:
full_path = filename_to_path[filename]
cursor.execute("SELECT 1 FROM staging_area WHERE original_path = ?", (full_path,))
if not cursor.fetchone():
ext = os.path.splitext(filename)[1]
new_name = f"{tag_info['cat']}_{tag_info['index']:03d}{ext}"
cursor.execute(
"INSERT OR REPLACE INTO staging_area VALUES (?, ?, ?, 1)",
(full_path, tag_info['cat'], new_name)
)
restored += 1
conn.commit()
conn.close()
return restored
except Exception as e:
print(f"Error restoring folder tags: {e}")
return 0
@staticmethod
def clear_folder_tags(folder_path):
"""Clears saved tags for a specific folder."""
conn = sqlite3.connect(SorterEngine.DB_PATH)
cursor = conn.cursor()
cursor.execute("DELETE FROM folder_tags WHERE folder_path = ?", (folder_path,))
conn.commit()
conn.close()
@staticmethod
def get_saved_folder_tags(folder_path):
"""Returns saved tags for a folder (for debugging/display)."""
conn = sqlite3.connect(SorterEngine.DB_PATH)
cursor = conn.cursor()
cursor.execute(
"SELECT filename, category, tag_index FROM folder_tags WHERE folder_path = ?",
(folder_path,)
)
result = {row[0]: {"cat": row[1], "index": row[2]} for row in cursor.fetchall()}
conn.close()
return result

1016
gallery_app.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,2 +1,3 @@
streamlit
Pillow
nicegui

18
start.sh Normal file
View File

@@ -0,0 +1,18 @@
#!/bin/bash
# 1. Navigate to app directory
cd /app
# 2. Install dependencies (Including NiceGUI if missing)
# This checks your requirements.txt AND ensures nicegui is present
pip install --no-cache-dir -r requirements.txt
# 3. Start NiceGUI in the Background (&)
# This runs silently while the script continues
echo "🚀 Starting NiceGUI on Port 8080..."
python3 gallery_app.py &
# 4. Start Streamlit in the Foreground
# This keeps the container running
echo "🚀 Starting Streamlit on Port 8501..."
streamlit run app.py --server.port=8501 --server.address=0.0.0.0

View File

@@ -1,289 +1,743 @@
import streamlit as st
import os
import math
import concurrent.futures
from typing import Dict, Set, List, Optional, Tuple
from engine import SorterEngine
# ==========================================
# 1. GLOBAL CALLBACKS (Prevents Page Refresh)
# STATE MANAGEMENT
# ==========================================
def cb_tag_image(img_path, selected_cat):
"""Tags an image. Updates DB immediately."""
if selected_cat.startswith("---") or selected_cat == "":
class StreamlitState:
"""Centralized state management with type hints."""
@staticmethod
def init():
"""Initialize all session state variables."""
defaults = {
't5_file_id': 0,
't5_page': 0,
't5_active_cat': 'Default',
't5_next_index': 1,
't5_op_mode': 'Copy',
't5_cleanup_mode': 'Keep',
't5_page_size': 24,
't5_grid_cols': 4,
't5_quality': 50,
}
for key, value in defaults.items():
if key not in st.session_state:
st.session_state[key] = value
@staticmethod
def trigger_refresh():
"""Force file cache invalidation."""
st.session_state.t5_file_id += 1
@staticmethod
def change_page(delta: int):
"""Navigate pages by delta."""
st.session_state.t5_page = max(0, st.session_state.t5_page + delta)
@staticmethod
def set_page(page_idx: int):
"""Jump to specific page."""
st.session_state.t5_page = page_idx
@staticmethod
def slider_change(key: str):
"""Handle slider-based page navigation (1-based to 0-based)."""
st.session_state.t5_page = st.session_state[key] - 1
# ==========================================
# CACHING & DATA LOADING
# ==========================================
@st.cache_data(show_spinner=False)
def get_cached_images(path: str, mutation_id: int) -> List[str]:
"""Scan folder for images. mutation_id forces refresh."""
return SorterEngine.get_images(path, recursive=True)
@st.cache_data(show_spinner=False, max_entries=2000)
def get_cached_thumbnail(path: str, quality: int, target_size: int, mtime: float) -> Optional[bytes]:
"""Load and compress thumbnail with caching."""
try:
return SorterEngine.compress_for_web(path, quality, target_size)
except Exception:
return None
@st.cache_data(show_spinner=False)
def get_cached_green_dots(all_images: List[str], page_size: int, staged_keys: frozenset) -> Set[int]:
"""
Calculate which pages have tagged images (cached).
Returns set of page indices with staged images.
"""
staged_set = set(staged_keys)
tagged_pages = set()
for idx, img_path in enumerate(all_images):
if img_path in staged_set:
tagged_pages.add(idx // page_size)
return tagged_pages
@st.cache_data(show_spinner=False)
def build_index_map(active_cat: str, path_o: str, staged_data_frozen: frozenset) -> Dict[int, str]:
"""
Build mapping of index numbers to file paths for active category.
Returns: {1: '/path/to/Cat_001.jpg', 2: '/path/to/Cat_002.jpg', ...}
"""
index_map = {}
# Convert frozenset back to dict for processing
staged_dict = {k: v for k, v in staged_data_frozen}
# Check staging area
for orig_path, info in staged_dict.items():
if info['cat'] == active_cat:
idx = _extract_index(info['name'])
if idx is not None:
index_map[idx] = orig_path
# Check disk
cat_path = os.path.join(path_o, active_cat)
if os.path.exists(cat_path):
for filename in os.listdir(cat_path):
if filename.startswith(active_cat):
idx = _extract_index(filename)
if idx is not None and idx not in index_map:
index_map[idx] = os.path.join(cat_path, filename)
return index_map
def _extract_index(filename: str) -> Optional[int]:
"""Extract numeric index from filename (e.g., 'Cat_042.jpg' -> 42)."""
try:
parts = filename.rsplit('_', 1)
if len(parts) > 1:
num_str = parts[1].split('.')[0]
return int(num_str)
except (ValueError, IndexError):
pass
return None
# ==========================================
# ACTIONS
# ==========================================
def action_tag(img_path: str, selected_cat: str, index_val: int, path_o: str):
"""Tag image with category and index, handling collisions."""
if selected_cat.startswith("---") or not selected_cat:
st.toast("⚠️ Select a valid category first!", icon="🚫")
return
staged = SorterEngine.get_staged_data()
ext = os.path.splitext(img_path)[1]
base_name = f"{selected_cat}_{index_val:03d}"
new_name = f"{base_name}{ext}"
# Auto-increment logic
count = len([v for v in staged.values() if v['cat'] == selected_cat]) + 1
new_name = f"{selected_cat}_{count:03d}{ext}"
# Collision detection
staged = SorterEngine.get_staged_data()
staged_names = {v['name'] for v in staged.values() if v['cat'] == selected_cat}
dest_path = os.path.join(path_o, selected_cat, new_name)
collision = False
suffix = 1
while new_name in staged_names or os.path.exists(dest_path):
collision = True
new_name = f"{base_name}_{suffix}{ext}"
dest_path = os.path.join(path_o, selected_cat, new_name)
suffix += 1
SorterEngine.stage_image(img_path, selected_cat, new_name)
def cb_untag_image(img_path):
"""Untags an image."""
if collision:
st.toast(f"⚠️ Conflict! Saved as: {new_name}", icon="🔀")
# Auto-increment index
st.session_state.t5_next_index = index_val + 1
def action_untag(img_path: str):
"""Remove staging from image."""
SorterEngine.clear_staged_item(img_path)
def cb_delete_image(img_path):
"""Moves image to trash."""
def action_delete(img_path: str):
"""Delete image to trash."""
SorterEngine.delete_to_trash(img_path)
StreamlitState.trigger_refresh()
def cb_apply_batch(current_batch, path_o, cleanup_mode, operation):
"""Commits the batch with the specified operation (Move/Copy)."""
def action_apply_batch(current_batch: List[str], path_o: str, cleanup_mode: str, operation: str):
"""Apply staged changes for current page."""
SorterEngine.commit_batch(current_batch, path_o, cleanup_mode, operation)
StreamlitState.trigger_refresh()
def cb_change_page(delta):
"""Updates page number (-1 or +1)."""
if 't5_page' not in st.session_state:
st.session_state.t5_page = 0
st.session_state.t5_page += delta
def action_apply_global(path_o: str, cleanup_mode: str, operation: str, path_s: str):
"""Apply all staged changes globally."""
SorterEngine.commit_global(path_o, cleanup_mode, operation, source_root=path_s)
StreamlitState.trigger_refresh()
def cb_jump_page(k):
"""Updates page number from direct input box."""
val = st.session_state[k]
st.session_state.t5_page = val - 1
def action_add_category(name: str):
"""Add new category."""
if name:
SorterEngine.add_category(name)
st.session_state.t5_active_cat = name
def action_rename_category(old_name: str, new_name: str):
"""Rename category."""
if new_name and new_name != old_name:
SorterEngine.rename_category(old_name, new_name)
st.session_state.t5_active_cat = new_name
def action_delete_category(cat_name: str):
"""Delete category."""
SorterEngine.delete_category(cat_name)
# Reset to first available category
cats = SorterEngine.get_categories() or ["Default"]
st.session_state.t5_active_cat = cats[0]
# ==========================================
# 2. FRAGMENT: SIDEBAR (Category Manager)
# DIALOGS
# ==========================================
@st.dialog("🔍 Full Resolution", width="large")
def view_high_res(img_path: str):
"""Modal for full resolution inspection."""
img_data = SorterEngine.compress_for_web(img_path, quality=90, target_size=None)
if img_data:
st.image(img_data, use_container_width=True)
st.caption(f"📁 {img_path}")
else:
st.error(f"Could not load: {img_path}")
@st.dialog("🖼️ Tag Preview", width="large")
def view_tag_preview(img_path: str, title: str):
"""Show image associated with a numbered tag."""
st.subheader(title)
img_data = SorterEngine.compress_for_web(img_path, quality=80, target_size=800)
if img_data:
st.image(img_data, use_container_width=True)
st.caption(f"📁 {img_path}")
else:
st.error(f"Could not load: {img_path}")
# ==========================================
# UI COMPONENTS
# ==========================================
@st.fragment
def render_sidebar_content():
def render_sidebar_content(path_o: str):
"""Render category management sidebar."""
st.divider()
st.subheader("🏷️ Category Manager")
# --- PREPARE LIST (With Separators) ---
cats = SorterEngine.get_categories()
processed_cats = []
last_char = ""
if cats:
for cat in cats:
current_char = cat[0].upper()
if last_char and current_char != last_char:
processed_cats.append(f"--- {current_char} ---")
processed_cats.append(cat)
last_char = current_char
# Get and process categories with separators
cats = SorterEngine.get_categories() or ["Default"]
processed_cats = _add_category_separators(cats)
# Sync radio selection immediately
if "t5_radio_select" in st.session_state:
new_selection = st.session_state.t5_radio_select
if not new_selection.startswith("---"):
st.session_state.t5_active_cat = new_selection
# --- STATE SYNC ---
if "t5_active_cat" not in st.session_state:
st.session_state.t5_active_cat = cats[0] if cats else "Default"
st.session_state.t5_active_cat = cats[0]
# Fallback if selection was deleted
current_selection = st.session_state.t5_active_cat
if not current_selection.startswith("---") and current_selection not in cats:
st.session_state.t5_active_cat = cats[0] if cats else "Default"
current_cat = st.session_state.t5_active_cat
# --- RADIO SELECTION ---
selection = st.radio("Active Tag", processed_cats, key="t5_radio_select")
# NUMBER GRID (1-25) with previews
if current_cat and not current_cat.startswith("---"):
st.caption(f"**{current_cat}** Index Map")
if not selection.startswith("---"):
st.session_state.t5_active_cat = selection
# Build index map (cached)
staged = SorterEngine.get_staged_data()
staged_frozen = frozenset(staged.items())
index_map = build_index_map(current_cat, path_o, staged_frozen)
# Render 5x5 grid
grid_cols = st.columns(5, gap="small")
for i in range(1, 26):
is_used = i in index_map
btn_type = "primary" if is_used else "secondary"
with grid_cols[(i - 1) % 5]:
if st.button(f"{i}", key=f"grid_{i}", type=btn_type, use_container_width=True):
st.session_state.t5_next_index = i
if is_used:
view_tag_preview(index_map[i], f"{current_cat} #{i}")
else:
st.toast(f"Next index set to #{i}")
st.divider()
# CATEGORY SELECTOR
st.radio("Active Category", processed_cats, key="t5_radio_select")
# INDEX CONTROLS
st.caption("Tagging Settings")
c_num1, c_num2 = st.columns([3, 1], vertical_alignment="bottom")
c_num1.number_input("Next Index #", min_value=1, step=1, key="t5_next_index")
if c_num2.button("🔄", help="Auto-detect next index"):
used_indices = list(index_map.keys()) if index_map else []
st.session_state.t5_next_index = max(used_indices) + 1 if used_indices else 1
st.rerun()
st.divider()
# --- TABS: ADD / EDIT ---
# CATEGORY MANAGEMENT TABS
tab_add, tab_edit = st.tabs([" Add", "✏️ Edit"])
with tab_add:
c1, c2 = st.columns([3, 1])
new_cat = c1.text_input("New Name", label_visibility="collapsed", placeholder="New...", key="t5_new_cat")
new_cat = c1.text_input(
"New Category",
label_visibility="collapsed",
placeholder="Enter name...",
key="t5_new_cat"
)
if c2.button("Add", key="btn_add_cat"):
if new_cat:
SorterEngine.add_category(new_cat)
st.rerun()
action_add_category(new_cat)
st.rerun()
with tab_edit:
target_cat = st.session_state.t5_active_cat
is_valid = target_cat and not target_cat.startswith("---") and target_cat in cats
if current_cat and not current_cat.startswith("---") and current_cat in cats:
st.caption(f"Editing: **{current_cat}**")
if is_valid:
st.caption(f"Editing: **{target_cat}**")
rename_val = st.text_input(
"Rename to:",
value=current_cat,
key=f"ren_{current_cat}"
)
# RENAME
rename_val = st.text_input("Rename to:", value=target_cat, key=f"ren_{target_cat}")
if st.button("💾 Save Name", key=f"save_{target_cat}", use_container_width=True):
if rename_val and rename_val != target_cat:
SorterEngine.rename_category(target_cat, rename_val)
st.session_state.t5_active_cat = rename_val
st.rerun()
if st.button("💾 Save", key=f"save_{current_cat}", use_container_width=True):
action_rename_category(current_cat, rename_val)
st.rerun()
st.markdown("---")
# DELETE
if st.button("🗑️ Delete Category", key=f"del_cat_{target_cat}", type="primary", use_container_width=True):
SorterEngine.delete_category(target_cat)
if st.button(
"🗑️ Delete Category",
key=f"del_cat_{current_cat}",
type="primary",
use_container_width=True
):
action_delete_category(current_cat)
st.rerun()
else:
st.info("Select a valid category to edit.")
def _add_category_separators(cats: List[str]) -> List[str]:
"""Add alphabetical separators between categories."""
processed = []
last_char = ""
for cat in cats:
current_char = cat[0].upper()
if last_char and current_char != last_char:
processed.append(f"--- {current_char} ---")
processed.append(cat)
last_char = current_char
return processed
def render_pagination_carousel(key_suffix: str, total_pages: int, current_page: int, tagged_pages: Set[int]):
"""Render pagination controls with green dot indicators."""
if total_pages <= 1:
return
# Rapid navigation slider (1-based)
st.slider(
"Page Navigator",
min_value=1,
max_value=total_pages,
value=current_page + 1,
step=1,
key=f"slider_{key_suffix}",
label_visibility="collapsed",
on_change=StreamlitState.slider_change,
args=(f"slider_{key_suffix}",)
)
# Calculate button window (show current ±2 pages)
window_radius = 2
start_p = max(0, current_page - window_radius)
end_p = min(total_pages, current_page + window_radius + 1)
# Adjust near edges to maintain consistent width
if current_page < window_radius:
end_p = min(total_pages, 5)
elif current_page > total_pages - window_radius - 1:
start_p = max(0, total_pages - 5)
num_buttons = end_p - start_p
if num_buttons < 1:
start_p = 0
end_p = total_pages
num_buttons = total_pages
# Render button row: [Prev] [1] [2] [3] ... [Next]
cols = st.columns([1] + [1] * num_buttons + [1])
# Previous button
with cols[0]:
st.button(
"",
disabled=(current_page == 0),
on_click=StreamlitState.change_page,
args=(-1,),
key=f"prev_{key_suffix}",
use_container_width=True
)
# Page number buttons
for i, p_idx in enumerate(range(start_p, end_p)):
with cols[i + 1]:
label = str(p_idx + 1)
if p_idx in tagged_pages:
label += " 🟢"
btn_type = "primary" if p_idx == current_page else "secondary"
st.button(
label,
type=btn_type,
key=f"btn_p{p_idx}_{key_suffix}",
use_container_width=True,
on_click=StreamlitState.set_page,
args=(p_idx,)
)
# Next button
with cols[-1]:
st.button(
"",
disabled=(current_page >= total_pages - 1),
on_click=StreamlitState.change_page,
args=(1,),
key=f"next_{key_suffix}",
use_container_width=True
)
# ==========================================
# 3. FRAGMENT: GALLERY GRID
# ==========================================
@st.fragment
def render_gallery_grid(current_batch, quality, grid_cols):
def render_gallery_grid(
current_batch: List[str],
quality: int,
grid_cols: int,
path_o: str
):
"""Render image gallery grid with parallel loading."""
staged = SorterEngine.get_staged_data()
selected_cat = st.session_state.get("t5_active_cat", "Default")
history = SorterEngine.get_processed_log()
selected_cat = st.session_state.t5_active_cat
tagging_disabled = selected_cat.startswith("---")
target_size = int(2400 / grid_cols)
# Parallel thumbnail loading
batch_cache = _load_thumbnails_parallel(current_batch, quality, target_size)
# Render grid
cols = st.columns(grid_cols)
for idx, img_path in enumerate(current_batch):
unique_key = f"frag_{os.path.basename(img_path)}"
with cols[idx % grid_cols]:
is_staged = img_path in staged
_render_image_card(
img_path=img_path,
batch_cache=batch_cache,
staged=staged,
history=history,
selected_cat=selected_cat,
tagging_disabled=tagging_disabled,
path_o=path_o
)
with st.container(border=True):
# Header
c_head1, c_head2 = st.columns([5, 1])
c_head1.caption(os.path.basename(img_path)[:15])
def _load_thumbnails_parallel(
batch: List[str],
quality: int,
target_size: int
) -> Dict[str, Optional[bytes]]:
"""Load thumbnails in parallel using ThreadPoolExecutor."""
batch_cache = {}
# DELETE (Callback)
c_head2.button("", key=f"del_{unique_key}",
on_click=cb_delete_image, args=(img_path,))
def fetch_one(path: str) -> Tuple[str, Optional[bytes]]:
try:
mtime = os.path.getmtime(path)
data = get_cached_thumbnail(path, quality, target_size, mtime)
return path, data
except Exception:
return path, None
# STATUS
if is_staged:
st.success(f"🏷️ {staged[img_path]['cat']}")
with concurrent.futures.ThreadPoolExecutor(max_workers=16) as executor:
futures = {executor.submit(fetch_one, p): p for p in batch}
for future in concurrent.futures.as_completed(futures):
path, data = future.result()
batch_cache[path] = data
# IMAGE
img_data = SorterEngine.compress_for_web(img_path, quality)
if img_data:
st.image(img_data, use_container_width=True)
return batch_cache
# ACTIONS (Callbacks)
if not is_staged:
st.button("Tag", key=f"tag_{unique_key}",
disabled=tagging_disabled,
use_container_width=True,
on_click=cb_tag_image, args=(img_path, selected_cat))
else:
st.button("Untag", key=f"untag_{unique_key}",
use_container_width=True,
on_click=cb_untag_image, args=(img_path,))
def _render_image_card(
img_path: str,
batch_cache: Dict[str, Optional[bytes]],
staged: Dict,
history: Dict,
selected_cat: str,
tagging_disabled: bool,
path_o: str
):
"""Render individual image card."""
unique_key = f"frag_{os.path.basename(img_path)}"
is_staged = img_path in staged
is_processed = img_path in history
with st.container(border=True):
# Header: filename + zoom + delete
c_name, c_zoom, c_del = st.columns([4, 1, 1])
c_name.caption(os.path.basename(img_path)[:15])
if c_zoom.button("🔍", key=f"zoom_{unique_key}"):
view_high_res(img_path)
c_del.button(
"",
key=f"del_{unique_key}",
on_click=action_delete,
args=(img_path,)
)
# Status indicator
if is_staged:
staged_info = staged[img_path]
idx = _extract_index(staged_info['name'])
idx_str = f" #{idx}" if idx else ""
st.success(f"🏷️ {staged_info['cat']}{idx_str}")
elif is_processed:
st.info(f"{history[img_path]['action']}")
# Thumbnail
img_data = batch_cache.get(img_path)
if img_data:
st.image(img_data, use_container_width=True)
else:
st.error("Failed to load")
# Action buttons
if not is_staged:
c_idx, c_tag = st.columns([1, 2], vertical_alignment="bottom")
card_index = c_idx.number_input(
"Index",
min_value=1,
step=1,
value=st.session_state.t5_next_index,
label_visibility="collapsed",
key=f"idx_{unique_key}"
)
c_tag.button(
"Tag",
key=f"tag_{unique_key}",
disabled=tagging_disabled,
use_container_width=True,
on_click=action_tag,
args=(img_path, selected_cat, card_index, path_o)
)
else:
# Show untag with index number
staged_name = staged[img_path]['name']
idx = _extract_index(staged_name)
untag_label = f"Untag (#{idx})" if idx else "Untag"
st.button(
untag_label,
key=f"untag_{unique_key}",
use_container_width=True,
on_click=action_untag,
args=(img_path,)
)
# ==========================================
# 4. FRAGMENT: BATCH ACTIONS
# ==========================================
@st.fragment
def render_batch_actions(current_batch, path_o, page_num):
st.write(f"### 🚀 Batch Actions (Page {page_num})")
def render_batch_actions(
current_batch: List[str],
path_o: str,
page_num: int,
path_s: str
):
"""Render batch processing controls."""
st.write("### 🚀 Processing Actions")
st.caption("Settings apply to both Page and Global actions")
# We use columns to organize the settings clearly
c_set1, c_set2, c_btn = st.columns([2, 2, 1.5], vertical_alignment="bottom")
c_set1, c_set2 = st.columns(2)
# 1. Operation for TAGGED files
op_mode = c_set1.radio("Tagged Files:", ["Move", "Copy"],
horizontal=True, key="t5_op_mode")
c_set1.radio(
"Tagged Files:",
["Copy", "Move"],
horizontal=True,
key="t5_op_mode"
)
# 2. Action for UNTAGGED files
cleanup = c_set2.radio("Untagged Files:", ["Keep", "Move to Unused", "Delete"],
horizontal=True, key="t5_cleanup_mode")
c_set2.radio(
"Untagged Files:",
["Keep", "Move to Unused", "Delete"],
horizontal=True,
key="t5_cleanup_mode"
)
# 3. Apply Button
# Note: We added 'op_mode' to the args
if c_btn.button(f"APPLY ({op_mode})", type="primary", use_container_width=True,
on_click=cb_apply_batch, args=(current_batch, path_o, cleanup, op_mode)):
st.success("Batch processed!")
# Rerun to show changes (files disappearing or remaining depending on copy/move)
st.divider()
c_btn1, c_btn2 = st.columns(2)
# Apply Page button
if c_btn1.button(
f"APPLY PAGE {page_num}",
type="secondary",
use_container_width=True,
on_click=action_apply_batch,
args=(
current_batch,
path_o,
st.session_state.t5_cleanup_mode,
st.session_state.t5_op_mode
)
):
st.toast(f"Page {page_num} applied!")
st.rerun()
# Apply Global button
if c_btn2.button(
"APPLY ALL (GLOBAL)",
type="primary",
use_container_width=True,
help="Process ALL tagged files",
on_click=action_apply_global,
args=(
path_o,
st.session_state.t5_cleanup_mode,
st.session_state.t5_op_mode,
path_s
)
):
st.toast("Global apply complete!")
st.rerun()
# ==========================================
# 5. MAIN RENDERER
# MAIN RENDER FUNCTION
# ==========================================
def render(quality, profile_name):
def render(quality: int, profile_name: str):
"""Main render function for Streamlit app."""
st.subheader("🖼️ Gallery Staging Sorter")
# Init State
if 't5_page' not in st.session_state: st.session_state.t5_page = 0
# Initialize state
StreamlitState.init()
# Load Paths
# Load profiles and paths
profiles = SorterEngine.load_profiles()
p_data = profiles.get(profile_name, {})
c1, c2 = st.columns(2)
path_s = c1.text_input("Source Folder", value=p_data.get("tab5_source", "/storage"), key="t5_s")
path_o = c2.text_input("Output Folder", value=p_data.get("tab5_out", "/storage"), key="t5_o")
c1, c2, c3 = st.columns([3, 3, 1])
if path_s != p_data.get("tab5_source") or path_o != p_data.get("tab5_out"):
if st.button("💾 Save Settings"):
SorterEngine.save_tab_paths(profile_name, t5_s=path_s, t5_o=path_o)
st.rerun()
path_s = c1.text_input(
"Source Folder",
value=p_data.get("tab5_source", "/storage"),
key="t5_s"
)
if not os.path.exists(path_s): return
path_o = c2.text_input(
"Output Folder",
value=p_data.get("tab5_out", "/storage"),
key="t5_o"
)
# --- RENDER SIDEBAR ---
with st.sidebar:
render_sidebar_content()
# Save settings button
if c3.button("💾 Save", use_container_width=True):
SorterEngine.save_tab_paths(profile_name, t5_s=path_s, t5_o=path_o)
StreamlitState.trigger_refresh()
st.toast("Settings saved!")
st.rerun()
# --- VIEW SETTINGS ---
with st.expander("👀 View Settings"):
c_v1, c_v2 = st.columns(2)
page_size = c_v1.slider("Images per Page", 12, 100, 24, 4)
grid_cols = c_v2.slider("Grid Columns", 2, 8, 4)
# --- DATA & MATH ---
all_images = SorterEngine.get_images(path_s, recursive=True)
if not all_images:
st.info("No images found.")
# Validate source path
if not os.path.exists(path_s):
st.warning("⚠️ Source path does not exist")
return
total_items = len(all_images)
total_pages = math.ceil(total_items / page_size)
# Render sidebar
with st.sidebar:
render_sidebar_content(path_o)
# Safety Bounds Check
if st.session_state.t5_page >= total_pages: st.session_state.t5_page = max(0, total_pages - 1)
if st.session_state.t5_page < 0: st.session_state.t5_page = 0
# View settings
with st.expander("👀 View Settings", expanded=False):
c_v1, c_v2, c_v3 = st.columns(3)
start_idx = st.session_state.t5_page * page_size
end_idx = start_idx + page_size
current_batch = all_images[start_idx:end_idx]
# --- NAVIGATION BAR COMPONENT ---
def nav_controls(key_suffix):
# New Layout: [Prev] [Input Box] ["/ 15"] [Next]
c1, c2, c3, c4 = st.columns([1.5, 1, 0.5, 1.5], vertical_alignment="center")
# 1. Previous Button
c1.button("⬅️ Prev",
disabled=(st.session_state.t5_page == 0),
on_click=cb_change_page, args=(-1,),
key=f"p_{key_suffix}", use_container_width=True)
# 2. Page Selector (Number Input)
c2.number_input(
"Page",
min_value=1, max_value=total_pages,
value=st.session_state.t5_page + 1,
step=1,
label_visibility="collapsed",
key=f"jump_{key_suffix}",
on_change=cb_jump_page, args=(f"jump_{key_suffix}",)
st.session_state.t5_page_size = c_v1.slider(
"Images/Page",
12, 100,
st.session_state.t5_page_size,
4
)
# 3. Total Page Count Display
c3.markdown(f"<div style='text-align: left; font-weight: bold;'>/ {total_pages}</div>", unsafe_allow_html=True)
st.session_state.t5_grid_cols = c_v2.slider(
"Grid Columns",
2, 8,
st.session_state.t5_grid_cols
)
# 4. Next Button
c4.button("Next ➡️",
disabled=(st.session_state.t5_page >= total_pages - 1),
on_click=cb_change_page, args=(1,),
key=f"n_{key_suffix}", use_container_width=True)
st.session_state.t5_quality = c_v3.slider(
"Preview Quality",
10, 100,
st.session_state.t5_quality,
10
)
# --- RENDER PAGE ---
st.divider()
nav_controls("top") # Top Nav
# Load images (cached)
all_images = get_cached_images(path_s, st.session_state.t5_file_id)
render_gallery_grid(current_batch, quality, grid_cols) # Grid
if not all_images:
st.info("📂 No images found in source folder")
return
st.divider()
nav_controls("bottom") # Bottom Nav
# Pagination calculations
page_size = st.session_state.t5_page_size
total_pages = math.ceil(len(all_images) / page_size)
# Bounds checking
if st.session_state.t5_page >= total_pages:
st.session_state.t5_page = max(0, total_pages - 1)
if st.session_state.t5_page < 0:
st.session_state.t5_page = 0
current_page = st.session_state.t5_page
start_idx = current_page * page_size
current_batch = all_images[start_idx : start_idx + page_size]
# Calculate green dots (cached)
staged = SorterEngine.get_staged_data()
green_dots = get_cached_green_dots(
all_images,
page_size,
frozenset(staged.keys())
)
# Render UI components
st.divider()
# Batch Actions
render_batch_actions(current_batch, path_o, st.session_state.t5_page + 1)
# Top pagination
render_pagination_carousel("top", total_pages, current_page, green_dots)
# Gallery grid
render_gallery_grid(
current_batch,
st.session_state.t5_quality,
st.session_state.t5_grid_cols,
path_o
)
st.divider()
# Bottom pagination
render_pagination_carousel("bot", total_pages, current_page, green_dots)
st.divider()
# Batch actions
render_batch_actions(current_batch, path_o, current_page + 1, path_s)