Compare commits
114 Commits
sql
...
4b5fff5c6e
| Author | SHA1 | Date | |
|---|---|---|---|
| 4b5fff5c6e | |||
| d07a308865 | |||
| c4d107206f | |||
| b499eb4dfd | |||
| ba8f104bc1 | |||
| 6b7e9ea682 | |||
| c15bec98ce | |||
| 0d8e84ea36 | |||
| e2f30b0332 | |||
| 24f9b7d955 | |||
| d56f6d8170 | |||
| f2980a9f94 | |||
| 4e3ff63f6a | |||
| 6e01cab5cd | |||
| 16ed81f0db | |||
| d98cee8015 | |||
| 2ebf3a4fcd | |||
| a4cb979131 | |||
| 9a3f7b7b94 | |||
| d8597f201a | |||
| 8911323832 | |||
| af5eafaf4d | |||
| 29750acf58 | |||
| da789e68ad | |||
| 79755c286b | |||
| 39a1b98924 | |||
| d3dbd4645a | |||
| d795671763 | |||
| 9f141ba42f | |||
| 7931060d43 | |||
| 3264845e68 | |||
| fe2c6445ef | |||
| 710a8407d2 | |||
| 97748ab8ff | |||
| b0125133f1 | |||
| a8c9a0376d | |||
| 9c171627d8 | |||
| b7a7d8c379 | |||
| 3928f4d225 | |||
| a0d58d8982 | |||
| b6f31786c6 | |||
| f48098c646 | |||
| 3bbbdc827c | |||
| 79a47e034e | |||
| d5fbfe765e | |||
| f6d5ebfe34 | |||
| bdcc05f388 | |||
| 31da900502 | |||
| f8f71b002d | |||
| bc75e7f341 | |||
| 6a3b72c035 | |||
| 387d4d874c | |||
| 7261f2c689 | |||
| 2263c3f598 | |||
| 7252fa3855 | |||
| a747f86daa | |||
| f5e242950d | |||
| dfab5e12ab | |||
| a08f2676f5 | |||
| 3255fe76dc | |||
| 0d44944192 | |||
| 8cc244e8be | |||
| e841e9b76b | |||
| a4717dfab6 | |||
| 3718975d99 | |||
| 40ffdcf671 | |||
| 81ecb91835 | |||
| e196ad27f5 | |||
| bd628b062e | |||
| 1abae0de22 | |||
| 64472c7850 | |||
| 907e7efd68 | |||
| 0cfe9c9d4b | |||
| 563dba5a0c | |||
| b7164ae167 | |||
| adff3d0124 | |||
| f0ffeef731 | |||
| 2473a3d20c | |||
| 9aad04bb02 | |||
| 45da9ee431 | |||
| 8c2b0f7809 | |||
| 58345dc7c0 | |||
| 941eb836b0 | |||
| c757038535 | |||
| 8a86915347 | |||
| bde8bc5805 | |||
| e4360f9124 | |||
| a88226778e | |||
| 94dbbc694f | |||
| 2653b5a0ee | |||
| 56db4080de | |||
| 87ed2f1dfb | |||
| e6ef69b126 | |||
| 676160be8c | |||
| 8d0e16ac63 | |||
| a1bda9a979 | |||
| b02bf124fb | |||
| 326ae25ab2 | |||
| 268de89f6d | |||
| 80b77b0218 | |||
| b19e7b937c | |||
| 316ef0e620 | |||
| 18550005dd | |||
| 65e19fb7ff | |||
| b25814f756 | |||
| 2b4221e444 | |||
| a5c5410b04 | |||
| 213aa254fb | |||
| f51a0d6fe0 | |||
| d054ff2725 | |||
| 7b4b0ff7ee | |||
| d3deb58469 | |||
| a6b88467a8 | |||
| f7d7e74cb9 |
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
__pycache__/
|
||||
.pytest_cache/
|
||||
.worktrees/
|
||||
413
README.md
413
README.md
@@ -1,121 +1,336 @@
|
||||
# 🎛️ AI Settings Manager for ComfyUI
|
||||
<p align="center">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="480" height="100" viewBox="0 0 480 100">
|
||||
<defs>
|
||||
<linearGradient id="bg" x1="0%" y1="0%" x2="100%" y2="100%">
|
||||
<stop offset="0%" style="stop-color:#1a1a2e;stop-opacity:1" />
|
||||
<stop offset="100%" style="stop-color:#16213e;stop-opacity:1" />
|
||||
</linearGradient>
|
||||
<linearGradient id="accent" x1="0%" y1="0%" x2="100%" y2="0%">
|
||||
<stop offset="0%" style="stop-color:#e94560" />
|
||||
<stop offset="100%" style="stop-color:#0f3460" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect width="480" height="100" rx="16" fill="url(#bg)" />
|
||||
<rect x="20" y="72" width="440" height="3" rx="1.5" fill="url(#accent)" opacity="0.6" />
|
||||
<text x="240" y="36" text-anchor="middle" fill="#e94560" font-family="monospace" font-size="13" font-weight="bold">{ JSON }</text>
|
||||
<text x="240" y="60" text-anchor="middle" fill="#eee" font-family="sans-serif" font-size="22" font-weight="bold">ComfyUI JSON Manager</text>
|
||||
<text x="240" y="90" text-anchor="middle" fill="#888" font-family="sans-serif" font-size="11">Visual dashboard & dynamic nodes for AI video workflows</text>
|
||||
</svg>
|
||||
</p>
|
||||
|
||||
A 100% vibecoded, visual dashboard for managing, versioning, and batch-processing JSON configuration files used in AI video generation workflows (I2V, VACE).
|
||||
<p align="center">
|
||||
<img src="https://img.shields.io/badge/License-Apache_2.0-blue.svg" alt="License" />
|
||||
<img src="https://img.shields.io/badge/Python-3.10%2B-green" alt="Python" />
|
||||
<img src="https://img.shields.io/badge/Built%20with-NiceGUI-FF4B4B" alt="NiceGUI" />
|
||||
<img src="https://img.shields.io/badge/ComfyUI-Custom%20Nodes-purple" alt="ComfyUI" />
|
||||
</p>
|
||||
|
||||
This tool consists of two parts:
|
||||
1. **Streamlit Web Interface:** A Dockerized editor to manage prompts, LoRAs, settings, and **branching history**.
|
||||
2. **ComfyUI Custom Nodes:** A set of nodes to read these JSON files (including custom keys) directly into your workflows.
|
||||
A visual dashboard for managing, versioning, and batch-processing JSON configuration files used in AI video generation workflows (I2V, VACE). Two parts:
|
||||
|
||||
  
|
||||
---
|
||||
|
||||
## ✨ Features
|
||||
|
||||
### 📝 Single File Editor
|
||||
* **Visual Interface:** Edit Prompts, Negative Prompts, Seeds, LoRAs, and advanced settings (Camera, FLF, VACE params) without touching raw JSON.
|
||||
* **🔧 Custom Parameters:** Add arbitrary key-value pairs (e.g., `controlnet_strength`, `my_custom_value`) that persist and can be read by ComfyUI.
|
||||
* **Conflict Protection:** Prevents accidental overwrites if the file is modified by another tab or process.
|
||||
* **Snippet Library:** Save reusable prompt fragments (e.g., "Cinematic Lighting", "Anime Style") and append them with one click.
|
||||
|
||||
### 🚀 Batch Processor
|
||||
* **Sequence Management:** Create unlimited sequences within a single JSON file.
|
||||
* **Smart Import:** Copy settings from **any other file** or **history entry** into your current batch sequence.
|
||||
* **Custom Keys per Shot:** Define unique parameters for specific shots in a batch (e.g., Shot 1 has `fog: 0.5`, Shot 2 has `fog: 0.0`).
|
||||
* **Promote to Single:** One-click convert a specific batch sequence back into a standalone Single File.
|
||||
|
||||
### 🕒 Visual Timeline (New!)
|
||||
* **Git-Style Branching:** A dedicated tab visualizes your edit history as a **horizontal node graph**.
|
||||
* **Non-Destructive:** If you jump back to an old version and make changes, the system automatically **forks a new branch** so you never lose history.
|
||||
* **Visual Diff:** Inspect any past version and see a "Delta View" highlighting exactly what changed (e.g., `Seed: 100 -> 555`) compared to your current state.
|
||||
* **Interactive Mode (WIP):** A zoomed-out, interactive canvas to explore complex history trees.
|
||||
1. **NiceGUI Web Interface** — Dockerized editor for prompts, LoRAs, settings, and branching history
|
||||
2. **ComfyUI Custom Nodes** — Read JSON files directly into workflows, including a dynamic node that auto-discovers keys
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Installation
|
||||
## Features
|
||||
|
||||
### 1. Unraid / Docker Setup (The Manager)
|
||||
This tool is designed to run as a lightweight container on Unraid.
|
||||
<table>
|
||||
<tr>
|
||||
<td width="50%">
|
||||
|
||||
1. **Prepare a Folder:** Create a folder on your server (e.g., `/mnt/user/appdata/ai-manager/`) and place the following files inside:
|
||||
* `app.py`
|
||||
* `utils.py`
|
||||
* `history_tree.py` (New logic engine)
|
||||
* `tab_single.py`
|
||||
* `tab_batch.py`
|
||||
* `tab_timeline.py`
|
||||
* `tab_timeline_wip.py`
|
||||
2. **Add Container in Unraid:**
|
||||
* **Repository:** `python:3.12-slim`
|
||||
* **Network:** `Bridge`
|
||||
* **WebUI:** `http://[IP]:[PORT:8501]`
|
||||
3. **Path Mappings:**
|
||||
* **App Location:** Container `/app` ↔ Host `/mnt/user/appdata/ai-manager/`
|
||||
* **Project Data:** Container `/mnt/user/` ↔ Host `/mnt/user/` (Your media/JSON location)
|
||||
4. **Post Arguments (Crucial):**
|
||||
Enable "Advanced View" and paste this command to install the required graph engines:
|
||||
```bash
|
||||
/bin/sh -c "apt-get update && apt-get install -y graphviz && pip install streamlit opencv-python-headless graphviz streamlit-agraph && cd /app && streamlit run app.py --server.headless true --server.port 8501"
|
||||
```
|
||||
<h3>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 20 20"><rect width="20" height="20" rx="4" fill="#e94560"/><text x="10" y="14" text-anchor="middle" fill="#fff" font-size="11">B</text></svg>
|
||||
Batch Processor
|
||||
</h3>
|
||||
|
||||
### 2. ComfyUI Setup (The Nodes)
|
||||
1. Navigate to your ComfyUI installation: `ComfyUI/custom_nodes/`
|
||||
2. Create a folder named `ComfyUI-JSON-Loader`.
|
||||
3. Place the `json_loader.py` file inside.
|
||||
4. Restart ComfyUI.
|
||||
- Unlimited sequences within a single JSON file
|
||||
- Import settings from any file or history entry
|
||||
- Per-shot custom keys (e.g. Shot 1: `fog: 0.5`, Shot 2: `fog: 0.0`)
|
||||
- Clone, reorder, and manage sequences visually
|
||||
- Conflict protection against external file modifications
|
||||
- Snippet library for reusable prompt fragments
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td width="50%">
|
||||
|
||||
<h3>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 20 20"><rect width="20" height="20" rx="4" fill="#533483"/><text x="10" y="14" text-anchor="middle" fill="#fff" font-size="11">T</text></svg>
|
||||
Visual Timeline
|
||||
</h3>
|
||||
|
||||
- Git-style branching with horizontal node graph
|
||||
- Non-destructive: forking on old-version edits preserves all history
|
||||
- Visual diff highlighting changes between any two versions
|
||||
- Restore any past state with one click
|
||||
|
||||
</td>
|
||||
<td width="50%">
|
||||
|
||||
<h3>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 20 20"><rect width="20" height="20" rx="4" fill="#2b9348"/><text x="10" y="14" text-anchor="middle" fill="#fff" font-size="11">D</text></svg>
|
||||
Dynamic Node (New)
|
||||
</h3>
|
||||
|
||||
- Auto-discovers all JSON keys and exposes them as outputs
|
||||
- No code changes needed when JSON structure evolves
|
||||
- Preserves connections when keys are added on refresh
|
||||
- Native type handling: `int`, `float`, `string`
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
---
|
||||
|
||||
## 🖥️ Usage Guide
|
||||
## Installation
|
||||
|
||||
### The Web Interface
|
||||
Navigate to your container's IP (e.g., `http://192.168.1.100:8501`).
|
||||
### 1. Unraid / Docker (NiceGUI Manager)
|
||||
|
||||
* **Custom Parameters:** Scroll to the bottom of the editor (Single or Batch) to find the "🔧 Custom Parameters" section. Type a Key (e.g., `strength`) and Value (e.g., `0.8`) and click "Add".
|
||||
* **Timeline:** Switch to the **Timeline Tab** to see your version history.
|
||||
* **Restore:** Select a node from the list or click on the graph (WIP tab) to view details. Click "Restore" to revert settings to that point.
|
||||
* **Branching:** If you restore an old node and click "Save/Snap", a new branch is created automatically.
|
||||
```bash
|
||||
# Repository: python:3.12-slim
|
||||
# Network: Bridge
|
||||
# WebUI: http://[IP]:[PORT:8080]
|
||||
```
|
||||
|
||||
### ComfyUI Workflow
|
||||
Search for "JSON" in ComfyUI to find the new nodes.
|
||||
**Path Mappings:**
|
||||
| Container | Host | Purpose |
|
||||
|:---|:---|:---|
|
||||
| `/app` | `/mnt/user/appdata/ai-manager/` | App files |
|
||||
| `/mnt/user/` | `/mnt/user/` | Project data / JSON location |
|
||||
|
||||
<img width="1251" height="921" alt="image" src="https://github.com/user-attachments/assets/06d567f8-15ee-4011-9b86-d0b43ce1ba74" />
|
||||
**Post Arguments:**
|
||||
```bash
|
||||
/bin/sh -c "apt-get update && apt-get install -y graphviz && \
|
||||
pip install nicegui graphviz requests && \
|
||||
cd /app && python main.py"
|
||||
```
|
||||
|
||||
#### Standard Nodes
|
||||
| Node Name | Description |
|
||||
| :--- | :--- |
|
||||
| **JSON Loader (Standard/I2V)** | Outputs prompts, FLF, Seed, and paths for I2V. |
|
||||
| **JSON Loader (VACE Full)** | Outputs everything above plus VACE integers (frames to skip, schedule, etc.). |
|
||||
| **JSON Loader (LoRAs Only)** | Outputs the 6 LoRA strings. |
|
||||
### 2. ComfyUI (Custom Nodes)
|
||||
|
||||
#### Universal Custom Nodes (New!)
|
||||
These nodes read *any* key you added in the "Custom Parameters" section. They work for both Single files (ignores sequence input) and Batch files (reads specific sequence).
|
||||
|
||||
| Node Name | Description |
|
||||
| :--- | :--- |
|
||||
| **JSON Loader (Custom 1)** | Reads 1 custom key. Input the key name (e.g., "strength"), outputs the value string. |
|
||||
| **JSON Loader (Custom 3)** | Reads 3 custom keys. |
|
||||
| **JSON Loader (Custom 6)** | Reads 6 custom keys. |
|
||||
|
||||
#### Batch Nodes
|
||||
These nodes require an integer input (Primitive or Batch Indexer) for `sequence_number`.
|
||||
|
||||
| Node Name | Description |
|
||||
| :--- | :--- |
|
||||
| **JSON Batch Loader (I2V)** | Loads specific sequence data for I2V. |
|
||||
| **JSON Batch Loader (VACE)** | Loads specific sequence data for VACE. |
|
||||
| **JSON Batch Loader (LoRAs)** | Loads specific LoRAs for that sequence. |
|
||||
```bash
|
||||
cd ComfyUI/custom_nodes/
|
||||
git clone <this-repo> ComfyUI-JSON-Manager
|
||||
# Restart ComfyUI
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📂 File Structure
|
||||
## ComfyUI Nodes
|
||||
|
||||
```text
|
||||
/ai-manager
|
||||
├── app.py # Main entry point & Tab controller
|
||||
├── utils.py # I/O logic, Config, and Defaults
|
||||
├── history_tree.py # Graph logic, Branching engine, Graphviz generator
|
||||
├── tab_single.py # Single Editor UI
|
||||
├── tab_batch.py # Batch Processor UI
|
||||
├── tab_timeline.py # Stable Timeline UI (Compact Graphviz + Diff Inspector)
|
||||
├── tab_timeline_wip.py # Interactive Timeline UI (Streamlit Agraph)
|
||||
└── json_loader.py # ComfyUI Custom Node script
|
||||
### Node Overview
|
||||
|
||||
<!--
|
||||
Diagram: shows JSON file flowing into different node types
|
||||
-->
|
||||
<p align="center">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="720" height="280" viewBox="0 0 720 280">
|
||||
<defs>
|
||||
<linearGradient id="nodeBg" x1="0%" y1="0%" x2="0%" y2="100%">
|
||||
<stop offset="0%" style="stop-color:#2d2d3d" />
|
||||
<stop offset="100%" style="stop-color:#1e1e2e" />
|
||||
</linearGradient>
|
||||
<filter id="shadow">
|
||||
<feDropShadow dx="1" dy="2" stdDeviation="3" flood-opacity="0.3"/>
|
||||
</filter>
|
||||
</defs>
|
||||
|
||||
<!-- JSON File -->
|
||||
<rect x="10" y="100" width="120" height="60" rx="8" fill="#0f3460" filter="url(#shadow)" />
|
||||
<text x="70" y="125" text-anchor="middle" fill="#aaa" font-family="monospace" font-size="10">batch_prompt</text>
|
||||
<text x="70" y="142" text-anchor="middle" fill="#fff" font-family="monospace" font-size="13" font-weight="bold">.json</text>
|
||||
|
||||
<!-- Arrow -->
|
||||
<line x1="130" y1="130" x2="170" y2="130" stroke="#555" stroke-width="2" marker-end="url(#arrowhead)"/>
|
||||
<defs><marker id="arrowhead" markerWidth="8" markerHeight="6" refX="8" refY="3" orient="auto"><polygon points="0 0, 8 3, 0 6" fill="#555"/></marker></defs>
|
||||
|
||||
<!-- Dynamic Node -->
|
||||
<rect x="180" y="20" width="200" height="70" rx="10" fill="url(#nodeBg)" stroke="#2b9348" stroke-width="2" filter="url(#shadow)" />
|
||||
<text x="280" y="44" text-anchor="middle" fill="#2b9348" font-family="sans-serif" font-size="12" font-weight="bold">JSON Loader (Dynamic)</text>
|
||||
<text x="280" y="62" text-anchor="middle" fill="#888" font-family="monospace" font-size="10">auto-discovers keys</text>
|
||||
<text x="280" y="78" text-anchor="middle" fill="#666" font-family="monospace" font-size="9">click Refresh to populate</text>
|
||||
|
||||
<!-- Batch I2V Node -->
|
||||
<rect x="180" y="105" width="200" height="50" rx="10" fill="url(#nodeBg)" stroke="#e94560" stroke-width="2" filter="url(#shadow)" />
|
||||
<text x="280" y="127" text-anchor="middle" fill="#e94560" font-family="sans-serif" font-size="12" font-weight="bold">JSON Batch Loader (I2V)</text>
|
||||
<text x="280" y="144" text-anchor="middle" fill="#888" font-family="monospace" font-size="10">prompts, flf, seed, paths</text>
|
||||
|
||||
<!-- Batch VACE Node -->
|
||||
<rect x="180" y="170" width="200" height="50" rx="10" fill="url(#nodeBg)" stroke="#533483" stroke-width="2" filter="url(#shadow)" />
|
||||
<text x="280" y="192" text-anchor="middle" fill="#533483" font-family="sans-serif" font-size="12" font-weight="bold">JSON Batch Loader (VACE)</text>
|
||||
<text x="280" y="209" text-anchor="middle" fill="#888" font-family="monospace" font-size="10">+ vace frames, schedule</text>
|
||||
|
||||
<!-- Custom Nodes -->
|
||||
<rect x="180" y="235" width="200" height="40" rx="10" fill="url(#nodeBg)" stroke="#0f3460" stroke-width="2" filter="url(#shadow)" />
|
||||
<text x="280" y="260" text-anchor="middle" fill="#0f3460" font-family="sans-serif" font-size="12" font-weight="bold">JSON Loader (Custom 1/3/6)</text>
|
||||
|
||||
<!-- Output labels -->
|
||||
<line x1="380" y1="55" x2="420" y2="55" stroke="#2b9348" stroke-width="1.5"/>
|
||||
<text x="430" y="47" fill="#aaa" font-family="monospace" font-size="9">general_prompt</text>
|
||||
<text x="430" y="59" fill="#aaa" font-family="monospace" font-size="9">seed (int)</text>
|
||||
<text x="430" y="71" fill="#aaa" font-family="monospace" font-size="9">my_custom_key ...</text>
|
||||
|
||||
<line x1="380" y1="130" x2="420" y2="130" stroke="#e94560" stroke-width="1.5"/>
|
||||
<text x="430" y="127" fill="#aaa" font-family="monospace" font-size="9">general_prompt, camera,</text>
|
||||
<text x="430" y="139" fill="#aaa" font-family="monospace" font-size="9">flf, seed, paths ...</text>
|
||||
|
||||
<line x1="380" y1="195" x2="420" y2="195" stroke="#533483" stroke-width="1.5"/>
|
||||
<text x="430" y="192" fill="#aaa" font-family="monospace" font-size="9">+ frame_to_skip, vace_schedule,</text>
|
||||
<text x="430" y="204" fill="#aaa" font-family="monospace" font-size="9">input_a_frames ...</text>
|
||||
|
||||
<line x1="380" y1="255" x2="420" y2="255" stroke="#0f3460" stroke-width="1.5"/>
|
||||
<text x="430" y="259" fill="#aaa" font-family="monospace" font-size="9">manual key lookup (1-6 slots)</text>
|
||||
</svg>
|
||||
</p>
|
||||
|
||||
### Dynamic Node
|
||||
|
||||
The **JSON Loader (Dynamic)** node reads your JSON file and automatically creates output slots for every key it finds. No code changes needed when your JSON structure evolves.
|
||||
|
||||
**How it works:**
|
||||
1. Enter a `json_path` and `sequence_number`
|
||||
2. Click **Refresh Outputs**
|
||||
3. Outputs appear named after JSON keys, with native types preserved
|
||||
|
||||
<p align="center">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="500" height="240" viewBox="0 0 500 240">
|
||||
<defs>
|
||||
<linearGradient id="dynBg" x1="0%" y1="0%" x2="0%" y2="100%">
|
||||
<stop offset="0%" style="stop-color:#353545" />
|
||||
<stop offset="100%" style="stop-color:#252535" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
|
||||
<!-- Node body -->
|
||||
<rect x="20" y="10" width="240" height="220" rx="10" fill="url(#dynBg)" stroke="#2b9348" stroke-width="2" />
|
||||
<rect x="20" y="10" width="240" height="28" rx="10" fill="#2b9348" />
|
||||
<rect x="20" y="28" width="240" height="10" fill="#2b9348" />
|
||||
<text x="140" y="31" text-anchor="middle" fill="#fff" font-family="sans-serif" font-size="13" font-weight="bold">JSON Loader (Dynamic)</text>
|
||||
|
||||
<!-- Inputs -->
|
||||
<text x="35" y="60" fill="#ccc" font-family="monospace" font-size="10">json_path: /data/prompt.json</text>
|
||||
<text x="35" y="78" fill="#ccc" font-family="monospace" font-size="10">sequence_number: 1</text>
|
||||
|
||||
<!-- Refresh button -->
|
||||
<rect x="45" y="88" width="190" height="24" rx="5" fill="#2b9348" opacity="0.3" stroke="#2b9348" stroke-width="1"/>
|
||||
<text x="140" y="104" text-anchor="middle" fill="#2b9348" font-family="sans-serif" font-size="11" font-weight="bold">Refresh Outputs</text>
|
||||
|
||||
<!-- Output slots -->
|
||||
<circle cx="260" cy="130" r="5" fill="#6bcb77"/>
|
||||
<text x="245" y="134" text-anchor="end" fill="#ccc" font-family="monospace" font-size="10">general_prompt</text>
|
||||
|
||||
<circle cx="260" cy="150" r="5" fill="#6bcb77"/>
|
||||
<text x="245" y="154" text-anchor="end" fill="#ccc" font-family="monospace" font-size="10">negative</text>
|
||||
|
||||
<circle cx="260" cy="170" r="5" fill="#4d96ff"/>
|
||||
<text x="245" y="174" text-anchor="end" fill="#ccc" font-family="monospace" font-size="10">seed</text>
|
||||
|
||||
<circle cx="260" cy="190" r="5" fill="#ff6b6b"/>
|
||||
<text x="245" y="194" text-anchor="end" fill="#ccc" font-family="monospace" font-size="10">flf</text>
|
||||
|
||||
<circle cx="260" cy="210" r="5" fill="#6bcb77"/>
|
||||
<text x="245" y="214" text-anchor="end" fill="#ccc" font-family="monospace" font-size="10">camera</text>
|
||||
|
||||
<!-- Connection lines to downstream -->
|
||||
<line x1="265" y1="130" x2="340" y2="130" stroke="#6bcb77" stroke-width="1.5"/>
|
||||
<line x1="265" y1="170" x2="340" y2="165" stroke="#4d96ff" stroke-width="1.5"/>
|
||||
|
||||
<!-- Downstream node -->
|
||||
<rect x="340" y="115" width="140" height="65" rx="8" fill="url(#dynBg)" stroke="#555" stroke-width="1.5" />
|
||||
<text x="410" y="137" text-anchor="middle" fill="#aaa" font-family="sans-serif" font-size="11">KSampler</text>
|
||||
<circle cx="340" cy="130" r="4" fill="#6bcb77"/>
|
||||
<text x="350" y="150" fill="#777" font-family="monospace" font-size="9">positive</text>
|
||||
<circle cx="340" cy="165" r="4" fill="#4d96ff"/>
|
||||
<text x="350" y="170" fill="#777" font-family="monospace" font-size="9">seed</text>
|
||||
|
||||
<!-- Legend -->
|
||||
<circle cx="30" y="248" r="4" fill="#6bcb77"/>
|
||||
<text x="40" y="252" fill="#888" font-family="monospace" font-size="9">STRING</text>
|
||||
<circle cx="100" y="248" r="4" fill="#4d96ff"/>
|
||||
<text x="110" y="252" fill="#888" font-family="monospace" font-size="9">INT</text>
|
||||
<circle cx="155" y="248" r="4" fill="#ff6b6b"/>
|
||||
<text x="165" y="252" fill="#888" font-family="monospace" font-size="9">FLOAT</text>
|
||||
</svg>
|
||||
</p>
|
||||
|
||||
**Type handling:** Values keep their native Python type — `int` stays `int`, `float` stays `float`, booleans become `"true"`/`"false"` strings, everything else becomes `string`. The `*` (any) output type allows connecting to any input.
|
||||
|
||||
**Refreshing is safe:** Clicking Refresh after adding new keys to your JSON preserves all existing connections. Only removed keys get disconnected.
|
||||
|
||||
### Standard & Batch Nodes
|
||||
|
||||
| Node | Outputs | Use Case |
|
||||
|:---|:---|:---|
|
||||
| **JSON Loader (Standard/I2V)** | prompts, flf, seed, paths | Single-file I2V workflows |
|
||||
| **JSON Loader (VACE Full)** | above + VACE integers | Single-file VACE workflows |
|
||||
| **JSON Loader (LoRAs Only)** | 6 LoRA strings | Single-file LoRA loading |
|
||||
| **JSON Batch Loader (I2V)** | prompts, flf, seed, paths | Batch I2V with sequence_number |
|
||||
| **JSON Batch Loader (VACE)** | above + VACE integers | Batch VACE with sequence_number |
|
||||
| **JSON Batch Loader (LoRAs)** | 6 LoRA strings | Batch LoRA loading |
|
||||
| **JSON Loader (Custom 1/3/6)** | 1, 3, or 6 string values | Manual key lookup by name |
|
||||
|
||||
---
|
||||
|
||||
## Web Interface Usage
|
||||
|
||||
Navigate to your container's IP (e.g., `http://192.168.1.100:8080`).
|
||||
|
||||
**Path navigation** supports case-insensitive matching — typing `/media/P5/myFolder` will resolve to `/media/p5/MyFolder` automatically.
|
||||
|
||||
- **Custom Parameters:** Scroll to "Custom Parameters" in any editor tab. Type a key and value, click Add.
|
||||
- **Timeline:** Switch to the Timeline tab to see version history as a graph. Restore any version, and new edits fork a branch automatically.
|
||||
- **Snippets:** Save reusable prompt fragments and append them with one click.
|
||||
|
||||
---
|
||||
|
||||
## JSON Format
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"batch_data": [
|
||||
{
|
||||
"sequence_number": 1,
|
||||
"general_prompt": "A cinematic scene...",
|
||||
"negative": "blurry, low quality",
|
||||
"seed": 42,
|
||||
"flf": 0.5,
|
||||
"camera": "pan_left",
|
||||
"video file path": "/data/input.mp4",
|
||||
"reference image path": "/data/ref.png",
|
||||
"my_custom_key": "any value"
|
||||
// ... any additional keys are auto-discovered by the Dynamic node
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## File Structure
|
||||
|
||||
```
|
||||
ComfyUI-JSON-Manager/
|
||||
├── __init__.py # ComfyUI entry point, exports nodes + WEB_DIRECTORY
|
||||
├── json_loader.py # All ComfyUI node classes + /json_manager/get_keys API
|
||||
├── web/
|
||||
│ └── json_dynamic.js # Frontend extension for Dynamic node (refresh, show/hide)
|
||||
├── main.py # NiceGUI web UI entry point & navigator
|
||||
├── state.py # Application state management
|
||||
├── utils.py # I/O, config, defaults, case-insensitive path resolver
|
||||
├── history_tree.py # Git-style branching engine
|
||||
├── tab_batch_ng.py # Batch processor UI (NiceGUI)
|
||||
├── tab_timeline_ng.py # Visual timeline UI (NiceGUI)
|
||||
├── tab_comfy_ng.py # ComfyUI server monitor (NiceGUI)
|
||||
├── tab_raw_ng.py # Raw JSON editor (NiceGUI)
|
||||
└── tests/
|
||||
├── test_json_loader.py
|
||||
├── test_utils.py
|
||||
└── test_history_tree.py
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## License
|
||||
|
||||
[Apache 2.0](LICENSE)
|
||||
|
||||
@@ -1,3 +1,9 @@
|
||||
from .json_loader import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS
|
||||
from .project_loader import PROJECT_NODE_CLASS_MAPPINGS, PROJECT_NODE_DISPLAY_NAME_MAPPINGS
|
||||
|
||||
__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS']
|
||||
NODE_CLASS_MAPPINGS.update(PROJECT_NODE_CLASS_MAPPINGS)
|
||||
NODE_DISPLAY_NAME_MAPPINGS.update(PROJECT_NODE_DISPLAY_NAME_MAPPINGS)
|
||||
|
||||
WEB_DIRECTORY = "./web"
|
||||
|
||||
__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS', 'WEB_DIRECTORY']
|
||||
|
||||
80
api_routes.py
Normal file
80
api_routes.py
Normal file
@@ -0,0 +1,80 @@
|
||||
"""REST API endpoints for ComfyUI to query project data from SQLite.
|
||||
|
||||
All endpoints are read-only. Mounted on the NiceGUI/FastAPI server.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from fastapi import HTTPException, Query
|
||||
from nicegui import app
|
||||
|
||||
from db import ProjectDB
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# The DB instance is set by register_api_routes()
|
||||
_db: ProjectDB | None = None
|
||||
|
||||
|
||||
def register_api_routes(db: ProjectDB) -> None:
|
||||
"""Register all REST API routes with the NiceGUI/FastAPI app."""
|
||||
global _db
|
||||
_db = db
|
||||
|
||||
app.add_api_route("/api/projects", _list_projects, methods=["GET"])
|
||||
app.add_api_route("/api/projects/{name}/files", _list_files, methods=["GET"])
|
||||
app.add_api_route("/api/projects/{name}/files/{file_name}/sequences", _list_sequences, methods=["GET"])
|
||||
app.add_api_route("/api/projects/{name}/files/{file_name}/data", _get_data, methods=["GET"])
|
||||
app.add_api_route("/api/projects/{name}/files/{file_name}/keys", _get_keys, methods=["GET"])
|
||||
|
||||
|
||||
def _get_db() -> ProjectDB:
|
||||
if _db is None:
|
||||
raise HTTPException(status_code=503, detail="Database not initialized")
|
||||
return _db
|
||||
|
||||
|
||||
def _list_projects() -> dict[str, Any]:
|
||||
db = _get_db()
|
||||
projects = db.list_projects()
|
||||
return {"projects": [p["name"] for p in projects]}
|
||||
|
||||
|
||||
def _list_files(name: str) -> dict[str, Any]:
|
||||
db = _get_db()
|
||||
files = db.list_project_files(name)
|
||||
return {"files": [{"name": f["name"], "data_type": f["data_type"]} for f in files]}
|
||||
|
||||
|
||||
def _list_sequences(name: str, file_name: str) -> dict[str, Any]:
|
||||
db = _get_db()
|
||||
seqs = db.list_project_sequences(name, file_name)
|
||||
return {"sequences": seqs}
|
||||
|
||||
|
||||
def _get_data(name: str, file_name: str, seq: int = Query(default=1)) -> dict[str, Any]:
|
||||
db = _get_db()
|
||||
proj = db.get_project(name)
|
||||
if not proj:
|
||||
raise HTTPException(status_code=404, detail=f"Project '{name}' not found")
|
||||
df = db.get_data_file_by_names(name, file_name)
|
||||
if not df:
|
||||
raise HTTPException(status_code=404, detail=f"File '{file_name}' not found in project '{name}'")
|
||||
data = db.get_sequence(df["id"], seq)
|
||||
if data is None:
|
||||
raise HTTPException(status_code=404, detail=f"Sequence {seq} not found")
|
||||
return data
|
||||
|
||||
|
||||
def _get_keys(name: str, file_name: str, seq: int = Query(default=1)) -> dict[str, Any]:
|
||||
db = _get_db()
|
||||
proj = db.get_project(name)
|
||||
if not proj:
|
||||
raise HTTPException(status_code=404, detail=f"Project '{name}' not found")
|
||||
df = db.get_data_file_by_names(name, file_name)
|
||||
if not df:
|
||||
raise HTTPException(status_code=404, detail=f"File '{file_name}' not found in project '{name}'")
|
||||
keys, types = db.get_sequence_keys(df["id"], seq)
|
||||
total = db.count_sequences(df["id"])
|
||||
return {"keys": keys, "types": types, "total_sequences": total}
|
||||
214
app.py
214
app.py
@@ -1,214 +0,0 @@
|
||||
import streamlit as st
|
||||
import random
|
||||
from pathlib import Path
|
||||
|
||||
# --- Import Custom Modules ---
|
||||
from utils import (
|
||||
load_config, save_config, load_snippets, save_snippets,
|
||||
load_json, save_json, generate_templates, DEFAULTS
|
||||
)
|
||||
from tab_single import render_single_editor
|
||||
from tab_batch import render_batch_processor
|
||||
from tab_timeline import render_timeline_tab
|
||||
from tab_timeline_wip import render_timeline_wip
|
||||
from tab_comfy import render_comfy_monitor
|
||||
|
||||
# ==========================================
|
||||
# 1. PAGE CONFIGURATION
|
||||
# ==========================================
|
||||
st.set_page_config(layout="wide", page_title="AI Settings Manager")
|
||||
|
||||
# ==========================================
|
||||
# 2. SESSION STATE INITIALIZATION
|
||||
# ==========================================
|
||||
if 'config' not in st.session_state:
|
||||
st.session_state.config = load_config()
|
||||
st.session_state.current_dir = Path(st.session_state.config.get("last_dir", Path.cwd()))
|
||||
|
||||
if 'snippets' not in st.session_state:
|
||||
st.session_state.snippets = load_snippets()
|
||||
|
||||
if 'loaded_file' not in st.session_state:
|
||||
st.session_state.loaded_file = None
|
||||
|
||||
if 'last_mtime' not in st.session_state:
|
||||
st.session_state.last_mtime = 0
|
||||
|
||||
if 'edit_history_idx' not in st.session_state:
|
||||
st.session_state.edit_history_idx = None
|
||||
|
||||
if 'single_editor_cache' not in st.session_state:
|
||||
st.session_state.single_editor_cache = DEFAULTS.copy()
|
||||
|
||||
if 'ui_reset_token' not in st.session_state:
|
||||
st.session_state.ui_reset_token = 0
|
||||
|
||||
# Track the active tab state for programmatic switching
|
||||
if 'active_tab_name' not in st.session_state:
|
||||
st.session_state.active_tab_name = "📝 Single Editor"
|
||||
|
||||
# ==========================================
|
||||
# 3. SIDEBAR (NAVIGATOR & TOOLS)
|
||||
# ==========================================
|
||||
with st.sidebar:
|
||||
st.header("📂 Navigator")
|
||||
|
||||
# --- Path Navigator ---
|
||||
new_path = st.text_input("Current Path", value=str(st.session_state.current_dir))
|
||||
if new_path != str(st.session_state.current_dir):
|
||||
p = Path(new_path)
|
||||
if p.exists() and p.is_dir():
|
||||
st.session_state.current_dir = p
|
||||
st.session_state.config['last_dir'] = str(p)
|
||||
save_config(st.session_state.current_dir, st.session_state.config['favorites'])
|
||||
st.rerun()
|
||||
|
||||
# --- Favorites System ---
|
||||
if st.button("📌 Pin Current Folder"):
|
||||
if str(st.session_state.current_dir) not in st.session_state.config['favorites']:
|
||||
st.session_state.config['favorites'].append(str(st.session_state.current_dir))
|
||||
save_config(st.session_state.current_dir, st.session_state.config['favorites'])
|
||||
st.rerun()
|
||||
|
||||
fav_selection = st.radio(
|
||||
"Jump to:",
|
||||
["Select..."] + st.session_state.config['favorites'],
|
||||
index=0,
|
||||
label_visibility="collapsed"
|
||||
)
|
||||
if fav_selection != "Select..." and fav_selection != str(st.session_state.current_dir):
|
||||
st.session_state.current_dir = Path(fav_selection)
|
||||
st.rerun()
|
||||
|
||||
st.markdown("---")
|
||||
|
||||
# --- Snippet Library ---
|
||||
st.subheader("🧩 Snippet Library")
|
||||
with st.expander("Add New Snippet"):
|
||||
snip_name = st.text_input("Name", placeholder="e.g. Cinematic")
|
||||
snip_content = st.text_area("Content", placeholder="4k, high quality...")
|
||||
if st.button("Save Snippet"):
|
||||
if snip_name and snip_content:
|
||||
st.session_state.snippets[snip_name] = snip_content
|
||||
save_snippets(st.session_state.snippets)
|
||||
st.success(f"Saved '{snip_name}'")
|
||||
st.rerun()
|
||||
|
||||
if st.session_state.snippets:
|
||||
st.caption("Click to Append to Prompt:")
|
||||
for name, content in st.session_state.snippets.items():
|
||||
col_s1, col_s2 = st.columns([4, 1])
|
||||
if col_s1.button(f"➕ {name}", use_container_width=True):
|
||||
st.session_state.append_prompt = content
|
||||
st.rerun()
|
||||
if col_s2.button("🗑️", key=f"del_snip_{name}"):
|
||||
del st.session_state.snippets[name]
|
||||
save_snippets(st.session_state.snippets)
|
||||
st.rerun()
|
||||
|
||||
st.markdown("---")
|
||||
|
||||
# --- File List & Creation ---
|
||||
json_files = sorted(list(st.session_state.current_dir.glob("*.json")))
|
||||
json_files = [f for f in json_files if f.name != ".editor_config.json" and f.name != ".editor_snippets.json"]
|
||||
|
||||
if not json_files:
|
||||
if st.button("Generate Templates"):
|
||||
generate_templates(st.session_state.current_dir)
|
||||
st.rerun()
|
||||
|
||||
with st.expander("Create New JSON"):
|
||||
new_filename = st.text_input("Filename", placeholder="my_prompt_vace")
|
||||
is_batch = st.checkbox("Is Batch File?")
|
||||
if st.button("Create"):
|
||||
if not new_filename.endswith(".json"): new_filename += ".json"
|
||||
path = st.session_state.current_dir / new_filename
|
||||
if is_batch:
|
||||
data = {"batch_data": []}
|
||||
else:
|
||||
data = DEFAULTS.copy()
|
||||
if "vace" in new_filename: data.update({"frame_to_skip": 81, "vace schedule": 1, "video file path": ""})
|
||||
elif "i2v" in new_filename: data.update({"reference image path": "", "flf image path": ""})
|
||||
save_json(path, data)
|
||||
st.rerun()
|
||||
|
||||
# --- File Selector ---
|
||||
if 'file_selector' not in st.session_state:
|
||||
st.session_state.file_selector = json_files[0].name if json_files else None
|
||||
if st.session_state.file_selector not in [f.name for f in json_files] and json_files:
|
||||
st.session_state.file_selector = json_files[0].name
|
||||
|
||||
selected_file_name = st.radio("Select File", [f.name for f in json_files], key="file_selector")
|
||||
|
||||
# ==========================================
|
||||
# 4. MAIN APP LOGIC
|
||||
# ==========================================
|
||||
if selected_file_name:
|
||||
file_path = st.session_state.current_dir / selected_file_name
|
||||
|
||||
# --- FILE LOADING & AUTO-SWITCH LOGIC ---
|
||||
if st.session_state.loaded_file != str(file_path):
|
||||
data, mtime = load_json(file_path)
|
||||
st.session_state.data_cache = data
|
||||
st.session_state.last_mtime = mtime
|
||||
st.session_state.loaded_file = str(file_path)
|
||||
|
||||
# Clear transient states
|
||||
if 'append_prompt' in st.session_state: del st.session_state.append_prompt
|
||||
if 'rand_seed' in st.session_state: del st.session_state.rand_seed
|
||||
if 'restored_indicator' in st.session_state: del st.session_state.restored_indicator
|
||||
st.session_state.edit_history_idx = None
|
||||
|
||||
# --- AUTO-SWITCH TAB LOGIC ---
|
||||
# If the file has 'batch_data' or is a list, force Batch tab.
|
||||
# Otherwise, force Single tab.
|
||||
is_batch = "batch_data" in data or isinstance(data, list)
|
||||
if is_batch:
|
||||
st.session_state.active_tab_name = "🚀 Batch Processor"
|
||||
else:
|
||||
st.session_state.active_tab_name = "📝 Single Editor"
|
||||
|
||||
else:
|
||||
data = st.session_state.data_cache
|
||||
|
||||
st.title(f"Editing: {selected_file_name}")
|
||||
|
||||
# --- CONTROLLED NAVIGATION (REPLACES ST.TABS) ---
|
||||
# Using radio buttons allows us to change 'active_tab_name' programmatically above.
|
||||
tabs_list = [
|
||||
"📝 Single Editor",
|
||||
"🚀 Batch Processor",
|
||||
"🕒 Timeline",
|
||||
"🧪 Interactive Timeline",
|
||||
"🔌 Comfy Monitor"
|
||||
]
|
||||
|
||||
# Ensure active tab is valid (safety check)
|
||||
if st.session_state.active_tab_name not in tabs_list:
|
||||
st.session_state.active_tab_name = tabs_list[0]
|
||||
|
||||
current_tab = st.radio(
|
||||
"Navigation",
|
||||
tabs_list,
|
||||
key="active_tab_name", # Binds to session state
|
||||
horizontal=True,
|
||||
label_visibility="collapsed"
|
||||
)
|
||||
|
||||
st.markdown("---")
|
||||
|
||||
# --- RENDER SELECTED TAB ---
|
||||
if current_tab == "📝 Single Editor":
|
||||
render_single_editor(data, file_path)
|
||||
|
||||
elif current_tab == "🚀 Batch Processor":
|
||||
render_batch_processor(data, file_path, json_files, st.session_state.current_dir, selected_file_name)
|
||||
|
||||
elif current_tab == "🕒 Timeline":
|
||||
render_timeline_tab(data, file_path)
|
||||
|
||||
elif current_tab == "🧪 Interactive Timeline":
|
||||
render_timeline_wip(data, file_path)
|
||||
|
||||
elif current_tab == "🔌 Comfy Monitor":
|
||||
render_comfy_monitor()
|
||||
349
db.py
Normal file
349
db.py
Normal file
@@ -0,0 +1,349 @@
|
||||
import json
|
||||
import logging
|
||||
import sqlite3
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from utils import load_json, KEY_BATCH_DATA, KEY_HISTORY_TREE
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_DB_PATH = Path.home() / ".comfyui_json_manager" / "projects.db"
|
||||
|
||||
SCHEMA_SQL = """
|
||||
CREATE TABLE IF NOT EXISTS projects (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
folder_path TEXT NOT NULL,
|
||||
description TEXT NOT NULL DEFAULT '',
|
||||
created_at REAL NOT NULL,
|
||||
updated_at REAL NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS data_files (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
project_id INTEGER NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
|
||||
name TEXT NOT NULL,
|
||||
data_type TEXT NOT NULL DEFAULT 'generic',
|
||||
top_level TEXT NOT NULL DEFAULT '{}',
|
||||
created_at REAL NOT NULL,
|
||||
updated_at REAL NOT NULL,
|
||||
UNIQUE(project_id, name)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS sequences (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
data_file_id INTEGER NOT NULL REFERENCES data_files(id) ON DELETE CASCADE,
|
||||
sequence_number INTEGER NOT NULL,
|
||||
data TEXT NOT NULL DEFAULT '{}',
|
||||
updated_at REAL NOT NULL,
|
||||
UNIQUE(data_file_id, sequence_number)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS history_trees (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
data_file_id INTEGER NOT NULL UNIQUE REFERENCES data_files(id) ON DELETE CASCADE,
|
||||
tree_data TEXT NOT NULL DEFAULT '{}',
|
||||
updated_at REAL NOT NULL
|
||||
);
|
||||
"""
|
||||
|
||||
|
||||
class ProjectDB:
|
||||
"""SQLite database for project-based data management."""
|
||||
|
||||
def __init__(self, db_path: str | Path | None = None):
|
||||
self.db_path = Path(db_path) if db_path else DEFAULT_DB_PATH
|
||||
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.conn = sqlite3.connect(
|
||||
str(self.db_path),
|
||||
check_same_thread=False,
|
||||
isolation_level=None, # autocommit — explicit BEGIN/COMMIT only
|
||||
)
|
||||
self.conn.row_factory = sqlite3.Row
|
||||
self.conn.execute("PRAGMA journal_mode=WAL")
|
||||
self.conn.execute("PRAGMA foreign_keys=ON")
|
||||
self.conn.executescript(SCHEMA_SQL)
|
||||
|
||||
def close(self):
|
||||
self.conn.close()
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Projects CRUD
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def create_project(self, name: str, folder_path: str, description: str = "") -> int:
|
||||
now = time.time()
|
||||
cur = self.conn.execute(
|
||||
"INSERT INTO projects (name, folder_path, description, created_at, updated_at) "
|
||||
"VALUES (?, ?, ?, ?, ?)",
|
||||
(name, folder_path, description, now, now),
|
||||
)
|
||||
self.conn.commit()
|
||||
return cur.lastrowid
|
||||
|
||||
def list_projects(self) -> list[dict]:
|
||||
rows = self.conn.execute(
|
||||
"SELECT id, name, folder_path, description, created_at, updated_at "
|
||||
"FROM projects ORDER BY name"
|
||||
).fetchall()
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
def get_project(self, name: str) -> dict | None:
|
||||
row = self.conn.execute(
|
||||
"SELECT id, name, folder_path, description, created_at, updated_at "
|
||||
"FROM projects WHERE name = ?",
|
||||
(name,),
|
||||
).fetchone()
|
||||
return dict(row) if row else None
|
||||
|
||||
def delete_project(self, name: str) -> bool:
|
||||
cur = self.conn.execute("DELETE FROM projects WHERE name = ?", (name,))
|
||||
self.conn.commit()
|
||||
return cur.rowcount > 0
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Data files
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def create_data_file(
|
||||
self, project_id: int, name: str, data_type: str = "generic", top_level: dict | None = None
|
||||
) -> int:
|
||||
now = time.time()
|
||||
tl = json.dumps(top_level or {})
|
||||
cur = self.conn.execute(
|
||||
"INSERT INTO data_files (project_id, name, data_type, top_level, created_at, updated_at) "
|
||||
"VALUES (?, ?, ?, ?, ?, ?)",
|
||||
(project_id, name, data_type, tl, now, now),
|
||||
)
|
||||
self.conn.commit()
|
||||
return cur.lastrowid
|
||||
|
||||
def list_data_files(self, project_id: int) -> list[dict]:
|
||||
rows = self.conn.execute(
|
||||
"SELECT id, project_id, name, data_type, created_at, updated_at "
|
||||
"FROM data_files WHERE project_id = ? ORDER BY name",
|
||||
(project_id,),
|
||||
).fetchall()
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
def get_data_file(self, project_id: int, name: str) -> dict | None:
|
||||
row = self.conn.execute(
|
||||
"SELECT id, project_id, name, data_type, top_level, created_at, updated_at "
|
||||
"FROM data_files WHERE project_id = ? AND name = ?",
|
||||
(project_id, name),
|
||||
).fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
d = dict(row)
|
||||
d["top_level"] = json.loads(d["top_level"])
|
||||
return d
|
||||
|
||||
def get_data_file_by_names(self, project_name: str, file_name: str) -> dict | None:
|
||||
row = self.conn.execute(
|
||||
"SELECT df.id, df.project_id, df.name, df.data_type, df.top_level, "
|
||||
"df.created_at, df.updated_at "
|
||||
"FROM data_files df JOIN projects p ON df.project_id = p.id "
|
||||
"WHERE p.name = ? AND df.name = ?",
|
||||
(project_name, file_name),
|
||||
).fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
d = dict(row)
|
||||
d["top_level"] = json.loads(d["top_level"])
|
||||
return d
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Sequences
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def upsert_sequence(self, data_file_id: int, sequence_number: int, data: dict) -> None:
|
||||
now = time.time()
|
||||
self.conn.execute(
|
||||
"INSERT INTO sequences (data_file_id, sequence_number, data, updated_at) "
|
||||
"VALUES (?, ?, ?, ?) "
|
||||
"ON CONFLICT(data_file_id, sequence_number) DO UPDATE SET data=excluded.data, updated_at=excluded.updated_at",
|
||||
(data_file_id, sequence_number, json.dumps(data), now),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def get_sequence(self, data_file_id: int, sequence_number: int) -> dict | None:
|
||||
row = self.conn.execute(
|
||||
"SELECT data FROM sequences WHERE data_file_id = ? AND sequence_number = ?",
|
||||
(data_file_id, sequence_number),
|
||||
).fetchone()
|
||||
return json.loads(row["data"]) if row else None
|
||||
|
||||
def list_sequences(self, data_file_id: int) -> list[int]:
|
||||
rows = self.conn.execute(
|
||||
"SELECT sequence_number FROM sequences WHERE data_file_id = ? ORDER BY sequence_number",
|
||||
(data_file_id,),
|
||||
).fetchall()
|
||||
return [r["sequence_number"] for r in rows]
|
||||
|
||||
def count_sequences(self, data_file_id: int) -> int:
|
||||
"""Return the number of sequences for a data file."""
|
||||
row = self.conn.execute(
|
||||
"SELECT COUNT(*) AS cnt FROM sequences WHERE data_file_id = ?",
|
||||
(data_file_id,),
|
||||
).fetchone()
|
||||
return row["cnt"]
|
||||
|
||||
def query_total_sequences(self, project_name: str, file_name: str) -> int:
|
||||
"""Return total sequence count by project and file names."""
|
||||
df = self.get_data_file_by_names(project_name, file_name)
|
||||
if not df:
|
||||
return 0
|
||||
return self.count_sequences(df["id"])
|
||||
|
||||
def get_sequence_keys(self, data_file_id: int, sequence_number: int) -> tuple[list[str], list[str]]:
|
||||
"""Returns (keys, types) for a sequence's data dict."""
|
||||
data = self.get_sequence(data_file_id, sequence_number)
|
||||
if not data:
|
||||
return [], []
|
||||
keys = []
|
||||
types = []
|
||||
for k, v in data.items():
|
||||
keys.append(k)
|
||||
if isinstance(v, bool):
|
||||
types.append("STRING")
|
||||
elif isinstance(v, int):
|
||||
types.append("INT")
|
||||
elif isinstance(v, float):
|
||||
types.append("FLOAT")
|
||||
else:
|
||||
types.append("STRING")
|
||||
return keys, types
|
||||
|
||||
def delete_sequences_for_file(self, data_file_id: int) -> None:
|
||||
self.conn.execute("DELETE FROM sequences WHERE data_file_id = ?", (data_file_id,))
|
||||
self.conn.commit()
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# History trees
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def save_history_tree(self, data_file_id: int, tree_data: dict) -> None:
|
||||
now = time.time()
|
||||
self.conn.execute(
|
||||
"INSERT INTO history_trees (data_file_id, tree_data, updated_at) "
|
||||
"VALUES (?, ?, ?) "
|
||||
"ON CONFLICT(data_file_id) DO UPDATE SET tree_data=excluded.tree_data, updated_at=excluded.updated_at",
|
||||
(data_file_id, json.dumps(tree_data), now),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def get_history_tree(self, data_file_id: int) -> dict | None:
|
||||
row = self.conn.execute(
|
||||
"SELECT tree_data FROM history_trees WHERE data_file_id = ?",
|
||||
(data_file_id,),
|
||||
).fetchone()
|
||||
return json.loads(row["tree_data"]) if row else None
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Import
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def import_json_file(self, project_id: int, json_path: str | Path, data_type: str = "generic") -> int:
|
||||
"""Import a JSON file into the database, splitting batch_data into sequences.
|
||||
|
||||
Safe to call repeatedly — existing data_file is updated, sequences are
|
||||
replaced, and history_tree is upserted. Atomic: all-or-nothing.
|
||||
"""
|
||||
json_path = Path(json_path)
|
||||
data, _ = load_json(json_path)
|
||||
file_name = json_path.stem
|
||||
|
||||
top_level = {k: v for k, v in data.items() if k not in (KEY_BATCH_DATA, KEY_HISTORY_TREE)}
|
||||
|
||||
self.conn.execute("BEGIN IMMEDIATE")
|
||||
try:
|
||||
existing = self.conn.execute(
|
||||
"SELECT id FROM data_files WHERE project_id = ? AND name = ?",
|
||||
(project_id, file_name),
|
||||
).fetchone()
|
||||
|
||||
if existing:
|
||||
df_id = existing["id"]
|
||||
now = time.time()
|
||||
self.conn.execute(
|
||||
"UPDATE data_files SET data_type = ?, top_level = ?, updated_at = ? WHERE id = ?",
|
||||
(data_type, json.dumps(top_level), now, df_id),
|
||||
)
|
||||
self.conn.execute("DELETE FROM sequences WHERE data_file_id = ?", (df_id,))
|
||||
else:
|
||||
now = time.time()
|
||||
cur = self.conn.execute(
|
||||
"INSERT INTO data_files (project_id, name, data_type, top_level, created_at, updated_at) "
|
||||
"VALUES (?, ?, ?, ?, ?, ?)",
|
||||
(project_id, file_name, data_type, json.dumps(top_level), now, now),
|
||||
)
|
||||
df_id = cur.lastrowid
|
||||
|
||||
# Import sequences from batch_data
|
||||
batch_data = data.get(KEY_BATCH_DATA, [])
|
||||
if isinstance(batch_data, list):
|
||||
for item in batch_data:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
seq_num = int(item.get("sequence_number", 0))
|
||||
now = time.time()
|
||||
self.conn.execute(
|
||||
"INSERT INTO sequences (data_file_id, sequence_number, data, updated_at) "
|
||||
"VALUES (?, ?, ?, ?) "
|
||||
"ON CONFLICT(data_file_id, sequence_number) DO UPDATE SET data=excluded.data, updated_at=excluded.updated_at",
|
||||
(df_id, seq_num, json.dumps(item), now),
|
||||
)
|
||||
|
||||
# Import history tree
|
||||
history_tree = data.get(KEY_HISTORY_TREE)
|
||||
if history_tree and isinstance(history_tree, dict):
|
||||
now = time.time()
|
||||
self.conn.execute(
|
||||
"INSERT INTO history_trees (data_file_id, tree_data, updated_at) "
|
||||
"VALUES (?, ?, ?) "
|
||||
"ON CONFLICT(data_file_id) DO UPDATE SET tree_data=excluded.tree_data, updated_at=excluded.updated_at",
|
||||
(df_id, json.dumps(history_tree), now),
|
||||
)
|
||||
|
||||
self.conn.execute("COMMIT")
|
||||
return df_id
|
||||
except Exception:
|
||||
try:
|
||||
self.conn.execute("ROLLBACK")
|
||||
except Exception:
|
||||
pass
|
||||
raise
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Query helpers (for REST API)
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def query_sequence_data(self, project_name: str, file_name: str, sequence_number: int) -> dict | None:
|
||||
"""Query a single sequence by project name, file name, and sequence number."""
|
||||
df = self.get_data_file_by_names(project_name, file_name)
|
||||
if not df:
|
||||
return None
|
||||
return self.get_sequence(df["id"], sequence_number)
|
||||
|
||||
def query_sequence_keys(self, project_name: str, file_name: str, sequence_number: int) -> tuple[list[str], list[str]]:
|
||||
"""Query keys and types for a sequence."""
|
||||
df = self.get_data_file_by_names(project_name, file_name)
|
||||
if not df:
|
||||
return [], []
|
||||
return self.get_sequence_keys(df["id"], sequence_number)
|
||||
|
||||
def list_project_files(self, project_name: str) -> list[dict]:
|
||||
"""List data files for a project by name."""
|
||||
proj = self.get_project(project_name)
|
||||
if not proj:
|
||||
return []
|
||||
return self.list_data_files(proj["id"])
|
||||
|
||||
def list_project_sequences(self, project_name: str, file_name: str) -> list[int]:
|
||||
"""List sequence numbers for a file in a project."""
|
||||
df = self.get_data_file_by_names(project_name, file_name)
|
||||
if not df:
|
||||
return []
|
||||
return self.list_sequences(df["id"])
|
||||
168
history_tree.py
168
history_tree.py
@@ -1,16 +1,20 @@
|
||||
import time
|
||||
import uuid
|
||||
from typing import Any
|
||||
|
||||
KEY_PROMPT_HISTORY = "prompt_history"
|
||||
|
||||
|
||||
class HistoryTree:
|
||||
def __init__(self, raw_data):
|
||||
self.nodes = raw_data.get("nodes", {})
|
||||
self.branches = raw_data.get("branches", {"main": None})
|
||||
self.head_id = raw_data.get("head_id", None)
|
||||
|
||||
if "prompt_history" in raw_data and isinstance(raw_data["prompt_history"], list) and not self.nodes:
|
||||
self._migrate_legacy(raw_data["prompt_history"])
|
||||
def __init__(self, raw_data: dict[str, Any]) -> None:
|
||||
self.nodes: dict[str, dict[str, Any]] = raw_data.get("nodes", {})
|
||||
self.branches: dict[str, str | None] = raw_data.get("branches", {"main": None})
|
||||
self.head_id: str | None = raw_data.get("head_id", None)
|
||||
|
||||
def _migrate_legacy(self, old_list):
|
||||
if KEY_PROMPT_HISTORY in raw_data and isinstance(raw_data[KEY_PROMPT_HISTORY], list) and not self.nodes:
|
||||
self._migrate_legacy(raw_data[KEY_PROMPT_HISTORY])
|
||||
|
||||
def _migrate_legacy(self, old_list: list[dict[str, Any]]) -> None:
|
||||
parent = None
|
||||
for item in reversed(old_list):
|
||||
node_id = str(uuid.uuid4())[:8]
|
||||
@@ -22,9 +26,20 @@ class HistoryTree:
|
||||
self.branches["main"] = parent
|
||||
self.head_id = parent
|
||||
|
||||
def commit(self, data, note="Snapshot"):
|
||||
def commit(self, data: dict[str, Any], note: str = "Snapshot") -> str:
|
||||
new_id = str(uuid.uuid4())[:8]
|
||||
|
||||
|
||||
# Cycle detection: walk parent chain from head to verify no cycle
|
||||
if self.head_id:
|
||||
visited = set()
|
||||
current = self.head_id
|
||||
while current:
|
||||
if current in visited:
|
||||
raise ValueError(f"Cycle detected in history tree at node {current}")
|
||||
visited.add(current)
|
||||
node = self.nodes.get(current)
|
||||
current = node["parent"] if node else None
|
||||
|
||||
active_branch = None
|
||||
for b_name, tip_id in self.branches.items():
|
||||
if tip_id == self.head_id:
|
||||
@@ -45,70 +60,141 @@ class HistoryTree:
|
||||
self.head_id = new_id
|
||||
return new_id
|
||||
|
||||
def checkout(self, node_id):
|
||||
def checkout(self, node_id: str) -> dict[str, Any] | None:
|
||||
if node_id in self.nodes:
|
||||
self.head_id = node_id
|
||||
return self.nodes[node_id]["data"]
|
||||
return None
|
||||
|
||||
def to_dict(self):
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
return {"nodes": self.nodes, "branches": self.branches, "head_id": self.head_id}
|
||||
|
||||
# --- UPDATED GRAPH GENERATOR ---
|
||||
def generate_graph(self, direction="LR"):
|
||||
def generate_graph(self, direction: str = "LR") -> str:
|
||||
"""
|
||||
Generates Graphviz source.
|
||||
direction: "LR" (Horizontal) or "TB" (Vertical)
|
||||
"""
|
||||
node_count = len(self.nodes)
|
||||
is_vertical = direction == "TB"
|
||||
|
||||
# Vertical mode uses much tighter spacing
|
||||
if is_vertical:
|
||||
if node_count <= 5:
|
||||
nodesep, ranksep = 0.3, 0.2
|
||||
elif node_count <= 15:
|
||||
nodesep, ranksep = 0.2, 0.15
|
||||
else:
|
||||
nodesep, ranksep = 0.1, 0.1
|
||||
else:
|
||||
if node_count <= 5:
|
||||
nodesep, ranksep = 0.5, 0.6
|
||||
elif node_count <= 15:
|
||||
nodesep, ranksep = 0.3, 0.4
|
||||
else:
|
||||
nodesep, ranksep = 0.15, 0.25
|
||||
|
||||
# Build reverse lookup: branch tip -> branch name(s)
|
||||
tip_to_branches: dict[str, list[str]] = {}
|
||||
for b_name, tip_id in self.branches.items():
|
||||
if tip_id:
|
||||
tip_to_branches.setdefault(tip_id, []).append(b_name)
|
||||
|
||||
dot = [
|
||||
'digraph History {',
|
||||
f' rankdir={direction};', # Dynamic Direction
|
||||
' bgcolor="white";',
|
||||
' splines=ortho;',
|
||||
|
||||
# TIGHT SPACING
|
||||
' nodesep=0.2;',
|
||||
' ranksep=0.3;',
|
||||
|
||||
# GLOBAL STYLES
|
||||
' node [shape=plain, fontname="Arial"];',
|
||||
f' rankdir={direction};',
|
||||
' bgcolor="white";',
|
||||
' splines=polyline;',
|
||||
f' nodesep={nodesep};',
|
||||
f' ranksep={ranksep};',
|
||||
' node [shape=plain, fontname="Arial"];',
|
||||
' edge [color="#888888", arrowsize=0.6, penwidth=1.0];'
|
||||
]
|
||||
|
||||
|
||||
# Build reverse lookup: node_id -> branch name (walk each branch ancestry)
|
||||
node_to_branch: dict[str, str] = {}
|
||||
for b_name, tip_id in self.branches.items():
|
||||
current = tip_id
|
||||
while current and current in self.nodes:
|
||||
if current not in node_to_branch:
|
||||
node_to_branch[current] = b_name
|
||||
current = self.nodes[current].get('parent')
|
||||
|
||||
# Per-branch color palette (bg, border) — cycles for many branches
|
||||
_branch_palette = [
|
||||
('#f9f9f9', '#999999'), # grey (default/main)
|
||||
('#eef4ff', '#6699cc'), # blue
|
||||
('#f5eeff', '#9977cc'), # purple
|
||||
('#fff0ee', '#cc7766'), # coral
|
||||
('#eefff5', '#66aa88'), # teal
|
||||
('#fff8ee', '#ccaa55'), # sand
|
||||
]
|
||||
branch_names = list(self.branches.keys())
|
||||
branch_colors = {
|
||||
b: _branch_palette[i % len(_branch_palette)]
|
||||
for i, b in enumerate(branch_names)
|
||||
}
|
||||
|
||||
sorted_nodes = sorted(self.nodes.values(), key=lambda x: x["timestamp"])
|
||||
|
||||
|
||||
# Font sizes and padding - smaller for vertical
|
||||
if is_vertical:
|
||||
note_font_size = 8
|
||||
meta_font_size = 7
|
||||
cell_padding = 2
|
||||
max_note_len = 18
|
||||
else:
|
||||
note_font_size = 10
|
||||
meta_font_size = 8
|
||||
cell_padding = 4
|
||||
max_note_len = 25
|
||||
|
||||
for n in sorted_nodes:
|
||||
nid = n["id"]
|
||||
full_note = n.get('note', 'Step')
|
||||
|
||||
display_note = (full_note[:15] + '..') if len(full_note) > 15 else full_note
|
||||
|
||||
# COLORS
|
||||
bg_color = "#f9f9f9"
|
||||
border_color = "#999999"
|
||||
|
||||
display_note = (full_note[:max_note_len] + '..') if len(full_note) > max_note_len else full_note
|
||||
|
||||
ts = time.strftime('%b %d %H:%M', time.localtime(n['timestamp']))
|
||||
|
||||
# Branch label for tip nodes
|
||||
branch_label = ""
|
||||
if nid in tip_to_branches:
|
||||
branch_label = ", ".join(tip_to_branches[nid])
|
||||
|
||||
# COLORS — per-branch tint, overridden for HEAD and tips
|
||||
b_name = node_to_branch.get(nid)
|
||||
bg_color, border_color = branch_colors.get(
|
||||
b_name, _branch_palette[0])
|
||||
border_width = "1"
|
||||
|
||||
|
||||
if nid == self.head_id:
|
||||
bg_color = "#fff6cd" # Yellow for Current
|
||||
bg_color = "#fff6cd"
|
||||
border_color = "#eebb00"
|
||||
border_width = "2"
|
||||
elif nid in self.branches.values():
|
||||
bg_color = "#e6ffe6" # Green for Tips
|
||||
bg_color = "#e6ffe6"
|
||||
border_color = "#66aa66"
|
||||
|
||||
# HTML LABEL
|
||||
rows = [
|
||||
f'<TR><TD><B><FONT POINT-SIZE="{note_font_size}">{display_note}</FONT></B></TD></TR>',
|
||||
f'<TR><TD><FONT POINT-SIZE="{meta_font_size}" COLOR="#555555">{ts} • {nid[:4]}</FONT></TD></TR>',
|
||||
]
|
||||
if branch_label:
|
||||
rows.append(f'<TR><TD><FONT POINT-SIZE="{meta_font_size}" COLOR="#4488cc"><I>{branch_label}</I></FONT></TD></TR>')
|
||||
|
||||
label = (
|
||||
f'<<TABLE BORDER="{border_width}" CELLBORDER="0" CELLSPACING="0" CELLPADDING="4" BGCOLOR="{bg_color}" COLOR="{border_color}">'
|
||||
f'<TR><TD><B><FONT POINT-SIZE="10">{display_note}</FONT></B></TD></TR>'
|
||||
f'<TR><TD><FONT POINT-SIZE="8" COLOR="#555555">{nid[:4]}</FONT></TD></TR>'
|
||||
f'</TABLE>>'
|
||||
f'<<TABLE BORDER="{border_width}" CELLBORDER="0" CELLSPACING="0" CELLPADDING="{cell_padding}" BGCOLOR="{bg_color}" COLOR="{border_color}">'
|
||||
+ "".join(rows)
|
||||
+ '</TABLE>>'
|
||||
)
|
||||
|
||||
|
||||
safe_tooltip = full_note.replace('"', "'")
|
||||
dot.append(f' "{nid}" [label={label}, tooltip="{safe_tooltip}"];')
|
||||
|
||||
|
||||
if n["parent"] and n["parent"] in self.nodes:
|
||||
dot.append(f' "{n["parent"]}" -> "{nid}";')
|
||||
|
||||
|
||||
dot.append("}")
|
||||
return "\n".join(dot)
|
||||
|
||||
206
json_loader.py
206
json_loader.py
@@ -1,17 +1,147 @@
|
||||
import json
|
||||
import os
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
KEY_BATCH_DATA = "batch_data"
|
||||
MAX_DYNAMIC_OUTPUTS = 32
|
||||
|
||||
|
||||
class AnyType(str):
|
||||
"""Universal connector type that matches any ComfyUI type."""
|
||||
def __ne__(self, __value: object) -> bool:
|
||||
return False
|
||||
|
||||
any_type = AnyType("*")
|
||||
|
||||
|
||||
try:
|
||||
from server import PromptServer
|
||||
from aiohttp import web
|
||||
except ImportError:
|
||||
PromptServer = None
|
||||
|
||||
|
||||
def to_float(val: Any) -> float:
|
||||
try:
|
||||
return float(val)
|
||||
except (ValueError, TypeError):
|
||||
return 0.0
|
||||
|
||||
def to_int(val: Any) -> int:
|
||||
try:
|
||||
return int(float(val))
|
||||
except (ValueError, TypeError):
|
||||
return 0
|
||||
|
||||
def get_batch_item(data: dict[str, Any], sequence_number: int) -> dict[str, Any]:
|
||||
"""Resolve batch item by sequence_number field, falling back to array index."""
|
||||
if KEY_BATCH_DATA in data and isinstance(data[KEY_BATCH_DATA], list) and len(data[KEY_BATCH_DATA]) > 0:
|
||||
# Search by sequence_number field first
|
||||
for item in data[KEY_BATCH_DATA]:
|
||||
if int(item.get("sequence_number", 0)) == sequence_number:
|
||||
return item
|
||||
# Fallback to array index
|
||||
idx = max(0, min(sequence_number - 1, len(data[KEY_BATCH_DATA]) - 1))
|
||||
logger.warning(f"No item with sequence_number={sequence_number}, falling back to index {idx}")
|
||||
return data[KEY_BATCH_DATA][idx]
|
||||
return data
|
||||
|
||||
# --- Shared Helper ---
|
||||
def read_json_data(json_path):
|
||||
def read_json_data(json_path: str) -> dict[str, Any]:
|
||||
if not os.path.exists(json_path):
|
||||
print(f"[JSON Loader] Warning: File not found at {json_path}")
|
||||
logger.warning(f"File not found at {json_path}")
|
||||
return {}
|
||||
try:
|
||||
with open(json_path, 'r') as f:
|
||||
return json.load(f)
|
||||
except Exception as e:
|
||||
print(f"[JSON Loader] Error: {e}")
|
||||
data = json.load(f)
|
||||
except (json.JSONDecodeError, IOError) as e:
|
||||
logger.warning(f"Error reading {json_path}: {e}")
|
||||
return {}
|
||||
if not isinstance(data, dict):
|
||||
logger.warning(f"Expected dict from {json_path}, got {type(data).__name__}")
|
||||
return {}
|
||||
return data
|
||||
|
||||
# --- API Route ---
|
||||
if PromptServer is not None:
|
||||
@PromptServer.instance.routes.get("/json_manager/get_keys")
|
||||
async def get_keys_route(request):
|
||||
json_path = request.query.get("path", "")
|
||||
try:
|
||||
seq = int(request.query.get("sequence_number", "1"))
|
||||
except (ValueError, TypeError):
|
||||
seq = 1
|
||||
data = read_json_data(json_path)
|
||||
if not data:
|
||||
return web.json_response({"keys": [], "types": [], "error": "file_not_found"})
|
||||
target = get_batch_item(data, seq)
|
||||
keys = []
|
||||
types = []
|
||||
if isinstance(target, dict):
|
||||
for k, v in target.items():
|
||||
keys.append(k)
|
||||
if isinstance(v, bool):
|
||||
types.append("STRING")
|
||||
elif isinstance(v, int):
|
||||
types.append("INT")
|
||||
elif isinstance(v, float):
|
||||
types.append("FLOAT")
|
||||
else:
|
||||
types.append("STRING")
|
||||
return web.json_response({"keys": keys, "types": types})
|
||||
|
||||
|
||||
# ==========================================
|
||||
# 0. DYNAMIC NODE
|
||||
# ==========================================
|
||||
|
||||
class JSONLoaderDynamic:
|
||||
@classmethod
|
||||
def INPUT_TYPES(s):
|
||||
return {
|
||||
"required": {
|
||||
"json_path": ("STRING", {"default": "", "multiline": False}),
|
||||
"sequence_number": ("INT", {"default": 1, "min": 1, "max": 9999}),
|
||||
},
|
||||
"optional": {
|
||||
"output_keys": ("STRING", {"default": ""}),
|
||||
"output_types": ("STRING", {"default": ""}),
|
||||
},
|
||||
}
|
||||
|
||||
RETURN_TYPES = tuple(any_type for _ in range(MAX_DYNAMIC_OUTPUTS))
|
||||
RETURN_NAMES = tuple(f"output_{i}" for i in range(MAX_DYNAMIC_OUTPUTS))
|
||||
FUNCTION = "load_dynamic"
|
||||
CATEGORY = "utils/json"
|
||||
OUTPUT_NODE = False
|
||||
|
||||
def load_dynamic(self, json_path, sequence_number, output_keys="", output_types=""):
|
||||
data = read_json_data(json_path)
|
||||
target = get_batch_item(data, sequence_number)
|
||||
|
||||
keys = [k.strip() for k in output_keys.split(",") if k.strip()] if output_keys else []
|
||||
|
||||
results = []
|
||||
for key in keys:
|
||||
val = target.get(key, "")
|
||||
if isinstance(val, bool):
|
||||
results.append(str(val).lower())
|
||||
elif isinstance(val, int):
|
||||
results.append(val)
|
||||
elif isinstance(val, float):
|
||||
results.append(val)
|
||||
else:
|
||||
results.append(str(val))
|
||||
|
||||
# Pad to MAX_DYNAMIC_OUTPUTS
|
||||
while len(results) < MAX_DYNAMIC_OUTPUTS:
|
||||
results.append("")
|
||||
|
||||
return tuple(results)
|
||||
|
||||
|
||||
# ==========================================
|
||||
# 1. STANDARD NODES (Single File)
|
||||
@@ -47,13 +177,6 @@ class JSONLoaderStandard:
|
||||
|
||||
def load_standard(self, json_path):
|
||||
data = read_json_data(json_path)
|
||||
def to_float(val):
|
||||
try: return float(val)
|
||||
except: return 0.0
|
||||
def to_int(val):
|
||||
try: return int(float(val))
|
||||
except: return 0
|
||||
|
||||
return (
|
||||
str(data.get("general_prompt", "")), str(data.get("general_negative", "")),
|
||||
str(data.get("current_prompt", "")), str(data.get("negative", "")),
|
||||
@@ -74,20 +197,13 @@ class JSONLoaderVACE:
|
||||
|
||||
def load_vace(self, json_path):
|
||||
data = read_json_data(json_path)
|
||||
def to_float(val):
|
||||
try: return float(val)
|
||||
except: return 0.0
|
||||
def to_int(val):
|
||||
try: return int(float(val))
|
||||
except: return 0
|
||||
|
||||
return (
|
||||
str(data.get("general_prompt", "")), str(data.get("general_negative", "")),
|
||||
str(data.get("current_prompt", "")), str(data.get("negative", "")),
|
||||
str(data.get("camera", "")), to_float(data.get("flf", 0.0)),
|
||||
to_int(data.get("seed", 0)),
|
||||
to_int(data.get("frame_to_skip", 81)), to_int(data.get("input_a_frames", 0)),
|
||||
to_int(data.get("input_b_frames", 0)), str(data.get("reference path", "")),
|
||||
to_int(data.get("frame_to_skip", 81)), to_int(data.get("input_a_frames", 16)),
|
||||
to_int(data.get("input_b_frames", 16)), str(data.get("reference path", "")),
|
||||
to_int(data.get("reference switch", 1)), to_int(data.get("vace schedule", 1)),
|
||||
str(data.get("video file path", "")), str(data.get("reference image path", ""))
|
||||
)
|
||||
@@ -107,10 +223,7 @@ class JSONLoaderBatchLoRA:
|
||||
|
||||
def load_batch_loras(self, json_path, sequence_number):
|
||||
data = read_json_data(json_path)
|
||||
target_data = data
|
||||
if "batch_data" in data and isinstance(data["batch_data"], list) and len(data["batch_data"]) > 0:
|
||||
idx = (sequence_number - 1) % len(data["batch_data"])
|
||||
target_data = data["batch_data"][idx]
|
||||
target_data = get_batch_item(data, sequence_number)
|
||||
return (
|
||||
str(target_data.get("lora 1 high", "")), str(target_data.get("lora 1 low", "")),
|
||||
str(target_data.get("lora 2 high", "")), str(target_data.get("lora 2 low", "")),
|
||||
@@ -128,16 +241,8 @@ class JSONLoaderBatchI2V:
|
||||
|
||||
def load_batch_i2v(self, json_path, sequence_number):
|
||||
data = read_json_data(json_path)
|
||||
target_data = data
|
||||
if "batch_data" in data and isinstance(data["batch_data"], list) and len(data["batch_data"]) > 0:
|
||||
idx = (sequence_number - 1) % len(data["batch_data"])
|
||||
target_data = data["batch_data"][idx]
|
||||
def to_float(val):
|
||||
try: return float(val)
|
||||
except: return 0.0
|
||||
def to_int(val):
|
||||
try: return int(float(val))
|
||||
except: return 0
|
||||
target_data = get_batch_item(data, sequence_number)
|
||||
|
||||
return (
|
||||
str(target_data.get("general_prompt", "")), str(target_data.get("general_negative", "")),
|
||||
str(target_data.get("current_prompt", "")), str(target_data.get("negative", "")),
|
||||
@@ -157,22 +262,14 @@ class JSONLoaderBatchVACE:
|
||||
|
||||
def load_batch_vace(self, json_path, sequence_number):
|
||||
data = read_json_data(json_path)
|
||||
target_data = data
|
||||
if "batch_data" in data and isinstance(data["batch_data"], list) and len(data["batch_data"]) > 0:
|
||||
idx = (sequence_number - 1) % len(data["batch_data"])
|
||||
target_data = data["batch_data"][idx]
|
||||
def to_float(val):
|
||||
try: return float(val)
|
||||
except: return 0.0
|
||||
def to_int(val):
|
||||
try: return int(float(val))
|
||||
except: return 0
|
||||
target_data = get_batch_item(data, sequence_number)
|
||||
|
||||
return (
|
||||
str(target_data.get("general_prompt", "")), str(target_data.get("general_negative", "")),
|
||||
str(target_data.get("current_prompt", "")), str(target_data.get("negative", "")),
|
||||
str(target_data.get("camera", "")), to_float(target_data.get("flf", 0.0)),
|
||||
to_int(target_data.get("seed", 0)), to_int(target_data.get("frame_to_skip", 81)),
|
||||
to_int(target_data.get("input_a_frames", 0)), to_int(target_data.get("input_b_frames", 0)),
|
||||
to_int(target_data.get("input_a_frames", 16)), to_int(target_data.get("input_b_frames", 16)),
|
||||
str(target_data.get("reference path", "")), to_int(target_data.get("reference switch", 1)),
|
||||
to_int(target_data.get("vace schedule", 1)), str(target_data.get("video file path", "")),
|
||||
str(target_data.get("reference image path", ""))
|
||||
@@ -199,10 +296,7 @@ class JSONLoaderCustom1:
|
||||
|
||||
def load_custom(self, json_path, sequence_number, key_1=""):
|
||||
data = read_json_data(json_path)
|
||||
target_data = data
|
||||
if "batch_data" in data and isinstance(data["batch_data"], list) and len(data["batch_data"]) > 0:
|
||||
idx = (sequence_number - 1) % len(data["batch_data"])
|
||||
target_data = data["batch_data"][idx]
|
||||
target_data = get_batch_item(data, sequence_number)
|
||||
return (str(target_data.get(key_1, "")),)
|
||||
|
||||
class JSONLoaderCustom3:
|
||||
@@ -226,10 +320,7 @@ class JSONLoaderCustom3:
|
||||
|
||||
def load_custom(self, json_path, sequence_number, key_1="", key_2="", key_3=""):
|
||||
data = read_json_data(json_path)
|
||||
target_data = data
|
||||
if "batch_data" in data and isinstance(data["batch_data"], list) and len(data["batch_data"]) > 0:
|
||||
idx = (sequence_number - 1) % len(data["batch_data"])
|
||||
target_data = data["batch_data"][idx]
|
||||
target_data = get_batch_item(data, sequence_number)
|
||||
return (
|
||||
str(target_data.get(key_1, "")),
|
||||
str(target_data.get(key_2, "")),
|
||||
@@ -260,10 +351,7 @@ class JSONLoaderCustom6:
|
||||
|
||||
def load_custom(self, json_path, sequence_number, key_1="", key_2="", key_3="", key_4="", key_5="", key_6=""):
|
||||
data = read_json_data(json_path)
|
||||
target_data = data
|
||||
if "batch_data" in data and isinstance(data["batch_data"], list) and len(data["batch_data"]) > 0:
|
||||
idx = (sequence_number - 1) % len(data["batch_data"])
|
||||
target_data = data["batch_data"][idx]
|
||||
target_data = get_batch_item(data, sequence_number)
|
||||
return (
|
||||
str(target_data.get(key_1, "")), str(target_data.get(key_2, "")),
|
||||
str(target_data.get(key_3, "")), str(target_data.get(key_4, "")),
|
||||
@@ -272,6 +360,7 @@ class JSONLoaderCustom6:
|
||||
|
||||
# --- Mappings ---
|
||||
NODE_CLASS_MAPPINGS = {
|
||||
"JSONLoaderDynamic": JSONLoaderDynamic,
|
||||
"JSONLoaderLoRA": JSONLoaderLoRA,
|
||||
"JSONLoaderStandard": JSONLoaderStandard,
|
||||
"JSONLoaderVACE": JSONLoaderVACE,
|
||||
@@ -284,6 +373,7 @@ NODE_CLASS_MAPPINGS = {
|
||||
}
|
||||
|
||||
NODE_DISPLAY_NAME_MAPPINGS = {
|
||||
"JSONLoaderDynamic": "JSON Loader (Dynamic)",
|
||||
"JSONLoaderLoRA": "JSON Loader (LoRAs Only)",
|
||||
"JSONLoaderStandard": "JSON Loader (Standard/I2V)",
|
||||
"JSONLoaderVACE": "JSON Loader (VACE Full)",
|
||||
|
||||
510
main.py
Normal file
510
main.py
Normal file
@@ -0,0 +1,510 @@
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from nicegui import ui
|
||||
|
||||
from state import AppState
|
||||
from utils import (
|
||||
load_config, save_config, load_snippets, save_snippets,
|
||||
load_json, save_json, generate_templates, DEFAULTS,
|
||||
KEY_BATCH_DATA, KEY_SEQUENCE_NUMBER,
|
||||
resolve_path_case_insensitive,
|
||||
)
|
||||
from tab_batch_ng import render_batch_processor
|
||||
from tab_timeline_ng import render_timeline_tab
|
||||
from tab_raw_ng import render_raw_editor
|
||||
from tab_comfy_ng import render_comfy_monitor
|
||||
from tab_projects_ng import render_projects_tab
|
||||
from db import ProjectDB
|
||||
from api_routes import register_api_routes
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Single shared DB instance for both the UI and API routes
|
||||
_shared_db: ProjectDB | None = None
|
||||
try:
|
||||
_shared_db = ProjectDB()
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to initialize ProjectDB: {e}")
|
||||
|
||||
|
||||
@ui.page('/')
|
||||
def index():
|
||||
ui.dark_mode(True)
|
||||
ui.colors(primary='#F59E0B')
|
||||
ui.add_head_html(
|
||||
'<link rel="stylesheet" href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&display=swap">'
|
||||
)
|
||||
ui.add_css('''
|
||||
/* === Dark Theme with Depth Palette === */
|
||||
:root {
|
||||
--bg-page: #0B0E14;
|
||||
--bg-surface-1: #13161E;
|
||||
--bg-surface-2: #1A1E2A;
|
||||
--bg-surface-3: #242836;
|
||||
--border: rgba(255,255,255,0.08);
|
||||
--text-primary: #EAECF0;
|
||||
--text-secondary: rgba(234,236,240,0.55);
|
||||
--accent: #F59E0B;
|
||||
--accent-subtle: rgba(245,158,11,0.12);
|
||||
--negative: #EF4444;
|
||||
}
|
||||
|
||||
/* Backgrounds */
|
||||
body.body--dark,
|
||||
.q-page.body--dark,
|
||||
.body--dark .q-page { background: var(--bg-page) !important; }
|
||||
.body--dark .q-drawer { background: var(--bg-surface-1) !important; }
|
||||
.body--dark .q-card {
|
||||
background: var(--bg-surface-2) !important;
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 0.75rem;
|
||||
}
|
||||
.body--dark .q-tab-panels { background: transparent !important; }
|
||||
.body--dark .q-tab-panel { background: transparent !important; }
|
||||
.body--dark .q-expansion-item { background: transparent !important; }
|
||||
|
||||
/* Text */
|
||||
.body--dark { color: var(--text-primary) !important; }
|
||||
.body--dark .q-field__label { color: var(--text-secondary) !important; }
|
||||
.body--dark .text-caption { color: var(--text-secondary) !important; }
|
||||
.body--dark .text-subtitle1,
|
||||
.body--dark .text-subtitle2 { color: var(--text-primary) !important; }
|
||||
|
||||
/* Inputs & textareas */
|
||||
.body--dark .q-field--outlined .q-field__control {
|
||||
background: var(--bg-surface-3) !important;
|
||||
border-radius: 0.5rem !important;
|
||||
}
|
||||
.body--dark .q-field--outlined .q-field__control:before {
|
||||
border-color: var(--border) !important;
|
||||
border-radius: 0.5rem !important;
|
||||
}
|
||||
.body--dark .q-field--outlined.q-field--focused .q-field__control:after {
|
||||
border-color: var(--accent) !important;
|
||||
}
|
||||
.body--dark .q-field__native,
|
||||
.body--dark .q-field__input { color: var(--text-primary) !important; }
|
||||
|
||||
/* Sidebar inputs get page bg */
|
||||
.body--dark .q-drawer .q-field--outlined .q-field__control {
|
||||
background: var(--bg-page) !important;
|
||||
}
|
||||
|
||||
/* Buttons */
|
||||
.body--dark .q-btn--standard { border-radius: 0.5rem !important; }
|
||||
.body--dark .q-btn--outline {
|
||||
transition: background 0.15s ease;
|
||||
}
|
||||
.body--dark .q-btn--outline:hover {
|
||||
background: var(--accent-subtle) !important;
|
||||
}
|
||||
|
||||
/* Tabs */
|
||||
.body--dark .q-tab--active { color: var(--accent) !important; }
|
||||
.body--dark .q-tab__indicator { background: var(--accent) !important; }
|
||||
|
||||
/* Separators */
|
||||
.body--dark .q-separator { background: var(--border) !important; }
|
||||
|
||||
/* Expansion items */
|
||||
.body--dark .q-expansion-item__content { padding: 12px 16px; }
|
||||
.body--dark .q-item { border-radius: 0.5rem; }
|
||||
|
||||
/* Splitter */
|
||||
.body--dark .q-splitter__separator { background: var(--border) !important; }
|
||||
.body--dark .q-splitter__before,
|
||||
.body--dark .q-splitter__after { padding: 0 8px; }
|
||||
|
||||
/* Action row wrap */
|
||||
.action-row { flex-wrap: wrap !important; gap: 8px !important; }
|
||||
|
||||
/* Notifications */
|
||||
.body--dark .q-notification { border-radius: 0.5rem; }
|
||||
|
||||
/* Font */
|
||||
body { font-family: "Inter", "Source Sans Pro", "Source Sans 3", sans-serif !important; }
|
||||
|
||||
/* Surface utility classes (need .body--dark to beat .body--dark .q-card specificity) */
|
||||
.body--dark .surface-1 { background: var(--bg-surface-1) !important; }
|
||||
.body--dark .surface-2 { background: var(--bg-surface-2) !important; }
|
||||
.body--dark .surface-3 { background: var(--bg-surface-3) !important; }
|
||||
|
||||
/* Typography utility classes */
|
||||
.section-header {
|
||||
font-size: 0.8rem;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
color: var(--text-secondary) !important;
|
||||
}
|
||||
.subsection-header {
|
||||
font-size: 0.85rem;
|
||||
font-weight: 500;
|
||||
color: var(--text-primary) !important;
|
||||
}
|
||||
|
||||
/* Scrollbar */
|
||||
::-webkit-scrollbar { width: 6px; height: 6px; }
|
||||
::-webkit-scrollbar-track { background: transparent; }
|
||||
::-webkit-scrollbar-thumb {
|
||||
background: rgba(255,255,255,0.12);
|
||||
border-radius: 3px;
|
||||
}
|
||||
::-webkit-scrollbar-thumb:hover {
|
||||
background: rgba(255,255,255,0.2);
|
||||
}
|
||||
|
||||
/* Secondary pane teal accent */
|
||||
.pane-secondary .q-field--outlined.q-field--focused .q-field__control:after {
|
||||
border-color: #06B6D4 !important;
|
||||
}
|
||||
.pane-secondary .q-btn.bg-primary { background-color: #06B6D4 !important; }
|
||||
.pane-secondary .section-header { color: rgba(6,182,212,0.7) !important; }
|
||||
''')
|
||||
|
||||
config = load_config()
|
||||
state = AppState(
|
||||
config=config,
|
||||
current_dir=Path(config.get('last_dir', str(Path.cwd()))),
|
||||
snippets=load_snippets(),
|
||||
db_enabled=config.get('db_enabled', False),
|
||||
current_project=config.get('current_project', ''),
|
||||
)
|
||||
|
||||
# Use the shared DB instance
|
||||
state.db = _shared_db
|
||||
|
||||
dual_pane = {'active': False, 'state': None}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Define helpers FIRST (before sidebar, which needs them)
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@ui.refreshable
|
||||
def render_main_content():
|
||||
max_w = '2400px' if dual_pane['active'] else '1200px'
|
||||
with ui.column().classes('w-full q-pa-md').style(f'max-width: {max_w}; margin: 0 auto'):
|
||||
if not state.file_path or not state.file_path.exists():
|
||||
ui.label('Select a file from the sidebar to begin.').classes(
|
||||
'text-subtitle1 q-pa-lg')
|
||||
return
|
||||
|
||||
ui.label(f'Editing: {state.file_path.name}').classes('text-h5 q-mb-lg').style('font-weight: 600')
|
||||
|
||||
with ui.tabs().classes('w-full').style('border-bottom: 1px solid var(--border)') as tabs:
|
||||
ui.tab('batch', label='Batch Processor')
|
||||
ui.tab('timeline', label='Timeline')
|
||||
ui.tab('raw', label='Raw Editor')
|
||||
ui.tab('projects', label='Projects')
|
||||
|
||||
with ui.tab_panels(tabs, value='batch').classes('w-full'):
|
||||
with ui.tab_panel('batch'):
|
||||
_render_batch_tab_content()
|
||||
with ui.tab_panel('timeline'):
|
||||
render_timeline_tab(state)
|
||||
with ui.tab_panel('raw'):
|
||||
render_raw_editor(state)
|
||||
with ui.tab_panel('projects'):
|
||||
render_projects_tab(state)
|
||||
|
||||
if state.show_comfy_monitor:
|
||||
ui.separator()
|
||||
with ui.expansion('ComfyUI Monitor', icon='dns').classes('w-full'):
|
||||
render_comfy_monitor(state)
|
||||
|
||||
@ui.refreshable
|
||||
def _render_batch_tab_content():
|
||||
def on_toggle(e):
|
||||
dual_pane['active'] = e.value
|
||||
if e.value and dual_pane['state'] is None:
|
||||
s2 = state.create_secondary()
|
||||
s2._render_main = _render_batch_tab_content
|
||||
dual_pane['state'] = s2
|
||||
render_main_content.refresh()
|
||||
|
||||
ui.switch('Dual Pane', value=dual_pane['active'], on_change=on_toggle)
|
||||
|
||||
if not dual_pane['active']:
|
||||
render_batch_processor(state)
|
||||
else:
|
||||
s2 = dual_pane['state']
|
||||
with ui.row().classes('w-full gap-4'):
|
||||
with ui.column().classes('col'):
|
||||
ui.label('Pane A').classes('section-header q-mb-sm')
|
||||
_render_pane_file_selector(state)
|
||||
render_batch_processor(state)
|
||||
with ui.column().classes('col pane-secondary'):
|
||||
ui.label('Pane B').classes('section-header q-mb-sm')
|
||||
_render_pane_file_selector(s2)
|
||||
if s2.file_path and s2.file_path.exists():
|
||||
render_batch_processor(s2)
|
||||
else:
|
||||
ui.label('Select a file above to begin.').classes(
|
||||
'text-caption q-pa-md')
|
||||
|
||||
def _render_pane_file_selector(pane_state: AppState):
|
||||
if not pane_state.current_dir.exists():
|
||||
ui.label('Directory not found.').classes('text-warning')
|
||||
return
|
||||
json_files = sorted(pane_state.current_dir.glob('*.json'))
|
||||
json_files = [f for f in json_files if f.name not in (
|
||||
'.editor_config.json', '.editor_snippets.json')]
|
||||
file_names = [f.name for f in json_files]
|
||||
|
||||
current_val = pane_state.file_path.name if pane_state.file_path else None
|
||||
|
||||
def on_select(e):
|
||||
if not e.value:
|
||||
return
|
||||
fp = pane_state.current_dir / e.value
|
||||
data, mtime = load_json(fp)
|
||||
pane_state.data_cache = data
|
||||
pane_state.last_mtime = mtime
|
||||
pane_state.loaded_file = str(fp)
|
||||
pane_state.file_path = fp
|
||||
pane_state.restored_indicator = None
|
||||
_render_batch_tab_content.refresh()
|
||||
|
||||
ui.select(
|
||||
file_names,
|
||||
value=current_val,
|
||||
label='File',
|
||||
on_change=on_select,
|
||||
).classes('w-full')
|
||||
|
||||
def load_file(file_name: str):
|
||||
"""Load a JSON file and refresh the main content."""
|
||||
fp = state.current_dir / file_name
|
||||
if state.loaded_file == str(fp):
|
||||
return
|
||||
data, mtime = load_json(fp)
|
||||
state.data_cache = data
|
||||
state.last_mtime = mtime
|
||||
state.loaded_file = str(fp)
|
||||
state.file_path = fp
|
||||
state.restored_indicator = None
|
||||
if state._main_rendered:
|
||||
render_main_content.refresh()
|
||||
|
||||
# Attach helpers to state so sidebar can call them
|
||||
state._load_file = load_file
|
||||
state._render_main = render_main_content
|
||||
state._main_rendered = False
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Sidebar (rendered AFTER helpers are attached)
|
||||
# ------------------------------------------------------------------
|
||||
with ui.left_drawer(value=True).classes('q-pa-md').style('width: 320px'):
|
||||
render_sidebar(state, dual_pane)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Main content area
|
||||
# ------------------------------------------------------------------
|
||||
render_main_content()
|
||||
state._main_rendered = True
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Sidebar
|
||||
# ======================================================================
|
||||
|
||||
def render_sidebar(state: AppState, dual_pane: dict):
|
||||
ui.label('Navigator').classes('text-h6')
|
||||
|
||||
# --- Path input + Pin ---
|
||||
with ui.card().classes('w-full q-pa-md q-mb-md'):
|
||||
path_input = ui.input(
|
||||
'Current Path',
|
||||
value=str(state.current_dir),
|
||||
).classes('w-full')
|
||||
|
||||
def on_path_enter():
|
||||
p = resolve_path_case_insensitive(path_input.value)
|
||||
if p is not None and p.is_dir():
|
||||
state.current_dir = p
|
||||
if dual_pane['state']:
|
||||
dual_pane['state'].current_dir = state.current_dir
|
||||
dual_pane['state'].file_path = None
|
||||
dual_pane['state'].loaded_file = None
|
||||
dual_pane['state'].data_cache = {}
|
||||
state.config['last_dir'] = str(p)
|
||||
save_config(state.current_dir, state.config['favorites'], state.config)
|
||||
state.loaded_file = None
|
||||
state.file_path = None
|
||||
path_input.set_value(str(p))
|
||||
render_file_list.refresh()
|
||||
# Auto-load inside render_file_list already refreshed main content
|
||||
# if files exist; only refresh here for the empty-directory case.
|
||||
if not state.loaded_file:
|
||||
state._render_main.refresh()
|
||||
|
||||
path_input.on('keydown.enter', lambda _: on_path_enter())
|
||||
|
||||
def pin_folder():
|
||||
d = str(state.current_dir)
|
||||
if d not in state.config['favorites']:
|
||||
state.config['favorites'].append(d)
|
||||
save_config(state.current_dir, state.config['favorites'], state.config)
|
||||
render_favorites.refresh()
|
||||
|
||||
ui.button('Pin Folder', icon='push_pin', on_click=pin_folder).classes('w-full')
|
||||
|
||||
# --- Favorites ---
|
||||
with ui.card().classes('w-full q-pa-md q-mb-md'):
|
||||
ui.label('Favorites').classes('section-header')
|
||||
|
||||
@ui.refreshable
|
||||
def render_favorites():
|
||||
for fav in list(state.config['favorites']):
|
||||
with ui.row().classes('w-full items-center'):
|
||||
ui.button(
|
||||
fav,
|
||||
on_click=lambda f=fav: _jump_to(f),
|
||||
).props('flat dense').classes('col')
|
||||
ui.button(
|
||||
icon='close',
|
||||
on_click=lambda f=fav: _unpin(f),
|
||||
).props('flat dense color=negative')
|
||||
|
||||
def _jump_to(fav: str):
|
||||
state.current_dir = Path(fav)
|
||||
if dual_pane['state']:
|
||||
dual_pane['state'].current_dir = state.current_dir
|
||||
dual_pane['state'].file_path = None
|
||||
dual_pane['state'].loaded_file = None
|
||||
dual_pane['state'].data_cache = {}
|
||||
state.config['last_dir'] = fav
|
||||
save_config(state.current_dir, state.config['favorites'], state.config)
|
||||
state.loaded_file = None
|
||||
state.file_path = None
|
||||
path_input.set_value(fav)
|
||||
render_file_list.refresh()
|
||||
if not state.loaded_file:
|
||||
state._render_main.refresh()
|
||||
|
||||
def _unpin(fav: str):
|
||||
if fav in state.config['favorites']:
|
||||
state.config['favorites'].remove(fav)
|
||||
save_config(state.current_dir, state.config['favorites'], state.config)
|
||||
render_favorites.refresh()
|
||||
|
||||
render_favorites()
|
||||
|
||||
# --- Snippet Library ---
|
||||
with ui.card().classes('w-full q-pa-md q-mb-md'):
|
||||
ui.label('Snippet Library').classes('section-header')
|
||||
|
||||
with ui.expansion('Add New Snippet'):
|
||||
snip_name_input = ui.input('Name', placeholder='e.g. Cinematic').classes('w-full')
|
||||
snip_content_input = ui.textarea('Content', placeholder='4k, high quality...').classes('w-full')
|
||||
|
||||
def save_snippet():
|
||||
name = snip_name_input.value
|
||||
content = snip_content_input.value
|
||||
if name and content:
|
||||
state.snippets[name] = content
|
||||
save_snippets(state.snippets)
|
||||
snip_name_input.set_value('')
|
||||
snip_content_input.set_value('')
|
||||
ui.notify(f"Saved '{name}'")
|
||||
render_snippet_list.refresh()
|
||||
|
||||
ui.button('Save Snippet', on_click=save_snippet).classes('w-full')
|
||||
|
||||
@ui.refreshable
|
||||
def render_snippet_list():
|
||||
if not state.snippets:
|
||||
return
|
||||
ui.label('Click to copy snippet text:').classes('text-caption')
|
||||
for name, content in list(state.snippets.items()):
|
||||
with ui.row().classes('w-full items-center'):
|
||||
async def copy_snippet(c=content):
|
||||
await ui.run_javascript(
|
||||
f'navigator.clipboard.writeText({json.dumps(c)})', timeout=3.0)
|
||||
ui.notify('Copied to clipboard')
|
||||
|
||||
ui.button(
|
||||
f'{name}',
|
||||
on_click=copy_snippet,
|
||||
).props('flat dense').classes('col')
|
||||
ui.button(
|
||||
icon='delete',
|
||||
on_click=lambda n=name: _del_snippet(n),
|
||||
).props('flat dense color=negative')
|
||||
|
||||
def _del_snippet(name: str):
|
||||
if name in state.snippets:
|
||||
del state.snippets[name]
|
||||
save_snippets(state.snippets)
|
||||
render_snippet_list.refresh()
|
||||
|
||||
render_snippet_list()
|
||||
|
||||
# --- File List ---
|
||||
with ui.card().classes('w-full q-pa-md q-mb-md'):
|
||||
@ui.refreshable
|
||||
def render_file_list():
|
||||
if not state.current_dir.exists():
|
||||
ui.label('Directory not found.').classes('text-warning')
|
||||
return
|
||||
json_files = sorted(state.current_dir.glob('*.json'))
|
||||
json_files = [f for f in json_files if f.name not in ('.editor_config.json', '.editor_snippets.json')]
|
||||
|
||||
if not json_files:
|
||||
ui.label('No JSON files in this folder.').classes('text-caption')
|
||||
ui.button('Generate Templates', on_click=lambda: _gen_templates()).classes('w-full')
|
||||
return
|
||||
|
||||
with ui.expansion('Create New JSON'):
|
||||
new_fn_input = ui.input('Filename', placeholder='my_prompt_vace').classes('w-full')
|
||||
|
||||
def create_new():
|
||||
fn = new_fn_input.value
|
||||
if not fn:
|
||||
return
|
||||
if not fn.endswith('.json'):
|
||||
fn += '.json'
|
||||
path = state.current_dir / fn
|
||||
first_item = DEFAULTS.copy()
|
||||
first_item[KEY_SEQUENCE_NUMBER] = 1
|
||||
save_json(path, {KEY_BATCH_DATA: [first_item]})
|
||||
new_fn_input.set_value('')
|
||||
render_file_list.refresh()
|
||||
|
||||
ui.button('Create', on_click=create_new).classes('w-full')
|
||||
|
||||
ui.label('Select File').classes('subsection-header q-mt-sm')
|
||||
file_names = [f.name for f in json_files]
|
||||
current = Path(state.loaded_file).name if state.loaded_file else None
|
||||
selected = current if current in file_names else (file_names[0] if file_names else None)
|
||||
ui.radio(
|
||||
file_names,
|
||||
value=selected,
|
||||
on_change=lambda e: state._load_file(e.value) if e.value else None,
|
||||
).classes('w-full')
|
||||
|
||||
# Auto-load first file if nothing loaded yet
|
||||
if file_names and not state.loaded_file:
|
||||
state._load_file(file_names[0])
|
||||
|
||||
def _gen_templates():
|
||||
generate_templates(state.current_dir)
|
||||
render_file_list.refresh()
|
||||
|
||||
render_file_list()
|
||||
|
||||
# --- Comfy Monitor toggle ---
|
||||
def on_monitor_toggle(e):
|
||||
state.show_comfy_monitor = e.value
|
||||
state._render_main.refresh()
|
||||
|
||||
ui.checkbox('Show Comfy Monitor', value=True, on_change=on_monitor_toggle)
|
||||
|
||||
|
||||
# Register REST API routes for ComfyUI connectivity (uses the shared DB instance)
|
||||
if _shared_db is not None:
|
||||
register_api_routes(_shared_db)
|
||||
|
||||
ui.run(title='AI Settings Manager', port=8080, reload=True)
|
||||
215
project_loader.py
Normal file
215
project_loader.py
Normal file
@@ -0,0 +1,215 @@
|
||||
import json
|
||||
import logging
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
from typing import Any
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
MAX_DYNAMIC_OUTPUTS = 32
|
||||
|
||||
|
||||
class AnyType(str):
|
||||
"""Universal connector type that matches any ComfyUI type."""
|
||||
def __ne__(self, __value: object) -> bool:
|
||||
return False
|
||||
|
||||
any_type = AnyType("*")
|
||||
|
||||
|
||||
try:
|
||||
from server import PromptServer
|
||||
from aiohttp import web
|
||||
except ImportError:
|
||||
PromptServer = None
|
||||
|
||||
|
||||
def to_float(val: Any) -> float:
|
||||
try:
|
||||
return float(val)
|
||||
except (ValueError, TypeError):
|
||||
return 0.0
|
||||
|
||||
def to_int(val: Any) -> int:
|
||||
try:
|
||||
return int(float(val))
|
||||
except (ValueError, TypeError):
|
||||
return 0
|
||||
|
||||
|
||||
def _fetch_json(url: str) -> dict:
|
||||
"""Fetch JSON from a URL using stdlib urllib.
|
||||
|
||||
On error, returns a dict with an "error" key describing the failure.
|
||||
"""
|
||||
try:
|
||||
with urllib.request.urlopen(url, timeout=5) as resp:
|
||||
return json.loads(resp.read())
|
||||
except urllib.error.HTTPError as e:
|
||||
# HTTPError is a subclass of URLError — must be caught first
|
||||
body = ""
|
||||
try:
|
||||
raw = e.read()
|
||||
detail = json.loads(raw)
|
||||
body = detail.get("detail", str(raw, "utf-8", errors="replace"))
|
||||
except Exception:
|
||||
body = str(e)
|
||||
logger.warning(f"HTTP {e.code} from {url}: {body}")
|
||||
return {"error": "http_error", "status": e.code, "message": body}
|
||||
except (urllib.error.URLError, OSError) as e:
|
||||
reason = str(e.reason) if hasattr(e, "reason") else str(e)
|
||||
logger.warning(f"Network error fetching {url}: {reason}")
|
||||
return {"error": "network_error", "message": reason}
|
||||
except json.JSONDecodeError as e:
|
||||
logger.warning(f"Invalid JSON from {url}: {e}")
|
||||
return {"error": "parse_error", "message": str(e)}
|
||||
|
||||
|
||||
def _fetch_data(manager_url: str, project: str, file: str, seq: int) -> dict:
|
||||
"""Fetch sequence data from the NiceGUI REST API."""
|
||||
p = urllib.parse.quote(project, safe='')
|
||||
f = urllib.parse.quote(file, safe='')
|
||||
url = f"{manager_url.rstrip('/')}/api/projects/{p}/files/{f}/data?seq={seq}"
|
||||
return _fetch_json(url)
|
||||
|
||||
|
||||
def _fetch_keys(manager_url: str, project: str, file: str, seq: int) -> dict:
|
||||
"""Fetch keys/types from the NiceGUI REST API."""
|
||||
p = urllib.parse.quote(project, safe='')
|
||||
f = urllib.parse.quote(file, safe='')
|
||||
url = f"{manager_url.rstrip('/')}/api/projects/{p}/files/{f}/keys?seq={seq}"
|
||||
return _fetch_json(url)
|
||||
|
||||
|
||||
# --- ComfyUI-side proxy endpoints (for frontend JS) ---
|
||||
if PromptServer is not None:
|
||||
@PromptServer.instance.routes.get("/json_manager/list_projects")
|
||||
async def list_projects_proxy(request):
|
||||
manager_url = request.query.get("url", "http://localhost:8080")
|
||||
url = f"{manager_url.rstrip('/')}/api/projects"
|
||||
data = _fetch_json(url)
|
||||
return web.json_response(data)
|
||||
|
||||
@PromptServer.instance.routes.get("/json_manager/list_project_files")
|
||||
async def list_project_files_proxy(request):
|
||||
manager_url = request.query.get("url", "http://localhost:8080")
|
||||
project = urllib.parse.quote(request.query.get("project", ""), safe='')
|
||||
url = f"{manager_url.rstrip('/')}/api/projects/{project}/files"
|
||||
data = _fetch_json(url)
|
||||
return web.json_response(data)
|
||||
|
||||
@PromptServer.instance.routes.get("/json_manager/list_project_sequences")
|
||||
async def list_project_sequences_proxy(request):
|
||||
manager_url = request.query.get("url", "http://localhost:8080")
|
||||
project = urllib.parse.quote(request.query.get("project", ""), safe='')
|
||||
file_name = urllib.parse.quote(request.query.get("file", ""), safe='')
|
||||
url = f"{manager_url.rstrip('/')}/api/projects/{project}/files/{file_name}/sequences"
|
||||
data = _fetch_json(url)
|
||||
return web.json_response(data)
|
||||
|
||||
@PromptServer.instance.routes.get("/json_manager/get_project_keys")
|
||||
async def get_project_keys_proxy(request):
|
||||
manager_url = request.query.get("url", "http://localhost:8080")
|
||||
project = request.query.get("project", "")
|
||||
file_name = request.query.get("file", "")
|
||||
try:
|
||||
seq = int(request.query.get("seq", "1"))
|
||||
except (ValueError, TypeError):
|
||||
seq = 1
|
||||
data = _fetch_keys(manager_url, project, file_name, seq)
|
||||
if data.get("error") in ("http_error", "network_error", "parse_error"):
|
||||
status = data.get("status", 502)
|
||||
return web.json_response(data, status=status)
|
||||
return web.json_response(data)
|
||||
|
||||
|
||||
|
||||
# ==========================================
|
||||
# 0. DYNAMIC NODE (Project-based)
|
||||
# ==========================================
|
||||
|
||||
class ProjectLoaderDynamic:
|
||||
@classmethod
|
||||
def INPUT_TYPES(s):
|
||||
return {
|
||||
"required": {
|
||||
"manager_url": ("STRING", {"default": "http://localhost:8080", "multiline": False}),
|
||||
"project_name": ("STRING", {"default": "", "multiline": False}),
|
||||
"file_name": ("STRING", {"default": "", "multiline": False}),
|
||||
"sequence_number": ("INT", {"default": 1, "min": 1, "max": 9999}),
|
||||
},
|
||||
"optional": {
|
||||
"output_keys": ("STRING", {"default": ""}),
|
||||
"output_types": ("STRING", {"default": ""}),
|
||||
},
|
||||
}
|
||||
|
||||
RETURN_TYPES = ("INT",) + tuple(any_type for _ in range(MAX_DYNAMIC_OUTPUTS))
|
||||
RETURN_NAMES = ("total_sequences",) + tuple(f"output_{i}" for i in range(MAX_DYNAMIC_OUTPUTS))
|
||||
FUNCTION = "load_dynamic"
|
||||
CATEGORY = "utils/json/project"
|
||||
OUTPUT_NODE = False
|
||||
|
||||
def load_dynamic(self, manager_url, project_name, file_name, sequence_number,
|
||||
output_keys="", output_types=""):
|
||||
# Fetch keys metadata (includes total_sequences count)
|
||||
keys_meta = _fetch_keys(manager_url, project_name, file_name, sequence_number)
|
||||
if keys_meta.get("error") in ("http_error", "network_error", "parse_error"):
|
||||
msg = keys_meta.get("message", "Unknown error")
|
||||
raise RuntimeError(f"Failed to fetch project keys: {msg}")
|
||||
total_sequences = keys_meta.get("total_sequences", 0)
|
||||
|
||||
data = _fetch_data(manager_url, project_name, file_name, sequence_number)
|
||||
if data.get("error") in ("http_error", "network_error", "parse_error"):
|
||||
msg = data.get("message", "Unknown error")
|
||||
raise RuntimeError(f"Failed to fetch sequence data: {msg}")
|
||||
|
||||
# Parse keys — try JSON array first, fall back to comma-split for compat
|
||||
keys = []
|
||||
if output_keys:
|
||||
try:
|
||||
keys = json.loads(output_keys)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
keys = [k.strip() for k in output_keys.split(",") if k.strip()]
|
||||
|
||||
# Parse types for coercion
|
||||
types = []
|
||||
if output_types:
|
||||
try:
|
||||
types = json.loads(output_types)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
types = [t.strip() for t in output_types.split(",")]
|
||||
|
||||
results = []
|
||||
for i, key in enumerate(keys):
|
||||
val = data.get(key, "")
|
||||
declared_type = types[i] if i < len(types) else ""
|
||||
# Coerce based on declared output type when possible
|
||||
if declared_type == "INT":
|
||||
results.append(to_int(val))
|
||||
elif declared_type == "FLOAT":
|
||||
results.append(to_float(val))
|
||||
elif isinstance(val, bool):
|
||||
results.append(str(val).lower())
|
||||
elif isinstance(val, int):
|
||||
results.append(val)
|
||||
elif isinstance(val, float):
|
||||
results.append(val)
|
||||
else:
|
||||
results.append(str(val))
|
||||
|
||||
while len(results) < MAX_DYNAMIC_OUTPUTS:
|
||||
results.append("")
|
||||
|
||||
return (total_sequences,) + tuple(results)
|
||||
|
||||
|
||||
# --- Mappings ---
|
||||
PROJECT_NODE_CLASS_MAPPINGS = {
|
||||
"ProjectLoaderDynamic": ProjectLoaderDynamic,
|
||||
}
|
||||
|
||||
PROJECT_NODE_DISPLAY_NAME_MAPPINGS = {
|
||||
"ProjectLoaderDynamic": "Project Loader (Dynamic)",
|
||||
}
|
||||
40
state.py
Normal file
40
state.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable
|
||||
|
||||
|
||||
@dataclass
|
||||
class AppState:
|
||||
config: dict
|
||||
current_dir: Path
|
||||
loaded_file: str | None = None
|
||||
last_mtime: float = 0
|
||||
data_cache: dict = field(default_factory=dict)
|
||||
snippets: dict = field(default_factory=dict)
|
||||
file_path: Path | None = None
|
||||
restored_indicator: str | None = None
|
||||
timeline_selected_nodes: set = field(default_factory=set)
|
||||
live_toggles: dict = field(default_factory=dict)
|
||||
show_comfy_monitor: bool = True
|
||||
|
||||
# Project DB fields
|
||||
db: Any = None
|
||||
current_project: str = ""
|
||||
db_enabled: bool = False
|
||||
|
||||
# Set at runtime by main.py / tab_comfy_ng.py
|
||||
_render_main: Any = None
|
||||
_load_file: Callable | None = None
|
||||
_main_rendered: bool = False
|
||||
_live_checkboxes: dict = field(default_factory=dict)
|
||||
_live_refreshables: dict = field(default_factory=dict)
|
||||
|
||||
def create_secondary(self) -> 'AppState':
|
||||
return AppState(
|
||||
config=self.config,
|
||||
current_dir=self.current_dir,
|
||||
snippets=self.snippets,
|
||||
db=self.db,
|
||||
current_project=self.current_project,
|
||||
db_enabled=self.db_enabled,
|
||||
)
|
||||
304
tab_batch.py
304
tab_batch.py
@@ -1,304 +0,0 @@
|
||||
import streamlit as st
|
||||
import random
|
||||
from utils import DEFAULTS, save_json, load_json
|
||||
from history_tree import HistoryTree
|
||||
|
||||
def create_batch_callback(original_filename, current_data, current_dir):
|
||||
new_name = f"batch_{original_filename}"
|
||||
new_path = current_dir / new_name
|
||||
|
||||
if new_path.exists():
|
||||
st.toast(f"File {new_name} already exists!", icon="⚠️")
|
||||
return
|
||||
|
||||
first_item = current_data.copy()
|
||||
if "prompt_history" in first_item: del first_item["prompt_history"]
|
||||
if "history_tree" in first_item: del first_item["history_tree"]
|
||||
|
||||
first_item["sequence_number"] = 1
|
||||
|
||||
new_data = {
|
||||
"batch_data": [first_item],
|
||||
"history_tree": {},
|
||||
"prompt_history": []
|
||||
}
|
||||
|
||||
save_json(new_path, new_data)
|
||||
st.toast(f"Created {new_name}", icon="✨")
|
||||
st.session_state.file_selector = new_name
|
||||
|
||||
|
||||
def render_batch_processor(data, file_path, json_files, current_dir, selected_file_name):
|
||||
is_batch_file = "batch_data" in data or isinstance(data, list)
|
||||
|
||||
if not is_batch_file:
|
||||
st.warning("This is a Single file. To use Batch mode, create a copy.")
|
||||
st.button("✨ Create Batch Copy", on_click=create_batch_callback, args=(selected_file_name, data, current_dir))
|
||||
return
|
||||
|
||||
if 'restored_indicator' in st.session_state and st.session_state.restored_indicator:
|
||||
st.info(f"📍 Editing Restored Version: **{st.session_state.restored_indicator}**")
|
||||
|
||||
batch_list = data.get("batch_data", [])
|
||||
|
||||
# --- ADD NEW SEQUENCE AREA ---
|
||||
st.subheader("Add New Sequence")
|
||||
ac1, ac2 = st.columns(2)
|
||||
|
||||
with ac1:
|
||||
file_options = [f.name for f in json_files]
|
||||
d_idx = file_options.index(selected_file_name) if selected_file_name in file_options else 0
|
||||
src_name = st.selectbox("Source File:", file_options, index=d_idx, key="batch_src_file")
|
||||
src_data, _ = load_json(current_dir / src_name)
|
||||
|
||||
with ac2:
|
||||
src_hist = src_data.get("prompt_history", [])
|
||||
h_opts = [f"#{i+1}: {h.get('note', 'No Note')} ({h.get('prompt', '')[:15]}...)" for i, h in enumerate(src_hist)] if src_hist else []
|
||||
sel_hist = st.selectbox("History Entry (Legacy):", h_opts, key="batch_src_hist")
|
||||
|
||||
bc1, bc2, bc3 = st.columns(3)
|
||||
|
||||
def add_sequence(new_item):
|
||||
max_seq = 0
|
||||
for s in batch_list:
|
||||
if "sequence_number" in s: max_seq = max(max_seq, int(s["sequence_number"]))
|
||||
new_item["sequence_number"] = max_seq + 1
|
||||
|
||||
for k in ["prompt_history", "history_tree", "note", "loras"]:
|
||||
if k in new_item: del new_item[k]
|
||||
|
||||
batch_list.append(new_item)
|
||||
data["batch_data"] = batch_list
|
||||
save_json(file_path, data)
|
||||
st.session_state.ui_reset_token += 1
|
||||
st.rerun()
|
||||
|
||||
if bc1.button("➕ Add Empty", use_container_width=True):
|
||||
add_sequence(DEFAULTS.copy())
|
||||
|
||||
if bc2.button("➕ From File", use_container_width=True, help=f"Copy {src_name}"):
|
||||
item = DEFAULTS.copy()
|
||||
flat = src_data["batch_data"][0] if "batch_data" in src_data and src_data["batch_data"] else src_data
|
||||
item.update(flat)
|
||||
add_sequence(item)
|
||||
|
||||
if bc3.button("➕ From History", use_container_width=True, disabled=not src_hist):
|
||||
if sel_hist:
|
||||
idx = int(sel_hist.split(":")[0].replace("#", "")) - 1
|
||||
item = DEFAULTS.copy()
|
||||
h_item = src_hist[idx]
|
||||
item.update(h_item)
|
||||
if "loras" in h_item and isinstance(h_item["loras"], dict):
|
||||
item.update(h_item["loras"])
|
||||
add_sequence(item)
|
||||
|
||||
# --- RENDER LIST ---
|
||||
st.markdown("---")
|
||||
st.info(f"Batch contains {len(batch_list)} sequences.")
|
||||
|
||||
lora_keys = ["lora 1 high", "lora 1 low", "lora 2 high", "lora 2 low", "lora 3 high", "lora 3 low"]
|
||||
standard_keys = {
|
||||
"general_prompt", "general_negative", "current_prompt", "negative", "prompt", "seed",
|
||||
"camera", "flf", "sequence_number"
|
||||
}
|
||||
standard_keys.update(lora_keys)
|
||||
standard_keys.update([
|
||||
"frame_to_skip", "input_a_frames", "input_b_frames", "reference switch", "vace schedule",
|
||||
"reference path", "video file path", "reference image path", "flf image path"
|
||||
])
|
||||
|
||||
for i, seq in enumerate(batch_list):
|
||||
seq_num = seq.get("sequence_number", i+1)
|
||||
prefix = f"{selected_file_name}_seq{i}_v{st.session_state.ui_reset_token}"
|
||||
|
||||
with st.expander(f"🎬 Sequence #{seq_num}", expanded=False):
|
||||
# --- NEW: ACTION ROW WITH CLONING ---
|
||||
act_c1, act_c2, act_c3, act_c4 = st.columns([1.2, 1.8, 1.2, 0.5])
|
||||
|
||||
# 1. Copy Source
|
||||
with act_c1:
|
||||
if st.button(f"📥 Copy {src_name}", key=f"{prefix}_copy", use_container_width=True):
|
||||
item = DEFAULTS.copy()
|
||||
flat = src_data["batch_data"][0] if "batch_data" in src_data and src_data["batch_data"] else src_data
|
||||
item.update(flat)
|
||||
item["sequence_number"] = seq_num
|
||||
for k in ["prompt_history", "history_tree"]:
|
||||
if k in item: del item[k]
|
||||
batch_list[i] = item
|
||||
data["batch_data"] = batch_list
|
||||
save_json(file_path, data)
|
||||
st.session_state.ui_reset_token += 1
|
||||
st.toast("Copied!", icon="📥")
|
||||
st.rerun()
|
||||
|
||||
# 2. Cloning Tools (Next / End)
|
||||
with act_c2:
|
||||
cl_1, cl_2 = st.columns(2)
|
||||
|
||||
# Clone Next
|
||||
if cl_1.button("👯 Next", key=f"{prefix}_c_next", help="Clone and insert below", use_container_width=True):
|
||||
new_seq = seq.copy()
|
||||
# Calculate new max sequence number
|
||||
max_sn = 0
|
||||
for s in batch_list: max_sn = max(max_sn, int(s.get("sequence_number", 0)))
|
||||
new_seq["sequence_number"] = max_sn + 1
|
||||
|
||||
batch_list.insert(i + 1, new_seq)
|
||||
data["batch_data"] = batch_list
|
||||
save_json(file_path, data)
|
||||
st.session_state.ui_reset_token += 1
|
||||
st.toast("Cloned to Next!", icon="👯")
|
||||
st.rerun()
|
||||
|
||||
# Clone End
|
||||
if cl_2.button("⏬ End", key=f"{prefix}_c_end", help="Clone and add to bottom", use_container_width=True):
|
||||
new_seq = seq.copy()
|
||||
max_sn = 0
|
||||
for s in batch_list: max_sn = max(max_sn, int(s.get("sequence_number", 0)))
|
||||
new_seq["sequence_number"] = max_sn + 1
|
||||
|
||||
batch_list.append(new_seq)
|
||||
data["batch_data"] = batch_list
|
||||
save_json(file_path, data)
|
||||
st.session_state.ui_reset_token += 1
|
||||
st.toast("Cloned to End!", icon="⏬")
|
||||
st.rerun()
|
||||
|
||||
# 3. Promote
|
||||
with act_c3:
|
||||
if st.button("↖️ Promote", key=f"{prefix}_prom", help="Save as Single File", use_container_width=True):
|
||||
single_data = seq.copy()
|
||||
single_data["prompt_history"] = data.get("prompt_history", [])
|
||||
single_data["history_tree"] = data.get("history_tree", {})
|
||||
if "sequence_number" in single_data: del single_data["sequence_number"]
|
||||
save_json(file_path, single_data)
|
||||
st.toast("Converted to Single!", icon="✅")
|
||||
st.rerun()
|
||||
|
||||
# 4. Remove
|
||||
with act_c4:
|
||||
if st.button("🗑️", key=f"{prefix}_del", use_container_width=True):
|
||||
batch_list.pop(i)
|
||||
data["batch_data"] = batch_list
|
||||
save_json(file_path, data)
|
||||
st.rerun()
|
||||
|
||||
st.markdown("---")
|
||||
c1, c2 = st.columns([2, 1])
|
||||
with c1:
|
||||
seq["general_prompt"] = st.text_area("General Prompt", value=seq.get("general_prompt", ""), height=60, key=f"{prefix}_gp")
|
||||
seq["general_negative"] = st.text_area("General Negative", value=seq.get("general_negative", ""), height=60, key=f"{prefix}_gn")
|
||||
seq["current_prompt"] = st.text_area("Specific Prompt", value=seq.get("current_prompt", ""), height=100, key=f"{prefix}_sp")
|
||||
seq["negative"] = st.text_area("Specific Negative", value=seq.get("negative", ""), height=60, key=f"{prefix}_sn")
|
||||
|
||||
with c2:
|
||||
seq["sequence_number"] = st.number_input("Seq Num", value=int(seq_num), key=f"{prefix}_sn_val")
|
||||
|
||||
s_row1, s_row2 = st.columns([3, 1])
|
||||
seed_key = f"{prefix}_seed"
|
||||
with s_row2:
|
||||
st.write("")
|
||||
st.write("")
|
||||
if st.button("🎲", key=f"{prefix}_rand"):
|
||||
st.session_state[seed_key] = random.randint(0, 999999999999)
|
||||
st.rerun()
|
||||
with s_row1:
|
||||
current_seed = st.session_state.get(seed_key, int(seq.get("seed", 0)))
|
||||
val = st.number_input("Seed", value=current_seed, key=seed_key)
|
||||
seq["seed"] = val
|
||||
|
||||
seq["camera"] = st.text_input("Camera", value=seq.get("camera", ""), key=f"{prefix}_cam")
|
||||
seq["flf"] = st.text_input("FLF", value=str(seq.get("flf", DEFAULTS["flf"])), key=f"{prefix}_flf")
|
||||
|
||||
if "video file path" in seq or "vace" in selected_file_name:
|
||||
seq["video file path"] = st.text_input("Video Path", value=seq.get("video file path", ""), key=f"{prefix}_vid")
|
||||
with st.expander("VACE Settings"):
|
||||
seq["frame_to_skip"] = st.number_input("Skip", value=int(seq.get("frame_to_skip", 81)), key=f"{prefix}_fts")
|
||||
seq["input_a_frames"] = st.number_input("In A", value=int(seq.get("input_a_frames", 0)), key=f"{prefix}_ia")
|
||||
seq["input_b_frames"] = st.number_input("In B", value=int(seq.get("input_b_frames", 0)), key=f"{prefix}_ib")
|
||||
seq["reference switch"] = st.number_input("Switch", value=int(seq.get("reference switch", 1)), key=f"{prefix}_rsw")
|
||||
seq["vace schedule"] = st.number_input("Sched", value=int(seq.get("vace schedule", 1)), key=f"{prefix}_vsc")
|
||||
seq["reference path"] = st.text_input("Ref Path", value=seq.get("reference path", ""), key=f"{prefix}_rp")
|
||||
seq["reference image path"] = st.text_input("Ref Img", value=seq.get("reference image path", ""), key=f"{prefix}_rip")
|
||||
|
||||
if "i2v" in selected_file_name and "vace" not in selected_file_name:
|
||||
seq["reference image path"] = st.text_input("Ref Img", value=seq.get("reference image path", ""), key=f"{prefix}_ri2")
|
||||
seq["flf image path"] = st.text_input("FLF Img", value=seq.get("flf image path", ""), key=f"{prefix}_flfi")
|
||||
|
||||
# --- LoRA Settings (Reverted to plain text) ---
|
||||
with st.expander("💊 LoRA Settings"):
|
||||
lc1, lc2, lc3 = st.columns(3)
|
||||
with lc1:
|
||||
seq["lora 1 high"] = st.text_input("LoRA 1 Name", value=seq.get("lora 1 high", ""), key=f"{prefix}_l1h")
|
||||
seq["lora 1 low"] = st.text_input("LoRA 1 Strength", value=str(seq.get("lora 1 low", "")), key=f"{prefix}_l1l")
|
||||
with lc2:
|
||||
seq["lora 2 high"] = st.text_input("LoRA 2 Name", value=seq.get("lora 2 high", ""), key=f"{prefix}_l2h")
|
||||
seq["lora 2 low"] = st.text_input("LoRA 2 Strength", value=str(seq.get("lora 2 low", "")), key=f"{prefix}_l2l")
|
||||
with lc3:
|
||||
seq["lora 3 high"] = st.text_input("LoRA 3 Name", value=seq.get("lora 3 high", ""), key=f"{prefix}_l3h")
|
||||
seq["lora 3 low"] = st.text_input("LoRA 3 Strength", value=str(seq.get("lora 3 low", "")), key=f"{prefix}_l3l")
|
||||
|
||||
# --- CUSTOM PARAMETERS ---
|
||||
st.markdown("---")
|
||||
st.caption("🔧 Custom Parameters")
|
||||
|
||||
custom_keys = [k for k in seq.keys() if k not in standard_keys]
|
||||
keys_to_remove = []
|
||||
|
||||
if custom_keys:
|
||||
for k in custom_keys:
|
||||
ck1, ck2, ck3 = st.columns([1, 2, 0.5])
|
||||
ck1.text_input("Key", value=k, disabled=True, key=f"{prefix}_ck_lbl_{k}", label_visibility="collapsed")
|
||||
val = ck2.text_input("Value", value=str(seq[k]), key=f"{prefix}_cv_{k}", label_visibility="collapsed")
|
||||
seq[k] = val
|
||||
|
||||
if ck3.button("🗑️", key=f"{prefix}_cdel_{k}"):
|
||||
keys_to_remove.append(k)
|
||||
|
||||
with st.expander("➕ Add Parameter"):
|
||||
nk_col, nv_col = st.columns(2)
|
||||
new_k = nk_col.text_input("Key", key=f"{prefix}_new_k")
|
||||
new_v = nv_col.text_input("Value", key=f"{prefix}_new_v")
|
||||
|
||||
if st.button("Add", key=f"{prefix}_add_cust"):
|
||||
if new_k and new_k not in seq:
|
||||
seq[new_k] = new_v
|
||||
save_json(file_path, data)
|
||||
st.session_state.ui_reset_token += 1
|
||||
st.rerun()
|
||||
|
||||
if keys_to_remove:
|
||||
for k in keys_to_remove:
|
||||
del seq[k]
|
||||
save_json(file_path, data)
|
||||
st.session_state.ui_reset_token += 1
|
||||
st.rerun()
|
||||
|
||||
st.markdown("---")
|
||||
|
||||
# --- SAVE ACTIONS WITH HISTORY COMMIT ---
|
||||
col_save, col_note = st.columns([1, 2])
|
||||
|
||||
with col_note:
|
||||
commit_msg = st.text_input("Change Note (Optional)", placeholder="e.g. Added sequence 3")
|
||||
|
||||
with col_save:
|
||||
if st.button("💾 Save & Snap", use_container_width=True):
|
||||
data["batch_data"] = batch_list
|
||||
|
||||
tree_data = data.get("history_tree", {})
|
||||
htree = HistoryTree(tree_data)
|
||||
|
||||
snapshot_payload = data.copy()
|
||||
if "history_tree" in snapshot_payload: del snapshot_payload["history_tree"]
|
||||
|
||||
htree.commit(snapshot_payload, note=commit_msg if commit_msg else "Batch Update")
|
||||
|
||||
data["history_tree"] = htree.to_dict()
|
||||
save_json(file_path, data)
|
||||
|
||||
if 'restored_indicator' in st.session_state:
|
||||
del st.session_state.restored_indicator
|
||||
|
||||
st.toast("Batch Saved & Snapshot Created!", icon="🚀")
|
||||
st.rerun()
|
||||
734
tab_batch_ng.py
Normal file
734
tab_batch_ng.py
Normal file
@@ -0,0 +1,734 @@
|
||||
import copy
|
||||
import random
|
||||
from pathlib import Path
|
||||
|
||||
from nicegui import ui
|
||||
|
||||
from state import AppState
|
||||
from utils import (
|
||||
DEFAULTS, save_json, load_json, sync_to_db,
|
||||
KEY_BATCH_DATA, KEY_HISTORY_TREE, KEY_PROMPT_HISTORY, KEY_SEQUENCE_NUMBER,
|
||||
)
|
||||
from history_tree import HistoryTree
|
||||
|
||||
IMAGE_EXTENSIONS = {'.png', '.jpg', '.jpeg', '.webp', '.bmp', '.gif'}
|
||||
SUB_SEGMENT_MULTIPLIER = 1000
|
||||
FRAME_TO_SKIP_DEFAULT = DEFAULTS['frame_to_skip']
|
||||
|
||||
VACE_MODES = [
|
||||
'End Extend', 'Pre Extend', 'Middle Extend', 'Edge Extend',
|
||||
'Join Extend', 'Bidirectional Extend', 'Frame Interpolation',
|
||||
'Replace/Inpaint', 'Video Inpaint', 'Keyframe',
|
||||
]
|
||||
VACE_FORMULAS = [
|
||||
'base + A', 'base + B', 'base + A + B', 'base + A + B',
|
||||
'base + A + B', 'base + A + B', '(B-1) * step',
|
||||
'snap(source)', 'snap(source)', 'base + A + B',
|
||||
]
|
||||
|
||||
|
||||
# --- Sub-segment helpers (same as original) ---
|
||||
|
||||
def is_subsegment(seq_num):
|
||||
return int(seq_num) >= SUB_SEGMENT_MULTIPLIER
|
||||
|
||||
def parent_of(seq_num):
|
||||
seq_num = int(seq_num)
|
||||
return seq_num // SUB_SEGMENT_MULTIPLIER if is_subsegment(seq_num) else seq_num
|
||||
|
||||
def sub_index_of(seq_num):
|
||||
seq_num = int(seq_num)
|
||||
return seq_num % SUB_SEGMENT_MULTIPLIER if is_subsegment(seq_num) else 0
|
||||
|
||||
def format_seq_label(seq_num):
|
||||
seq_num = int(seq_num)
|
||||
if is_subsegment(seq_num):
|
||||
return f'Sub #{parent_of(seq_num)}.{sub_index_of(seq_num)}'
|
||||
return f'Sequence #{seq_num}'
|
||||
|
||||
def next_sub_segment_number(batch_list, parent_seq_num):
|
||||
parent_seq_num = int(parent_seq_num)
|
||||
max_sub = 0
|
||||
for s in batch_list:
|
||||
sn = int(s.get(KEY_SEQUENCE_NUMBER, 0))
|
||||
if is_subsegment(sn) and parent_of(sn) == parent_seq_num:
|
||||
max_sub = max(max_sub, sub_index_of(sn))
|
||||
return parent_seq_num * SUB_SEGMENT_MULTIPLIER + max_sub + 1
|
||||
|
||||
def max_main_seq_number(batch_list):
|
||||
"""Highest non-subsegment sequence number in the batch."""
|
||||
return max(
|
||||
(int(x.get(KEY_SEQUENCE_NUMBER, 0))
|
||||
for x in batch_list if not is_subsegment(x.get(KEY_SEQUENCE_NUMBER, 0))),
|
||||
default=0,
|
||||
)
|
||||
|
||||
|
||||
def find_insert_position(batch_list, parent_index, parent_seq_num):
|
||||
parent_seq_num = int(parent_seq_num)
|
||||
pos = parent_index + 1
|
||||
while pos < len(batch_list):
|
||||
sn = int(batch_list[pos].get(KEY_SEQUENCE_NUMBER, 0))
|
||||
if is_subsegment(sn) and parent_of(sn) == parent_seq_num:
|
||||
pos += 1
|
||||
else:
|
||||
break
|
||||
return pos
|
||||
|
||||
|
||||
# --- Helper for repetitive dict-bound inputs ---
|
||||
|
||||
def dict_input(element_fn, label, seq, key, **kwargs):
|
||||
"""Create an input element bound to seq[key] via blur and model-value update."""
|
||||
val = seq.get(key, '')
|
||||
if isinstance(val, (int, float)):
|
||||
val = str(val) if element_fn != ui.number else val
|
||||
el = element_fn(label, value=val, **kwargs)
|
||||
|
||||
def _sync(k=key):
|
||||
seq[k] = el.value
|
||||
|
||||
el.on('blur', lambda _: _sync())
|
||||
el.on('update:model-value', lambda _: _sync())
|
||||
return el
|
||||
|
||||
|
||||
def dict_number(label, seq, key, default=0, **kwargs):
|
||||
"""Number input bound to seq[key] via blur and model-value update."""
|
||||
val = seq.get(key, default)
|
||||
try:
|
||||
# Try float first to handle "1.5" strings, then check if it's a clean int
|
||||
fval = float(val)
|
||||
val = int(fval) if fval == int(fval) else fval
|
||||
except (ValueError, TypeError, OverflowError):
|
||||
val = default
|
||||
el = ui.number(label, value=val, **kwargs)
|
||||
|
||||
def _sync(k=key, d=default):
|
||||
v = el.value
|
||||
if v is None:
|
||||
v = d
|
||||
elif isinstance(v, float):
|
||||
try:
|
||||
v = int(v) if v == int(v) else v
|
||||
except (OverflowError, ValueError):
|
||||
v = d
|
||||
seq[k] = v
|
||||
|
||||
el.on('blur', lambda _: _sync())
|
||||
el.on('update:model-value', lambda _: _sync())
|
||||
return el
|
||||
|
||||
|
||||
def dict_textarea(label, seq, key, **kwargs):
|
||||
"""Textarea bound to seq[key] via blur and model-value update."""
|
||||
el = ui.textarea(label, value=seq.get(key, ''), **kwargs)
|
||||
|
||||
def _sync(k=key):
|
||||
seq[k] = el.value
|
||||
|
||||
el.on('blur', lambda _: _sync())
|
||||
el.on('update:model-value', lambda _: _sync())
|
||||
return el
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Main render function
|
||||
# ======================================================================
|
||||
|
||||
def render_batch_processor(state: AppState):
|
||||
data = state.data_cache
|
||||
file_path = state.file_path
|
||||
if isinstance(data, list):
|
||||
data = {KEY_BATCH_DATA: data}
|
||||
state.data_cache = data
|
||||
is_batch_file = KEY_BATCH_DATA in data
|
||||
|
||||
if not is_batch_file:
|
||||
ui.label('This is a Single file. To use Batch mode, create a copy.').classes(
|
||||
'text-warning')
|
||||
|
||||
def create_batch():
|
||||
new_name = f'batch_{file_path.name}'
|
||||
new_path = file_path.parent / new_name
|
||||
if new_path.exists():
|
||||
ui.notify(f'File {new_name} already exists!', type='warning')
|
||||
return
|
||||
first_item = copy.deepcopy(data)
|
||||
first_item.pop(KEY_PROMPT_HISTORY, None)
|
||||
first_item.pop(KEY_HISTORY_TREE, None)
|
||||
first_item[KEY_SEQUENCE_NUMBER] = 1
|
||||
new_data = {KEY_BATCH_DATA: [first_item], KEY_HISTORY_TREE: {},
|
||||
KEY_PROMPT_HISTORY: []}
|
||||
save_json(new_path, new_data)
|
||||
if state.db_enabled and state.current_project and state.db:
|
||||
sync_to_db(state.db, state.current_project, new_path, new_data)
|
||||
ui.notify(f'Created {new_name}', type='positive')
|
||||
|
||||
ui.button('Create Batch Copy', icon='content_copy', on_click=create_batch)
|
||||
return
|
||||
|
||||
if state.restored_indicator:
|
||||
ui.label(f'Editing Restored Version: {state.restored_indicator}').classes(
|
||||
'text-info q-pa-sm')
|
||||
|
||||
batch_list = data.get(KEY_BATCH_DATA, [])
|
||||
|
||||
# Source file data for importing
|
||||
with ui.card().classes('w-full q-pa-md q-mb-lg'):
|
||||
with ui.expansion('Add New Sequence from Source File', icon='playlist_add').classes('w-full'):
|
||||
json_files = sorted(state.current_dir.glob('*.json'))
|
||||
json_files = [f for f in json_files if f.name not in (
|
||||
'.editor_config.json', '.editor_snippets.json')]
|
||||
file_options = {f.name: f.name for f in json_files}
|
||||
|
||||
src_file_select = ui.select(
|
||||
file_options,
|
||||
value=file_path.name,
|
||||
label='Source File:',
|
||||
).classes('w-64')
|
||||
|
||||
src_seq_select = ui.select([], label='Source Sequence:').classes('w-64')
|
||||
|
||||
# Track loaded source data
|
||||
_src_cache = {'data': None, 'batch': [], 'name': None}
|
||||
|
||||
def _update_src():
|
||||
name = src_file_select.value
|
||||
if name and name != _src_cache['name']:
|
||||
src_data, _ = load_json(state.current_dir / name)
|
||||
_src_cache['data'] = src_data
|
||||
_src_cache['batch'] = src_data.get(KEY_BATCH_DATA, [])
|
||||
_src_cache['name'] = name
|
||||
if _src_cache['batch']:
|
||||
opts = {i: format_seq_label(s.get(KEY_SEQUENCE_NUMBER, i+1))
|
||||
for i, s in enumerate(_src_cache['batch'])}
|
||||
src_seq_select.set_options(opts, value=0)
|
||||
else:
|
||||
src_seq_select.set_options({})
|
||||
|
||||
src_file_select.on_value_change(lambda _: _update_src())
|
||||
_update_src()
|
||||
|
||||
def _add_sequence(new_item):
|
||||
new_item[KEY_SEQUENCE_NUMBER] = max_main_seq_number(batch_list) + 1
|
||||
for k in [KEY_PROMPT_HISTORY, KEY_HISTORY_TREE, 'note', 'loras']:
|
||||
new_item.pop(k, None)
|
||||
batch_list.append(new_item)
|
||||
data[KEY_BATCH_DATA] = batch_list
|
||||
save_json(file_path, data)
|
||||
if state.db_enabled and state.current_project and state.db:
|
||||
sync_to_db(state.db, state.current_project, file_path, data)
|
||||
render_sequence_list.refresh()
|
||||
|
||||
with ui.row().classes('q-mt-sm'):
|
||||
def add_empty():
|
||||
_add_sequence(DEFAULTS.copy())
|
||||
|
||||
def add_from_source():
|
||||
item = copy.deepcopy(DEFAULTS)
|
||||
src_batch = _src_cache['batch']
|
||||
sel_idx = src_seq_select.value
|
||||
if src_batch and sel_idx is not None:
|
||||
item.update(copy.deepcopy(src_batch[int(sel_idx)]))
|
||||
elif _src_cache['data']:
|
||||
item.update(copy.deepcopy(_src_cache['data']))
|
||||
_add_sequence(item)
|
||||
|
||||
ui.button('Add Empty', icon='add', on_click=add_empty)
|
||||
ui.button('From Source', icon='file_download', on_click=add_from_source)
|
||||
|
||||
# --- Standard / LoRA / VACE key sets ---
|
||||
lora_keys = ['lora 1 high', 'lora 1 low', 'lora 2 high', 'lora 2 low',
|
||||
'lora 3 high', 'lora 3 low']
|
||||
standard_keys = {
|
||||
'general_prompt', 'general_negative', 'current_prompt', 'negative', 'prompt',
|
||||
'seed', 'cfg', 'camera', 'flf', KEY_SEQUENCE_NUMBER,
|
||||
'frame_to_skip', 'end_frame', 'transition', 'vace_length',
|
||||
'input_a_frames', 'input_b_frames', 'reference switch', 'vace schedule',
|
||||
'reference path', 'video file path', 'reference image path', 'flf image path',
|
||||
}
|
||||
standard_keys.update(lora_keys)
|
||||
|
||||
def sort_by_number():
|
||||
batch_list.sort(key=lambda s: int(s.get(KEY_SEQUENCE_NUMBER, 0)))
|
||||
data[KEY_BATCH_DATA] = batch_list
|
||||
save_json(file_path, data)
|
||||
if state.db_enabled and state.current_project and state.db:
|
||||
sync_to_db(state.db, state.current_project, file_path, data)
|
||||
ui.notify('Sorted by sequence number!', type='positive')
|
||||
render_sequence_list.refresh()
|
||||
|
||||
# --- Sequence list + mass update (inside refreshable so they stay in sync) ---
|
||||
@ui.refreshable
|
||||
def render_sequence_list():
|
||||
# Mass update (rebuilt on refresh so checkboxes match current sequences)
|
||||
_render_mass_update(batch_list, data, file_path, state, render_sequence_list)
|
||||
|
||||
with ui.row().classes('w-full items-center'):
|
||||
ui.label(f'Batch contains {len(batch_list)} sequences.')
|
||||
ui.button('Sort by Number', icon='sort', on_click=sort_by_number).props('flat')
|
||||
|
||||
for i, seq in enumerate(batch_list):
|
||||
with ui.card().classes('w-full q-mb-sm'):
|
||||
_render_sequence_card(
|
||||
i, seq, batch_list, data, file_path, state,
|
||||
_src_cache, src_seq_select,
|
||||
standard_keys, render_sequence_list,
|
||||
)
|
||||
|
||||
render_sequence_list()
|
||||
|
||||
# --- Save & Snap ---
|
||||
with ui.card().classes('w-full q-pa-md q-mt-lg'):
|
||||
with ui.row().classes('w-full items-end q-gutter-md'):
|
||||
commit_input = ui.input('Change Note (Optional)',
|
||||
placeholder='e.g. Added sequence 3').classes('col')
|
||||
|
||||
def save_and_snap():
|
||||
data[KEY_BATCH_DATA] = batch_list
|
||||
tree_data = data.get(KEY_HISTORY_TREE, {})
|
||||
htree = HistoryTree(tree_data)
|
||||
snapshot_payload = copy.deepcopy(data)
|
||||
snapshot_payload.pop(KEY_HISTORY_TREE, None)
|
||||
note = commit_input.value if commit_input.value else 'Batch Update'
|
||||
htree.commit(snapshot_payload, note=note)
|
||||
data[KEY_HISTORY_TREE] = htree.to_dict()
|
||||
save_json(file_path, data)
|
||||
if state.db_enabled and state.current_project and state.db:
|
||||
sync_to_db(state.db, state.current_project, file_path, data)
|
||||
state.restored_indicator = None
|
||||
commit_input.set_value('')
|
||||
ui.notify('Batch Saved & Snapshot Created!', type='positive')
|
||||
|
||||
ui.button('Save & Snap', icon='save', on_click=save_and_snap).props('color=primary')
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Single sequence card
|
||||
# ======================================================================
|
||||
|
||||
def _render_sequence_card(i, seq, batch_list, data, file_path, state,
|
||||
src_cache, src_seq_select, standard_keys,
|
||||
refresh_list):
|
||||
def commit(message=None):
|
||||
data[KEY_BATCH_DATA] = batch_list
|
||||
save_json(file_path, data)
|
||||
if state.db_enabled and state.current_project and state.db:
|
||||
sync_to_db(state.db, state.current_project, file_path, data)
|
||||
if message:
|
||||
ui.notify(message, type='positive')
|
||||
refresh_list.refresh()
|
||||
|
||||
seq_num = seq.get(KEY_SEQUENCE_NUMBER, i + 1)
|
||||
|
||||
if is_subsegment(seq_num):
|
||||
label = f'Sub #{parent_of(seq_num)}.{sub_index_of(seq_num)} ({int(seq_num)})'
|
||||
else:
|
||||
label = f'Sequence #{seq_num}'
|
||||
|
||||
with ui.expansion(label, icon='movie').classes('w-full'):
|
||||
# --- Action row ---
|
||||
with ui.row().classes('w-full q-gutter-sm action-row'):
|
||||
# Copy from source
|
||||
def copy_source(idx=i, sn=seq_num):
|
||||
item = copy.deepcopy(DEFAULTS)
|
||||
src_batch = src_cache['batch']
|
||||
sel_idx = src_seq_select.value
|
||||
if src_batch and sel_idx is not None:
|
||||
item.update(copy.deepcopy(src_batch[int(sel_idx)]))
|
||||
elif src_cache['data']:
|
||||
item.update(copy.deepcopy(src_cache['data']))
|
||||
item[KEY_SEQUENCE_NUMBER] = sn
|
||||
item.pop(KEY_PROMPT_HISTORY, None)
|
||||
item.pop(KEY_HISTORY_TREE, None)
|
||||
batch_list[idx] = item
|
||||
commit('Copied!')
|
||||
|
||||
ui.button('Copy Src', icon='file_download', on_click=copy_source).props('outline')
|
||||
|
||||
# Clone Next
|
||||
def clone_next(idx=i, sn=seq_num, s=seq):
|
||||
new_seq = copy.deepcopy(s)
|
||||
new_seq[KEY_SEQUENCE_NUMBER] = max_main_seq_number(batch_list) + 1
|
||||
if not is_subsegment(sn):
|
||||
pos = find_insert_position(batch_list, idx, int(sn))
|
||||
else:
|
||||
pos = idx + 1
|
||||
batch_list.insert(pos, new_seq)
|
||||
commit('Cloned to Next!')
|
||||
|
||||
ui.button('Clone Next', icon='content_copy', on_click=clone_next).props('outline')
|
||||
|
||||
# Clone End
|
||||
def clone_end(s=seq):
|
||||
new_seq = copy.deepcopy(s)
|
||||
new_seq[KEY_SEQUENCE_NUMBER] = max_main_seq_number(batch_list) + 1
|
||||
batch_list.append(new_seq)
|
||||
commit('Cloned to End!')
|
||||
|
||||
ui.button('Clone End', icon='vertical_align_bottom', on_click=clone_end).props('outline')
|
||||
|
||||
# Clone Sub
|
||||
def clone_sub(idx=i, sn=seq_num, s=seq):
|
||||
new_seq = copy.deepcopy(s)
|
||||
p_seq = parent_of(sn)
|
||||
p_idx = idx
|
||||
if is_subsegment(sn):
|
||||
for pi, ps in enumerate(batch_list):
|
||||
if int(ps.get(KEY_SEQUENCE_NUMBER, 0)) == p_seq:
|
||||
p_idx = pi
|
||||
break
|
||||
new_seq[KEY_SEQUENCE_NUMBER] = next_sub_segment_number(batch_list, p_seq)
|
||||
pos = find_insert_position(batch_list, p_idx, p_seq)
|
||||
batch_list.insert(pos, new_seq)
|
||||
commit(f'Created {format_seq_label(new_seq[KEY_SEQUENCE_NUMBER])}!')
|
||||
|
||||
ui.button('Clone Sub', icon='link', on_click=clone_sub).props('outline')
|
||||
|
||||
ui.element('div').classes('col')
|
||||
|
||||
# Delete
|
||||
def delete(idx=i):
|
||||
batch_list.pop(idx)
|
||||
commit()
|
||||
|
||||
ui.button(icon='delete', on_click=delete).props('color=negative')
|
||||
|
||||
ui.separator()
|
||||
|
||||
# --- Prompts + Settings (2-column) ---
|
||||
with ui.splitter(value=66).classes('w-full') as splitter:
|
||||
with splitter.before:
|
||||
dict_textarea('General Prompt', seq, 'general_prompt').classes(
|
||||
'w-full q-mt-sm').props('outlined rows=2')
|
||||
dict_textarea('General Negative', seq, 'general_negative').classes(
|
||||
'w-full q-mt-sm').props('outlined rows=2')
|
||||
dict_textarea('Specific Prompt', seq, 'current_prompt').classes(
|
||||
'w-full q-mt-sm').props('outlined rows=10')
|
||||
dict_textarea('Specific Negative', seq, 'negative').classes(
|
||||
'w-full q-mt-sm').props('outlined rows=2')
|
||||
|
||||
with splitter.after:
|
||||
# Sequence number
|
||||
sn_label = (
|
||||
f'Seq Number (Sub #{parent_of(seq_num)}.{sub_index_of(seq_num)})'
|
||||
if is_subsegment(seq_num) else 'Sequence Number'
|
||||
)
|
||||
sn_input = dict_number(sn_label, seq, KEY_SEQUENCE_NUMBER)
|
||||
sn_input.props('outlined').classes('w-full')
|
||||
|
||||
# Seed + randomize
|
||||
with ui.row().classes('w-full items-end'):
|
||||
seed_input = dict_number('Seed', seq, 'seed').classes('col').props('outlined')
|
||||
|
||||
def randomize_seed(si=seed_input, s=seq):
|
||||
new_seed = random.randint(0, 999999999999)
|
||||
si.set_value(new_seed)
|
||||
s['seed'] = new_seed
|
||||
|
||||
ui.button(icon='casino', on_click=randomize_seed).props('flat')
|
||||
|
||||
# CFG
|
||||
dict_number('CFG', seq, 'cfg', default=DEFAULTS['cfg'],
|
||||
step=0.5, format='%.1f').props('outlined').classes('w-full')
|
||||
|
||||
dict_input(ui.input, 'Camera', seq, 'camera').props('outlined').classes('w-full')
|
||||
dict_input(ui.input, 'FLF', seq, 'flf').props('outlined').classes('w-full')
|
||||
dict_number('End Frame', seq, 'end_frame').props('outlined').classes('w-full')
|
||||
dict_input(ui.input, 'Video File Path', seq, 'video file path').props(
|
||||
'outlined input-style="direction: rtl"').classes('w-full')
|
||||
|
||||
# Image paths with preview
|
||||
for img_label, img_key in [
|
||||
('Reference Image Path', 'reference image path'),
|
||||
('Reference Path', 'reference path'),
|
||||
('FLF Image Path', 'flf image path'),
|
||||
]:
|
||||
with ui.row().classes('w-full items-center'):
|
||||
inp = dict_input(ui.input, img_label, seq, img_key).classes(
|
||||
'col').props('outlined input-style="direction: rtl"')
|
||||
img_path = Path(seq.get(img_key, '')) if seq.get(img_key) else None
|
||||
if (img_path and img_path.exists() and
|
||||
img_path.suffix.lower() in IMAGE_EXTENSIONS):
|
||||
with ui.dialog() as dlg, ui.card():
|
||||
ui.image(str(img_path)).classes('w-full')
|
||||
ui.button(icon='visibility', on_click=dlg.open).props('flat dense')
|
||||
|
||||
# --- VACE Settings (full width) ---
|
||||
with ui.expansion('VACE Settings', icon='settings').classes('w-full'):
|
||||
_render_vace_settings(i, seq, batch_list, data, file_path, state, refresh_list)
|
||||
|
||||
# --- LoRA Settings ---
|
||||
with ui.expansion('LoRA Settings', icon='style').classes('w-full'):
|
||||
for lora_idx in range(1, 4):
|
||||
for tier, tier_label in [('high', 'High'), ('low', 'Low')]:
|
||||
k = f'lora {lora_idx} {tier}'
|
||||
raw = str(seq.get(k, ''))
|
||||
inner = raw.replace('<lora:', '').replace('>', '')
|
||||
# Split "name:strength" or just "name"
|
||||
if ':' in inner:
|
||||
parts = inner.rsplit(':', 1)
|
||||
lora_name = parts[0]
|
||||
try:
|
||||
lora_strength = float(parts[1])
|
||||
except ValueError:
|
||||
lora_name = inner
|
||||
lora_strength = 1.0
|
||||
else:
|
||||
lora_name = inner
|
||||
lora_strength = 1.0
|
||||
|
||||
with ui.row().classes('w-full items-center q-gutter-sm'):
|
||||
ui.label(f'L{lora_idx} {tier_label}').classes(
|
||||
'text-caption').style('min-width: 55px')
|
||||
name_input = ui.input(
|
||||
'Name',
|
||||
value=lora_name,
|
||||
).classes('col').props('outlined dense')
|
||||
strength_input = ui.number(
|
||||
'Str',
|
||||
value=lora_strength,
|
||||
min=0, max=10, step=0.1,
|
||||
format='%.1f',
|
||||
).props('outlined dense').style('max-width: 80px')
|
||||
|
||||
def _lora_sync(key=k, n_inp=name_input, s_inp=strength_input):
|
||||
name = n_inp.value or ''
|
||||
strength = s_inp.value if s_inp.value is not None else 1.0
|
||||
seq[key] = f'<lora:{name}:{strength:.1f}>' if name else ''
|
||||
|
||||
name_input.on('blur', lambda _, s=_lora_sync: s())
|
||||
name_input.on('update:model-value', lambda _, s=_lora_sync: s())
|
||||
strength_input.on('blur', lambda _, s=_lora_sync: s())
|
||||
strength_input.on('update:model-value', lambda _, s=_lora_sync: s())
|
||||
|
||||
# --- Custom Parameters ---
|
||||
ui.label('Custom Parameters').classes('section-header q-mt-md')
|
||||
|
||||
custom_keys = [k for k in seq.keys() if k not in standard_keys]
|
||||
if custom_keys:
|
||||
for k in custom_keys:
|
||||
with ui.row().classes('w-full items-center'):
|
||||
ui.input('Key', value=k).props('readonly outlined dense').classes('w-32')
|
||||
dict_input(ui.input, 'Value', seq, k).props('outlined dense').classes('col')
|
||||
|
||||
def del_custom(key=k):
|
||||
del seq[key]
|
||||
commit()
|
||||
|
||||
ui.button(icon='delete', on_click=del_custom).props('flat dense color=negative')
|
||||
|
||||
with ui.expansion('Add Parameter', icon='add').classes('w-full'):
|
||||
new_k_input = ui.input('Key').props('outlined dense')
|
||||
new_v_input = ui.input('Value').props('outlined dense')
|
||||
|
||||
def add_param():
|
||||
k = new_k_input.value
|
||||
v = new_v_input.value
|
||||
if k and k not in seq:
|
||||
seq[k] = v
|
||||
new_k_input.set_value('')
|
||||
new_v_input.set_value('')
|
||||
commit()
|
||||
|
||||
ui.button('Add', on_click=add_param).props('flat')
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# VACE Settings sub-section
|
||||
# ======================================================================
|
||||
|
||||
def _render_vace_settings(i, seq, batch_list, data, file_path, state, refresh_list):
|
||||
# VACE Schedule (needed early for both columns)
|
||||
sched_val = max(0, min(int(seq.get('vace schedule', 1)), len(VACE_MODES) - 1))
|
||||
|
||||
# Mode reference dialog
|
||||
with ui.dialog() as ref_dlg, ui.card():
|
||||
table_md = (
|
||||
'| # | Mode | Formula |\n|:--|:-----|:--------|\n'
|
||||
+ '\n'.join(
|
||||
f'| **{j}** | {VACE_MODES[j]} | `{VACE_FORMULAS[j]}` |'
|
||||
for j in range(len(VACE_MODES)))
|
||||
+ '\n\n*All totals snapped to 4n+1 (1,5,9,...,49,...,81,...)*'
|
||||
)
|
||||
ui.markdown(table_md)
|
||||
|
||||
with ui.row().classes('w-full q-gutter-md'):
|
||||
# --- Left column ---
|
||||
with ui.column().classes('col'):
|
||||
# Frame to Skip + shift
|
||||
with ui.row().classes('w-full items-end'):
|
||||
fts_input = dict_number('Frame to Skip', seq, 'frame_to_skip').classes(
|
||||
'col').props('outlined')
|
||||
|
||||
_original_fts = int(seq.get('frame_to_skip', FRAME_TO_SKIP_DEFAULT))
|
||||
|
||||
def shift_fts(idx=i, orig=_original_fts):
|
||||
new_fts = int(fts_input.value) if fts_input.value is not None else orig
|
||||
delta = new_fts - orig
|
||||
if delta == 0:
|
||||
ui.notify('No change to shift', type='info')
|
||||
return
|
||||
shifted = 0
|
||||
for j in range(idx + 1, len(batch_list)):
|
||||
batch_list[j]['frame_to_skip'] = int(
|
||||
batch_list[j].get('frame_to_skip', FRAME_TO_SKIP_DEFAULT)) + delta
|
||||
shifted += 1
|
||||
data[KEY_BATCH_DATA] = batch_list
|
||||
save_json(file_path, data)
|
||||
if state.db_enabled and state.current_project and state.db:
|
||||
sync_to_db(state.db, state.current_project, file_path, data)
|
||||
ui.notify(f'Shifted {shifted} sequences by {delta:+d}', type='positive')
|
||||
refresh_list.refresh()
|
||||
|
||||
ui.button('Shift', icon='arrow_downward', on_click=shift_fts).props(
|
||||
'outline').style('height: 40px')
|
||||
|
||||
dict_input(ui.input, 'Transition', seq, 'transition').props('outlined').classes(
|
||||
'w-full q-mt-sm')
|
||||
|
||||
# VACE Schedule
|
||||
with ui.row().classes('w-full items-center q-mt-sm'):
|
||||
vs_input = dict_number('VACE Schedule', seq, 'vace schedule', default=1,
|
||||
min=0, max=len(VACE_MODES) - 1).classes('col').props(
|
||||
'outlined')
|
||||
mode_label = ui.label(VACE_MODES[sched_val]).classes('text-caption')
|
||||
ui.button(icon='help', on_click=ref_dlg.open).props('flat dense round')
|
||||
|
||||
def update_mode_label(e):
|
||||
idx = int(e.sender.value) if e.sender.value is not None else 0
|
||||
idx = max(0, min(idx, len(VACE_MODES) - 1))
|
||||
mode_label.set_text(VACE_MODES[idx])
|
||||
|
||||
vs_input.on('update:model-value', update_mode_label)
|
||||
|
||||
# --- Right column ---
|
||||
with ui.column().classes('col'):
|
||||
ia_input = dict_number('Input A Frames', seq, 'input_a_frames').props(
|
||||
'outlined').classes('w-full')
|
||||
ib_input = dict_number('Input B Frames', seq, 'input_b_frames').props(
|
||||
'outlined').classes('w-full q-mt-sm')
|
||||
|
||||
# VACE Length + output calculation
|
||||
input_a = int(seq.get('input_a_frames', 16))
|
||||
input_b = int(seq.get('input_b_frames', 16))
|
||||
stored_total = int(seq.get('vace_length', 49))
|
||||
mode_idx = int(seq.get('vace schedule', 1))
|
||||
|
||||
if mode_idx == 0:
|
||||
base_length = max(stored_total - input_a, 1)
|
||||
elif mode_idx == 1:
|
||||
base_length = max(stored_total - input_b, 1)
|
||||
else:
|
||||
base_length = max(stored_total - input_a - input_b, 1)
|
||||
|
||||
with ui.row().classes('w-full items-center q-mt-sm'):
|
||||
vl_input = ui.number('VACE Length', value=base_length, min=1).classes(
|
||||
'col').props('outlined')
|
||||
output_label = ui.label(f'Output: {stored_total}').classes('text-bold')
|
||||
|
||||
dict_number('Reference Switch', seq, 'reference switch').props(
|
||||
'outlined').classes('w-full q-mt-sm')
|
||||
|
||||
# Recalculate VACE output when any input changes
|
||||
def recalc_vace(*_args):
|
||||
mi = int(vs_input.value) if vs_input.value is not None else 0
|
||||
ia = int(ia_input.value) if ia_input.value is not None else 16
|
||||
ib = int(ib_input.value) if ib_input.value is not None else 16
|
||||
nb = int(vl_input.value) if vl_input.value is not None else 1
|
||||
|
||||
if mi == 0:
|
||||
raw = nb + ia
|
||||
elif mi == 1:
|
||||
raw = nb + ib
|
||||
else:
|
||||
raw = nb + ia + ib
|
||||
|
||||
snapped = ((raw + 2) // 4) * 4 + 1
|
||||
seq['vace_length'] = snapped
|
||||
output_label.set_text(f'Output: {snapped}')
|
||||
|
||||
for inp in (vs_input, ia_input, ib_input, vl_input):
|
||||
inp.on('update:model-value', recalc_vace)
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Mass Update
|
||||
# ======================================================================
|
||||
|
||||
def _render_mass_update(batch_list, data, file_path, state: AppState, refresh_list=None):
|
||||
with ui.expansion('Mass Update', icon='sync').classes('w-full'):
|
||||
if len(batch_list) < 2:
|
||||
ui.label('Need at least 2 sequences for mass update.').classes('text-caption')
|
||||
return
|
||||
|
||||
source_options = {i: format_seq_label(s.get(KEY_SEQUENCE_NUMBER, i+1))
|
||||
for i, s in enumerate(batch_list)}
|
||||
source_select = ui.select(source_options, value=0,
|
||||
label='Copy from sequence:').classes('w-full')
|
||||
|
||||
field_select = ui.select([], multiple=True,
|
||||
label='Fields to copy:').classes('w-full')
|
||||
|
||||
def update_fields(_=None):
|
||||
idx = source_select.value
|
||||
if idx is not None and 0 <= idx < len(batch_list):
|
||||
src = batch_list[idx]
|
||||
keys = [k for k in src.keys() if k != 'sequence_number']
|
||||
field_select.set_options(keys)
|
||||
|
||||
source_select.on_value_change(update_fields)
|
||||
update_fields()
|
||||
|
||||
ui.label('Apply to:').classes('subsection-header q-mt-md')
|
||||
select_all_cb = ui.checkbox('Select All')
|
||||
target_checks = {}
|
||||
with ui.scroll_area().style('max-height: 250px'):
|
||||
for idx, s in enumerate(batch_list):
|
||||
sn = s.get(KEY_SEQUENCE_NUMBER, idx + 1)
|
||||
cb = ui.checkbox(format_seq_label(sn))
|
||||
target_checks[idx] = cb
|
||||
|
||||
def on_select_all(e):
|
||||
for cb in target_checks.values():
|
||||
cb.set_value(e.value)
|
||||
|
||||
select_all_cb.on_value_change(on_select_all)
|
||||
|
||||
def apply_mass_update():
|
||||
src_idx = source_select.value
|
||||
if src_idx is None or src_idx >= len(batch_list):
|
||||
ui.notify('Source sequence no longer exists', type='warning')
|
||||
return
|
||||
selected_keys = field_select.value or []
|
||||
if not selected_keys:
|
||||
ui.notify('No fields selected', type='warning')
|
||||
return
|
||||
|
||||
source_seq = batch_list[src_idx]
|
||||
targets = [idx for idx, cb in target_checks.items()
|
||||
if cb.value and idx != src_idx and idx < len(batch_list)]
|
||||
if not targets:
|
||||
ui.notify('No target sequences selected', type='warning')
|
||||
return
|
||||
|
||||
for idx in targets:
|
||||
for key in selected_keys:
|
||||
batch_list[idx][key] = copy.deepcopy(source_seq.get(key))
|
||||
|
||||
data[KEY_BATCH_DATA] = batch_list
|
||||
htree = HistoryTree(data.get(KEY_HISTORY_TREE, {}))
|
||||
snapshot = copy.deepcopy(data)
|
||||
snapshot.pop(KEY_HISTORY_TREE, None)
|
||||
htree.commit(snapshot, f"Mass update: {', '.join(selected_keys)}")
|
||||
data[KEY_HISTORY_TREE] = htree.to_dict()
|
||||
save_json(file_path, data)
|
||||
if state.db_enabled and state.current_project and state.db:
|
||||
sync_to_db(state.db, state.current_project, file_path, data)
|
||||
ui.notify(f'Updated {len(targets)} sequences', type='positive')
|
||||
if refresh_list:
|
||||
refresh_list.refresh()
|
||||
|
||||
ui.button('Apply Changes', icon='check', on_click=apply_mass_update).props(
|
||||
'color=primary')
|
||||
165
tab_comfy.py
165
tab_comfy.py
@@ -1,165 +0,0 @@
|
||||
import streamlit as st
|
||||
import requests
|
||||
from PIL import Image
|
||||
from io import BytesIO
|
||||
from utils import save_config
|
||||
|
||||
def render_single_instance(instance_config, index, all_instances):
|
||||
url = instance_config.get("url", "http://127.0.0.1:8188")
|
||||
name = instance_config.get("name", f"Server {index+1}")
|
||||
|
||||
COMFY_URL = url.rstrip("/")
|
||||
|
||||
c_head, c_set = st.columns([3, 1])
|
||||
c_head.markdown(f"### 🔌 {name}")
|
||||
|
||||
with c_set.popover("⚙️ Settings"):
|
||||
st.caption("Press Update to apply changes!")
|
||||
new_name = st.text_input("Name", value=name, key=f"name_{index}")
|
||||
new_url = st.text_input("URL", value=url, key=f"url_{index}")
|
||||
|
||||
if new_url != url:
|
||||
st.warning("⚠️ Unsaved URL! Click Update below.")
|
||||
|
||||
if st.button("💾 Update & Save", key=f"save_{index}", type="primary"):
|
||||
all_instances[index]["name"] = new_name
|
||||
all_instances[index]["url"] = new_url
|
||||
st.session_state.config["comfy_instances"] = all_instances
|
||||
|
||||
save_config(
|
||||
st.session_state.current_dir,
|
||||
st.session_state.config['favorites'],
|
||||
{"comfy_instances": all_instances}
|
||||
)
|
||||
st.toast("Server config saved!", icon="💾")
|
||||
st.rerun()
|
||||
|
||||
st.divider()
|
||||
if st.button("🗑️ Remove Server", key=f"del_{index}"):
|
||||
all_instances.pop(index)
|
||||
st.session_state.config["comfy_instances"] = all_instances
|
||||
save_config(
|
||||
st.session_state.current_dir,
|
||||
st.session_state.config['favorites'],
|
||||
{"comfy_instances": all_instances}
|
||||
)
|
||||
st.rerun()
|
||||
|
||||
# --- 1. STATUS DASHBOARD ---
|
||||
with st.expander("📊 Server Status", expanded=True):
|
||||
col1, col2, col3, col4 = st.columns([1, 1, 1, 1])
|
||||
try:
|
||||
res = requests.get(f"{COMFY_URL}/queue", timeout=1.5)
|
||||
queue_data = res.json()
|
||||
running_cnt = len(queue_data.get("queue_running", []))
|
||||
pending_cnt = len(queue_data.get("queue_pending", []))
|
||||
|
||||
col1.metric("Status", "🟢 Online" if running_cnt > 0 else "💤 Idle")
|
||||
col2.metric("Pending", pending_cnt)
|
||||
col3.metric("Running", running_cnt)
|
||||
|
||||
if col4.button("🔄 Check Img", key=f"refresh_{index}", use_container_width=True):
|
||||
st.session_state[f"force_img_refresh_{index}"] = True
|
||||
except Exception:
|
||||
col1.metric("Status", "🔴 Offline")
|
||||
col2.metric("Pending", "-")
|
||||
col3.metric("Running", "-")
|
||||
st.error(f"Could not connect to {COMFY_URL}")
|
||||
return
|
||||
|
||||
# --- 2. LIVE VIEW (WITH TOGGLE) ---
|
||||
st.write("")
|
||||
c_label, c_ctrl = st.columns([1, 2])
|
||||
c_label.subheader("📺 Live View")
|
||||
|
||||
# LIVE PREVIEW TOGGLE
|
||||
enable_preview = c_ctrl.checkbox("Enable Live Preview", value=True, key=f"live_toggle_{index}")
|
||||
|
||||
if enable_preview:
|
||||
# Height Slider
|
||||
iframe_h = st.slider(
|
||||
"Height (px)",
|
||||
min_value=600, max_value=2500, value=1000, step=50,
|
||||
key=f"h_slider_{index}"
|
||||
)
|
||||
|
||||
st.markdown(
|
||||
f"""
|
||||
<iframe src="{COMFY_URL}" width="100%" height="{iframe_h}px"
|
||||
style="border: 1px solid #444; border-radius: 8px; box-shadow: 0 4px 6px rgba(0,0,0,0.3);">
|
||||
</iframe>
|
||||
""",
|
||||
unsafe_allow_html=True
|
||||
)
|
||||
else:
|
||||
st.info("Live Preview is disabled. Enable it above to see the interface.")
|
||||
|
||||
st.markdown("---")
|
||||
|
||||
# --- 3. LATEST OUTPUT ---
|
||||
if st.session_state.get(f"force_img_refresh_{index}", False):
|
||||
st.caption("🖼️ Most Recent Output")
|
||||
try:
|
||||
hist_res = requests.get(f"{COMFY_URL}/history", timeout=2)
|
||||
history = hist_res.json()
|
||||
if history:
|
||||
last_prompt_id = list(history.keys())[-1]
|
||||
outputs = history[last_prompt_id].get("outputs", {})
|
||||
found_img = None
|
||||
for node_id, node_output in outputs.items():
|
||||
if "images" in node_output:
|
||||
for img_info in node_output["images"]:
|
||||
if img_info["type"] == "output":
|
||||
found_img = img_info
|
||||
break
|
||||
if found_img: break
|
||||
|
||||
if found_img:
|
||||
img_name = found_img['filename']
|
||||
folder = found_img['subfolder']
|
||||
img_type = found_img['type']
|
||||
img_url = f"{COMFY_URL}/view?filename={img_name}&subfolder={folder}&type={img_type}"
|
||||
img_res = requests.get(img_url)
|
||||
image = Image.open(BytesIO(img_res.content))
|
||||
st.image(image, caption=f"Last Output: {img_name}")
|
||||
else:
|
||||
st.warning("Last run had no image output.")
|
||||
else:
|
||||
st.info("No history found.")
|
||||
st.session_state[f"force_img_refresh_{index}"] = False
|
||||
except Exception as e:
|
||||
st.error(f"Error fetching image: {e}")
|
||||
|
||||
def render_comfy_monitor():
|
||||
if "comfy_instances" not in st.session_state.config:
|
||||
st.session_state.config["comfy_instances"] = [
|
||||
{"name": "Main Server", "url": "http://192.168.1.100:8188"}
|
||||
]
|
||||
|
||||
instances = st.session_state.config["comfy_instances"]
|
||||
tab_names = [i["name"] for i in instances] + ["➕ Add Server"]
|
||||
tabs = st.tabs(tab_names)
|
||||
|
||||
for i, tab in enumerate(tabs[:-1]):
|
||||
with tab:
|
||||
render_single_instance(instances[i], i, instances)
|
||||
|
||||
with tabs[-1]:
|
||||
st.header("Add New ComfyUI Instance")
|
||||
with st.form("add_server_form"):
|
||||
new_name = st.text_input("Server Name", placeholder="e.g. Render Node 2")
|
||||
new_url = st.text_input("URL", placeholder="http://192.168.1.50:8188")
|
||||
if st.form_submit_button("Add Instance"):
|
||||
if new_name and new_url:
|
||||
instances.append({"name": new_name, "url": new_url})
|
||||
st.session_state.config["comfy_instances"] = instances
|
||||
|
||||
save_config(
|
||||
st.session_state.current_dir,
|
||||
st.session_state.config['favorites'],
|
||||
{"comfy_instances": instances}
|
||||
)
|
||||
st.success("Server Added!")
|
||||
st.rerun()
|
||||
else:
|
||||
st.error("Please fill in both Name and URL.")
|
||||
278
tab_comfy_ng.py
Normal file
278
tab_comfy_ng.py
Normal file
@@ -0,0 +1,278 @@
|
||||
import asyncio
|
||||
import html
|
||||
import time
|
||||
import urllib.parse
|
||||
|
||||
import requests
|
||||
from nicegui import ui
|
||||
|
||||
from state import AppState
|
||||
from utils import save_config
|
||||
|
||||
|
||||
def render_comfy_monitor(state: AppState):
|
||||
config = state.config
|
||||
|
||||
# --- Global Monitor Settings ---
|
||||
with ui.expansion('Monitor Settings', icon='settings').classes('w-full'):
|
||||
with ui.row().classes('w-full items-end'):
|
||||
viewer_input = ui.input(
|
||||
'Remote Browser URL',
|
||||
value=config.get('viewer_url', ''),
|
||||
placeholder='e.g., http://localhost:5800',
|
||||
).classes('col')
|
||||
timeout_slider = ui.slider(
|
||||
min=0, max=60, step=1,
|
||||
value=config.get('monitor_timeout', 0),
|
||||
).classes('col')
|
||||
ui.label().bind_text_from(timeout_slider, 'value',
|
||||
backward=lambda v: f'Timeout: {v} min')
|
||||
|
||||
def save_monitor_settings():
|
||||
config['viewer_url'] = viewer_input.value
|
||||
config['monitor_timeout'] = int(timeout_slider.value)
|
||||
save_config(state.current_dir, config['favorites'], config)
|
||||
ui.notify('Monitor settings saved!', type='positive')
|
||||
|
||||
ui.button('Save Monitor Settings', icon='save', on_click=save_monitor_settings)
|
||||
|
||||
# --- Instance Management ---
|
||||
if 'comfy_instances' not in config:
|
||||
config['comfy_instances'] = [
|
||||
{'name': 'Main Server', 'url': 'http://192.168.1.100:8188'}
|
||||
]
|
||||
|
||||
instances = config['comfy_instances']
|
||||
|
||||
@ui.refreshable
|
||||
def render_instance_tabs():
|
||||
if not instances:
|
||||
ui.label('No servers configured. Add one below.')
|
||||
|
||||
for idx, inst in enumerate(instances):
|
||||
with ui.expansion(inst.get('name', f'Server {idx+1}'), icon='dns').classes('w-full'):
|
||||
_render_single_instance(state, inst, idx, instances, render_instance_tabs)
|
||||
|
||||
# Add server section
|
||||
ui.separator()
|
||||
ui.label('Add New Server').classes('section-header')
|
||||
with ui.row().classes('w-full items-end'):
|
||||
new_name = ui.input('Server Name', placeholder='e.g. Render Node 2').classes('col')
|
||||
new_url = ui.input('URL', placeholder='http://192.168.1.50:8188').classes('col')
|
||||
|
||||
def add_instance():
|
||||
if new_name.value and new_url.value:
|
||||
instances.append({'name': new_name.value, 'url': new_url.value})
|
||||
config['comfy_instances'] = instances
|
||||
save_config(state.current_dir, config['favorites'], config)
|
||||
ui.notify('Server Added!', type='positive')
|
||||
new_name.set_value('')
|
||||
new_url.set_value('')
|
||||
render_instance_tabs.refresh()
|
||||
else:
|
||||
ui.notify('Please fill in both Name and URL.', type='warning')
|
||||
|
||||
ui.button('Add Instance', icon='add', on_click=add_instance)
|
||||
|
||||
render_instance_tabs()
|
||||
|
||||
# --- Auto-poll timer (every 300s) ---
|
||||
# Store live_checkbox references so the timer can update them
|
||||
_live_checkboxes = state._live_checkboxes
|
||||
_live_refreshables = state._live_refreshables
|
||||
|
||||
def poll_all():
|
||||
timeout_val = config.get('monitor_timeout', 0)
|
||||
if timeout_val > 0:
|
||||
for key, start_time in list(state.live_toggles.items()):
|
||||
if start_time and (time.time() - start_time) > (timeout_val * 60):
|
||||
state.live_toggles[key] = None
|
||||
if key in _live_checkboxes:
|
||||
_live_checkboxes[key].set_value(False)
|
||||
if key in _live_refreshables:
|
||||
_live_refreshables[key].refresh()
|
||||
|
||||
ui.timer(300, poll_all)
|
||||
|
||||
|
||||
def _fetch_blocking(url, timeout=1.5):
|
||||
"""Run a blocking GET request; returns (response, error)."""
|
||||
try:
|
||||
res = requests.get(url, timeout=timeout)
|
||||
return res, None
|
||||
except Exception as e:
|
||||
return None, e
|
||||
|
||||
|
||||
def _render_single_instance(state: AppState, instance_config: dict, index: int,
|
||||
all_instances: list, refresh_fn):
|
||||
config = state.config
|
||||
url = instance_config.get('url', 'http://127.0.0.1:8188')
|
||||
name = instance_config.get('name', f'Server {index+1}')
|
||||
comfy_url = url.rstrip('/')
|
||||
|
||||
# --- Settings popover ---
|
||||
with ui.expansion('Settings', icon='settings'):
|
||||
name_input = ui.input('Name', value=name).classes('w-full')
|
||||
url_input = ui.input('URL', value=url).classes('w-full')
|
||||
|
||||
def update_server():
|
||||
all_instances[index]['name'] = name_input.value
|
||||
all_instances[index]['url'] = url_input.value
|
||||
config['comfy_instances'] = all_instances
|
||||
save_config(state.current_dir, config['favorites'], config)
|
||||
ui.notify('Server config saved!', type='positive')
|
||||
refresh_fn.refresh()
|
||||
|
||||
def remove_server():
|
||||
all_instances.pop(index)
|
||||
config['comfy_instances'] = all_instances
|
||||
save_config(state.current_dir, config['favorites'], config)
|
||||
ui.notify('Server removed', type='info')
|
||||
refresh_fn.refresh()
|
||||
|
||||
ui.button('Update & Save', icon='save', on_click=update_server).props('color=primary')
|
||||
ui.button('Remove Server', icon='delete', on_click=remove_server).props('color=negative')
|
||||
|
||||
# --- Status Dashboard ---
|
||||
status_container = ui.row().classes('w-full items-center q-gutter-md')
|
||||
|
||||
async def refresh_status():
|
||||
status_container.clear()
|
||||
loop = asyncio.get_event_loop()
|
||||
res, err = await loop.run_in_executor(
|
||||
None, lambda: _fetch_blocking(f'{comfy_url}/queue'))
|
||||
with status_container:
|
||||
if res is not None:
|
||||
try:
|
||||
queue_data = res.json()
|
||||
except (ValueError, Exception):
|
||||
ui.label('Invalid response from server').classes('text-negative')
|
||||
return
|
||||
running_cnt = len(queue_data.get('queue_running', []))
|
||||
pending_cnt = len(queue_data.get('queue_pending', []))
|
||||
|
||||
with ui.card().classes('q-pa-md text-center').style('min-width: 100px'):
|
||||
ui.label('Status')
|
||||
ui.label('Online' if running_cnt > 0 else 'Idle').classes(
|
||||
'text-positive' if running_cnt > 0 else 'text-grey')
|
||||
with ui.card().classes('q-pa-md text-center').style('min-width: 100px'):
|
||||
ui.label('Pending')
|
||||
ui.label(str(pending_cnt))
|
||||
with ui.card().classes('q-pa-md text-center').style('min-width: 100px'):
|
||||
ui.label('Running')
|
||||
ui.label(str(running_cnt))
|
||||
else:
|
||||
with ui.card().classes('q-pa-md text-center').style('min-width: 100px'):
|
||||
ui.label('Status')
|
||||
ui.label('Offline').classes('text-negative')
|
||||
ui.label(f'Could not connect to {comfy_url}').classes('text-negative')
|
||||
|
||||
# Initial status fetch (non-blocking via button click handler pattern)
|
||||
ui.timer(0.1, refresh_status, once=True)
|
||||
ui.button('Refresh Status', icon='refresh', on_click=refresh_status).props('flat dense')
|
||||
|
||||
# --- Live View ---
|
||||
with ui.card().classes('w-full q-pa-md q-mt-md'):
|
||||
ui.label('Live View').classes('section-header')
|
||||
toggle_key = f'live_toggle_{index}'
|
||||
|
||||
live_checkbox = ui.checkbox('Enable Live Preview', value=False)
|
||||
# Store reference so poll_all timer can disable it on timeout
|
||||
state._live_checkboxes[toggle_key] = live_checkbox
|
||||
|
||||
@ui.refreshable
|
||||
def render_live_view():
|
||||
if not live_checkbox.value:
|
||||
ui.label('Live Preview is disabled.').classes('text-caption')
|
||||
return
|
||||
|
||||
# Record start time
|
||||
if toggle_key not in state.live_toggles or state.live_toggles.get(toggle_key) is None:
|
||||
state.live_toggles[toggle_key] = time.time()
|
||||
|
||||
timeout_val = config.get('monitor_timeout', 0)
|
||||
if timeout_val > 0:
|
||||
start = state.live_toggles.get(toggle_key, time.time())
|
||||
remaining = (timeout_val * 60) - (time.time() - start)
|
||||
if remaining <= 0:
|
||||
live_checkbox.set_value(False)
|
||||
state.live_toggles[toggle_key] = None
|
||||
ui.label('Preview timed out.').classes('text-caption')
|
||||
return
|
||||
ui.label(f'Auto-off in: {int(remaining)}s').classes('text-caption')
|
||||
|
||||
iframe_h = ui.slider(min=600, max=2500, step=50, value=1000).classes('w-full')
|
||||
ui.label().bind_text_from(iframe_h, 'value', backward=lambda v: f'Height: {v}px')
|
||||
|
||||
viewer_base = config.get('viewer_url', '').strip()
|
||||
parsed = urllib.parse.urlparse(viewer_base)
|
||||
if viewer_base and parsed.scheme in ('http', 'https'):
|
||||
safe_src = html.escape(viewer_base, quote=True)
|
||||
ui.label(f'Viewing: {viewer_base}').classes('text-caption')
|
||||
|
||||
iframe_container = ui.column().classes('w-full')
|
||||
|
||||
def update_iframe():
|
||||
iframe_container.clear()
|
||||
with iframe_container:
|
||||
ui.html(
|
||||
f'<iframe src="{safe_src}" width="100%" height="{int(iframe_h.value)}px"'
|
||||
f' style="border: 2px solid #666; border-radius: 8px;"></iframe>'
|
||||
)
|
||||
|
||||
iframe_h.on_value_change(lambda _: update_iframe())
|
||||
update_iframe()
|
||||
else:
|
||||
ui.label('No valid viewer URL configured.').classes('text-warning')
|
||||
|
||||
state._live_refreshables[toggle_key] = render_live_view
|
||||
live_checkbox.on_value_change(lambda _: render_live_view.refresh())
|
||||
render_live_view()
|
||||
|
||||
# --- Latest Output ---
|
||||
with ui.card().classes('w-full q-pa-md q-mt-md'):
|
||||
ui.label('Latest Output').classes('section-header')
|
||||
img_container = ui.column().classes('w-full')
|
||||
|
||||
async def check_image():
|
||||
img_container.clear()
|
||||
loop = asyncio.get_event_loop()
|
||||
res, err = await loop.run_in_executor(
|
||||
None, lambda: _fetch_blocking(f'{comfy_url}/history', timeout=2))
|
||||
with img_container:
|
||||
if err is not None:
|
||||
ui.label(f'Error fetching image: {err}').classes('text-negative')
|
||||
return
|
||||
try:
|
||||
history = res.json()
|
||||
except (ValueError, Exception):
|
||||
ui.label('Invalid response from server').classes('text-negative')
|
||||
return
|
||||
if not history:
|
||||
ui.label('No history found.').classes('text-caption')
|
||||
return
|
||||
last_prompt_id = list(history.keys())[-1]
|
||||
outputs = history[last_prompt_id].get('outputs', {})
|
||||
found_img = None
|
||||
for node_output in outputs.values():
|
||||
if 'images' in node_output:
|
||||
for img_info in node_output['images']:
|
||||
if img_info['type'] == 'output':
|
||||
found_img = img_info
|
||||
break
|
||||
if found_img:
|
||||
break
|
||||
if found_img:
|
||||
params = urllib.parse.urlencode({
|
||||
'filename': found_img['filename'],
|
||||
'subfolder': found_img['subfolder'],
|
||||
'type': found_img['type'],
|
||||
})
|
||||
img_url = f'{comfy_url}/view?{params}'
|
||||
ui.image(img_url).classes('w-full').style('max-width: 600px')
|
||||
ui.label(f'Last Output: {found_img["filename"]}').classes('text-caption')
|
||||
else:
|
||||
ui.label('Last run had no image output.').classes('text-caption')
|
||||
|
||||
ui.button('Check Latest Image', icon='image', on_click=check_image).props('flat')
|
||||
165
tab_projects_ng.py
Normal file
165
tab_projects_ng.py
Normal file
@@ -0,0 +1,165 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from nicegui import ui
|
||||
|
||||
from state import AppState
|
||||
from db import ProjectDB
|
||||
from utils import save_config, sync_to_db, KEY_BATCH_DATA
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def render_projects_tab(state: AppState):
|
||||
"""Render the Projects management tab."""
|
||||
|
||||
# --- DB toggle ---
|
||||
def on_db_toggle(e):
|
||||
state.db_enabled = e.value
|
||||
state.config['db_enabled'] = e.value
|
||||
save_config(state.current_dir, state.config.get('favorites', []), state.config)
|
||||
render_project_content.refresh()
|
||||
|
||||
ui.switch('Enable Project Database', value=state.db_enabled,
|
||||
on_change=on_db_toggle).classes('q-mb-md')
|
||||
|
||||
@ui.refreshable
|
||||
def render_project_content():
|
||||
if not state.db_enabled:
|
||||
ui.label('Project database is disabled. Enable it above to manage projects.').classes(
|
||||
'text-caption q-pa-md')
|
||||
return
|
||||
|
||||
if not state.db:
|
||||
ui.label('Database not initialized.').classes('text-warning q-pa-md')
|
||||
return
|
||||
|
||||
# --- Create project form ---
|
||||
with ui.card().classes('w-full q-pa-md q-mb-md'):
|
||||
ui.label('Create New Project').classes('section-header')
|
||||
name_input = ui.input('Project Name', placeholder='my_project').classes('w-full')
|
||||
desc_input = ui.input('Description (optional)', placeholder='A short description').classes('w-full')
|
||||
|
||||
def create_project():
|
||||
name = name_input.value.strip()
|
||||
if not name:
|
||||
ui.notify('Please enter a project name', type='warning')
|
||||
return
|
||||
try:
|
||||
state.db.create_project(name, str(state.current_dir), desc_input.value.strip())
|
||||
name_input.set_value('')
|
||||
desc_input.set_value('')
|
||||
ui.notify(f'Created project "{name}"', type='positive')
|
||||
render_project_list.refresh()
|
||||
except Exception as e:
|
||||
ui.notify(f'Error: {e}', type='negative')
|
||||
|
||||
ui.button('Create Project', icon='add', on_click=create_project).classes('w-full')
|
||||
|
||||
# --- Active project indicator ---
|
||||
if state.current_project:
|
||||
ui.label(f'Active Project: {state.current_project}').classes(
|
||||
'text-bold text-primary q-pa-sm')
|
||||
|
||||
# --- Project list ---
|
||||
@ui.refreshable
|
||||
def render_project_list():
|
||||
projects = state.db.list_projects()
|
||||
if not projects:
|
||||
ui.label('No projects yet. Create one above.').classes('text-caption q-pa-md')
|
||||
return
|
||||
|
||||
for proj in projects:
|
||||
is_active = proj['name'] == state.current_project
|
||||
card_style = 'border-left: 3px solid var(--accent);' if is_active else ''
|
||||
|
||||
with ui.card().classes('w-full q-pa-sm q-mb-sm').style(card_style):
|
||||
with ui.row().classes('w-full items-center'):
|
||||
with ui.column().classes('col'):
|
||||
ui.label(proj['name']).classes('text-bold')
|
||||
if proj['description']:
|
||||
ui.label(proj['description']).classes('text-caption')
|
||||
ui.label(f'Path: {proj["folder_path"]}').classes('text-caption')
|
||||
files = state.db.list_data_files(proj['id'])
|
||||
ui.label(f'{len(files)} data file(s)').classes('text-caption')
|
||||
|
||||
with ui.row().classes('q-gutter-xs'):
|
||||
if not is_active:
|
||||
def activate(name=proj['name']):
|
||||
state.current_project = name
|
||||
state.config['current_project'] = name
|
||||
save_config(state.current_dir,
|
||||
state.config.get('favorites', []),
|
||||
state.config)
|
||||
ui.notify(f'Activated project "{name}"', type='positive')
|
||||
render_project_list.refresh()
|
||||
|
||||
ui.button('Activate', icon='check_circle',
|
||||
on_click=activate).props('flat dense color=primary')
|
||||
else:
|
||||
def deactivate():
|
||||
state.current_project = ''
|
||||
state.config['current_project'] = ''
|
||||
save_config(state.current_dir,
|
||||
state.config.get('favorites', []),
|
||||
state.config)
|
||||
ui.notify('Deactivated project', type='info')
|
||||
render_project_list.refresh()
|
||||
|
||||
ui.button('Deactivate', icon='cancel',
|
||||
on_click=deactivate).props('flat dense')
|
||||
|
||||
def import_folder(pid=proj['id'], pname=proj['name']):
|
||||
_import_folder(state, pid, pname, render_project_list)
|
||||
|
||||
ui.button('Import Folder', icon='folder_open',
|
||||
on_click=import_folder).props('flat dense')
|
||||
|
||||
def delete_proj(name=proj['name']):
|
||||
state.db.delete_project(name)
|
||||
if state.current_project == name:
|
||||
state.current_project = ''
|
||||
state.config['current_project'] = ''
|
||||
save_config(state.current_dir,
|
||||
state.config.get('favorites', []),
|
||||
state.config)
|
||||
ui.notify(f'Deleted project "{name}"', type='positive')
|
||||
render_project_list.refresh()
|
||||
|
||||
ui.button(icon='delete',
|
||||
on_click=delete_proj).props('flat dense color=negative')
|
||||
|
||||
render_project_list()
|
||||
|
||||
render_project_content()
|
||||
|
||||
|
||||
def _import_folder(state: AppState, project_id: int, project_name: str, refresh_fn):
|
||||
"""Bulk import all .json files from current directory into a project."""
|
||||
json_files = sorted(state.current_dir.glob('*.json'))
|
||||
json_files = [f for f in json_files if f.name not in (
|
||||
'.editor_config.json', '.editor_snippets.json')]
|
||||
|
||||
if not json_files:
|
||||
ui.notify('No JSON files in current directory', type='warning')
|
||||
return
|
||||
|
||||
imported = 0
|
||||
skipped = 0
|
||||
for jf in json_files:
|
||||
file_name = jf.stem
|
||||
existing = state.db.get_data_file(project_id, file_name)
|
||||
if existing:
|
||||
skipped += 1
|
||||
continue
|
||||
try:
|
||||
state.db.import_json_file(project_id, jf)
|
||||
imported += 1
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to import {jf}: {e}")
|
||||
|
||||
msg = f'Imported {imported} file(s)'
|
||||
if skipped:
|
||||
msg += f', skipped {skipped} existing'
|
||||
ui.notify(msg, type='positive')
|
||||
refresh_fn.refresh()
|
||||
75
tab_raw_ng.py
Normal file
75
tab_raw_ng.py
Normal file
@@ -0,0 +1,75 @@
|
||||
import copy
|
||||
import json
|
||||
|
||||
from nicegui import ui
|
||||
|
||||
from state import AppState
|
||||
from utils import save_json, sync_to_db, get_file_mtime, KEY_HISTORY_TREE, KEY_PROMPT_HISTORY
|
||||
|
||||
|
||||
def render_raw_editor(state: AppState):
|
||||
data = state.data_cache
|
||||
file_path = state.file_path
|
||||
|
||||
with ui.card().classes('w-full q-pa-md'):
|
||||
ui.label(f'Raw Editor: {file_path.name}').classes('text-h6 q-mb-md')
|
||||
|
||||
hide_history = ui.checkbox(
|
||||
'Hide History (Safe Mode)',
|
||||
value=True,
|
||||
)
|
||||
|
||||
@ui.refreshable
|
||||
def render_editor():
|
||||
# Prepare display data
|
||||
if hide_history.value:
|
||||
display_data = copy.deepcopy(data)
|
||||
display_data.pop(KEY_HISTORY_TREE, None)
|
||||
display_data.pop(KEY_PROMPT_HISTORY, None)
|
||||
else:
|
||||
display_data = data
|
||||
|
||||
try:
|
||||
json_str = json.dumps(display_data, indent=4, ensure_ascii=False)
|
||||
except Exception as e:
|
||||
ui.notify(f'Error serializing JSON: {e}', type='negative')
|
||||
json_str = '{}'
|
||||
|
||||
text_area = ui.textarea(
|
||||
'JSON Content',
|
||||
value=json_str,
|
||||
).classes('w-full font-mono').props('outlined rows=30')
|
||||
|
||||
def do_save():
|
||||
try:
|
||||
input_data = json.loads(text_area.value)
|
||||
|
||||
# Merge hidden history back in if safe mode
|
||||
if hide_history.value:
|
||||
if KEY_HISTORY_TREE in data:
|
||||
input_data[KEY_HISTORY_TREE] = data[KEY_HISTORY_TREE]
|
||||
if KEY_PROMPT_HISTORY in data:
|
||||
input_data[KEY_PROMPT_HISTORY] = data[KEY_PROMPT_HISTORY]
|
||||
|
||||
save_json(file_path, input_data)
|
||||
if state.db_enabled and state.current_project and state.db:
|
||||
sync_to_db(state.db, state.current_project, file_path, input_data)
|
||||
|
||||
data.clear()
|
||||
data.update(input_data)
|
||||
state.last_mtime = get_file_mtime(file_path)
|
||||
|
||||
ui.notify('Raw JSON Saved Successfully!', type='positive')
|
||||
render_editor.refresh()
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
ui.notify(f'Invalid JSON Syntax: {e}', type='negative')
|
||||
except Exception as e:
|
||||
ui.notify(f'Unexpected Error: {e}', type='negative')
|
||||
|
||||
ui.button('Save Raw Changes', icon='save', on_click=do_save).props(
|
||||
'color=primary'
|
||||
).classes('w-full q-mt-md')
|
||||
|
||||
hide_history.on_value_change(lambda _: render_editor.refresh())
|
||||
render_editor()
|
||||
250
tab_single.py
250
tab_single.py
@@ -1,250 +0,0 @@
|
||||
import streamlit as st
|
||||
import random
|
||||
from utils import DEFAULTS, save_json, get_file_mtime
|
||||
|
||||
def render_single_editor(data, file_path):
|
||||
is_batch_file = "batch_data" in data or isinstance(data, list)
|
||||
|
||||
if is_batch_file:
|
||||
st.info("This is a batch file. Switch to the 'Batch Processor' tab.")
|
||||
return
|
||||
|
||||
col1, col2 = st.columns([2, 1])
|
||||
|
||||
# Unique prefix for this file's widgets + Version Token (Fixes Restore bug)
|
||||
fk = f"{file_path.name}_v{st.session_state.ui_reset_token}"
|
||||
|
||||
# --- FORM ---
|
||||
with col1:
|
||||
with st.expander("🌍 General Prompts (Global Layer)", expanded=False):
|
||||
gen_prompt = st.text_area("General Prompt", value=data.get("general_prompt", ""), height=100, key=f"{fk}_gp")
|
||||
gen_negative = st.text_area("General Negative", value=data.get("general_negative", DEFAULTS["general_negative"]), height=100, key=f"{fk}_gn")
|
||||
|
||||
st.write("📝 **Specific Prompts**")
|
||||
current_prompt_val = data.get("current_prompt", "")
|
||||
if 'append_prompt' in st.session_state:
|
||||
current_prompt_val = (current_prompt_val.strip() + ", " + st.session_state.append_prompt).strip(', ')
|
||||
del st.session_state.append_prompt
|
||||
|
||||
new_prompt = st.text_area("Specific Prompt", value=current_prompt_val, height=150, key=f"{fk}_sp")
|
||||
new_negative = st.text_area("Specific Negative", value=data.get("negative", ""), height=100, key=f"{fk}_sn")
|
||||
|
||||
# Seed
|
||||
col_seed_val, col_seed_btn = st.columns([4, 1])
|
||||
seed_key = f"{fk}_seed"
|
||||
|
||||
with col_seed_btn:
|
||||
st.write("")
|
||||
st.write("")
|
||||
if st.button("🎲 Randomize", key=f"{fk}_rand"):
|
||||
st.session_state[seed_key] = random.randint(0, 999999999999)
|
||||
st.rerun()
|
||||
|
||||
with col_seed_val:
|
||||
seed_val = st.session_state.get('rand_seed', int(data.get("seed", 0)))
|
||||
new_seed = st.number_input("Seed", value=seed_val, step=1, min_value=0, format="%d", key=seed_key)
|
||||
data["seed"] = new_seed
|
||||
|
||||
# LoRAs
|
||||
st.subheader("LoRAs")
|
||||
l_col1, l_col2 = st.columns(2)
|
||||
loras = {}
|
||||
lora_keys = ["lora 1 high", "lora 1 low", "lora 2 high", "lora 2 low", "lora 3 high", "lora 3 low"]
|
||||
for i, k in enumerate(lora_keys):
|
||||
with (l_col1 if i % 2 == 0 else l_col2):
|
||||
loras[k] = st.text_input(k.title(), value=data.get(k, ""), key=f"{fk}_{k}")
|
||||
|
||||
# Settings
|
||||
st.subheader("Settings")
|
||||
spec_fields = {}
|
||||
spec_fields["camera"] = st.text_input("Camera", value=str(data.get("camera", DEFAULTS["camera"])), key=f"{fk}_cam")
|
||||
spec_fields["flf"] = st.text_input("FLF", value=str(data.get("flf", DEFAULTS["flf"])), key=f"{fk}_flf")
|
||||
|
||||
# Explicitly track standard setting keys to exclude them from custom list
|
||||
standard_keys = {
|
||||
"general_prompt", "general_negative", "current_prompt", "negative", "prompt", "seed",
|
||||
"camera", "flf", "batch_data", "prompt_history", "sequence_number", "ui_reset_token",
|
||||
"model_name", "vae_name", "steps", "cfg", "denoise", "sampler_name", "scheduler"
|
||||
}
|
||||
standard_keys.update(lora_keys)
|
||||
|
||||
if "vace" in file_path.name:
|
||||
vace_keys = ["frame_to_skip", "input_a_frames", "input_b_frames", "reference switch", "vace schedule", "reference path", "video file path", "reference image path"]
|
||||
standard_keys.update(vace_keys)
|
||||
|
||||
spec_fields["frame_to_skip"] = st.number_input("Frame to Skip", value=int(data.get("frame_to_skip", 81)), key=f"{fk}_fts")
|
||||
spec_fields["input_a_frames"] = st.number_input("Input A Frames", value=int(data.get("input_a_frames", 0)), key=f"{fk}_ia")
|
||||
spec_fields["input_b_frames"] = st.number_input("Input B Frames", value=int(data.get("input_b_frames", 0)), key=f"{fk}_ib")
|
||||
spec_fields["reference switch"] = st.number_input("Reference Switch", value=int(data.get("reference switch", 1)), key=f"{fk}_rsw")
|
||||
spec_fields["vace schedule"] = st.number_input("VACE Schedule", value=int(data.get("vace schedule", 1)), key=f"{fk}_vsc")
|
||||
for f in ["reference path", "video file path", "reference image path"]:
|
||||
spec_fields[f] = st.text_input(f.title(), value=str(data.get(f, "")), key=f"{fk}_{f}")
|
||||
elif "i2v" in file_path.name:
|
||||
i2v_keys = ["reference image path", "flf image path", "video file path"]
|
||||
standard_keys.update(i2v_keys)
|
||||
|
||||
for f in i2v_keys:
|
||||
spec_fields[f] = st.text_input(f.title(), value=str(data.get(f, "")), key=f"{fk}_{f}")
|
||||
|
||||
# --- CUSTOM PARAMETERS LOGIC ---
|
||||
st.markdown("---")
|
||||
st.subheader("🔧 Custom Parameters")
|
||||
|
||||
# Filter keys: Only those NOT in the standard set
|
||||
custom_keys = [k for k in data.keys() if k not in standard_keys]
|
||||
|
||||
keys_to_remove = []
|
||||
|
||||
if custom_keys:
|
||||
for k in custom_keys:
|
||||
c1, c2, c3 = st.columns([1, 2, 0.5])
|
||||
c1.text_input("Key", value=k, disabled=True, key=f"{fk}_ck_lbl_{k}", label_visibility="collapsed")
|
||||
val = c2.text_input("Value", value=str(data[k]), key=f"{fk}_cv_{k}", label_visibility="collapsed")
|
||||
data[k] = val
|
||||
|
||||
if c3.button("🗑️", key=f"{fk}_cdel_{k}"):
|
||||
keys_to_remove.append(k)
|
||||
else:
|
||||
st.caption("No custom keys added.")
|
||||
|
||||
# Add New Key Interface
|
||||
with st.expander("➕ Add New Parameter"):
|
||||
nk_col, nv_col = st.columns(2)
|
||||
new_k = nk_col.text_input("Key Name", key=f"{fk}_new_k")
|
||||
new_v = nv_col.text_input("Value", key=f"{fk}_new_v")
|
||||
|
||||
if st.button("Add Parameter", key=f"{fk}_add_cust"):
|
||||
if new_k and new_k not in data:
|
||||
data[new_k] = new_v
|
||||
st.rerun()
|
||||
elif new_k in data:
|
||||
st.error(f"Key '{new_k}' already exists!")
|
||||
|
||||
# Apply Removals
|
||||
if keys_to_remove:
|
||||
for k in keys_to_remove:
|
||||
del data[k]
|
||||
st.rerun()
|
||||
|
||||
# --- ACTIONS & HISTORY ---
|
||||
with col2:
|
||||
current_state = {
|
||||
"general_prompt": gen_prompt, "general_negative": gen_negative,
|
||||
"current_prompt": new_prompt, "negative": new_negative,
|
||||
"seed": new_seed, **loras, **spec_fields
|
||||
}
|
||||
|
||||
# MERGE CUSTOM KEYS
|
||||
for k in custom_keys:
|
||||
if k not in keys_to_remove:
|
||||
current_state[k] = data[k]
|
||||
|
||||
st.session_state.single_editor_cache = current_state
|
||||
|
||||
st.subheader("Actions")
|
||||
current_disk_mtime = get_file_mtime(file_path)
|
||||
is_conflict = current_disk_mtime > st.session_state.last_mtime
|
||||
|
||||
if is_conflict:
|
||||
st.error("⚠️ CONFLICT: Disk changed!")
|
||||
if st.button("Force Save"):
|
||||
data.update(current_state)
|
||||
save_json(file_path, data) # No return val in new utils
|
||||
st.session_state.last_mtime = get_file_mtime(file_path) # Manual Update
|
||||
st.session_state.data_cache = data
|
||||
st.toast("Saved!", icon="⚠️")
|
||||
st.rerun()
|
||||
if st.button("Reload File"):
|
||||
st.session_state.loaded_file = None
|
||||
st.rerun()
|
||||
else:
|
||||
if st.button("💾 Update File", use_container_width=True):
|
||||
data.update(current_state)
|
||||
save_json(file_path, data)
|
||||
st.session_state.last_mtime = get_file_mtime(file_path)
|
||||
st.session_state.data_cache = data
|
||||
st.toast("Updated!", icon="✅")
|
||||
|
||||
st.markdown("---")
|
||||
archive_note = st.text_input("Archive Note")
|
||||
if st.button("📦 Snapshot to History", use_container_width=True):
|
||||
entry = {"note": archive_note if archive_note else "Snapshot", **current_state}
|
||||
if "prompt_history" not in data: data["prompt_history"] = []
|
||||
data["prompt_history"].insert(0, entry)
|
||||
data.update(entry)
|
||||
save_json(file_path, data)
|
||||
st.session_state.last_mtime = get_file_mtime(file_path)
|
||||
st.session_state.data_cache = data
|
||||
st.toast("Archived!", icon="📦")
|
||||
st.rerun()
|
||||
|
||||
# --- FULL HISTORY PANEL ---
|
||||
st.markdown("---")
|
||||
st.subheader("History")
|
||||
history = data.get("prompt_history", [])
|
||||
|
||||
if not history:
|
||||
st.caption("No history yet.")
|
||||
|
||||
for idx, h in enumerate(history):
|
||||
note = h.get('note', 'No Note')
|
||||
|
||||
with st.container():
|
||||
if st.session_state.edit_history_idx == idx:
|
||||
with st.expander(f"📝 Editing: {note}", expanded=True):
|
||||
edit_note = st.text_input("Note", value=note, key=f"h_en_{idx}")
|
||||
edit_seed = st.number_input("Seed", value=int(h.get('seed', 0)), key=f"h_es_{idx}")
|
||||
edit_gp = st.text_area("General P", value=h.get('general_prompt', ''), height=60, key=f"h_egp_{idx}")
|
||||
edit_gn = st.text_area("General N", value=h.get('general_negative', ''), height=60, key=f"h_egn_{idx}")
|
||||
edit_sp = st.text_area("Specific P", value=h.get('prompt', ''), height=100, key=f"h_esp_{idx}")
|
||||
edit_sn = st.text_area("Specific N", value=h.get('negative', ''), height=60, key=f"h_esn_{idx}")
|
||||
|
||||
hc1, hc2 = st.columns([1, 4])
|
||||
if hc1.button("💾 Save", key=f"h_save_{idx}"):
|
||||
h.update({
|
||||
'note': edit_note, 'seed': edit_seed,
|
||||
'general_prompt': edit_gp, 'general_negative': edit_gn,
|
||||
'prompt': edit_sp, 'negative': edit_sn
|
||||
})
|
||||
save_json(file_path, data)
|
||||
st.session_state.last_mtime = get_file_mtime(file_path)
|
||||
st.session_state.data_cache = data
|
||||
st.session_state.edit_history_idx = None
|
||||
st.rerun()
|
||||
if hc2.button("Cancel", key=f"h_can_{idx}"):
|
||||
st.session_state.edit_history_idx = None
|
||||
st.rerun()
|
||||
|
||||
else:
|
||||
with st.expander(f"#{idx+1}: {note}"):
|
||||
st.caption(f"Seed: {h.get('seed', 0)}")
|
||||
st.text(f"SPEC: {h.get('prompt', '')[:40]}...")
|
||||
|
||||
view_data = {k:v for k,v in h.items() if k not in ['prompt', 'negative', 'general_prompt', 'general_negative', 'note']}
|
||||
st.json(view_data, expanded=False)
|
||||
|
||||
bh1, bh2, bh3 = st.columns([2, 1, 1])
|
||||
|
||||
if bh1.button("Restore", key=f"h_rest_{idx}", use_container_width=True):
|
||||
data.update(h)
|
||||
if 'prompt' in h: data['current_prompt'] = h['prompt']
|
||||
save_json(file_path, data)
|
||||
st.session_state.last_mtime = get_file_mtime(file_path)
|
||||
st.session_state.data_cache = data
|
||||
|
||||
# Refresh UI
|
||||
st.session_state.ui_reset_token += 1
|
||||
|
||||
st.toast("Restored!", icon="⏪")
|
||||
st.rerun()
|
||||
|
||||
if bh2.button("✏️", key=f"h_edit_{idx}"):
|
||||
st.session_state.edit_history_idx = idx
|
||||
st.rerun()
|
||||
|
||||
if bh3.button("🗑️", key=f"h_del_{idx}"):
|
||||
history.pop(idx)
|
||||
save_json(file_path, data)
|
||||
st.session_state.last_mtime = get_file_mtime(file_path)
|
||||
st.session_state.data_cache = data
|
||||
st.rerun()
|
||||
143
tab_timeline.py
143
tab_timeline.py
@@ -1,143 +0,0 @@
|
||||
import streamlit as st
|
||||
import json
|
||||
import graphviz
|
||||
import time
|
||||
from history_tree import HistoryTree
|
||||
from utils import save_json
|
||||
|
||||
def render_timeline_tab(data, file_path):
|
||||
tree_data = data.get("history_tree", {})
|
||||
if not tree_data:
|
||||
st.info("No history timeline exists. Make some changes in the Editor first!")
|
||||
return
|
||||
|
||||
htree = HistoryTree(tree_data)
|
||||
|
||||
if 'restored_indicator' in st.session_state and st.session_state.restored_indicator:
|
||||
st.info(f"📍 Editing Restored Version: **{st.session_state.restored_indicator}**")
|
||||
|
||||
# --- VIEW SWITCHER ---
|
||||
c_title, c_view = st.columns([2, 1])
|
||||
c_title.subheader("🕰️ Version History")
|
||||
|
||||
view_mode = c_view.radio(
|
||||
"View Mode",
|
||||
["🌳 Horizontal", "🌲 Vertical", "📜 Linear Log"],
|
||||
horizontal=True,
|
||||
label_visibility="collapsed"
|
||||
)
|
||||
|
||||
# --- RENDER GRAPH VIEWS ---
|
||||
if view_mode in ["🌳 Horizontal", "🌲 Vertical"]:
|
||||
direction = "LR" if view_mode == "🌳 Horizontal" else "TB"
|
||||
try:
|
||||
graph_dot = htree.generate_graph(direction=direction)
|
||||
st.graphviz_chart(graph_dot, use_container_width=True)
|
||||
except Exception as e:
|
||||
st.error(f"Graph Error: {e}")
|
||||
|
||||
# --- RENDER LINEAR LOG VIEW ---
|
||||
elif view_mode == "📜 Linear Log":
|
||||
st.caption("A simple chronological list of all snapshots.")
|
||||
all_nodes = list(htree.nodes.values())
|
||||
all_nodes.sort(key=lambda x: x["timestamp"], reverse=True)
|
||||
|
||||
for n in all_nodes:
|
||||
is_head = (n["id"] == htree.head_id)
|
||||
with st.container():
|
||||
c1, c2, c3 = st.columns([0.5, 4, 1])
|
||||
with c1:
|
||||
st.markdown("### 📍" if is_head else "### ⚫")
|
||||
with c2:
|
||||
note_txt = n.get('note', 'Step')
|
||||
ts = time.strftime('%H:%M:%S', time.localtime(n['timestamp']))
|
||||
if is_head:
|
||||
st.markdown(f"**{note_txt}** (Current)")
|
||||
else:
|
||||
st.write(f"**{note_txt}**")
|
||||
st.caption(f"ID: {n['id'][:6]} • Time: {ts}")
|
||||
with c3:
|
||||
if not is_head:
|
||||
if st.button("⏪", key=f"log_rst_{n['id']}", help="Restore this version"):
|
||||
data.update(n["data"])
|
||||
htree.head_id = n['id']
|
||||
data["history_tree"] = htree.to_dict()
|
||||
save_json(file_path, data)
|
||||
st.session_state.ui_reset_token += 1
|
||||
label = f"{n.get('note')} ({n['id'][:4]})"
|
||||
st.session_state.restored_indicator = label
|
||||
st.toast(f"Restored!", icon="🔄")
|
||||
st.rerun()
|
||||
st.divider()
|
||||
|
||||
st.markdown("---")
|
||||
|
||||
# --- ACTIONS & SELECTION ---
|
||||
col_sel, col_act = st.columns([3, 1])
|
||||
|
||||
all_nodes = list(htree.nodes.values())
|
||||
all_nodes.sort(key=lambda x: x["timestamp"], reverse=True)
|
||||
|
||||
def fmt_node(n):
|
||||
return f"{n.get('note', 'Step')} ({n['id']})"
|
||||
|
||||
with col_sel:
|
||||
current_idx = 0
|
||||
for i, n in enumerate(all_nodes):
|
||||
if n["id"] == htree.head_id:
|
||||
current_idx = i
|
||||
break
|
||||
|
||||
selected_node = st.selectbox(
|
||||
"Select Version to Manage:",
|
||||
all_nodes,
|
||||
format_func=fmt_node,
|
||||
index=current_idx
|
||||
)
|
||||
|
||||
if selected_node:
|
||||
node_data = selected_node["data"]
|
||||
|
||||
# --- ACTIONS ---
|
||||
with col_act:
|
||||
st.write(""); st.write("")
|
||||
if st.button("⏪ Restore Version", type="primary", use_container_width=True):
|
||||
data.update(node_data)
|
||||
htree.head_id = selected_node['id']
|
||||
data["history_tree"] = htree.to_dict()
|
||||
save_json(file_path, data)
|
||||
st.session_state.ui_reset_token += 1
|
||||
label = f"{selected_node.get('note')} ({selected_node['id'][:4]})"
|
||||
st.session_state.restored_indicator = label
|
||||
st.toast(f"Restored!", icon="🔄")
|
||||
st.rerun()
|
||||
|
||||
# --- RENAME ---
|
||||
rn_col1, rn_col2 = st.columns([3, 1])
|
||||
new_label = rn_col1.text_input("Rename Label", value=selected_node.get("note", ""))
|
||||
if rn_col2.button("Update Label"):
|
||||
selected_node["note"] = new_label
|
||||
data["history_tree"] = htree.to_dict()
|
||||
save_json(file_path, data)
|
||||
st.rerun()
|
||||
|
||||
# --- DANGER ZONE ---
|
||||
st.markdown("---")
|
||||
with st.expander("⚠️ Danger Zone (Delete)"):
|
||||
st.warning("Deleting a node cannot be undone.")
|
||||
if st.button("🗑️ Delete This Node", type="primary"):
|
||||
if selected_node['id'] in htree.nodes:
|
||||
del htree.nodes[selected_node['id']]
|
||||
for b, tip in list(htree.branches.items()):
|
||||
if tip == selected_node['id']:
|
||||
del htree.branches[b]
|
||||
if htree.head_id == selected_node['id']:
|
||||
if htree.nodes:
|
||||
fallback = sorted(htree.nodes.values(), key=lambda x: x["timestamp"])[-1]
|
||||
htree.head_id = fallback["id"]
|
||||
else:
|
||||
htree.head_id = None
|
||||
data["history_tree"] = htree.to_dict()
|
||||
save_json(file_path, data)
|
||||
st.toast("Node Deleted", icon="🗑️")
|
||||
st.rerun()
|
||||
565
tab_timeline_ng.py
Normal file
565
tab_timeline_ng.py
Normal file
@@ -0,0 +1,565 @@
|
||||
import copy
|
||||
import time
|
||||
|
||||
from nicegui import ui
|
||||
|
||||
from state import AppState
|
||||
from history_tree import HistoryTree
|
||||
from utils import save_json, sync_to_db, KEY_BATCH_DATA, KEY_HISTORY_TREE
|
||||
|
||||
|
||||
def _delete_nodes(htree, data, file_path, node_ids):
|
||||
"""Delete nodes with backup, branch cleanup, and head fallback."""
|
||||
if 'history_tree_backup' not in data:
|
||||
data['history_tree_backup'] = []
|
||||
data['history_tree_backup'].append(copy.deepcopy(htree.to_dict()))
|
||||
for nid in node_ids:
|
||||
htree.nodes.pop(nid, None)
|
||||
for b, tip in list(htree.branches.items()):
|
||||
if tip in node_ids:
|
||||
del htree.branches[b]
|
||||
if htree.head_id in node_ids:
|
||||
if htree.nodes:
|
||||
htree.head_id = sorted(htree.nodes.values(),
|
||||
key=lambda x: x['timestamp'])[-1]['id']
|
||||
else:
|
||||
htree.head_id = None
|
||||
data[KEY_HISTORY_TREE] = htree.to_dict()
|
||||
save_json(file_path, data)
|
||||
|
||||
|
||||
def _render_selection_picker(all_nodes, htree, state, refresh_fn):
|
||||
"""Multi-select picker for batch-deleting timeline nodes."""
|
||||
all_ids = [n['id'] for n in all_nodes]
|
||||
|
||||
def fmt_option(nid):
|
||||
n = htree.nodes[nid]
|
||||
ts = time.strftime('%b %d %H:%M', time.localtime(n['timestamp']))
|
||||
note = n.get('note', 'Step')
|
||||
head = ' (HEAD)' if nid == htree.head_id else ''
|
||||
return f'{note} - {ts} ({nid[:6]}){head}'
|
||||
|
||||
options = {nid: fmt_option(nid) for nid in all_ids}
|
||||
|
||||
def on_selection_change(e):
|
||||
state.timeline_selected_nodes = set(e.value) if e.value else set()
|
||||
|
||||
ui.select(
|
||||
options,
|
||||
value=list(state.timeline_selected_nodes),
|
||||
multiple=True,
|
||||
label='Select nodes to delete:',
|
||||
on_change=on_selection_change,
|
||||
).classes('w-full')
|
||||
|
||||
with ui.row():
|
||||
def select_all():
|
||||
state.timeline_selected_nodes = set(all_ids)
|
||||
refresh_fn()
|
||||
def deselect_all():
|
||||
state.timeline_selected_nodes = set()
|
||||
refresh_fn()
|
||||
ui.button('Select All', on_click=select_all).props('flat dense')
|
||||
ui.button('Deselect All', on_click=deselect_all).props('flat dense')
|
||||
|
||||
|
||||
def _render_graph_or_log(mode, all_nodes, htree, selected_nodes,
|
||||
selection_mode_on, toggle_select_fn, restore_fn,
|
||||
selected=None):
|
||||
"""Render graph visualization or linear log view."""
|
||||
if mode in ('Horizontal', 'Vertical'):
|
||||
direction = 'LR' if mode == 'Horizontal' else 'TB'
|
||||
with ui.card().classes('w-full q-pa-md'):
|
||||
try:
|
||||
graph_dot = htree.generate_graph(direction=direction)
|
||||
sel_id = selected.get('node_id') if selected else None
|
||||
_render_graphviz(graph_dot, selected_node_id=sel_id)
|
||||
except Exception as e:
|
||||
ui.label(f'Graph Error: {e}').classes('text-negative')
|
||||
|
||||
elif mode == 'Linear Log':
|
||||
ui.label('Chronological list of all snapshots.').classes('text-caption')
|
||||
for n in all_nodes:
|
||||
is_head = n['id'] == htree.head_id
|
||||
is_selected = n['id'] in selected_nodes
|
||||
|
||||
card_style = ''
|
||||
if is_selected:
|
||||
card_style = 'background: rgba(239, 68, 68, 0.1) !important; border-left: 3px solid var(--negative);'
|
||||
elif is_head:
|
||||
card_style = 'background: var(--accent-subtle) !important; border-left: 3px solid var(--accent);'
|
||||
with ui.card().classes('w-full q-mb-sm').style(card_style):
|
||||
with ui.row().classes('w-full items-center'):
|
||||
if selection_mode_on:
|
||||
ui.checkbox(
|
||||
'',
|
||||
value=is_selected,
|
||||
on_change=lambda e, nid=n['id']: toggle_select_fn(
|
||||
nid, e.value),
|
||||
)
|
||||
|
||||
icon = 'location_on' if is_head else 'circle'
|
||||
ui.icon(icon).classes(
|
||||
'text-primary' if is_head else 'text-grey')
|
||||
|
||||
with ui.column().classes('col'):
|
||||
note = n.get('note', 'Step')
|
||||
ts = time.strftime('%b %d %H:%M',
|
||||
time.localtime(n['timestamp']))
|
||||
label = f'{note} (Current)' if is_head else note
|
||||
ui.label(label).classes('text-bold')
|
||||
ui.label(
|
||||
f'ID: {n["id"][:6]} - {ts}').classes('text-caption')
|
||||
|
||||
if not is_head and not selection_mode_on:
|
||||
ui.button(
|
||||
'Restore',
|
||||
icon='restore',
|
||||
on_click=lambda node=n: restore_fn(node),
|
||||
).props('flat dense color=primary')
|
||||
|
||||
|
||||
def _render_batch_delete(htree, data, file_path, state, refresh_fn):
|
||||
"""Render batch delete controls for selected timeline nodes."""
|
||||
valid = state.timeline_selected_nodes & set(htree.nodes.keys())
|
||||
state.timeline_selected_nodes = valid
|
||||
count = len(valid)
|
||||
if count == 0:
|
||||
return
|
||||
|
||||
ui.label(
|
||||
f'{count} node{"s" if count != 1 else ""} selected for deletion.'
|
||||
).classes('text-warning q-mt-md')
|
||||
|
||||
def do_batch_delete():
|
||||
current_valid = state.timeline_selected_nodes & set(htree.nodes.keys())
|
||||
_delete_nodes(htree, data, file_path, current_valid)
|
||||
if state.db_enabled and state.current_project and state.db:
|
||||
sync_to_db(state.db, state.current_project, file_path, data)
|
||||
state.timeline_selected_nodes = set()
|
||||
ui.notify(
|
||||
f'Deleted {len(current_valid)} node{"s" if len(current_valid) != 1 else ""}!',
|
||||
type='positive')
|
||||
refresh_fn()
|
||||
|
||||
ui.button(
|
||||
f'Delete {count} Node{"s" if count != 1 else ""}',
|
||||
icon='delete',
|
||||
on_click=do_batch_delete,
|
||||
).props('color=negative')
|
||||
|
||||
|
||||
def _walk_branch_nodes(htree, tip_id):
|
||||
"""Walk parent pointers from tip, returning nodes newest-first."""
|
||||
nodes = []
|
||||
current = tip_id
|
||||
while current and current in htree.nodes:
|
||||
nodes.append(htree.nodes[current])
|
||||
current = htree.nodes[current].get('parent')
|
||||
return nodes
|
||||
|
||||
|
||||
def _find_active_branch(htree):
|
||||
"""Return branch name whose tip == head_id, or None if detached."""
|
||||
if not htree.head_id:
|
||||
return None
|
||||
for b_name, tip_id in htree.branches.items():
|
||||
if tip_id == htree.head_id:
|
||||
return b_name
|
||||
return None
|
||||
|
||||
|
||||
def _find_branch_for_node(htree, node_id):
|
||||
"""Return the branch name whose ancestry contains node_id, or None."""
|
||||
for b_name, tip_id in htree.branches.items():
|
||||
current = tip_id
|
||||
while current and current in htree.nodes:
|
||||
if current == node_id:
|
||||
return b_name
|
||||
current = htree.nodes[current].get('parent')
|
||||
return None
|
||||
|
||||
|
||||
def _render_node_manager(all_nodes, htree, data, file_path, restore_fn, refresh_fn,
|
||||
selected, state=None):
|
||||
"""Render branch-grouped node manager with restore, rename, delete, and preview."""
|
||||
ui.label('Manage Version').classes('section-header')
|
||||
|
||||
active_branch = _find_active_branch(htree)
|
||||
|
||||
# --- (a) Branch selector ---
|
||||
def fmt_branch(b_name):
|
||||
count = len(_walk_branch_nodes(htree, htree.branches.get(b_name)))
|
||||
suffix = ' (active)' if b_name == active_branch else ''
|
||||
return f'{b_name} ({count} nodes){suffix}'
|
||||
|
||||
branch_options = {b: fmt_branch(b) for b in htree.branches}
|
||||
|
||||
def on_branch_change(e):
|
||||
selected['branch'] = e.value
|
||||
tip = htree.branches.get(e.value)
|
||||
if tip:
|
||||
selected['node_id'] = tip
|
||||
render_branch_nodes.refresh()
|
||||
|
||||
ui.select(
|
||||
branch_options,
|
||||
value=selected['branch'],
|
||||
label='Branch:',
|
||||
on_change=on_branch_change,
|
||||
).classes('w-full')
|
||||
|
||||
# --- (b) Node list + (c) Actions panel ---
|
||||
@ui.refreshable
|
||||
def render_branch_nodes():
|
||||
branch_name = selected['branch']
|
||||
tip_id = htree.branches.get(branch_name)
|
||||
nodes = _walk_branch_nodes(htree, tip_id) if tip_id else []
|
||||
|
||||
if not nodes:
|
||||
ui.label('No nodes on this branch.').classes('text-caption q-pa-sm')
|
||||
return
|
||||
|
||||
with ui.scroll_area().classes('w-full').style('max-height: 350px'):
|
||||
for n in nodes:
|
||||
nid = n['id']
|
||||
is_head = nid == htree.head_id
|
||||
is_tip = nid == tip_id
|
||||
is_selected = nid == selected['node_id']
|
||||
|
||||
card_style = ''
|
||||
if is_selected:
|
||||
card_style = 'border-left: 3px solid var(--primary);'
|
||||
elif is_head:
|
||||
card_style = 'border-left: 3px solid var(--accent);'
|
||||
|
||||
with ui.card().classes('w-full q-mb-xs q-pa-xs').style(card_style):
|
||||
with ui.row().classes('w-full items-center no-wrap'):
|
||||
icon = 'location_on' if is_head else 'circle'
|
||||
icon_size = 'sm' if is_head else 'xs'
|
||||
ui.icon(icon, size=icon_size).classes(
|
||||
'text-primary' if is_head else 'text-grey')
|
||||
|
||||
with ui.column().classes('col q-ml-xs').style('min-width: 0'):
|
||||
note = n.get('note', 'Step')
|
||||
ts = time.strftime('%b %d %H:%M',
|
||||
time.localtime(n['timestamp']))
|
||||
label_text = note
|
||||
lbl = ui.label(label_text).classes('text-body2 ellipsis')
|
||||
if is_head:
|
||||
lbl.classes('text-bold')
|
||||
ui.label(f'{ts} \u2022 {nid[:6]}').classes(
|
||||
'text-caption text-grey')
|
||||
|
||||
if is_head:
|
||||
ui.badge('HEAD', color='amber').props('dense')
|
||||
if is_tip and not is_head:
|
||||
ui.badge('tip', color='green', outline=True).props('dense')
|
||||
|
||||
def select_node(node_id=nid):
|
||||
selected['node_id'] = node_id
|
||||
render_branch_nodes.refresh()
|
||||
|
||||
ui.button(icon='check_circle', on_click=select_node).props(
|
||||
'flat dense round size=sm'
|
||||
).tooltip('Select this node')
|
||||
|
||||
# --- (c) Actions panel ---
|
||||
sel_id = selected['node_id']
|
||||
if not sel_id or sel_id not in htree.nodes:
|
||||
return
|
||||
|
||||
sel_node = htree.nodes[sel_id]
|
||||
sel_note = sel_node.get('note', 'Step')
|
||||
is_head = sel_id == htree.head_id
|
||||
|
||||
ui.separator().classes('q-my-sm')
|
||||
ui.label(f'Selected: {sel_note} ({sel_id[:6]})').classes(
|
||||
'text-caption text-bold')
|
||||
|
||||
with ui.row().classes('w-full items-end q-gutter-sm'):
|
||||
if not is_head:
|
||||
def restore_selected():
|
||||
if sel_id in htree.nodes:
|
||||
restore_fn(htree.nodes[sel_id])
|
||||
ui.button('Restore', icon='restore',
|
||||
on_click=restore_selected).props('color=primary dense')
|
||||
|
||||
# Rename
|
||||
rename_input = ui.input('Rename Label').classes('col').props('dense')
|
||||
|
||||
def rename_node():
|
||||
if sel_id in htree.nodes and rename_input.value:
|
||||
htree.nodes[sel_id]['note'] = rename_input.value
|
||||
data[KEY_HISTORY_TREE] = htree.to_dict()
|
||||
save_json(file_path, data)
|
||||
if state and state.db_enabled and state.current_project and state.db:
|
||||
sync_to_db(state.db, state.current_project, file_path, data)
|
||||
ui.notify('Label updated', type='positive')
|
||||
refresh_fn()
|
||||
|
||||
ui.button('Update Label', on_click=rename_node).props('flat dense')
|
||||
|
||||
# Danger zone
|
||||
with ui.expansion('Danger Zone', icon='warning').classes(
|
||||
'w-full q-mt-sm').style('border-left: 3px solid var(--negative)'):
|
||||
ui.label('Deleting a node cannot be undone.').classes('text-warning')
|
||||
|
||||
def delete_selected():
|
||||
if sel_id in htree.nodes:
|
||||
_delete_nodes(htree, data, file_path, {sel_id})
|
||||
if state and state.db_enabled and state.current_project and state.db:
|
||||
sync_to_db(state.db, state.current_project, file_path, data)
|
||||
ui.notify('Node Deleted', type='positive')
|
||||
refresh_fn()
|
||||
|
||||
ui.button('Delete This Node', icon='delete',
|
||||
on_click=delete_selected).props('color=negative dense')
|
||||
|
||||
# Data preview
|
||||
with ui.expansion('Data Preview', icon='preview').classes('w-full q-mt-sm'):
|
||||
_render_data_preview(sel_id, htree)
|
||||
|
||||
render_branch_nodes()
|
||||
|
||||
|
||||
def render_timeline_tab(state: AppState):
|
||||
data = state.data_cache
|
||||
file_path = state.file_path
|
||||
|
||||
tree_data = data.get(KEY_HISTORY_TREE, {})
|
||||
if not tree_data:
|
||||
ui.label('No history timeline exists. Make some changes in the Editor first!').classes(
|
||||
'text-subtitle1 q-pa-md')
|
||||
return
|
||||
|
||||
htree = HistoryTree(tree_data)
|
||||
|
||||
# --- Shared selected-node state (survives refreshes, shared by graph + manager) ---
|
||||
active_branch = _find_active_branch(htree)
|
||||
default_branch = active_branch
|
||||
if not default_branch and htree.head_id:
|
||||
for b_name, tip_id in htree.branches.items():
|
||||
for n in _walk_branch_nodes(htree, tip_id):
|
||||
if n['id'] == htree.head_id:
|
||||
default_branch = b_name
|
||||
break
|
||||
if default_branch:
|
||||
break
|
||||
if not default_branch and htree.branches:
|
||||
default_branch = next(iter(htree.branches))
|
||||
selected = {'node_id': htree.head_id, 'branch': default_branch}
|
||||
|
||||
if state.restored_indicator:
|
||||
ui.label(f'Editing Restored Version: {state.restored_indicator}').classes(
|
||||
'text-info q-pa-sm')
|
||||
|
||||
# --- View mode + Selection toggle ---
|
||||
with ui.row().classes('w-full items-center q-gutter-md q-mb-md'):
|
||||
ui.label('Version History').classes('text-h6 col')
|
||||
view_mode = ui.toggle(
|
||||
['Horizontal', 'Vertical', 'Linear Log'],
|
||||
value='Horizontal',
|
||||
)
|
||||
selection_mode = ui.switch('Select to Delete')
|
||||
|
||||
@ui.refreshable
|
||||
def render_timeline():
|
||||
all_nodes = sorted(htree.nodes.values(), key=lambda x: x['timestamp'], reverse=True)
|
||||
selected_nodes = state.timeline_selected_nodes if selection_mode.value else set()
|
||||
|
||||
if selection_mode.value:
|
||||
_render_selection_picker(all_nodes, htree, state, render_timeline.refresh)
|
||||
|
||||
_render_graph_or_log(
|
||||
view_mode.value, all_nodes, htree, selected_nodes,
|
||||
selection_mode.value, _toggle_select, _restore_and_refresh,
|
||||
selected=selected)
|
||||
|
||||
if selection_mode.value and state.timeline_selected_nodes:
|
||||
_render_batch_delete(htree, data, file_path, state, render_timeline.refresh)
|
||||
|
||||
with ui.card().classes('w-full q-pa-md q-mt-md'):
|
||||
_render_node_manager(
|
||||
all_nodes, htree, data, file_path,
|
||||
_restore_and_refresh, render_timeline.refresh,
|
||||
selected, state=state)
|
||||
|
||||
def _toggle_select(nid, checked):
|
||||
if checked:
|
||||
state.timeline_selected_nodes.add(nid)
|
||||
else:
|
||||
state.timeline_selected_nodes.discard(nid)
|
||||
render_timeline.refresh()
|
||||
|
||||
def _restore_and_refresh(node):
|
||||
_restore_node(data, node, htree, file_path, state)
|
||||
# Refresh all tabs (batch, raw, timeline) so they pick up the restored data
|
||||
state._render_main.refresh()
|
||||
|
||||
view_mode.on_value_change(lambda _: render_timeline.refresh())
|
||||
selection_mode.on_value_change(lambda _: render_timeline.refresh())
|
||||
render_timeline()
|
||||
|
||||
# --- Poll for graph node clicks (JS → Python bridge) ---
|
||||
async def _poll_graph_click():
|
||||
if view_mode.value == 'Linear Log':
|
||||
return
|
||||
try:
|
||||
result = await ui.run_javascript(
|
||||
'const v = window.graphSelectedNode;'
|
||||
'window.graphSelectedNode = null; v;'
|
||||
)
|
||||
except Exception:
|
||||
return
|
||||
if not result:
|
||||
return
|
||||
node_id = str(result)
|
||||
if node_id not in htree.nodes:
|
||||
return
|
||||
branch = _find_branch_for_node(htree, node_id)
|
||||
if branch:
|
||||
selected['branch'] = branch
|
||||
selected['node_id'] = node_id
|
||||
render_timeline.refresh()
|
||||
|
||||
ui.timer(0.2, _poll_graph_click)
|
||||
|
||||
|
||||
def _render_graphviz(dot_source: str, selected_node_id: str | None = None):
|
||||
"""Render graphviz DOT source as interactive SVG with click-to-select."""
|
||||
try:
|
||||
import graphviz
|
||||
src = graphviz.Source(dot_source)
|
||||
svg = src.pipe(format='svg').decode('utf-8')
|
||||
|
||||
sel_escaped = selected_node_id.replace("'", "\\'") if selected_node_id else ''
|
||||
|
||||
# CSS inline (allowed), JS via run_javascript (script tags blocked)
|
||||
css = '''<style>
|
||||
.timeline-graph g.node { cursor: pointer; }
|
||||
.timeline-graph g.node:hover { filter: brightness(1.3); }
|
||||
.timeline-graph g.node.selected ellipse,
|
||||
.timeline-graph g.node.selected polygon[stroke]:not([stroke="none"]) {
|
||||
stroke: #f59e0b !important;
|
||||
stroke-width: 3px !important;
|
||||
}
|
||||
</style>'''
|
||||
|
||||
ui.html(
|
||||
f'{css}<div class="timeline-graph"'
|
||||
f' style="overflow: auto; max-height: 500px; width: 100%;">'
|
||||
f'{svg}</div>'
|
||||
)
|
||||
|
||||
# Find container by class with retry for Vue async render
|
||||
ui.run_javascript(f'''
|
||||
(function attempt(tries) {{
|
||||
var container = document.querySelector('.timeline-graph');
|
||||
if (!container || !container.querySelector('g.node')) {{
|
||||
if (tries < 20) setTimeout(function() {{ attempt(tries + 1); }}, 100);
|
||||
return;
|
||||
}}
|
||||
container.querySelectorAll('g.node').forEach(function(g) {{
|
||||
g.addEventListener('click', function() {{
|
||||
var title = g.querySelector('title');
|
||||
if (title) {{
|
||||
window.graphSelectedNode = title.textContent.trim();
|
||||
container.querySelectorAll('g.node.selected').forEach(
|
||||
function(el) {{ el.classList.remove('selected'); }});
|
||||
g.classList.add('selected');
|
||||
}}
|
||||
}});
|
||||
}});
|
||||
var selId = '{sel_escaped}';
|
||||
if (selId) {{
|
||||
container.querySelectorAll('g.node').forEach(function(g) {{
|
||||
var title = g.querySelector('title');
|
||||
if (title && title.textContent.trim() === selId) {{
|
||||
g.classList.add('selected');
|
||||
}}
|
||||
}});
|
||||
}}
|
||||
}})(0);
|
||||
''')
|
||||
except ImportError:
|
||||
ui.label('Install graphviz Python package for graph rendering.').classes('text-warning')
|
||||
ui.code(dot_source).classes('w-full')
|
||||
except Exception as e:
|
||||
ui.label(f'Graph rendering error: {e}').classes('text-negative')
|
||||
|
||||
|
||||
def _restore_node(data, node, htree, file_path, state: AppState):
|
||||
"""Restore a history node as the current version."""
|
||||
node_data = copy.deepcopy(node['data'])
|
||||
if KEY_BATCH_DATA not in node_data and KEY_BATCH_DATA in data:
|
||||
del data[KEY_BATCH_DATA]
|
||||
data.update(node_data)
|
||||
htree.head_id = node['id']
|
||||
data[KEY_HISTORY_TREE] = htree.to_dict()
|
||||
save_json(file_path, data)
|
||||
if state.db_enabled and state.current_project and state.db:
|
||||
sync_to_db(state.db, state.current_project, file_path, data)
|
||||
label = f"{node.get('note', 'Step')} ({node['id'][:4]})"
|
||||
state.restored_indicator = label
|
||||
ui.notify('Restored!', type='positive')
|
||||
|
||||
|
||||
def _render_data_preview(nid, htree):
|
||||
"""Render a read-only preview of the selected node's data."""
|
||||
if not nid or nid not in htree.nodes:
|
||||
ui.label('No node selected.').classes('text-caption')
|
||||
return
|
||||
|
||||
node_data = htree.nodes[nid]['data']
|
||||
batch_list = node_data.get(KEY_BATCH_DATA, [])
|
||||
|
||||
if batch_list and isinstance(batch_list, list) and len(batch_list) > 0:
|
||||
ui.label(f'This snapshot contains {len(batch_list)} sequences.').classes('text-caption')
|
||||
for i, seq_data in enumerate(batch_list):
|
||||
seq_num = seq_data.get('sequence_number', i + 1)
|
||||
with ui.expansion(f'Sequence #{seq_num}', value=(i == 0)):
|
||||
_render_preview_fields(seq_data)
|
||||
else:
|
||||
_render_preview_fields(node_data)
|
||||
|
||||
|
||||
def _render_preview_fields(item_data: dict):
|
||||
"""Render read-only preview of prompts, settings, LoRAs."""
|
||||
with ui.grid(columns=2).classes('w-full'):
|
||||
ui.textarea('General Positive',
|
||||
value=item_data.get('general_prompt', '')).props('readonly outlined rows=3')
|
||||
ui.textarea('General Negative',
|
||||
value=item_data.get('general_negative', '')).props('readonly outlined rows=3')
|
||||
val_sp = item_data.get('current_prompt', '') or item_data.get('prompt', '')
|
||||
ui.textarea('Specific Positive',
|
||||
value=val_sp).props('readonly outlined rows=3')
|
||||
ui.textarea('Specific Negative',
|
||||
value=item_data.get('negative', '')).props('readonly outlined rows=3')
|
||||
|
||||
with ui.row().classes('w-full q-gutter-md'):
|
||||
ui.input('Camera', value=str(item_data.get('camera', 'static'))).props('readonly outlined')
|
||||
ui.input('FLF', value=str(item_data.get('flf', '0.0'))).props('readonly outlined')
|
||||
ui.input('Seed', value=str(item_data.get('seed', '-1'))).props('readonly outlined')
|
||||
|
||||
with ui.expansion('LoRA Configuration'):
|
||||
with ui.row().classes('w-full q-gutter-md'):
|
||||
for lora_idx in range(1, 4):
|
||||
with ui.column():
|
||||
ui.input(f'L{lora_idx} Name',
|
||||
value=item_data.get(f'lora {lora_idx} high', '')).props(
|
||||
'readonly outlined dense')
|
||||
ui.input(f'L{lora_idx} Str',
|
||||
value=str(item_data.get(f'lora {lora_idx} low', ''))).props(
|
||||
'readonly outlined dense')
|
||||
|
||||
vace_keys = ['frame_to_skip', 'vace schedule', 'video file path']
|
||||
if any(k in item_data for k in vace_keys):
|
||||
with ui.expansion('VACE / I2V Settings'):
|
||||
with ui.row().classes('w-full q-gutter-md'):
|
||||
ui.input('Skip Frames',
|
||||
value=str(item_data.get('frame_to_skip', ''))).props('readonly outlined')
|
||||
ui.input('Schedule',
|
||||
value=str(item_data.get('vace schedule', ''))).props('readonly outlined')
|
||||
ui.input('Video Path',
|
||||
value=str(item_data.get('video file path', ''))).props('readonly outlined')
|
||||
@@ -1,175 +0,0 @@
|
||||
import streamlit as st
|
||||
import json
|
||||
from history_tree import HistoryTree
|
||||
from utils import save_json
|
||||
from streamlit_agraph import agraph, Node, Edge, Config
|
||||
|
||||
def render_timeline_wip(data, file_path):
|
||||
tree_data = data.get("history_tree", {})
|
||||
if not tree_data:
|
||||
st.info("No history timeline exists.")
|
||||
return
|
||||
|
||||
htree = HistoryTree(tree_data)
|
||||
|
||||
# --- 1. BUILD GRAPH ---
|
||||
nodes = []
|
||||
edges = []
|
||||
|
||||
sorted_nodes = sorted(htree.nodes.values(), key=lambda x: x["timestamp"])
|
||||
|
||||
for n in sorted_nodes:
|
||||
nid = n["id"]
|
||||
note = n.get('note', 'Step')
|
||||
short_note = (note[:15] + '..') if len(note) > 15 else note
|
||||
|
||||
color = "#ffffff"
|
||||
border = "#666666"
|
||||
|
||||
if nid == htree.head_id:
|
||||
color = "#fff6cd"
|
||||
border = "#eebb00"
|
||||
|
||||
if nid in htree.branches.values():
|
||||
if color == "#ffffff":
|
||||
color = "#e6ffe6"
|
||||
border = "#44aa44"
|
||||
|
||||
nodes.append(Node(
|
||||
id=nid,
|
||||
label=f"{short_note}\n({nid[:4]})",
|
||||
size=25,
|
||||
shape="box",
|
||||
color=color,
|
||||
borderWidth=1,
|
||||
borderColor=border,
|
||||
font={'color': 'black', 'face': 'Arial', 'size': 14}
|
||||
))
|
||||
|
||||
if n["parent"] and n["parent"] in htree.nodes:
|
||||
edges.append(Edge(
|
||||
source=n["parent"],
|
||||
target=nid,
|
||||
color="#aaaaaa",
|
||||
type="STRAIGHT"
|
||||
))
|
||||
|
||||
config = Config(
|
||||
width="100%",
|
||||
height="400px",
|
||||
directed=True,
|
||||
physics=False,
|
||||
hierarchical=True,
|
||||
layout={
|
||||
"hierarchical": {
|
||||
"enabled": True,
|
||||
"levelSeparation": 150,
|
||||
"nodeSpacing": 100,
|
||||
"treeSpacing": 100,
|
||||
"direction": "LR",
|
||||
"sortMethod": "directed"
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
st.subheader("✨ Interactive Timeline")
|
||||
st.caption("Click a node to view its settings below.")
|
||||
|
||||
# --- FIX: REMOVED 'key' ARGUMENT ---
|
||||
selected_id = agraph(nodes=nodes, edges=edges, config=config)
|
||||
|
||||
st.markdown("---")
|
||||
|
||||
# --- 2. DETERMINE TARGET ---
|
||||
target_node_id = selected_id if selected_id else htree.head_id
|
||||
|
||||
if target_node_id and target_node_id in htree.nodes:
|
||||
selected_node = htree.nodes[target_node_id]
|
||||
node_data = selected_node["data"]
|
||||
|
||||
# Header
|
||||
c_h1, c_h2 = st.columns([3, 1])
|
||||
c_h1.markdown(f"### 📄 Previewing: {selected_node.get('note', 'Step')}")
|
||||
c_h1.caption(f"ID: {target_node_id}")
|
||||
|
||||
# Restore Button
|
||||
with c_h2:
|
||||
st.write(""); st.write("")
|
||||
if st.button("⏪ Restore This Version", type="primary", use_container_width=True, key=f"rst_{target_node_id}"):
|
||||
data.update(node_data)
|
||||
htree.head_id = target_node_id
|
||||
|
||||
data["history_tree"] = htree.to_dict()
|
||||
save_json(file_path, data)
|
||||
|
||||
st.session_state.ui_reset_token += 1
|
||||
label = f"{selected_node.get('note')} ({target_node_id[:4]})"
|
||||
st.session_state.restored_indicator = label
|
||||
|
||||
st.toast(f"Restored {target_node_id}!", icon="🔄")
|
||||
st.rerun()
|
||||
|
||||
# --- 3. PREVIEW LOGIC (BATCH VS SINGLE) ---
|
||||
|
||||
# Helper to render one set of inputs
|
||||
def render_preview_fields(item_data, prefix):
|
||||
# A. Prompts
|
||||
p_col1, p_col2 = st.columns(2)
|
||||
with p_col1:
|
||||
val_gp = item_data.get("general_prompt", "")
|
||||
st.text_area("General Positive", value=val_gp, height=80, disabled=True, key=f"{prefix}_gp")
|
||||
|
||||
val_sp = item_data.get("current_prompt", "") or item_data.get("prompt", "")
|
||||
st.text_area("Specific Positive", value=val_sp, height=80, disabled=True, key=f"{prefix}_sp")
|
||||
with p_col2:
|
||||
val_gn = item_data.get("general_negative", "")
|
||||
st.text_area("General Negative", value=val_gn, height=80, disabled=True, key=f"{prefix}_gn")
|
||||
|
||||
val_sn = item_data.get("negative", "")
|
||||
st.text_area("Specific Negative", value=val_sn, height=80, disabled=True, key=f"{prefix}_sn")
|
||||
|
||||
# B. Settings
|
||||
s_col1, s_col2, s_col3 = st.columns(3)
|
||||
s_col1.text_input("Camera", value=str(item_data.get("camera", "static")), disabled=True, key=f"{prefix}_cam")
|
||||
s_col2.text_input("FLF", value=str(item_data.get("flf", "0.0")), disabled=True, key=f"{prefix}_flf")
|
||||
s_col3.text_input("Seed", value=str(item_data.get("seed", "-1")), disabled=True, key=f"{prefix}_seed")
|
||||
|
||||
# C. LoRAs
|
||||
with st.expander("💊 LoRA Configuration", expanded=False):
|
||||
l1, l2, l3 = st.columns(3)
|
||||
with l1:
|
||||
st.text_input("L1 Name", value=item_data.get("lora 1 high", ""), disabled=True, key=f"{prefix}_l1h")
|
||||
st.text_input("L1 Str", value=str(item_data.get("lora 1 low", "")), disabled=True, key=f"{prefix}_l1l")
|
||||
with l2:
|
||||
st.text_input("L2 Name", value=item_data.get("lora 2 high", ""), disabled=True, key=f"{prefix}_l2h")
|
||||
st.text_input("L2 Str", value=str(item_data.get("lora 2 low", "")), disabled=True, key=f"{prefix}_l2l")
|
||||
with l3:
|
||||
st.text_input("L3 Name", value=item_data.get("lora 3 high", ""), disabled=True, key=f"{prefix}_l3h")
|
||||
st.text_input("L3 Str", value=str(item_data.get("lora 3 low", "")), disabled=True, key=f"{prefix}_l3l")
|
||||
|
||||
# D. VACE
|
||||
vace_keys = ["frame_to_skip", "vace schedule", "video file path"]
|
||||
has_vace = any(k in item_data for k in vace_keys)
|
||||
if has_vace:
|
||||
with st.expander("🎞️ VACE / I2V Settings", expanded=False):
|
||||
v1, v2, v3 = st.columns(3)
|
||||
v1.text_input("Skip Frames", value=str(item_data.get("frame_to_skip", "")), disabled=True, key=f"{prefix}_fts")
|
||||
v2.text_input("Schedule", value=str(item_data.get("vace schedule", "")), disabled=True, key=f"{prefix}_vsc")
|
||||
v3.text_input("Video Path", value=str(item_data.get("video file path", "")), disabled=True, key=f"{prefix}_vid")
|
||||
|
||||
# --- DETECT BATCH VS SINGLE ---
|
||||
batch_list = node_data.get("batch_data", [])
|
||||
|
||||
if batch_list and isinstance(batch_list, list) and len(batch_list) > 0:
|
||||
st.info(f"📚 This snapshot contains {len(batch_list)} sequences.")
|
||||
|
||||
for i, seq_data in enumerate(batch_list):
|
||||
seq_num = seq_data.get("sequence_number", i+1)
|
||||
with st.expander(f"🎬 Sequence #{seq_num}", expanded=(i==0)):
|
||||
# Unique prefix for every sequence in every node
|
||||
prefix = f"p_{target_node_id}_s{i}"
|
||||
render_preview_fields(seq_data, prefix)
|
||||
else:
|
||||
# Single File Preview
|
||||
prefix = f"p_{target_node_id}_single"
|
||||
render_preview_fields(node_data, prefix)
|
||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
5
tests/conftest.py
Normal file
5
tests/conftest.py
Normal file
@@ -0,0 +1,5 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add project root to sys.path so tests can import project modules
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
|
||||
1
tests/pytest.ini
Normal file
1
tests/pytest.ini
Normal file
@@ -0,0 +1 @@
|
||||
[pytest]
|
||||
369
tests/test_db.py
Normal file
369
tests/test_db.py
Normal file
@@ -0,0 +1,369 @@
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from db import ProjectDB
|
||||
from utils import KEY_BATCH_DATA, KEY_HISTORY_TREE
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def db(tmp_path):
|
||||
"""Create a fresh ProjectDB in a temp directory."""
|
||||
db_path = tmp_path / "test.db"
|
||||
pdb = ProjectDB(db_path)
|
||||
yield pdb
|
||||
pdb.close()
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Projects CRUD
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
class TestProjects:
|
||||
def test_create_and_get(self, db):
|
||||
pid = db.create_project("proj1", "/some/path", "A test project")
|
||||
assert pid > 0
|
||||
proj = db.get_project("proj1")
|
||||
assert proj is not None
|
||||
assert proj["name"] == "proj1"
|
||||
assert proj["folder_path"] == "/some/path"
|
||||
assert proj["description"] == "A test project"
|
||||
|
||||
def test_list_projects(self, db):
|
||||
db.create_project("beta", "/b")
|
||||
db.create_project("alpha", "/a")
|
||||
projects = db.list_projects()
|
||||
assert len(projects) == 2
|
||||
assert projects[0]["name"] == "alpha"
|
||||
assert projects[1]["name"] == "beta"
|
||||
|
||||
def test_get_nonexistent(self, db):
|
||||
assert db.get_project("nope") is None
|
||||
|
||||
def test_delete_project(self, db):
|
||||
db.create_project("to_delete", "/x")
|
||||
assert db.delete_project("to_delete") is True
|
||||
assert db.get_project("to_delete") is None
|
||||
|
||||
def test_delete_nonexistent(self, db):
|
||||
assert db.delete_project("nope") is False
|
||||
|
||||
def test_unique_name_constraint(self, db):
|
||||
db.create_project("dup", "/a")
|
||||
with pytest.raises(Exception):
|
||||
db.create_project("dup", "/b")
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Data files
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
class TestDataFiles:
|
||||
def test_create_and_list(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch_i2v", "i2v", {"extra": "meta"})
|
||||
assert df_id > 0
|
||||
files = db.list_data_files(pid)
|
||||
assert len(files) == 1
|
||||
assert files[0]["name"] == "batch_i2v"
|
||||
assert files[0]["data_type"] == "i2v"
|
||||
|
||||
def test_get_data_file(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
db.create_data_file(pid, "batch_i2v", "i2v", {"key": "value"})
|
||||
df = db.get_data_file(pid, "batch_i2v")
|
||||
assert df is not None
|
||||
assert df["top_level"] == {"key": "value"}
|
||||
|
||||
def test_get_data_file_by_names(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
db.create_data_file(pid, "batch_i2v", "i2v")
|
||||
df = db.get_data_file_by_names("p1", "batch_i2v")
|
||||
assert df is not None
|
||||
assert df["name"] == "batch_i2v"
|
||||
|
||||
def test_get_nonexistent_data_file(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
assert db.get_data_file(pid, "nope") is None
|
||||
|
||||
def test_unique_constraint(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
db.create_data_file(pid, "batch_i2v", "i2v")
|
||||
with pytest.raises(Exception):
|
||||
db.create_data_file(pid, "batch_i2v", "vace")
|
||||
|
||||
def test_cascade_delete(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch_i2v", "i2v")
|
||||
db.upsert_sequence(df_id, 1, {"prompt": "hello"})
|
||||
db.save_history_tree(df_id, {"nodes": {}})
|
||||
db.delete_project("p1")
|
||||
assert db.get_data_file(pid, "batch_i2v") is None
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Sequences
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
class TestSequences:
|
||||
def test_upsert_and_get(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
db.upsert_sequence(df_id, 1, {"prompt": "hello", "seed": 42})
|
||||
data = db.get_sequence(df_id, 1)
|
||||
assert data == {"prompt": "hello", "seed": 42}
|
||||
|
||||
def test_upsert_updates_existing(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
db.upsert_sequence(df_id, 1, {"prompt": "v1"})
|
||||
db.upsert_sequence(df_id, 1, {"prompt": "v2"})
|
||||
data = db.get_sequence(df_id, 1)
|
||||
assert data["prompt"] == "v2"
|
||||
|
||||
def test_list_sequences(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
db.upsert_sequence(df_id, 3, {"a": 1})
|
||||
db.upsert_sequence(df_id, 1, {"b": 2})
|
||||
db.upsert_sequence(df_id, 2, {"c": 3})
|
||||
seqs = db.list_sequences(df_id)
|
||||
assert seqs == [1, 2, 3]
|
||||
|
||||
def test_get_nonexistent_sequence(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
assert db.get_sequence(df_id, 99) is None
|
||||
|
||||
def test_get_sequence_keys(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
db.upsert_sequence(df_id, 1, {
|
||||
"prompt": "hello",
|
||||
"seed": 42,
|
||||
"cfg": 1.5,
|
||||
"flag": True,
|
||||
})
|
||||
keys, types = db.get_sequence_keys(df_id, 1)
|
||||
assert "prompt" in keys
|
||||
assert "seed" in keys
|
||||
idx_prompt = keys.index("prompt")
|
||||
idx_seed = keys.index("seed")
|
||||
idx_cfg = keys.index("cfg")
|
||||
idx_flag = keys.index("flag")
|
||||
assert types[idx_prompt] == "STRING"
|
||||
assert types[idx_seed] == "INT"
|
||||
assert types[idx_cfg] == "FLOAT"
|
||||
assert types[idx_flag] == "STRING" # bools -> STRING
|
||||
|
||||
def test_get_sequence_keys_nonexistent(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
keys, types = db.get_sequence_keys(df_id, 99)
|
||||
assert keys == []
|
||||
assert types == []
|
||||
|
||||
def test_delete_sequences_for_file(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
db.upsert_sequence(df_id, 1, {"a": 1})
|
||||
db.upsert_sequence(df_id, 2, {"b": 2})
|
||||
db.delete_sequences_for_file(df_id)
|
||||
assert db.list_sequences(df_id) == []
|
||||
|
||||
def test_count_sequences(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
assert db.count_sequences(df_id) == 0
|
||||
db.upsert_sequence(df_id, 1, {"a": 1})
|
||||
db.upsert_sequence(df_id, 2, {"b": 2})
|
||||
db.upsert_sequence(df_id, 3, {"c": 3})
|
||||
assert db.count_sequences(df_id) == 3
|
||||
|
||||
def test_query_total_sequences(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
db.upsert_sequence(df_id, 1, {"a": 1})
|
||||
db.upsert_sequence(df_id, 2, {"b": 2})
|
||||
assert db.query_total_sequences("p1", "batch") == 2
|
||||
|
||||
def test_query_total_sequences_nonexistent(self, db):
|
||||
assert db.query_total_sequences("nope", "nope") == 0
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# History trees
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
class TestHistoryTrees:
|
||||
def test_save_and_get(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
tree = {"nodes": {"abc": {"id": "abc"}}, "head_id": "abc"}
|
||||
db.save_history_tree(df_id, tree)
|
||||
result = db.get_history_tree(df_id)
|
||||
assert result == tree
|
||||
|
||||
def test_upsert_updates(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
db.save_history_tree(df_id, {"v": 1})
|
||||
db.save_history_tree(df_id, {"v": 2})
|
||||
result = db.get_history_tree(df_id)
|
||||
assert result == {"v": 2}
|
||||
|
||||
def test_get_nonexistent(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
assert db.get_history_tree(df_id) is None
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Import
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
class TestImport:
|
||||
def test_import_json_file(self, db, tmp_path):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
json_path = tmp_path / "batch_prompt_i2v.json"
|
||||
data = {
|
||||
KEY_BATCH_DATA: [
|
||||
{"sequence_number": 1, "prompt": "hello", "seed": 42},
|
||||
{"sequence_number": 2, "prompt": "world", "seed": 99},
|
||||
],
|
||||
KEY_HISTORY_TREE: {"nodes": {}, "head_id": None},
|
||||
}
|
||||
json_path.write_text(json.dumps(data))
|
||||
|
||||
df_id = db.import_json_file(pid, json_path, "i2v")
|
||||
assert df_id > 0
|
||||
|
||||
seqs = db.list_sequences(df_id)
|
||||
assert seqs == [1, 2]
|
||||
|
||||
s1 = db.get_sequence(df_id, 1)
|
||||
assert s1["prompt"] == "hello"
|
||||
assert s1["seed"] == 42
|
||||
|
||||
tree = db.get_history_tree(df_id)
|
||||
assert tree == {"nodes": {}, "head_id": None}
|
||||
|
||||
def test_import_file_name_from_stem(self, db, tmp_path):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
json_path = tmp_path / "my_batch.json"
|
||||
json_path.write_text(json.dumps({KEY_BATCH_DATA: [{"sequence_number": 1}]}))
|
||||
db.import_json_file(pid, json_path)
|
||||
df = db.get_data_file(pid, "my_batch")
|
||||
assert df is not None
|
||||
|
||||
def test_import_no_batch_data(self, db, tmp_path):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
json_path = tmp_path / "simple.json"
|
||||
json_path.write_text(json.dumps({"prompt": "flat file"}))
|
||||
df_id = db.import_json_file(pid, json_path)
|
||||
seqs = db.list_sequences(df_id)
|
||||
assert seqs == []
|
||||
|
||||
def test_reimport_updates_existing(self, db, tmp_path):
|
||||
"""Re-importing the same file should update data, not crash."""
|
||||
pid = db.create_project("p1", "/p1")
|
||||
json_path = tmp_path / "batch.json"
|
||||
|
||||
# First import
|
||||
data_v1 = {KEY_BATCH_DATA: [{"sequence_number": 1, "prompt": "v1"}]}
|
||||
json_path.write_text(json.dumps(data_v1))
|
||||
df_id_1 = db.import_json_file(pid, json_path, "i2v")
|
||||
|
||||
# Second import (same file, updated data)
|
||||
data_v2 = {KEY_BATCH_DATA: [{"sequence_number": 1, "prompt": "v2"}, {"sequence_number": 2, "prompt": "new"}]}
|
||||
json_path.write_text(json.dumps(data_v2))
|
||||
df_id_2 = db.import_json_file(pid, json_path, "vace")
|
||||
|
||||
# Should reuse the same data_file row
|
||||
assert df_id_1 == df_id_2
|
||||
# Data type should be updated
|
||||
df = db.get_data_file(pid, "batch")
|
||||
assert df["data_type"] == "vace"
|
||||
# Sequences should reflect v2
|
||||
seqs = db.list_sequences(df_id_2)
|
||||
assert seqs == [1, 2]
|
||||
s1 = db.get_sequence(df_id_2, 1)
|
||||
assert s1["prompt"] == "v2"
|
||||
|
||||
def test_import_skips_non_dict_batch_items(self, db, tmp_path):
|
||||
"""Non-dict elements in batch_data should be silently skipped, not crash."""
|
||||
pid = db.create_project("p1", "/p1")
|
||||
json_path = tmp_path / "mixed.json"
|
||||
data = {KEY_BATCH_DATA: [
|
||||
{"sequence_number": 1, "prompt": "valid"},
|
||||
"not a dict",
|
||||
42,
|
||||
None,
|
||||
{"sequence_number": 3, "prompt": "also valid"},
|
||||
]}
|
||||
json_path.write_text(json.dumps(data))
|
||||
df_id = db.import_json_file(pid, json_path)
|
||||
|
||||
seqs = db.list_sequences(df_id)
|
||||
assert seqs == [1, 3]
|
||||
|
||||
def test_import_atomic_on_error(self, db, tmp_path):
|
||||
"""If import fails partway, no partial data should be committed."""
|
||||
pid = db.create_project("p1", "/p1")
|
||||
json_path = tmp_path / "batch.json"
|
||||
data = {KEY_BATCH_DATA: [{"sequence_number": 1, "prompt": "hello"}]}
|
||||
json_path.write_text(json.dumps(data))
|
||||
db.import_json_file(pid, json_path)
|
||||
|
||||
# Now try to import with bad data that will cause an error
|
||||
# (overwrite the file with invalid sequence_number that causes int() to fail)
|
||||
bad_data = {KEY_BATCH_DATA: [{"sequence_number": "not_a_number", "prompt": "bad"}]}
|
||||
json_path.write_text(json.dumps(bad_data))
|
||||
with pytest.raises(ValueError):
|
||||
db.import_json_file(pid, json_path)
|
||||
|
||||
# Original data should still be intact (rollback worked)
|
||||
df = db.get_data_file(pid, "batch")
|
||||
assert df is not None
|
||||
s1 = db.get_sequence(df["id"], 1)
|
||||
assert s1["prompt"] == "hello"
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Query helpers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
class TestQueryHelpers:
|
||||
def test_query_sequence_data(self, db):
|
||||
pid = db.create_project("myproject", "/mp")
|
||||
df_id = db.create_data_file(pid, "batch_i2v", "i2v")
|
||||
db.upsert_sequence(df_id, 1, {"prompt": "test", "seed": 7})
|
||||
result = db.query_sequence_data("myproject", "batch_i2v", 1)
|
||||
assert result == {"prompt": "test", "seed": 7}
|
||||
|
||||
def test_query_sequence_data_not_found(self, db):
|
||||
assert db.query_sequence_data("nope", "nope", 1) is None
|
||||
|
||||
def test_query_sequence_keys(self, db):
|
||||
pid = db.create_project("myproject", "/mp")
|
||||
df_id = db.create_data_file(pid, "batch_i2v", "i2v")
|
||||
db.upsert_sequence(df_id, 1, {"prompt": "test", "seed": 7})
|
||||
keys, types = db.query_sequence_keys("myproject", "batch_i2v", 1)
|
||||
assert "prompt" in keys
|
||||
assert "seed" in keys
|
||||
|
||||
def test_list_project_files(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
db.create_data_file(pid, "file_a", "i2v")
|
||||
db.create_data_file(pid, "file_b", "vace")
|
||||
files = db.list_project_files("p1")
|
||||
assert len(files) == 2
|
||||
|
||||
def test_list_project_sequences(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
db.upsert_sequence(df_id, 1, {})
|
||||
db.upsert_sequence(df_id, 2, {})
|
||||
seqs = db.list_project_sequences("p1", "batch")
|
||||
assert seqs == [1, 2]
|
||||
67
tests/test_history_tree.py
Normal file
67
tests/test_history_tree.py
Normal file
@@ -0,0 +1,67 @@
|
||||
import pytest
|
||||
from history_tree import HistoryTree
|
||||
|
||||
|
||||
def test_commit_creates_node_with_correct_parent():
|
||||
tree = HistoryTree({})
|
||||
id1 = tree.commit({"a": 1}, note="first")
|
||||
id2 = tree.commit({"b": 2}, note="second")
|
||||
|
||||
assert tree.nodes[id1]["parent"] is None
|
||||
assert tree.nodes[id2]["parent"] == id1
|
||||
|
||||
|
||||
def test_checkout_returns_correct_data():
|
||||
tree = HistoryTree({})
|
||||
id1 = tree.commit({"val": 42}, note="snap")
|
||||
result = tree.checkout(id1)
|
||||
assert result == {"val": 42}
|
||||
|
||||
|
||||
def test_checkout_nonexistent_returns_none():
|
||||
tree = HistoryTree({})
|
||||
assert tree.checkout("nonexistent") is None
|
||||
|
||||
|
||||
def test_cycle_detection_raises():
|
||||
tree = HistoryTree({})
|
||||
id1 = tree.commit({"a": 1})
|
||||
# Manually introduce a cycle
|
||||
tree.nodes[id1]["parent"] = id1
|
||||
with pytest.raises(ValueError, match="Cycle detected"):
|
||||
tree.commit({"b": 2})
|
||||
|
||||
|
||||
def test_branch_creation_on_detached_head():
|
||||
tree = HistoryTree({})
|
||||
id1 = tree.commit({"a": 1})
|
||||
id2 = tree.commit({"b": 2})
|
||||
# Detach head by checking out a non-tip node
|
||||
tree.checkout(id1)
|
||||
# head_id is now id1, which is no longer a branch tip (main points to id2)
|
||||
id3 = tree.commit({"c": 3})
|
||||
# A new branch should have been created
|
||||
assert len(tree.branches) == 2
|
||||
assert tree.nodes[id3]["parent"] == id1
|
||||
|
||||
|
||||
def test_legacy_migration():
|
||||
legacy = {
|
||||
"prompt_history": [
|
||||
{"note": "Entry A", "seed": 1},
|
||||
{"note": "Entry B", "seed": 2},
|
||||
]
|
||||
}
|
||||
tree = HistoryTree(legacy)
|
||||
assert len(tree.nodes) == 2
|
||||
assert tree.head_id is not None
|
||||
assert tree.branches["main"] == tree.head_id
|
||||
|
||||
|
||||
def test_to_dict_roundtrip():
|
||||
tree = HistoryTree({})
|
||||
tree.commit({"x": 1}, note="test")
|
||||
d = tree.to_dict()
|
||||
tree2 = HistoryTree(d)
|
||||
assert tree2.head_id == tree.head_id
|
||||
assert tree2.nodes == tree.nodes
|
||||
165
tests/test_json_loader.py
Normal file
165
tests/test_json_loader.py
Normal file
@@ -0,0 +1,165 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
from json_loader import (
|
||||
to_float, to_int, get_batch_item, read_json_data,
|
||||
JSONLoaderDynamic, MAX_DYNAMIC_OUTPUTS,
|
||||
)
|
||||
|
||||
|
||||
class TestToFloat:
|
||||
def test_valid(self):
|
||||
assert to_float("3.14") == 3.14
|
||||
assert to_float(5) == 5.0
|
||||
|
||||
def test_invalid(self):
|
||||
assert to_float("abc") == 0.0
|
||||
|
||||
def test_none(self):
|
||||
assert to_float(None) == 0.0
|
||||
|
||||
|
||||
class TestToInt:
|
||||
def test_valid(self):
|
||||
assert to_int("7") == 7
|
||||
assert to_int(3.9) == 3
|
||||
|
||||
def test_invalid(self):
|
||||
assert to_int("xyz") == 0
|
||||
|
||||
def test_none(self):
|
||||
assert to_int(None) == 0
|
||||
|
||||
|
||||
class TestGetBatchItem:
|
||||
def test_lookup_by_sequence_number_field(self):
|
||||
data = {"batch_data": [
|
||||
{"sequence_number": 1, "a": "first"},
|
||||
{"sequence_number": 5, "a": "fifth"},
|
||||
{"sequence_number": 3, "a": "third"},
|
||||
]}
|
||||
assert get_batch_item(data, 5) == {"sequence_number": 5, "a": "fifth"}
|
||||
assert get_batch_item(data, 3) == {"sequence_number": 3, "a": "third"}
|
||||
|
||||
def test_fallback_to_index(self):
|
||||
data = {"batch_data": [{"a": 1}, {"a": 2}, {"a": 3}]}
|
||||
assert get_batch_item(data, 2) == {"a": 2}
|
||||
|
||||
def test_clamp_high(self):
|
||||
data = {"batch_data": [{"a": 1}, {"a": 2}]}
|
||||
assert get_batch_item(data, 99) == {"a": 2}
|
||||
|
||||
def test_clamp_low(self):
|
||||
data = {"batch_data": [{"a": 1}, {"a": 2}]}
|
||||
assert get_batch_item(data, 0) == {"a": 1}
|
||||
|
||||
def test_no_batch_data(self):
|
||||
data = {"key": "val"}
|
||||
assert get_batch_item(data, 1) == data
|
||||
|
||||
|
||||
class TestReadJsonData:
|
||||
def test_missing_file(self, tmp_path):
|
||||
assert read_json_data(str(tmp_path / "nope.json")) == {}
|
||||
|
||||
def test_invalid_json(self, tmp_path):
|
||||
p = tmp_path / "bad.json"
|
||||
p.write_text("{broken")
|
||||
assert read_json_data(str(p)) == {}
|
||||
|
||||
def test_non_dict_json(self, tmp_path):
|
||||
p = tmp_path / "list.json"
|
||||
p.write_text(json.dumps([1, 2, 3]))
|
||||
assert read_json_data(str(p)) == {}
|
||||
|
||||
def test_valid(self, tmp_path):
|
||||
p = tmp_path / "ok.json"
|
||||
p.write_text(json.dumps({"key": "val"}))
|
||||
assert read_json_data(str(p)) == {"key": "val"}
|
||||
|
||||
|
||||
class TestJSONLoaderDynamic:
|
||||
def _make_json(self, tmp_path, data):
|
||||
p = tmp_path / "test.json"
|
||||
p.write_text(json.dumps(data))
|
||||
return str(p)
|
||||
|
||||
def test_known_keys(self, tmp_path):
|
||||
path = self._make_json(tmp_path, {"name": "alice", "age": 30, "score": 9.5})
|
||||
loader = JSONLoaderDynamic()
|
||||
result = loader.load_dynamic(path, 1, output_keys="name,age,score")
|
||||
assert result[0] == "alice"
|
||||
assert result[1] == 30
|
||||
assert result[2] == 9.5
|
||||
|
||||
def test_empty_output_keys(self, tmp_path):
|
||||
path = self._make_json(tmp_path, {"name": "alice"})
|
||||
loader = JSONLoaderDynamic()
|
||||
result = loader.load_dynamic(path, 1, output_keys="")
|
||||
assert len(result) == MAX_DYNAMIC_OUTPUTS
|
||||
assert all(v == "" for v in result)
|
||||
|
||||
def test_pads_to_max(self, tmp_path):
|
||||
path = self._make_json(tmp_path, {"a": "1", "b": "2"})
|
||||
loader = JSONLoaderDynamic()
|
||||
result = loader.load_dynamic(path, 1, output_keys="a,b")
|
||||
assert len(result) == MAX_DYNAMIC_OUTPUTS
|
||||
assert result[0] == "1"
|
||||
assert result[1] == "2"
|
||||
assert all(v == "" for v in result[2:])
|
||||
|
||||
def test_type_preservation_int(self, tmp_path):
|
||||
path = self._make_json(tmp_path, {"count": 42})
|
||||
loader = JSONLoaderDynamic()
|
||||
result = loader.load_dynamic(path, 1, output_keys="count")
|
||||
assert result[0] == 42
|
||||
assert isinstance(result[0], int)
|
||||
|
||||
def test_type_preservation_float(self, tmp_path):
|
||||
path = self._make_json(tmp_path, {"rate": 3.14})
|
||||
loader = JSONLoaderDynamic()
|
||||
result = loader.load_dynamic(path, 1, output_keys="rate")
|
||||
assert result[0] == 3.14
|
||||
assert isinstance(result[0], float)
|
||||
|
||||
def test_type_preservation_str(self, tmp_path):
|
||||
path = self._make_json(tmp_path, {"label": "hello"})
|
||||
loader = JSONLoaderDynamic()
|
||||
result = loader.load_dynamic(path, 1, output_keys="label")
|
||||
assert result[0] == "hello"
|
||||
assert isinstance(result[0], str)
|
||||
|
||||
def test_bool_becomes_string(self, tmp_path):
|
||||
path = self._make_json(tmp_path, {"flag": True, "off": False})
|
||||
loader = JSONLoaderDynamic()
|
||||
result = loader.load_dynamic(path, 1, output_keys="flag,off")
|
||||
assert result[0] == "true"
|
||||
assert result[1] == "false"
|
||||
assert isinstance(result[0], str)
|
||||
|
||||
def test_missing_key_returns_empty_string(self, tmp_path):
|
||||
path = self._make_json(tmp_path, {"a": "1"})
|
||||
loader = JSONLoaderDynamic()
|
||||
result = loader.load_dynamic(path, 1, output_keys="a,nonexistent")
|
||||
assert result[0] == "1"
|
||||
assert result[1] == ""
|
||||
|
||||
def test_missing_file_returns_all_empty(self, tmp_path):
|
||||
loader = JSONLoaderDynamic()
|
||||
result = loader.load_dynamic(str(tmp_path / "nope.json"), 1, output_keys="a,b")
|
||||
assert len(result) == MAX_DYNAMIC_OUTPUTS
|
||||
assert result[0] == ""
|
||||
assert result[1] == ""
|
||||
|
||||
def test_batch_data(self, tmp_path):
|
||||
path = self._make_json(tmp_path, {
|
||||
"batch_data": [
|
||||
{"sequence_number": 1, "x": "first"},
|
||||
{"sequence_number": 2, "x": "second"},
|
||||
]
|
||||
})
|
||||
loader = JSONLoaderDynamic()
|
||||
result = loader.load_dynamic(path, 2, output_keys="x")
|
||||
assert result[0] == "second"
|
||||
211
tests/test_project_loader.py
Normal file
211
tests/test_project_loader.py
Normal file
@@ -0,0 +1,211 @@
|
||||
import json
|
||||
from unittest.mock import patch, MagicMock
|
||||
from io import BytesIO
|
||||
|
||||
import pytest
|
||||
|
||||
from project_loader import (
|
||||
ProjectLoaderDynamic,
|
||||
_fetch_json,
|
||||
_fetch_data,
|
||||
_fetch_keys,
|
||||
MAX_DYNAMIC_OUTPUTS,
|
||||
)
|
||||
|
||||
|
||||
def _mock_urlopen(data: dict):
|
||||
"""Create a mock context manager for urllib.request.urlopen."""
|
||||
response = MagicMock()
|
||||
response.read.return_value = json.dumps(data).encode()
|
||||
response.__enter__ = lambda s: s
|
||||
response.__exit__ = MagicMock(return_value=False)
|
||||
return response
|
||||
|
||||
|
||||
class TestFetchHelpers:
|
||||
def test_fetch_json_success(self):
|
||||
data = {"key": "value"}
|
||||
with patch("project_loader.urllib.request.urlopen", return_value=_mock_urlopen(data)):
|
||||
result = _fetch_json("http://example.com/api")
|
||||
assert result == data
|
||||
|
||||
def test_fetch_json_network_error(self):
|
||||
with patch("project_loader.urllib.request.urlopen", side_effect=OSError("connection refused")):
|
||||
result = _fetch_json("http://example.com/api")
|
||||
assert result["error"] == "network_error"
|
||||
assert "connection refused" in result["message"]
|
||||
|
||||
def test_fetch_json_http_error(self):
|
||||
import urllib.error
|
||||
err = urllib.error.HTTPError(
|
||||
"http://example.com/api", 404, "Not Found", {},
|
||||
BytesIO(json.dumps({"detail": "Project 'x' not found"}).encode())
|
||||
)
|
||||
with patch("project_loader.urllib.request.urlopen", side_effect=err):
|
||||
result = _fetch_json("http://example.com/api")
|
||||
assert result["error"] == "http_error"
|
||||
assert result["status"] == 404
|
||||
assert "not found" in result["message"].lower()
|
||||
|
||||
def test_fetch_data_builds_url(self):
|
||||
data = {"prompt": "hello"}
|
||||
with patch("project_loader.urllib.request.urlopen", return_value=_mock_urlopen(data)) as mock:
|
||||
result = _fetch_data("http://localhost:8080", "proj1", "batch_i2v", 1)
|
||||
assert result == data
|
||||
called_url = mock.call_args[0][0]
|
||||
assert "/api/projects/proj1/files/batch_i2v/data?seq=1" in called_url
|
||||
|
||||
def test_fetch_keys_builds_url(self):
|
||||
data = {"keys": ["prompt"], "types": ["STRING"]}
|
||||
with patch("project_loader.urllib.request.urlopen", return_value=_mock_urlopen(data)) as mock:
|
||||
result = _fetch_keys("http://localhost:8080", "proj1", "batch_i2v", 1)
|
||||
assert result == data
|
||||
called_url = mock.call_args[0][0]
|
||||
assert "/api/projects/proj1/files/batch_i2v/keys?seq=1" in called_url
|
||||
|
||||
def test_fetch_data_strips_trailing_slash(self):
|
||||
data = {"prompt": "hello"}
|
||||
with patch("project_loader.urllib.request.urlopen", return_value=_mock_urlopen(data)) as mock:
|
||||
_fetch_data("http://localhost:8080/", "proj1", "file1", 1)
|
||||
called_url = mock.call_args[0][0]
|
||||
assert "//api" not in called_url
|
||||
|
||||
def test_fetch_data_encodes_special_chars(self):
|
||||
"""Project/file names with spaces or special chars should be percent-encoded."""
|
||||
data = {"prompt": "hello"}
|
||||
with patch("project_loader.urllib.request.urlopen", return_value=_mock_urlopen(data)) as mock:
|
||||
_fetch_data("http://localhost:8080", "my project", "batch file", 1)
|
||||
called_url = mock.call_args[0][0]
|
||||
assert "my%20project" in called_url
|
||||
assert "batch%20file" in called_url
|
||||
assert " " not in called_url.split("?")[0] # no raw spaces in path
|
||||
|
||||
|
||||
class TestProjectLoaderDynamic:
|
||||
def _keys_meta(self, total=5):
|
||||
return {"keys": [], "types": [], "total_sequences": total}
|
||||
|
||||
def test_load_dynamic_with_keys(self):
|
||||
data = {"prompt": "hello", "seed": 42, "cfg": 1.5}
|
||||
node = ProjectLoaderDynamic()
|
||||
with patch("project_loader._fetch_keys", return_value=self._keys_meta()):
|
||||
with patch("project_loader._fetch_data", return_value=data):
|
||||
result = node.load_dynamic(
|
||||
"http://localhost:8080", "proj1", "batch_i2v", 1,
|
||||
output_keys="prompt,seed,cfg"
|
||||
)
|
||||
assert result[0] == 5 # total_sequences
|
||||
assert result[1] == "hello"
|
||||
assert result[2] == 42
|
||||
assert result[3] == 1.5
|
||||
assert len(result) == MAX_DYNAMIC_OUTPUTS + 1
|
||||
|
||||
def test_load_dynamic_with_json_encoded_keys(self):
|
||||
"""JSON-encoded output_keys should be parsed correctly."""
|
||||
import json as _json
|
||||
data = {"my,key": "comma_val", "normal": "ok"}
|
||||
node = ProjectLoaderDynamic()
|
||||
keys_json = _json.dumps(["my,key", "normal"])
|
||||
with patch("project_loader._fetch_keys", return_value=self._keys_meta()):
|
||||
with patch("project_loader._fetch_data", return_value=data):
|
||||
result = node.load_dynamic(
|
||||
"http://localhost:8080", "proj1", "batch_i2v", 1,
|
||||
output_keys=keys_json
|
||||
)
|
||||
assert result[1] == "comma_val"
|
||||
assert result[2] == "ok"
|
||||
|
||||
def test_load_dynamic_type_coercion(self):
|
||||
"""output_types should coerce values to declared types."""
|
||||
import json as _json
|
||||
data = {"seed": "42", "cfg": "1.5", "prompt": "hello"}
|
||||
node = ProjectLoaderDynamic()
|
||||
keys_json = _json.dumps(["seed", "cfg", "prompt"])
|
||||
types_json = _json.dumps(["INT", "FLOAT", "STRING"])
|
||||
with patch("project_loader._fetch_keys", return_value=self._keys_meta()):
|
||||
with patch("project_loader._fetch_data", return_value=data):
|
||||
result = node.load_dynamic(
|
||||
"http://localhost:8080", "proj1", "batch_i2v", 1,
|
||||
output_keys=keys_json, output_types=types_json
|
||||
)
|
||||
assert result[1] == 42 # string "42" coerced to int
|
||||
assert result[2] == 1.5 # string "1.5" coerced to float
|
||||
assert result[3] == "hello" # string stays string
|
||||
|
||||
def test_load_dynamic_empty_keys(self):
|
||||
node = ProjectLoaderDynamic()
|
||||
with patch("project_loader._fetch_keys", return_value=self._keys_meta()):
|
||||
with patch("project_loader._fetch_data", return_value={"prompt": "hello"}):
|
||||
result = node.load_dynamic(
|
||||
"http://localhost:8080", "proj1", "batch_i2v", 1,
|
||||
output_keys=""
|
||||
)
|
||||
# Slot 0 is total_sequences (INT), rest are empty strings
|
||||
assert result[0] == 5
|
||||
assert all(v == "" for v in result[1:])
|
||||
|
||||
def test_load_dynamic_missing_key(self):
|
||||
node = ProjectLoaderDynamic()
|
||||
with patch("project_loader._fetch_keys", return_value=self._keys_meta()):
|
||||
with patch("project_loader._fetch_data", return_value={"prompt": "hello"}):
|
||||
result = node.load_dynamic(
|
||||
"http://localhost:8080", "proj1", "batch_i2v", 1,
|
||||
output_keys="nonexistent"
|
||||
)
|
||||
assert result[1] == ""
|
||||
|
||||
def test_load_dynamic_bool_becomes_string(self):
|
||||
node = ProjectLoaderDynamic()
|
||||
with patch("project_loader._fetch_keys", return_value=self._keys_meta()):
|
||||
with patch("project_loader._fetch_data", return_value={"flag": True}):
|
||||
result = node.load_dynamic(
|
||||
"http://localhost:8080", "proj1", "batch_i2v", 1,
|
||||
output_keys="flag"
|
||||
)
|
||||
assert result[1] == "true"
|
||||
|
||||
def test_load_dynamic_returns_total_sequences(self):
|
||||
"""total_sequences should be the first output from keys metadata."""
|
||||
node = ProjectLoaderDynamic()
|
||||
with patch("project_loader._fetch_keys", return_value={"keys": [], "types": [], "total_sequences": 42}):
|
||||
with patch("project_loader._fetch_data", return_value={}):
|
||||
result = node.load_dynamic(
|
||||
"http://localhost:8080", "proj1", "batch_i2v", 1,
|
||||
output_keys=""
|
||||
)
|
||||
assert result[0] == 42
|
||||
|
||||
def test_load_dynamic_raises_on_network_error(self):
|
||||
"""Network errors from _fetch_keys should raise RuntimeError."""
|
||||
node = ProjectLoaderDynamic()
|
||||
error_resp = {"error": "network_error", "message": "Connection refused"}
|
||||
with patch("project_loader._fetch_keys", return_value=error_resp):
|
||||
with pytest.raises(RuntimeError, match="Failed to fetch project keys"):
|
||||
node.load_dynamic("http://localhost:8080", "proj1", "batch", 1)
|
||||
|
||||
def test_load_dynamic_raises_on_data_fetch_error(self):
|
||||
"""Network errors from _fetch_data should raise RuntimeError."""
|
||||
node = ProjectLoaderDynamic()
|
||||
error_resp = {"error": "http_error", "status": 404, "message": "Sequence not found"}
|
||||
with patch("project_loader._fetch_keys", return_value=self._keys_meta()):
|
||||
with patch("project_loader._fetch_data", return_value=error_resp):
|
||||
with pytest.raises(RuntimeError, match="Failed to fetch sequence data"):
|
||||
node.load_dynamic("http://localhost:8080", "proj1", "batch", 1)
|
||||
|
||||
def test_input_types_has_manager_url(self):
|
||||
inputs = ProjectLoaderDynamic.INPUT_TYPES()
|
||||
assert "manager_url" in inputs["required"]
|
||||
assert "project_name" in inputs["required"]
|
||||
assert "file_name" in inputs["required"]
|
||||
assert "sequence_number" in inputs["required"]
|
||||
|
||||
def test_category(self):
|
||||
assert ProjectLoaderDynamic.CATEGORY == "utils/json/project"
|
||||
|
||||
|
||||
class TestNodeMappings:
|
||||
def test_mappings_exist(self):
|
||||
from project_loader import PROJECT_NODE_CLASS_MAPPINGS, PROJECT_NODE_DISPLAY_NAME_MAPPINGS
|
||||
assert "ProjectLoaderDynamic" in PROJECT_NODE_CLASS_MAPPINGS
|
||||
assert len(PROJECT_NODE_CLASS_MAPPINGS) == 1
|
||||
assert len(PROJECT_NODE_DISPLAY_NAME_MAPPINGS) == 1
|
||||
94
tests/test_utils.py
Normal file
94
tests/test_utils.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from utils import load_json, save_json, get_file_mtime, ALLOWED_BASE_DIR, DEFAULTS, resolve_path_case_insensitive
|
||||
|
||||
|
||||
def test_load_json_valid(tmp_path):
|
||||
p = tmp_path / "test.json"
|
||||
data = {"key": "value"}
|
||||
p.write_text(json.dumps(data))
|
||||
result, mtime = load_json(p)
|
||||
assert result == data
|
||||
assert mtime > 0
|
||||
|
||||
|
||||
def test_load_json_missing(tmp_path):
|
||||
p = tmp_path / "nope.json"
|
||||
result, mtime = load_json(p)
|
||||
assert result == DEFAULTS.copy()
|
||||
assert mtime == 0
|
||||
|
||||
|
||||
def test_load_json_invalid(tmp_path):
|
||||
p = tmp_path / "bad.json"
|
||||
p.write_text("{not valid json")
|
||||
result, mtime = load_json(p)
|
||||
assert result == DEFAULTS.copy()
|
||||
assert mtime == 0
|
||||
|
||||
|
||||
def test_save_json_atomic(tmp_path):
|
||||
p = tmp_path / "out.json"
|
||||
data = {"hello": "world"}
|
||||
save_json(p, data)
|
||||
assert p.exists()
|
||||
assert not p.with_suffix(".json.tmp").exists()
|
||||
assert json.loads(p.read_text()) == data
|
||||
|
||||
|
||||
def test_save_json_overwrites(tmp_path):
|
||||
p = tmp_path / "out.json"
|
||||
save_json(p, {"a": 1})
|
||||
save_json(p, {"b": 2})
|
||||
assert json.loads(p.read_text()) == {"b": 2}
|
||||
|
||||
|
||||
def test_get_file_mtime_existing(tmp_path):
|
||||
p = tmp_path / "f.txt"
|
||||
p.write_text("x")
|
||||
assert get_file_mtime(p) > 0
|
||||
|
||||
|
||||
def test_get_file_mtime_missing(tmp_path):
|
||||
assert get_file_mtime(tmp_path / "missing.txt") == 0
|
||||
|
||||
|
||||
def test_allowed_base_dir_is_set():
|
||||
assert ALLOWED_BASE_DIR is not None
|
||||
assert isinstance(ALLOWED_BASE_DIR, Path)
|
||||
|
||||
|
||||
class TestResolvePathCaseInsensitive:
|
||||
def test_exact_match(self, tmp_path):
|
||||
d = tmp_path / "MyFolder"
|
||||
d.mkdir()
|
||||
result = resolve_path_case_insensitive(str(d))
|
||||
assert result == d.resolve()
|
||||
|
||||
def test_wrong_case_single_component(self, tmp_path):
|
||||
d = tmp_path / "MyFolder"
|
||||
d.mkdir()
|
||||
wrong = tmp_path / "myfolder"
|
||||
result = resolve_path_case_insensitive(str(wrong))
|
||||
assert result == d.resolve()
|
||||
|
||||
def test_wrong_case_nested(self, tmp_path):
|
||||
d = tmp_path / "Parent" / "Child"
|
||||
d.mkdir(parents=True)
|
||||
wrong = tmp_path / "parent" / "CHILD"
|
||||
result = resolve_path_case_insensitive(str(wrong))
|
||||
assert result == d.resolve()
|
||||
|
||||
def test_no_match_returns_none(self, tmp_path):
|
||||
result = resolve_path_case_insensitive(str(tmp_path / "nonexistent"))
|
||||
assert result is None
|
||||
|
||||
def test_file_path(self, tmp_path):
|
||||
f = tmp_path / "Data.json"
|
||||
f.write_text("{}")
|
||||
wrong = tmp_path / "data.JSON"
|
||||
result = resolve_path_case_insensitive(str(wrong))
|
||||
assert result == f.resolve()
|
||||
194
utils.py
194
utils.py
@@ -1,39 +1,52 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from pathlib import Path
|
||||
import streamlit as st
|
||||
from typing import Any
|
||||
|
||||
# --- Magic String Keys ---
|
||||
KEY_BATCH_DATA = "batch_data"
|
||||
KEY_HISTORY_TREE = "history_tree"
|
||||
KEY_PROMPT_HISTORY = "prompt_history"
|
||||
KEY_SEQUENCE_NUMBER = "sequence_number"
|
||||
|
||||
# Configure logging for the application
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s [%(name)s] %(levelname)s: %(message)s",
|
||||
datefmt="%H:%M:%S",
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Default structure for new files
|
||||
DEFAULTS = {
|
||||
# --- Standard Keys for your Restored Single Tab ---
|
||||
"general_prompt": "", # Global positive
|
||||
"general_negative": "", # Global negative
|
||||
"current_prompt": "", # Specific positive
|
||||
"negative": "", # Specific negative
|
||||
# --- Prompts ---
|
||||
"general_prompt": "",
|
||||
"general_negative": "Vivid tones, overexposed, static, blurry details, subtitles, style, artwork, painting, picture, still image, overall gray, worst quality, low quality, JPEG compression artifacts, ugly, deformed, extra fingers, poorly drawn hands, poorly drawn face, distorted, disfigured, malformed limbs, fused fingers, unmoving frame, cluttered background, three legs",
|
||||
"current_prompt": "",
|
||||
"negative": "",
|
||||
"seed": -1,
|
||||
|
||||
"cfg": 1.5,
|
||||
|
||||
# --- Settings ---
|
||||
"camera": "static",
|
||||
"flf": 0.0,
|
||||
"steps": 20,
|
||||
"cfg": 7.0,
|
||||
"sampler_name": "euler",
|
||||
"scheduler": "normal",
|
||||
"denoise": 1.0,
|
||||
"model_name": "v1-5-pruned-emaonly.ckpt",
|
||||
"vae_name": "vae-ft-mse-840000-ema-pruned.ckpt",
|
||||
|
||||
# --- I2V / VACE Specifics ---
|
||||
"frame_to_skip": 81,
|
||||
"end_frame": 0,
|
||||
"transition": "1-2",
|
||||
"vace_length": 49,
|
||||
"vace schedule": 1,
|
||||
"input_a_frames": 0,
|
||||
"input_b_frames": 0,
|
||||
"input_a_frames": 16,
|
||||
"input_b_frames": 16,
|
||||
"reference switch": 1,
|
||||
"video file path": "",
|
||||
"reference image path": "",
|
||||
"reference path": "",
|
||||
"flf image path": "",
|
||||
|
||||
|
||||
# --- LoRAs ---
|
||||
"lora 1 high": "", "lora 1 low": "",
|
||||
"lora 2 high": "", "lora 2 low": "",
|
||||
@@ -43,14 +56,51 @@ DEFAULTS = {
|
||||
CONFIG_FILE = Path(".editor_config.json")
|
||||
SNIPPETS_FILE = Path(".editor_snippets.json")
|
||||
|
||||
# No restriction on directory navigation
|
||||
ALLOWED_BASE_DIR = Path("/").resolve()
|
||||
|
||||
def resolve_path_case_insensitive(path: str | Path) -> Path | None:
|
||||
"""Resolve a path with case-insensitive component matching on Linux.
|
||||
|
||||
Walks each component of the path and matches against actual directory
|
||||
entries when an exact match fails. Returns the corrected Path, or None
|
||||
if no match is found.
|
||||
"""
|
||||
p = Path(path)
|
||||
if p.exists():
|
||||
return p.resolve()
|
||||
|
||||
# Start from the root / anchor
|
||||
parts = p.resolve().parts # resolve to get absolute parts
|
||||
built = Path(parts[0]) # root "/"
|
||||
for component in parts[1:]:
|
||||
candidate = built / component
|
||||
if candidate.exists():
|
||||
built = candidate
|
||||
continue
|
||||
# Case-insensitive scan of the parent directory
|
||||
try:
|
||||
lower = component.lower()
|
||||
match = next(
|
||||
(entry for entry in built.iterdir() if entry.name.lower() == lower),
|
||||
None,
|
||||
)
|
||||
except PermissionError:
|
||||
return None
|
||||
if match is None:
|
||||
return None
|
||||
built = match
|
||||
return built.resolve()
|
||||
|
||||
|
||||
def load_config():
|
||||
"""Loads the main editor configuration (Favorites, Last Dir, Servers)."""
|
||||
if CONFIG_FILE.exists():
|
||||
try:
|
||||
with open(CONFIG_FILE, 'r') as f:
|
||||
return json.load(f)
|
||||
except:
|
||||
pass
|
||||
except (json.JSONDecodeError, IOError) as e:
|
||||
logger.warning(f"Failed to load config: {e}")
|
||||
return {"favorites": [], "last_dir": str(Path.cwd()), "comfy_instances": []}
|
||||
|
||||
def save_config(current_dir, favorites, extra_data=None):
|
||||
@@ -76,15 +126,15 @@ def load_snippets():
|
||||
try:
|
||||
with open(SNIPPETS_FILE, 'r') as f:
|
||||
return json.load(f)
|
||||
except:
|
||||
pass
|
||||
except (json.JSONDecodeError, IOError) as e:
|
||||
logger.warning(f"Failed to load snippets: {e}")
|
||||
return {}
|
||||
|
||||
def save_snippets(snippets):
|
||||
with open(SNIPPETS_FILE, 'w') as f:
|
||||
json.dump(snippets, f, indent=4)
|
||||
|
||||
def load_json(path):
|
||||
def load_json(path: str | Path) -> tuple[dict[str, Any], float]:
|
||||
path = Path(path)
|
||||
if not path.exists():
|
||||
return DEFAULTS.copy(), 0
|
||||
@@ -93,23 +143,103 @@ def load_json(path):
|
||||
data = json.load(f)
|
||||
return data, path.stat().st_mtime
|
||||
except Exception as e:
|
||||
st.error(f"Error loading JSON: {e}")
|
||||
logger.error(f"Error loading JSON: {e}")
|
||||
return DEFAULTS.copy(), 0
|
||||
|
||||
def save_json(path, data):
|
||||
with open(path, 'w') as f:
|
||||
def save_json(path: str | Path, data: dict[str, Any]) -> None:
|
||||
path = Path(path)
|
||||
tmp = path.with_suffix('.json.tmp')
|
||||
with open(tmp, 'w') as f:
|
||||
json.dump(data, f, indent=4)
|
||||
os.replace(tmp, path)
|
||||
|
||||
def get_file_mtime(path):
|
||||
def get_file_mtime(path: str | Path) -> float:
|
||||
"""Returns the modification time of a file, or 0 if it doesn't exist."""
|
||||
path = Path(path)
|
||||
if path.exists():
|
||||
return path.stat().st_mtime
|
||||
return 0
|
||||
|
||||
def generate_templates(current_dir):
|
||||
"""Creates dummy template files if folder is empty."""
|
||||
save_json(current_dir / "template_i2v.json", DEFAULTS)
|
||||
|
||||
batch_data = {"batch_data": [DEFAULTS.copy(), DEFAULTS.copy()]}
|
||||
save_json(current_dir / "template_batch.json", batch_data)
|
||||
def sync_to_db(db, project_name: str, file_path: Path, data: dict) -> None:
|
||||
"""Dual-write helper: sync JSON data to the project database.
|
||||
|
||||
Resolves (or creates) the data_file, upserts all sequences from batch_data,
|
||||
and saves the history_tree. All writes happen in a single transaction.
|
||||
"""
|
||||
if not db or not project_name:
|
||||
return
|
||||
try:
|
||||
proj = db.get_project(project_name)
|
||||
if not proj:
|
||||
return
|
||||
file_name = Path(file_path).stem
|
||||
|
||||
# Use a single transaction for atomicity
|
||||
db.conn.execute("BEGIN IMMEDIATE")
|
||||
try:
|
||||
df = db.get_data_file(proj["id"], file_name)
|
||||
top_level = {k: v for k, v in data.items()
|
||||
if k not in (KEY_BATCH_DATA, KEY_HISTORY_TREE)}
|
||||
if not df:
|
||||
now = __import__('time').time()
|
||||
cur = db.conn.execute(
|
||||
"INSERT INTO data_files (project_id, name, data_type, top_level, created_at, updated_at) "
|
||||
"VALUES (?, ?, ?, ?, ?, ?)",
|
||||
(proj["id"], file_name, "generic", json.dumps(top_level), now, now),
|
||||
)
|
||||
df_id = cur.lastrowid
|
||||
else:
|
||||
df_id = df["id"]
|
||||
# Update top_level metadata
|
||||
now = __import__('time').time()
|
||||
db.conn.execute(
|
||||
"UPDATE data_files SET top_level = ?, updated_at = ? WHERE id = ?",
|
||||
(json.dumps(top_level), now, df_id),
|
||||
)
|
||||
|
||||
# Sync sequences
|
||||
batch_data = data.get(KEY_BATCH_DATA, [])
|
||||
if isinstance(batch_data, list):
|
||||
db.conn.execute("DELETE FROM sequences WHERE data_file_id = ?", (df_id,))
|
||||
for item in batch_data:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
seq_num = int(item.get(KEY_SEQUENCE_NUMBER, 0))
|
||||
now = __import__('time').time()
|
||||
db.conn.execute(
|
||||
"INSERT INTO sequences (data_file_id, sequence_number, data, updated_at) "
|
||||
"VALUES (?, ?, ?, ?)",
|
||||
(df_id, seq_num, json.dumps(item), now),
|
||||
)
|
||||
|
||||
# Sync history tree
|
||||
history_tree = data.get(KEY_HISTORY_TREE)
|
||||
if history_tree and isinstance(history_tree, dict):
|
||||
now = __import__('time').time()
|
||||
db.conn.execute(
|
||||
"INSERT INTO history_trees (data_file_id, tree_data, updated_at) "
|
||||
"VALUES (?, ?, ?) "
|
||||
"ON CONFLICT(data_file_id) DO UPDATE SET tree_data=excluded.tree_data, updated_at=excluded.updated_at",
|
||||
(df_id, json.dumps(history_tree), now),
|
||||
)
|
||||
|
||||
db.conn.execute("COMMIT")
|
||||
except Exception:
|
||||
try:
|
||||
db.conn.execute("ROLLBACK")
|
||||
except Exception:
|
||||
pass
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.warning(f"sync_to_db failed: {e}")
|
||||
|
||||
|
||||
def generate_templates(current_dir: Path) -> None:
|
||||
"""Creates batch template files if folder is empty."""
|
||||
first = DEFAULTS.copy()
|
||||
first[KEY_SEQUENCE_NUMBER] = 1
|
||||
save_json(current_dir / "batch_prompt_i2v.json", {KEY_BATCH_DATA: [first]})
|
||||
|
||||
first2 = DEFAULTS.copy()
|
||||
first2[KEY_SEQUENCE_NUMBER] = 1
|
||||
save_json(current_dir / "batch_prompt_vace_extend.json", {KEY_BATCH_DATA: [first2]})
|
||||
|
||||
168
web/json_dynamic.js
Normal file
168
web/json_dynamic.js
Normal file
@@ -0,0 +1,168 @@
|
||||
import { app } from "../../scripts/app.js";
|
||||
import { api } from "../../scripts/api.js";
|
||||
|
||||
app.registerExtension({
|
||||
name: "json.manager.dynamic",
|
||||
|
||||
async beforeRegisterNodeDef(nodeType, nodeData, app) {
|
||||
if (nodeData.name !== "JSONLoaderDynamic") return;
|
||||
|
||||
const origOnNodeCreated = nodeType.prototype.onNodeCreated;
|
||||
nodeType.prototype.onNodeCreated = function () {
|
||||
origOnNodeCreated?.apply(this, arguments);
|
||||
|
||||
// Hide internal widgets (managed by JS)
|
||||
for (const name of ["output_keys", "output_types"]) {
|
||||
const w = this.widgets?.find(w => w.name === name);
|
||||
if (w) { w.type = "hidden"; w.computeSize = () => [0, -4]; }
|
||||
}
|
||||
|
||||
// Do NOT remove default outputs synchronously here.
|
||||
// During graph loading, ComfyUI creates all nodes (firing onNodeCreated)
|
||||
// before configuring them. Other nodes (e.g. Kijai Set/Get) may resolve
|
||||
// links to our outputs during their configure step. If we remove outputs
|
||||
// here, those nodes find no output slot and error out.
|
||||
//
|
||||
// Instead, defer cleanup: for loaded workflows onConfigure sets _configured
|
||||
// before this runs; for new nodes the defaults are cleaned up.
|
||||
this._configured = false;
|
||||
|
||||
// Add Refresh button
|
||||
this.addWidget("button", "Refresh Outputs", null, () => {
|
||||
this.refreshDynamicOutputs();
|
||||
});
|
||||
|
||||
queueMicrotask(() => {
|
||||
if (!this._configured) {
|
||||
// New node (not loading) — remove the 32 Python default outputs
|
||||
while (this.outputs.length > 0) {
|
||||
this.removeOutput(0);
|
||||
}
|
||||
this.setSize(this.computeSize());
|
||||
app.graph?.setDirtyCanvas(true, true);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
nodeType.prototype.refreshDynamicOutputs = async function () {
|
||||
const pathWidget = this.widgets?.find(w => w.name === "json_path");
|
||||
const seqWidget = this.widgets?.find(w => w.name === "sequence_number");
|
||||
if (!pathWidget?.value) return;
|
||||
|
||||
try {
|
||||
const resp = await api.fetchApi(
|
||||
`/json_manager/get_keys?path=${encodeURIComponent(pathWidget.value)}&sequence_number=${seqWidget?.value || 1}`
|
||||
);
|
||||
const data = await resp.json();
|
||||
const { keys, types } = data;
|
||||
|
||||
// If the file wasn't found, keep existing outputs and links intact
|
||||
if (data.error === "file_not_found") {
|
||||
console.warn("[JSONLoaderDynamic] File not found, keeping existing outputs:", pathWidget.value);
|
||||
return;
|
||||
}
|
||||
|
||||
// Store keys and types in hidden widgets for persistence
|
||||
const okWidget = this.widgets?.find(w => w.name === "output_keys");
|
||||
if (okWidget) okWidget.value = keys.join(",");
|
||||
const otWidget = this.widgets?.find(w => w.name === "output_types");
|
||||
if (otWidget) otWidget.value = types.join(",");
|
||||
|
||||
// Build a map of current output names to slot indices
|
||||
const oldSlots = {};
|
||||
for (let i = 0; i < this.outputs.length; i++) {
|
||||
oldSlots[this.outputs[i].name] = i;
|
||||
}
|
||||
|
||||
// Build new outputs, reusing existing slots to preserve links
|
||||
const newOutputs = [];
|
||||
for (let k = 0; k < keys.length; k++) {
|
||||
const key = keys[k];
|
||||
const type = types[k] || "*";
|
||||
if (key in oldSlots) {
|
||||
// Reuse existing slot object (keeps links intact)
|
||||
const slot = this.outputs[oldSlots[key]];
|
||||
slot.type = type;
|
||||
newOutputs.push(slot);
|
||||
delete oldSlots[key];
|
||||
} else {
|
||||
// New key — create a fresh slot
|
||||
newOutputs.push({ name: key, type: type, links: null });
|
||||
}
|
||||
}
|
||||
|
||||
// Disconnect links on slots that are being removed
|
||||
for (const name in oldSlots) {
|
||||
const idx = oldSlots[name];
|
||||
if (this.outputs[idx]?.links?.length) {
|
||||
for (const linkId of [...this.outputs[idx].links]) {
|
||||
this.graph?.removeLink(linkId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reassign the outputs array and fix link slot indices
|
||||
this.outputs = newOutputs;
|
||||
if (this.graph) {
|
||||
for (let i = 0; i < this.outputs.length; i++) {
|
||||
const links = this.outputs[i].links;
|
||||
if (!links) continue;
|
||||
for (const linkId of links) {
|
||||
const link = this.graph.links[linkId];
|
||||
if (link) link.origin_slot = i;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.setSize(this.computeSize());
|
||||
app.graph.setDirtyCanvas(true, true);
|
||||
} catch (e) {
|
||||
console.error("[JSONLoaderDynamic] Refresh failed:", e);
|
||||
}
|
||||
};
|
||||
|
||||
// Restore state on workflow load
|
||||
const origOnConfigure = nodeType.prototype.onConfigure;
|
||||
nodeType.prototype.onConfigure = function (info) {
|
||||
origOnConfigure?.apply(this, arguments);
|
||||
this._configured = true;
|
||||
|
||||
// Hide internal widgets
|
||||
for (const name of ["output_keys", "output_types"]) {
|
||||
const w = this.widgets?.find(w => w.name === name);
|
||||
if (w) { w.type = "hidden"; w.computeSize = () => [0, -4]; }
|
||||
}
|
||||
|
||||
const okWidget = this.widgets?.find(w => w.name === "output_keys");
|
||||
const otWidget = this.widgets?.find(w => w.name === "output_types");
|
||||
|
||||
const keys = okWidget?.value
|
||||
? okWidget.value.split(",").filter(k => k.trim())
|
||||
: [];
|
||||
const types = otWidget?.value
|
||||
? otWidget.value.split(",")
|
||||
: [];
|
||||
|
||||
if (keys.length > 0) {
|
||||
// On load, LiteGraph already restored serialized outputs with links.
|
||||
// Rename and set types to match stored state (preserves links).
|
||||
for (let i = 0; i < this.outputs.length && i < keys.length; i++) {
|
||||
this.outputs[i].name = keys[i].trim();
|
||||
if (types[i]) this.outputs[i].type = types[i];
|
||||
}
|
||||
|
||||
// Remove any extra outputs beyond the key count
|
||||
while (this.outputs.length > keys.length) {
|
||||
this.removeOutput(this.outputs.length - 1);
|
||||
}
|
||||
} else if (this.outputs.length > 0) {
|
||||
// Widget values empty but serialized outputs exist — sync widgets
|
||||
// from the outputs LiteGraph already restored (fallback).
|
||||
if (okWidget) okWidget.value = this.outputs.map(o => o.name).join(",");
|
||||
if (otWidget) otWidget.value = this.outputs.map(o => o.type).join(",");
|
||||
}
|
||||
|
||||
this.setSize(this.computeSize());
|
||||
};
|
||||
},
|
||||
});
|
||||
272
web/project_dynamic.js
Normal file
272
web/project_dynamic.js
Normal file
@@ -0,0 +1,272 @@
|
||||
import { app } from "../../scripts/app.js";
|
||||
import { api } from "../../scripts/api.js";
|
||||
|
||||
app.registerExtension({
|
||||
name: "json.manager.project.dynamic",
|
||||
|
||||
async beforeRegisterNodeDef(nodeType, nodeData, app) {
|
||||
if (nodeData.name !== "ProjectLoaderDynamic") return;
|
||||
|
||||
const origOnNodeCreated = nodeType.prototype.onNodeCreated;
|
||||
nodeType.prototype.onNodeCreated = function () {
|
||||
origOnNodeCreated?.apply(this, arguments);
|
||||
|
||||
// Hide internal widgets (managed by JS)
|
||||
for (const name of ["output_keys", "output_types"]) {
|
||||
const w = this.widgets?.find(w => w.name === name);
|
||||
if (w) { w.type = "hidden"; w.computeSize = () => [0, -4]; }
|
||||
}
|
||||
|
||||
// Do NOT remove default outputs synchronously here.
|
||||
// During graph loading, ComfyUI creates all nodes (firing onNodeCreated)
|
||||
// before configuring them. Other nodes (e.g. Kijai Set/Get) may resolve
|
||||
// links to our outputs during their configure step. If we remove outputs
|
||||
// here, those nodes find no output slot and error out.
|
||||
//
|
||||
// Instead, defer cleanup: for loaded workflows onConfigure sets _configured
|
||||
// before this runs; for new nodes the defaults are cleaned up.
|
||||
this._configured = false;
|
||||
|
||||
// Add Refresh button
|
||||
this.addWidget("button", "Refresh Outputs", null, () => {
|
||||
this.refreshDynamicOutputs();
|
||||
});
|
||||
|
||||
// Auto-refresh with 500ms debounce on widget changes
|
||||
this._refreshTimer = null;
|
||||
const autoRefreshWidgets = ["project_name", "file_name", "sequence_number"];
|
||||
for (const widgetName of autoRefreshWidgets) {
|
||||
const w = this.widgets?.find(w => w.name === widgetName);
|
||||
if (w) {
|
||||
const origCallback = w.callback;
|
||||
const node = this;
|
||||
w.callback = function (...args) {
|
||||
origCallback?.apply(this, args);
|
||||
clearTimeout(node._refreshTimer);
|
||||
node._refreshTimer = setTimeout(() => {
|
||||
node.refreshDynamicOutputs();
|
||||
}, 500);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
queueMicrotask(() => {
|
||||
if (!this._configured) {
|
||||
// New node (not loading) — remove the Python default outputs
|
||||
// and add only the fixed total_sequences slot
|
||||
while (this.outputs.length > 0) {
|
||||
this.removeOutput(0);
|
||||
}
|
||||
this.addOutput("total_sequences", "INT");
|
||||
this.setSize(this.computeSize());
|
||||
app.graph?.setDirtyCanvas(true, true);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
nodeType.prototype._setStatus = function (status, message) {
|
||||
const baseTitle = "Project Loader (Dynamic)";
|
||||
if (status === "ok") {
|
||||
this.title = baseTitle;
|
||||
this.color = undefined;
|
||||
this.bgcolor = undefined;
|
||||
} else if (status === "error") {
|
||||
this.title = baseTitle + " - ERROR";
|
||||
this.color = "#ff4444";
|
||||
this.bgcolor = "#331111";
|
||||
if (message) this.title = baseTitle + ": " + message;
|
||||
} else if (status === "loading") {
|
||||
this.title = baseTitle + " - Loading...";
|
||||
}
|
||||
app.graph?.setDirtyCanvas(true, true);
|
||||
};
|
||||
|
||||
nodeType.prototype.refreshDynamicOutputs = async function () {
|
||||
const urlWidget = this.widgets?.find(w => w.name === "manager_url");
|
||||
const projectWidget = this.widgets?.find(w => w.name === "project_name");
|
||||
const fileWidget = this.widgets?.find(w => w.name === "file_name");
|
||||
const seqWidget = this.widgets?.find(w => w.name === "sequence_number");
|
||||
|
||||
if (!urlWidget?.value || !projectWidget?.value || !fileWidget?.value) return;
|
||||
|
||||
this._setStatus("loading");
|
||||
|
||||
try {
|
||||
const resp = await api.fetchApi(
|
||||
`/json_manager/get_project_keys?url=${encodeURIComponent(urlWidget.value)}&project=${encodeURIComponent(projectWidget.value)}&file=${encodeURIComponent(fileWidget.value)}&seq=${seqWidget?.value || 1}`
|
||||
);
|
||||
|
||||
if (!resp.ok) {
|
||||
let errorMsg = `HTTP ${resp.status}`;
|
||||
try {
|
||||
const errData = await resp.json();
|
||||
if (errData.message) errorMsg = errData.message;
|
||||
} catch (_) {}
|
||||
this._setStatus("error", errorMsg);
|
||||
return;
|
||||
}
|
||||
|
||||
const data = await resp.json();
|
||||
const keys = data.keys;
|
||||
const types = data.types;
|
||||
|
||||
// If the API returned an error or missing data, keep existing outputs and links intact
|
||||
if (data.error || !Array.isArray(keys) || !Array.isArray(types)) {
|
||||
const errMsg = data.error ? data.message || data.error : "Missing keys/types";
|
||||
this._setStatus("error", errMsg);
|
||||
return;
|
||||
}
|
||||
|
||||
// Store keys and types in hidden widgets for persistence (JSON-encoded)
|
||||
const okWidget = this.widgets?.find(w => w.name === "output_keys");
|
||||
if (okWidget) okWidget.value = JSON.stringify(keys);
|
||||
const otWidget = this.widgets?.find(w => w.name === "output_types");
|
||||
if (otWidget) otWidget.value = JSON.stringify(types);
|
||||
|
||||
// Slot 0 is always total_sequences (INT) — ensure it exists
|
||||
if (this.outputs.length === 0 || this.outputs[0].name !== "total_sequences") {
|
||||
this.outputs.unshift({ name: "total_sequences", type: "INT", links: null });
|
||||
}
|
||||
this.outputs[0].type = "INT";
|
||||
|
||||
// Build a map of current dynamic output names to slot indices (skip slot 0)
|
||||
const oldSlots = {};
|
||||
for (let i = 1; i < this.outputs.length; i++) {
|
||||
oldSlots[this.outputs[i].name] = i;
|
||||
}
|
||||
|
||||
// Build new dynamic outputs, reusing existing slots to preserve links
|
||||
const newOutputs = [this.outputs[0]]; // Keep total_sequences at slot 0
|
||||
for (let k = 0; k < keys.length; k++) {
|
||||
const key = keys[k];
|
||||
const type = types[k] || "*";
|
||||
if (key in oldSlots) {
|
||||
const slot = this.outputs[oldSlots[key]];
|
||||
slot.type = type;
|
||||
newOutputs.push(slot);
|
||||
delete oldSlots[key];
|
||||
} else {
|
||||
newOutputs.push({ name: key, type: type, links: null });
|
||||
}
|
||||
}
|
||||
|
||||
// Disconnect links on slots that are being removed
|
||||
for (const name in oldSlots) {
|
||||
const idx = oldSlots[name];
|
||||
if (this.outputs[idx]?.links?.length) {
|
||||
for (const linkId of [...this.outputs[idx].links]) {
|
||||
this.graph?.removeLink(linkId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reassign the outputs array and fix link slot indices
|
||||
this.outputs = newOutputs;
|
||||
if (this.graph) {
|
||||
for (let i = 0; i < this.outputs.length; i++) {
|
||||
const links = this.outputs[i].links;
|
||||
if (!links) continue;
|
||||
for (const linkId of links) {
|
||||
const link = this.graph.links[linkId];
|
||||
if (link) link.origin_slot = i;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this._setStatus("ok");
|
||||
this.setSize(this.computeSize());
|
||||
app.graph?.setDirtyCanvas(true, true);
|
||||
} catch (e) {
|
||||
console.error("[ProjectLoaderDynamic] Refresh failed:", e);
|
||||
this._setStatus("error", "Server unreachable");
|
||||
}
|
||||
};
|
||||
|
||||
// Restore state on workflow load
|
||||
const origOnConfigure = nodeType.prototype.onConfigure;
|
||||
nodeType.prototype.onConfigure = function (info) {
|
||||
origOnConfigure?.apply(this, arguments);
|
||||
this._configured = true;
|
||||
|
||||
// Hide internal widgets
|
||||
for (const name of ["output_keys", "output_types"]) {
|
||||
const w = this.widgets?.find(w => w.name === name);
|
||||
if (w) { w.type = "hidden"; w.computeSize = () => [0, -4]; }
|
||||
}
|
||||
|
||||
const okWidget = this.widgets?.find(w => w.name === "output_keys");
|
||||
const otWidget = this.widgets?.find(w => w.name === "output_types");
|
||||
|
||||
// Parse keys/types — try JSON array first, fall back to comma-split
|
||||
let keys = [];
|
||||
if (okWidget?.value) {
|
||||
try { keys = JSON.parse(okWidget.value); } catch (_) {
|
||||
keys = okWidget.value.split(",").map(k => k.trim()).filter(Boolean);
|
||||
}
|
||||
}
|
||||
let types = [];
|
||||
if (otWidget?.value) {
|
||||
try { types = JSON.parse(otWidget.value); } catch (_) {
|
||||
types = otWidget.value.split(",").map(t => t.trim()).filter(Boolean);
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure slot 0 is total_sequences (INT)
|
||||
if (this.outputs.length === 0 || this.outputs[0].name !== "total_sequences") {
|
||||
this.outputs.unshift({ name: "total_sequences", type: "INT", links: null });
|
||||
// LiteGraph restores links AFTER onConfigure, so graph.links is
|
||||
// empty here. Defer link fixup to a microtask that runs after the
|
||||
// synchronous graph.configure() finishes (including link restoration).
|
||||
// We must also rebuild output.links arrays because LiteGraph will
|
||||
// place link IDs on the wrong outputs (shifted by the unshift above).
|
||||
const node = this;
|
||||
queueMicrotask(() => {
|
||||
if (!node.graph) return;
|
||||
// Clear all output.links — they were populated at old indices
|
||||
for (const output of node.outputs) {
|
||||
output.links = null;
|
||||
}
|
||||
// Rebuild from graph.links with corrected origin_slot (+1)
|
||||
for (const linkId in node.graph.links) {
|
||||
const link = node.graph.links[linkId];
|
||||
if (!link || link.origin_id !== node.id) continue;
|
||||
link.origin_slot += 1;
|
||||
const output = node.outputs[link.origin_slot];
|
||||
if (output) {
|
||||
if (!output.links) output.links = [];
|
||||
output.links.push(link.id);
|
||||
}
|
||||
}
|
||||
app.graph?.setDirtyCanvas(true, true);
|
||||
});
|
||||
}
|
||||
this.outputs[0].type = "INT";
|
||||
this.outputs[0].name = "total_sequences";
|
||||
|
||||
if (keys.length > 0) {
|
||||
// On load, LiteGraph already restored serialized outputs with links.
|
||||
// Dynamic outputs start at slot 1. Rename and set types to match stored state.
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const slotIdx = i + 1; // offset by 1 for total_sequences
|
||||
if (slotIdx < this.outputs.length) {
|
||||
this.outputs[slotIdx].name = keys[i];
|
||||
if (types[i]) this.outputs[slotIdx].type = types[i];
|
||||
}
|
||||
}
|
||||
|
||||
// Remove any extra outputs beyond keys + total_sequences
|
||||
while (this.outputs.length > keys.length + 1) {
|
||||
this.removeOutput(this.outputs.length - 1);
|
||||
}
|
||||
} else if (this.outputs.length > 1) {
|
||||
// Widget values empty but serialized dynamic outputs exist — sync widgets
|
||||
// from the outputs LiteGraph already restored (fallback, skip slot 0).
|
||||
const dynamicOutputs = this.outputs.slice(1);
|
||||
if (okWidget) okWidget.value = JSON.stringify(dynamicOutputs.map(o => o.name));
|
||||
if (otWidget) otWidget.value = JSON.stringify(dynamicOutputs.map(o => o.type));
|
||||
}
|
||||
|
||||
this.setSize(this.computeSize());
|
||||
};
|
||||
},
|
||||
});
|
||||
Reference in New Issue
Block a user