Add SaveSnapshot node with visual separation and rolling limit

Introduce a SaveSnapshot custom node that triggers snapshot captures
via WebSocket. Node-triggered snapshots are visually distinct in the
sidebar (purple left border + "Node" badge) and managed with their
own independent rolling limit (maxNodeSnapshots setting), separate
from auto/manual snapshot pruning. Node snapshots skip hash-dedup
so repeated queue runs always capture.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-24 21:22:53 +01:00
parent 838b3d0b00
commit 3877c5838c
5 changed files with 160 additions and 11 deletions

View File

@@ -6,7 +6,8 @@ to browse and restore any previous version. Stored in server-side JSON files.
"""
from . import snapshot_routes
from .snapshot_node import SaveSnapshot
WEB_DIRECTORY = "./js"
NODE_CLASS_MAPPINGS = {}
NODE_DISPLAY_NAME_MAPPINGS = {}
NODE_CLASS_MAPPINGS = {"SaveSnapshot": SaveSnapshot}
NODE_DISPLAY_NAME_MAPPINGS = {"SaveSnapshot": "Save Snapshot"}

View File

@@ -22,6 +22,7 @@ let maxSnapshots = 50;
let debounceMs = 3000;
let autoCaptureEnabled = true;
let captureOnLoad = true;
let maxNodeSnapshots = 5;
// ─── State ───────────────────────────────────────────────────────────
@@ -126,7 +127,7 @@ async function pruneSnapshots(workflowKey) {
const resp = await api.fetchApi("/snapshot-manager/prune", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ workflowKey, maxSnapshots }),
body: JSON.stringify({ workflowKey, maxSnapshots, source: "regular" }),
});
if (!resp.ok) {
const err = await resp.json();
@@ -137,6 +138,22 @@ async function pruneSnapshots(workflowKey) {
}
}
async function pruneNodeSnapshots(workflowKey) {
try {
const resp = await api.fetchApi("/snapshot-manager/prune", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ workflowKey, maxSnapshots: maxNodeSnapshots, source: "node" }),
});
if (!resp.ok) {
const err = await resp.json();
throw new Error(err.error || resp.statusText);
}
} catch (err) {
console.warn(`[${EXTENSION_NAME}] Node prune failed:`, err);
}
}
// ─── IndexedDB Migration ────────────────────────────────────────────
async function migrateFromIndexedDB() {
@@ -331,6 +348,43 @@ async function captureSnapshot(label = "Auto") {
return true;
}
async function captureNodeSnapshot(label = "Node Trigger") {
if (restoreLock) return false;
const graphData = getGraphData();
if (!graphData) return false;
const nodes = graphData.nodes || [];
if (nodes.length === 0) return false;
const workflowKey = getWorkflowKey();
const record = {
id: generateId(),
workflowKey,
timestamp: Date.now(),
label,
nodeCount: nodes.length,
graphData,
locked: false,
source: "node",
};
try {
await db_put(record);
await pruneNodeSnapshots(workflowKey);
} catch {
return false;
}
pickerDirty = true;
if (sidebarRefresh) {
sidebarRefresh().catch(() => {});
}
return true;
}
function scheduleCaptureSnapshot() {
if (!autoCaptureEnabled) return;
if (restoreLock) return;
@@ -566,6 +620,20 @@ const CSS = `
background: #dc2626;
color: #fff;
}
.snap-item-node {
border-left: 3px solid #6d28d9;
}
.snap-node-badge {
display: inline-block;
font-size: 9px;
padding: 1px 5px;
border-radius: 3px;
background: #6d28d9;
color: #fff;
margin-left: 6px;
vertical-align: middle;
font-weight: 500;
}
.snap-empty {
padding: 20px;
text-align: center;
@@ -922,7 +990,11 @@ async function buildSidebar(el) {
// newest first
records.sort((a, b) => b.timestamp - a.timestamp);
countSpan.textContent = `${records.length} / ${maxSnapshots}`;
const regularCount = records.filter(r => r.source !== "node").length;
const nodeCount = records.filter(r => r.source === "node").length;
countSpan.textContent = nodeCount > 0
? `${regularCount}/${maxSnapshots} + ${nodeCount}/${maxNodeSnapshots} node`
: `${regularCount} / ${maxSnapshots}`;
// Update selector label and styling
selectorLabel.textContent = effKey;
@@ -959,7 +1031,7 @@ async function buildSidebar(el) {
for (const rec of records) {
const item = document.createElement("div");
item.className = "snap-item";
item.className = rec.source === "node" ? "snap-item snap-item-node" : "snap-item";
const info = document.createElement("div");
info.className = "snap-item-info";
@@ -967,6 +1039,12 @@ async function buildSidebar(el) {
const labelDiv = document.createElement("div");
labelDiv.className = "snap-item-label";
labelDiv.textContent = rec.label;
if (rec.source === "node") {
const badge = document.createElement("span");
badge.className = "snap-node-badge";
badge.textContent = "Node";
labelDiv.appendChild(badge);
}
const time = document.createElement("div");
time.className = "snap-item-time";
@@ -1102,6 +1180,17 @@ if (window.__COMFYUI_FRONTEND_VERSION__) {
captureOnLoad = value;
},
},
{
id: "SnapshotManager.maxNodeSnapshots",
name: "Max node-triggered snapshots per workflow",
type: "slider",
defaultValue: 5,
attrs: { min: 1, max: 50, step: 1 },
category: ["Snapshot Manager", "Capture Settings", "Max node-triggered snapshots"],
onChange(value) {
maxNodeSnapshots = value;
},
},
],
init() {
@@ -1130,6 +1219,14 @@ if (window.__COMFYUI_FRONTEND_VERSION__) {
scheduleCaptureSnapshot();
});
// Listen for node-triggered snapshot captures via WebSocket
api.addEventListener("snapshot-manager-capture", (event) => {
const label = event.detail?.label || "Node Trigger";
captureNodeSnapshot(label).catch((err) => {
console.warn(`[${EXTENSION_NAME}] Node-triggered capture failed:`, err);
});
});
// Listen for workflow switches via Pinia store action
const workflowStore = app.extensionManager?.workflow;
if (workflowStore?.$onAction) {

39
snapshot_node.py Normal file
View File

@@ -0,0 +1,39 @@
from server import PromptServer
class _AnyType(str):
def __ne__(self, other):
return False
ANY_TYPE = _AnyType("*")
class SaveSnapshot:
CATEGORY = "Snapshot Manager"
FUNCTION = "execute"
RETURN_TYPES = (ANY_TYPE,)
OUTPUT_NODE = True
@classmethod
def INPUT_TYPES(cls):
return {
"required": {
"value": (ANY_TYPE, {}),
"label": ("STRING", {"default": "Node Trigger"}),
}
}
@classmethod
def VALIDATE_INPUTS(cls, input_types):
return True
@classmethod
def IS_CHANGED(cls, *args, **kwargs):
return float("NaN")
def execute(self, value, label):
PromptServer.instance.send_sync(
"snapshot-manager-capture", {"label": label}
)
return (value,)

View File

@@ -84,9 +84,10 @@ async def prune_snapshots(request):
data = await request.json()
workflow_key = data.get("workflowKey")
max_snapshots = data.get("maxSnapshots")
source = data.get("source")
if not workflow_key or max_snapshots is None:
return web.json_response({"error": "Missing workflowKey or maxSnapshots"}, status=400)
deleted = storage.prune(workflow_key, int(max_snapshots))
deleted = storage.prune(workflow_key, int(max_snapshots), source=source)
return web.json_response({"deleted": deleted})
except Exception as e:
return web.json_response({"error": str(e)}, status=500)

View File

@@ -104,13 +104,24 @@ def get_all_workflow_keys():
return results
def prune(workflow_key, max_snapshots):
"""Delete oldest unlocked snapshots beyond limit. Returns count deleted."""
def prune(workflow_key, max_snapshots, source=None):
"""Delete oldest unlocked snapshots beyond limit. Returns count deleted.
source filtering:
- "node": only prune records where source == "node"
- "regular": only prune records where source is absent or not "node"
- None: prune all unlocked (existing behavior)
"""
records = get_all_for_workflow(workflow_key)
unlocked = [r for r in records if not r.get("locked")]
if len(unlocked) <= max_snapshots:
if source == "node":
candidates = [r for r in records if not r.get("locked") and r.get("source") == "node"]
elif source == "regular":
candidates = [r for r in records if not r.get("locked") and r.get("source") != "node"]
else:
candidates = [r for r in records if not r.get("locked")]
if len(candidates) <= max_snapshots:
return 0
to_delete = unlocked[: len(unlocked) - max_snapshots]
to_delete = candidates[: len(candidates) - max_snapshots]
d = _workflow_dir(workflow_key)
deleted = 0
for rec in to_delete: