Add JSONLoaderDynamic node with JS frontend for auto-discovered outputs
Dynamic node reads JSON keys and exposes them as outputs automatically via 32 AnyType slots managed by a JS extension (show/hide/rename). Includes /json_manager/get_keys API route, bool-safe type handling, and workflow save/reload support. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -1,3 +1,5 @@
|
|||||||
from .json_loader import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS
|
from .json_loader import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS
|
||||||
|
|
||||||
__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS']
|
WEB_DIRECTORY = "./web"
|
||||||
|
|
||||||
|
__all__ = ['NODE_CLASS_MAPPINGS', 'NODE_DISPLAY_NAME_MAPPINGS', 'WEB_DIRECTORY']
|
||||||
|
|||||||
@@ -6,6 +6,22 @@ from typing import Any
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
KEY_BATCH_DATA = "batch_data"
|
KEY_BATCH_DATA = "batch_data"
|
||||||
|
MAX_DYNAMIC_OUTPUTS = 32
|
||||||
|
|
||||||
|
|
||||||
|
class AnyType(str):
|
||||||
|
"""Universal connector type that matches any ComfyUI type."""
|
||||||
|
def __ne__(self, __value: object) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
any_type = AnyType("*")
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from server import PromptServer
|
||||||
|
from aiohttp import web
|
||||||
|
except ImportError:
|
||||||
|
PromptServer = None
|
||||||
|
|
||||||
|
|
||||||
def to_float(val: Any) -> float:
|
def to_float(val: Any) -> float:
|
||||||
@@ -49,6 +65,69 @@ def read_json_data(json_path: str) -> dict[str, Any]:
|
|||||||
return {}
|
return {}
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
# --- API Route ---
|
||||||
|
if PromptServer is not None:
|
||||||
|
@PromptServer.instance.routes.get("/json_manager/get_keys")
|
||||||
|
async def get_keys_route(request):
|
||||||
|
json_path = request.query.get("path", "")
|
||||||
|
try:
|
||||||
|
seq = int(request.query.get("sequence_number", "1"))
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
seq = 1
|
||||||
|
data = read_json_data(json_path)
|
||||||
|
target = get_batch_item(data, seq)
|
||||||
|
keys = list(target.keys()) if isinstance(target, dict) else []
|
||||||
|
return web.json_response({"keys": keys})
|
||||||
|
|
||||||
|
|
||||||
|
# ==========================================
|
||||||
|
# 0. DYNAMIC NODE
|
||||||
|
# ==========================================
|
||||||
|
|
||||||
|
class JSONLoaderDynamic:
|
||||||
|
@classmethod
|
||||||
|
def INPUT_TYPES(s):
|
||||||
|
return {
|
||||||
|
"required": {
|
||||||
|
"json_path": ("STRING", {"default": "", "multiline": False}),
|
||||||
|
"sequence_number": ("INT", {"default": 1, "min": 1, "max": 9999}),
|
||||||
|
},
|
||||||
|
"optional": {
|
||||||
|
"output_keys": ("STRING", {"default": ""}),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
RETURN_TYPES = tuple(any_type for _ in range(MAX_DYNAMIC_OUTPUTS))
|
||||||
|
RETURN_NAMES = tuple(f"output_{i}" for i in range(MAX_DYNAMIC_OUTPUTS))
|
||||||
|
FUNCTION = "load_dynamic"
|
||||||
|
CATEGORY = "utils/json"
|
||||||
|
OUTPUT_NODE = False
|
||||||
|
|
||||||
|
def load_dynamic(self, json_path, sequence_number, output_keys=""):
|
||||||
|
data = read_json_data(json_path)
|
||||||
|
target = get_batch_item(data, sequence_number)
|
||||||
|
|
||||||
|
keys = [k.strip() for k in output_keys.split(",") if k.strip()] if output_keys else []
|
||||||
|
|
||||||
|
results = []
|
||||||
|
for key in keys:
|
||||||
|
val = target.get(key, "")
|
||||||
|
if isinstance(val, bool):
|
||||||
|
results.append(str(val).lower())
|
||||||
|
elif isinstance(val, int):
|
||||||
|
results.append(val)
|
||||||
|
elif isinstance(val, float):
|
||||||
|
results.append(val)
|
||||||
|
else:
|
||||||
|
results.append(str(val))
|
||||||
|
|
||||||
|
# Pad to MAX_DYNAMIC_OUTPUTS
|
||||||
|
while len(results) < MAX_DYNAMIC_OUTPUTS:
|
||||||
|
results.append("")
|
||||||
|
|
||||||
|
return tuple(results)
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
# ==========================================
|
||||||
# 1. STANDARD NODES (Single File)
|
# 1. STANDARD NODES (Single File)
|
||||||
# ==========================================
|
# ==========================================
|
||||||
@@ -270,6 +349,7 @@ class JSONLoaderCustom6:
|
|||||||
|
|
||||||
# --- Mappings ---
|
# --- Mappings ---
|
||||||
NODE_CLASS_MAPPINGS = {
|
NODE_CLASS_MAPPINGS = {
|
||||||
|
"JSONLoaderDynamic": JSONLoaderDynamic,
|
||||||
"JSONLoaderLoRA": JSONLoaderLoRA,
|
"JSONLoaderLoRA": JSONLoaderLoRA,
|
||||||
"JSONLoaderStandard": JSONLoaderStandard,
|
"JSONLoaderStandard": JSONLoaderStandard,
|
||||||
"JSONLoaderVACE": JSONLoaderVACE,
|
"JSONLoaderVACE": JSONLoaderVACE,
|
||||||
@@ -282,6 +362,7 @@ NODE_CLASS_MAPPINGS = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
NODE_DISPLAY_NAME_MAPPINGS = {
|
NODE_DISPLAY_NAME_MAPPINGS = {
|
||||||
|
"JSONLoaderDynamic": "JSON Loader (Dynamic)",
|
||||||
"JSONLoaderLoRA": "JSON Loader (LoRAs Only)",
|
"JSONLoaderLoRA": "JSON Loader (LoRAs Only)",
|
||||||
"JSONLoaderStandard": "JSON Loader (Standard/I2V)",
|
"JSONLoaderStandard": "JSON Loader (Standard/I2V)",
|
||||||
"JSONLoaderVACE": "JSON Loader (VACE Full)",
|
"JSONLoaderVACE": "JSON Loader (VACE Full)",
|
||||||
|
|||||||
@@ -3,7 +3,10 @@ import os
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from json_loader import to_float, to_int, get_batch_item, read_json_data
|
from json_loader import (
|
||||||
|
to_float, to_int, get_batch_item, read_json_data,
|
||||||
|
JSONLoaderDynamic, MAX_DYNAMIC_OUTPUTS,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TestToFloat:
|
class TestToFloat:
|
||||||
@@ -75,3 +78,88 @@ class TestReadJsonData:
|
|||||||
p = tmp_path / "ok.json"
|
p = tmp_path / "ok.json"
|
||||||
p.write_text(json.dumps({"key": "val"}))
|
p.write_text(json.dumps({"key": "val"}))
|
||||||
assert read_json_data(str(p)) == {"key": "val"}
|
assert read_json_data(str(p)) == {"key": "val"}
|
||||||
|
|
||||||
|
|
||||||
|
class TestJSONLoaderDynamic:
|
||||||
|
def _make_json(self, tmp_path, data):
|
||||||
|
p = tmp_path / "test.json"
|
||||||
|
p.write_text(json.dumps(data))
|
||||||
|
return str(p)
|
||||||
|
|
||||||
|
def test_known_keys(self, tmp_path):
|
||||||
|
path = self._make_json(tmp_path, {"name": "alice", "age": 30, "score": 9.5})
|
||||||
|
loader = JSONLoaderDynamic()
|
||||||
|
result = loader.load_dynamic(path, 1, output_keys="name,age,score")
|
||||||
|
assert result[0] == "alice"
|
||||||
|
assert result[1] == 30
|
||||||
|
assert result[2] == 9.5
|
||||||
|
|
||||||
|
def test_empty_output_keys(self, tmp_path):
|
||||||
|
path = self._make_json(tmp_path, {"name": "alice"})
|
||||||
|
loader = JSONLoaderDynamic()
|
||||||
|
result = loader.load_dynamic(path, 1, output_keys="")
|
||||||
|
assert len(result) == MAX_DYNAMIC_OUTPUTS
|
||||||
|
assert all(v == "" for v in result)
|
||||||
|
|
||||||
|
def test_pads_to_max(self, tmp_path):
|
||||||
|
path = self._make_json(tmp_path, {"a": "1", "b": "2"})
|
||||||
|
loader = JSONLoaderDynamic()
|
||||||
|
result = loader.load_dynamic(path, 1, output_keys="a,b")
|
||||||
|
assert len(result) == MAX_DYNAMIC_OUTPUTS
|
||||||
|
assert result[0] == "1"
|
||||||
|
assert result[1] == "2"
|
||||||
|
assert all(v == "" for v in result[2:])
|
||||||
|
|
||||||
|
def test_type_preservation_int(self, tmp_path):
|
||||||
|
path = self._make_json(tmp_path, {"count": 42})
|
||||||
|
loader = JSONLoaderDynamic()
|
||||||
|
result = loader.load_dynamic(path, 1, output_keys="count")
|
||||||
|
assert result[0] == 42
|
||||||
|
assert isinstance(result[0], int)
|
||||||
|
|
||||||
|
def test_type_preservation_float(self, tmp_path):
|
||||||
|
path = self._make_json(tmp_path, {"rate": 3.14})
|
||||||
|
loader = JSONLoaderDynamic()
|
||||||
|
result = loader.load_dynamic(path, 1, output_keys="rate")
|
||||||
|
assert result[0] == 3.14
|
||||||
|
assert isinstance(result[0], float)
|
||||||
|
|
||||||
|
def test_type_preservation_str(self, tmp_path):
|
||||||
|
path = self._make_json(tmp_path, {"label": "hello"})
|
||||||
|
loader = JSONLoaderDynamic()
|
||||||
|
result = loader.load_dynamic(path, 1, output_keys="label")
|
||||||
|
assert result[0] == "hello"
|
||||||
|
assert isinstance(result[0], str)
|
||||||
|
|
||||||
|
def test_bool_becomes_string(self, tmp_path):
|
||||||
|
path = self._make_json(tmp_path, {"flag": True, "off": False})
|
||||||
|
loader = JSONLoaderDynamic()
|
||||||
|
result = loader.load_dynamic(path, 1, output_keys="flag,off")
|
||||||
|
assert result[0] == "true"
|
||||||
|
assert result[1] == "false"
|
||||||
|
assert isinstance(result[0], str)
|
||||||
|
|
||||||
|
def test_missing_key_returns_empty_string(self, tmp_path):
|
||||||
|
path = self._make_json(tmp_path, {"a": "1"})
|
||||||
|
loader = JSONLoaderDynamic()
|
||||||
|
result = loader.load_dynamic(path, 1, output_keys="a,nonexistent")
|
||||||
|
assert result[0] == "1"
|
||||||
|
assert result[1] == ""
|
||||||
|
|
||||||
|
def test_missing_file_returns_all_empty(self, tmp_path):
|
||||||
|
loader = JSONLoaderDynamic()
|
||||||
|
result = loader.load_dynamic(str(tmp_path / "nope.json"), 1, output_keys="a,b")
|
||||||
|
assert len(result) == MAX_DYNAMIC_OUTPUTS
|
||||||
|
assert result[0] == ""
|
||||||
|
assert result[1] == ""
|
||||||
|
|
||||||
|
def test_batch_data(self, tmp_path):
|
||||||
|
path = self._make_json(tmp_path, {
|
||||||
|
"batch_data": [
|
||||||
|
{"sequence_number": 1, "x": "first"},
|
||||||
|
{"sequence_number": 2, "x": "second"},
|
||||||
|
]
|
||||||
|
})
|
||||||
|
loader = JSONLoaderDynamic()
|
||||||
|
result = loader.load_dynamic(path, 2, output_keys="x")
|
||||||
|
assert result[0] == "second"
|
||||||
|
|||||||
133
web/json_dynamic.js
Normal file
133
web/json_dynamic.js
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
import { app } from "../../scripts/app.js";
|
||||||
|
import { api } from "../../scripts/api.js";
|
||||||
|
|
||||||
|
app.registerExtension({
|
||||||
|
name: "json.manager.dynamic",
|
||||||
|
|
||||||
|
async beforeRegisterNodeDef(nodeType, nodeData, app) {
|
||||||
|
if (nodeData.name !== "JSONLoaderDynamic") return;
|
||||||
|
|
||||||
|
const origOnNodeCreated = nodeType.prototype.onNodeCreated;
|
||||||
|
nodeType.prototype.onNodeCreated = function () {
|
||||||
|
origOnNodeCreated?.apply(this, arguments);
|
||||||
|
|
||||||
|
// Hide the output_keys widget (managed internally by JS)
|
||||||
|
const okWidget = this.widgets?.find(w => w.name === "output_keys");
|
||||||
|
if (okWidget) {
|
||||||
|
okWidget.type = "hidden";
|
||||||
|
okWidget.computeSize = () => [0, -4];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hide all 32 outputs initially
|
||||||
|
this._dynamicOutputCount = 0;
|
||||||
|
for (let i = 0; i < this.outputs.length; i++) {
|
||||||
|
this.outputs[i]._visible = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add Refresh button
|
||||||
|
this.addWidget("button", "Refresh Outputs", null, () => {
|
||||||
|
this.refreshDynamicOutputs();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Store original outputs array for show/hide
|
||||||
|
this._allOutputs = [...this.outputs];
|
||||||
|
// Start with no visible outputs
|
||||||
|
this.outputs.length = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
nodeType.prototype.refreshDynamicOutputs = async function () {
|
||||||
|
const pathWidget = this.widgets?.find(w => w.name === "json_path");
|
||||||
|
const seqWidget = this.widgets?.find(w => w.name === "sequence_number");
|
||||||
|
if (!pathWidget?.value) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const resp = await api.fetchApi(
|
||||||
|
`/json_manager/get_keys?path=${encodeURIComponent(pathWidget.value)}&sequence_number=${seqWidget?.value || 1}`
|
||||||
|
);
|
||||||
|
const { keys } = await resp.json();
|
||||||
|
|
||||||
|
// Update output_keys widget for Python to read
|
||||||
|
const okWidget = this.widgets?.find(w => w.name === "output_keys");
|
||||||
|
if (okWidget) okWidget.value = keys.join(",");
|
||||||
|
|
||||||
|
// Restore full outputs array to manipulate
|
||||||
|
if (this._allOutputs) {
|
||||||
|
this.outputs = this._allOutputs;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Disconnect any links on outputs that will be hidden
|
||||||
|
for (let i = keys.length; i < 32; i++) {
|
||||||
|
if (this.outputs[i]?.links?.length) {
|
||||||
|
for (const linkId of [...this.outputs[i].links]) {
|
||||||
|
this.graph?.removeLink(linkId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rename visible outputs to key names
|
||||||
|
for (let i = 0; i < 32; i++) {
|
||||||
|
if (i < keys.length) {
|
||||||
|
this.outputs[i].name = keys[i];
|
||||||
|
this.outputs[i]._visible = true;
|
||||||
|
} else {
|
||||||
|
this.outputs[i].name = `output_${i}`;
|
||||||
|
this.outputs[i]._visible = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Truncate outputs array to only show active ones
|
||||||
|
this._dynamicOutputCount = keys.length;
|
||||||
|
this._allOutputs = [...this.outputs];
|
||||||
|
this.outputs.length = keys.length;
|
||||||
|
|
||||||
|
this.setSize(this.computeSize());
|
||||||
|
app.graph.setDirtyCanvas(true, true);
|
||||||
|
} catch (e) {
|
||||||
|
console.error("[JSONLoaderDynamic] Refresh failed:", e);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Override configure to restore state on workflow load
|
||||||
|
const origOnConfigure = nodeType.prototype.onConfigure;
|
||||||
|
nodeType.prototype.onConfigure = function (info) {
|
||||||
|
origOnConfigure?.apply(this, arguments);
|
||||||
|
|
||||||
|
const okWidget = this.widgets?.find(w => w.name === "output_keys");
|
||||||
|
if (okWidget) {
|
||||||
|
okWidget.type = "hidden";
|
||||||
|
okWidget.computeSize = () => [0, -4];
|
||||||
|
}
|
||||||
|
|
||||||
|
const keys = okWidget?.value
|
||||||
|
? okWidget.value.split(",").filter(k => k.trim())
|
||||||
|
: [];
|
||||||
|
|
||||||
|
// Ensure we have the full 32 outputs stored
|
||||||
|
this._allOutputs = [...this.outputs];
|
||||||
|
while (this._allOutputs.length < 32) {
|
||||||
|
this._allOutputs.push({
|
||||||
|
name: `output_${this._allOutputs.length}`,
|
||||||
|
type: "*",
|
||||||
|
links: null,
|
||||||
|
_visible: false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rename and set visibility
|
||||||
|
for (let i = 0; i < 32; i++) {
|
||||||
|
if (i < keys.length) {
|
||||||
|
this._allOutputs[i].name = keys[i].trim();
|
||||||
|
this._allOutputs[i]._visible = true;
|
||||||
|
} else {
|
||||||
|
this._allOutputs[i].name = `output_${i}`;
|
||||||
|
this._allOutputs[i]._visible = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this._dynamicOutputCount = keys.length;
|
||||||
|
this.outputs = this._allOutputs.slice(0, keys.length);
|
||||||
|
|
||||||
|
this.setSize(this.computeSize());
|
||||||
|
};
|
||||||
|
},
|
||||||
|
});
|
||||||
Reference in New Issue
Block a user