Remove JSONLoaderDynamic, handled by ComfyUI-JSON-Dynamic extension
The separate ComfyUI-JSON-Dynamic extension provides the same node. Removes json_loader.py, web/json_dynamic.js, and their tests. Only ProjectLoaderDynamic remains in this extension. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -1,8 +1,7 @@
|
|||||||
from .json_loader import NODE_CLASS_MAPPINGS, NODE_DISPLAY_NAME_MAPPINGS
|
|
||||||
from .project_loader import PROJECT_NODE_CLASS_MAPPINGS, PROJECT_NODE_DISPLAY_NAME_MAPPINGS
|
from .project_loader import PROJECT_NODE_CLASS_MAPPINGS, PROJECT_NODE_DISPLAY_NAME_MAPPINGS
|
||||||
|
|
||||||
NODE_CLASS_MAPPINGS.update(PROJECT_NODE_CLASS_MAPPINGS)
|
NODE_CLASS_MAPPINGS = PROJECT_NODE_CLASS_MAPPINGS
|
||||||
NODE_DISPLAY_NAME_MAPPINGS.update(PROJECT_NODE_DISPLAY_NAME_MAPPINGS)
|
NODE_DISPLAY_NAME_MAPPINGS = PROJECT_NODE_DISPLAY_NAME_MAPPINGS
|
||||||
|
|
||||||
WEB_DIRECTORY = "./web"
|
WEB_DIRECTORY = "./web"
|
||||||
|
|
||||||
|
|||||||
153
json_loader.py
153
json_loader.py
@@ -1,153 +0,0 @@
|
|||||||
import json
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
KEY_BATCH_DATA = "batch_data"
|
|
||||||
MAX_DYNAMIC_OUTPUTS = 32
|
|
||||||
|
|
||||||
|
|
||||||
class AnyType(str):
|
|
||||||
"""Universal connector type that matches any ComfyUI type."""
|
|
||||||
def __ne__(self, __value: object) -> bool:
|
|
||||||
return False
|
|
||||||
|
|
||||||
any_type = AnyType("*")
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
from server import PromptServer
|
|
||||||
from aiohttp import web
|
|
||||||
except ImportError:
|
|
||||||
PromptServer = None
|
|
||||||
|
|
||||||
|
|
||||||
def to_float(val: Any) -> float:
|
|
||||||
try:
|
|
||||||
return float(val)
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
return 0.0
|
|
||||||
|
|
||||||
def to_int(val: Any) -> int:
|
|
||||||
try:
|
|
||||||
return int(float(val))
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def get_batch_item(data: dict[str, Any], sequence_number: int) -> dict[str, Any]:
|
|
||||||
"""Resolve batch item by sequence_number field, falling back to array index."""
|
|
||||||
if KEY_BATCH_DATA in data and isinstance(data[KEY_BATCH_DATA], list) and len(data[KEY_BATCH_DATA]) > 0:
|
|
||||||
# Search by sequence_number field first
|
|
||||||
for item in data[KEY_BATCH_DATA]:
|
|
||||||
if int(item.get("sequence_number", 0)) == sequence_number:
|
|
||||||
return item
|
|
||||||
# Fallback to array index
|
|
||||||
idx = max(0, min(sequence_number - 1, len(data[KEY_BATCH_DATA]) - 1))
|
|
||||||
logger.warning(f"No item with sequence_number={sequence_number}, falling back to index {idx}")
|
|
||||||
return data[KEY_BATCH_DATA][idx]
|
|
||||||
return data
|
|
||||||
|
|
||||||
# --- Shared Helper ---
|
|
||||||
def read_json_data(json_path: str) -> dict[str, Any]:
|
|
||||||
if not os.path.exists(json_path):
|
|
||||||
logger.warning(f"File not found at {json_path}")
|
|
||||||
return {}
|
|
||||||
try:
|
|
||||||
with open(json_path, 'r') as f:
|
|
||||||
data = json.load(f)
|
|
||||||
except (json.JSONDecodeError, IOError) as e:
|
|
||||||
logger.warning(f"Error reading {json_path}: {e}")
|
|
||||||
return {}
|
|
||||||
if not isinstance(data, dict):
|
|
||||||
logger.warning(f"Expected dict from {json_path}, got {type(data).__name__}")
|
|
||||||
return {}
|
|
||||||
return data
|
|
||||||
|
|
||||||
# --- API Route ---
|
|
||||||
if PromptServer is not None:
|
|
||||||
@PromptServer.instance.routes.get("/json_manager/get_keys")
|
|
||||||
async def get_keys_route(request):
|
|
||||||
json_path = request.query.get("path", "")
|
|
||||||
try:
|
|
||||||
seq = int(request.query.get("sequence_number", "1"))
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
seq = 1
|
|
||||||
data = read_json_data(json_path)
|
|
||||||
if not data:
|
|
||||||
return web.json_response({"keys": [], "types": [], "error": "file_not_found"})
|
|
||||||
target = get_batch_item(data, seq)
|
|
||||||
keys = []
|
|
||||||
types = []
|
|
||||||
if isinstance(target, dict):
|
|
||||||
for k, v in target.items():
|
|
||||||
keys.append(k)
|
|
||||||
if isinstance(v, bool):
|
|
||||||
types.append("STRING")
|
|
||||||
elif isinstance(v, int):
|
|
||||||
types.append("INT")
|
|
||||||
elif isinstance(v, float):
|
|
||||||
types.append("FLOAT")
|
|
||||||
else:
|
|
||||||
types.append("STRING")
|
|
||||||
return web.json_response({"keys": keys, "types": types})
|
|
||||||
|
|
||||||
|
|
||||||
# ==========================================
|
|
||||||
# 0. DYNAMIC NODE
|
|
||||||
# ==========================================
|
|
||||||
|
|
||||||
class JSONLoaderDynamic:
|
|
||||||
@classmethod
|
|
||||||
def INPUT_TYPES(s):
|
|
||||||
return {
|
|
||||||
"required": {
|
|
||||||
"json_path": ("STRING", {"default": "", "multiline": False}),
|
|
||||||
"sequence_number": ("INT", {"default": 1, "min": 1, "max": 9999}),
|
|
||||||
},
|
|
||||||
"optional": {
|
|
||||||
"output_keys": ("STRING", {"default": ""}),
|
|
||||||
"output_types": ("STRING", {"default": ""}),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
RETURN_TYPES = tuple(any_type for _ in range(MAX_DYNAMIC_OUTPUTS))
|
|
||||||
RETURN_NAMES = tuple(f"output_{i}" for i in range(MAX_DYNAMIC_OUTPUTS))
|
|
||||||
FUNCTION = "load_dynamic"
|
|
||||||
CATEGORY = "utils/json"
|
|
||||||
OUTPUT_NODE = False
|
|
||||||
|
|
||||||
def load_dynamic(self, json_path, sequence_number, output_keys="", output_types=""):
|
|
||||||
data = read_json_data(json_path)
|
|
||||||
target = get_batch_item(data, sequence_number)
|
|
||||||
|
|
||||||
keys = [k.strip() for k in output_keys.split(",") if k.strip()] if output_keys else []
|
|
||||||
|
|
||||||
results = []
|
|
||||||
for key in keys:
|
|
||||||
val = target.get(key, "")
|
|
||||||
if isinstance(val, bool):
|
|
||||||
results.append(str(val).lower())
|
|
||||||
elif isinstance(val, int):
|
|
||||||
results.append(val)
|
|
||||||
elif isinstance(val, float):
|
|
||||||
results.append(val)
|
|
||||||
else:
|
|
||||||
results.append(str(val))
|
|
||||||
|
|
||||||
# Pad to MAX_DYNAMIC_OUTPUTS
|
|
||||||
while len(results) < MAX_DYNAMIC_OUTPUTS:
|
|
||||||
results.append("")
|
|
||||||
|
|
||||||
return tuple(results)
|
|
||||||
|
|
||||||
|
|
||||||
# --- Mappings ---
|
|
||||||
NODE_CLASS_MAPPINGS = {
|
|
||||||
"JSONLoaderDynamic": JSONLoaderDynamic,
|
|
||||||
}
|
|
||||||
|
|
||||||
NODE_DISPLAY_NAME_MAPPINGS = {
|
|
||||||
"JSONLoaderDynamic": "JSON Loader (Dynamic)",
|
|
||||||
}
|
|
||||||
@@ -1,165 +0,0 @@
|
|||||||
import json
|
|
||||||
import os
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from json_loader import (
|
|
||||||
to_float, to_int, get_batch_item, read_json_data,
|
|
||||||
JSONLoaderDynamic, MAX_DYNAMIC_OUTPUTS,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestToFloat:
|
|
||||||
def test_valid(self):
|
|
||||||
assert to_float("3.14") == 3.14
|
|
||||||
assert to_float(5) == 5.0
|
|
||||||
|
|
||||||
def test_invalid(self):
|
|
||||||
assert to_float("abc") == 0.0
|
|
||||||
|
|
||||||
def test_none(self):
|
|
||||||
assert to_float(None) == 0.0
|
|
||||||
|
|
||||||
|
|
||||||
class TestToInt:
|
|
||||||
def test_valid(self):
|
|
||||||
assert to_int("7") == 7
|
|
||||||
assert to_int(3.9) == 3
|
|
||||||
|
|
||||||
def test_invalid(self):
|
|
||||||
assert to_int("xyz") == 0
|
|
||||||
|
|
||||||
def test_none(self):
|
|
||||||
assert to_int(None) == 0
|
|
||||||
|
|
||||||
|
|
||||||
class TestGetBatchItem:
|
|
||||||
def test_lookup_by_sequence_number_field(self):
|
|
||||||
data = {"batch_data": [
|
|
||||||
{"sequence_number": 1, "a": "first"},
|
|
||||||
{"sequence_number": 5, "a": "fifth"},
|
|
||||||
{"sequence_number": 3, "a": "third"},
|
|
||||||
]}
|
|
||||||
assert get_batch_item(data, 5) == {"sequence_number": 5, "a": "fifth"}
|
|
||||||
assert get_batch_item(data, 3) == {"sequence_number": 3, "a": "third"}
|
|
||||||
|
|
||||||
def test_fallback_to_index(self):
|
|
||||||
data = {"batch_data": [{"a": 1}, {"a": 2}, {"a": 3}]}
|
|
||||||
assert get_batch_item(data, 2) == {"a": 2}
|
|
||||||
|
|
||||||
def test_clamp_high(self):
|
|
||||||
data = {"batch_data": [{"a": 1}, {"a": 2}]}
|
|
||||||
assert get_batch_item(data, 99) == {"a": 2}
|
|
||||||
|
|
||||||
def test_clamp_low(self):
|
|
||||||
data = {"batch_data": [{"a": 1}, {"a": 2}]}
|
|
||||||
assert get_batch_item(data, 0) == {"a": 1}
|
|
||||||
|
|
||||||
def test_no_batch_data(self):
|
|
||||||
data = {"key": "val"}
|
|
||||||
assert get_batch_item(data, 1) == data
|
|
||||||
|
|
||||||
|
|
||||||
class TestReadJsonData:
|
|
||||||
def test_missing_file(self, tmp_path):
|
|
||||||
assert read_json_data(str(tmp_path / "nope.json")) == {}
|
|
||||||
|
|
||||||
def test_invalid_json(self, tmp_path):
|
|
||||||
p = tmp_path / "bad.json"
|
|
||||||
p.write_text("{broken")
|
|
||||||
assert read_json_data(str(p)) == {}
|
|
||||||
|
|
||||||
def test_non_dict_json(self, tmp_path):
|
|
||||||
p = tmp_path / "list.json"
|
|
||||||
p.write_text(json.dumps([1, 2, 3]))
|
|
||||||
assert read_json_data(str(p)) == {}
|
|
||||||
|
|
||||||
def test_valid(self, tmp_path):
|
|
||||||
p = tmp_path / "ok.json"
|
|
||||||
p.write_text(json.dumps({"key": "val"}))
|
|
||||||
assert read_json_data(str(p)) == {"key": "val"}
|
|
||||||
|
|
||||||
|
|
||||||
class TestJSONLoaderDynamic:
|
|
||||||
def _make_json(self, tmp_path, data):
|
|
||||||
p = tmp_path / "test.json"
|
|
||||||
p.write_text(json.dumps(data))
|
|
||||||
return str(p)
|
|
||||||
|
|
||||||
def test_known_keys(self, tmp_path):
|
|
||||||
path = self._make_json(tmp_path, {"name": "alice", "age": 30, "score": 9.5})
|
|
||||||
loader = JSONLoaderDynamic()
|
|
||||||
result = loader.load_dynamic(path, 1, output_keys="name,age,score")
|
|
||||||
assert result[0] == "alice"
|
|
||||||
assert result[1] == 30
|
|
||||||
assert result[2] == 9.5
|
|
||||||
|
|
||||||
def test_empty_output_keys(self, tmp_path):
|
|
||||||
path = self._make_json(tmp_path, {"name": "alice"})
|
|
||||||
loader = JSONLoaderDynamic()
|
|
||||||
result = loader.load_dynamic(path, 1, output_keys="")
|
|
||||||
assert len(result) == MAX_DYNAMIC_OUTPUTS
|
|
||||||
assert all(v == "" for v in result)
|
|
||||||
|
|
||||||
def test_pads_to_max(self, tmp_path):
|
|
||||||
path = self._make_json(tmp_path, {"a": "1", "b": "2"})
|
|
||||||
loader = JSONLoaderDynamic()
|
|
||||||
result = loader.load_dynamic(path, 1, output_keys="a,b")
|
|
||||||
assert len(result) == MAX_DYNAMIC_OUTPUTS
|
|
||||||
assert result[0] == "1"
|
|
||||||
assert result[1] == "2"
|
|
||||||
assert all(v == "" for v in result[2:])
|
|
||||||
|
|
||||||
def test_type_preservation_int(self, tmp_path):
|
|
||||||
path = self._make_json(tmp_path, {"count": 42})
|
|
||||||
loader = JSONLoaderDynamic()
|
|
||||||
result = loader.load_dynamic(path, 1, output_keys="count")
|
|
||||||
assert result[0] == 42
|
|
||||||
assert isinstance(result[0], int)
|
|
||||||
|
|
||||||
def test_type_preservation_float(self, tmp_path):
|
|
||||||
path = self._make_json(tmp_path, {"rate": 3.14})
|
|
||||||
loader = JSONLoaderDynamic()
|
|
||||||
result = loader.load_dynamic(path, 1, output_keys="rate")
|
|
||||||
assert result[0] == 3.14
|
|
||||||
assert isinstance(result[0], float)
|
|
||||||
|
|
||||||
def test_type_preservation_str(self, tmp_path):
|
|
||||||
path = self._make_json(tmp_path, {"label": "hello"})
|
|
||||||
loader = JSONLoaderDynamic()
|
|
||||||
result = loader.load_dynamic(path, 1, output_keys="label")
|
|
||||||
assert result[0] == "hello"
|
|
||||||
assert isinstance(result[0], str)
|
|
||||||
|
|
||||||
def test_bool_becomes_string(self, tmp_path):
|
|
||||||
path = self._make_json(tmp_path, {"flag": True, "off": False})
|
|
||||||
loader = JSONLoaderDynamic()
|
|
||||||
result = loader.load_dynamic(path, 1, output_keys="flag,off")
|
|
||||||
assert result[0] == "true"
|
|
||||||
assert result[1] == "false"
|
|
||||||
assert isinstance(result[0], str)
|
|
||||||
|
|
||||||
def test_missing_key_returns_empty_string(self, tmp_path):
|
|
||||||
path = self._make_json(tmp_path, {"a": "1"})
|
|
||||||
loader = JSONLoaderDynamic()
|
|
||||||
result = loader.load_dynamic(path, 1, output_keys="a,nonexistent")
|
|
||||||
assert result[0] == "1"
|
|
||||||
assert result[1] == ""
|
|
||||||
|
|
||||||
def test_missing_file_returns_all_empty(self, tmp_path):
|
|
||||||
loader = JSONLoaderDynamic()
|
|
||||||
result = loader.load_dynamic(str(tmp_path / "nope.json"), 1, output_keys="a,b")
|
|
||||||
assert len(result) == MAX_DYNAMIC_OUTPUTS
|
|
||||||
assert result[0] == ""
|
|
||||||
assert result[1] == ""
|
|
||||||
|
|
||||||
def test_batch_data(self, tmp_path):
|
|
||||||
path = self._make_json(tmp_path, {
|
|
||||||
"batch_data": [
|
|
||||||
{"sequence_number": 1, "x": "first"},
|
|
||||||
{"sequence_number": 2, "x": "second"},
|
|
||||||
]
|
|
||||||
})
|
|
||||||
loader = JSONLoaderDynamic()
|
|
||||||
result = loader.load_dynamic(path, 2, output_keys="x")
|
|
||||||
assert result[0] == "second"
|
|
||||||
@@ -1,168 +0,0 @@
|
|||||||
import { app } from "../../scripts/app.js";
|
|
||||||
import { api } from "../../scripts/api.js";
|
|
||||||
|
|
||||||
app.registerExtension({
|
|
||||||
name: "json.manager.dynamic",
|
|
||||||
|
|
||||||
async beforeRegisterNodeDef(nodeType, nodeData, app) {
|
|
||||||
if (nodeData.name !== "JSONLoaderDynamic") return;
|
|
||||||
|
|
||||||
const origOnNodeCreated = nodeType.prototype.onNodeCreated;
|
|
||||||
nodeType.prototype.onNodeCreated = function () {
|
|
||||||
origOnNodeCreated?.apply(this, arguments);
|
|
||||||
|
|
||||||
// Hide internal widgets (managed by JS)
|
|
||||||
for (const name of ["output_keys", "output_types"]) {
|
|
||||||
const w = this.widgets?.find(w => w.name === name);
|
|
||||||
if (w) { w.type = "hidden"; w.computeSize = () => [0, -4]; }
|
|
||||||
}
|
|
||||||
|
|
||||||
// Do NOT remove default outputs synchronously here.
|
|
||||||
// During graph loading, ComfyUI creates all nodes (firing onNodeCreated)
|
|
||||||
// before configuring them. Other nodes (e.g. Kijai Set/Get) may resolve
|
|
||||||
// links to our outputs during their configure step. If we remove outputs
|
|
||||||
// here, those nodes find no output slot and error out.
|
|
||||||
//
|
|
||||||
// Instead, defer cleanup: for loaded workflows onConfigure sets _configured
|
|
||||||
// before this runs; for new nodes the defaults are cleaned up.
|
|
||||||
this._configured = false;
|
|
||||||
|
|
||||||
// Add Refresh button
|
|
||||||
this.addWidget("button", "Refresh Outputs", null, () => {
|
|
||||||
this.refreshDynamicOutputs();
|
|
||||||
});
|
|
||||||
|
|
||||||
queueMicrotask(() => {
|
|
||||||
if (!this._configured) {
|
|
||||||
// New node (not loading) — remove the 32 Python default outputs
|
|
||||||
while (this.outputs.length > 0) {
|
|
||||||
this.removeOutput(0);
|
|
||||||
}
|
|
||||||
this.setSize(this.computeSize());
|
|
||||||
app.graph?.setDirtyCanvas(true, true);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
nodeType.prototype.refreshDynamicOutputs = async function () {
|
|
||||||
const pathWidget = this.widgets?.find(w => w.name === "json_path");
|
|
||||||
const seqWidget = this.widgets?.find(w => w.name === "sequence_number");
|
|
||||||
if (!pathWidget?.value) return;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const resp = await api.fetchApi(
|
|
||||||
`/json_manager/get_keys?path=${encodeURIComponent(pathWidget.value)}&sequence_number=${seqWidget?.value || 1}`
|
|
||||||
);
|
|
||||||
const data = await resp.json();
|
|
||||||
const { keys, types } = data;
|
|
||||||
|
|
||||||
// If the file wasn't found, keep existing outputs and links intact
|
|
||||||
if (data.error === "file_not_found") {
|
|
||||||
console.warn("[JSONLoaderDynamic] File not found, keeping existing outputs:", pathWidget.value);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Store keys and types in hidden widgets for persistence
|
|
||||||
const okWidget = this.widgets?.find(w => w.name === "output_keys");
|
|
||||||
if (okWidget) okWidget.value = keys.join(",");
|
|
||||||
const otWidget = this.widgets?.find(w => w.name === "output_types");
|
|
||||||
if (otWidget) otWidget.value = types.join(",");
|
|
||||||
|
|
||||||
// Build a map of current output names to slot indices
|
|
||||||
const oldSlots = {};
|
|
||||||
for (let i = 0; i < this.outputs.length; i++) {
|
|
||||||
oldSlots[this.outputs[i].name] = i;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build new outputs, reusing existing slots to preserve links
|
|
||||||
const newOutputs = [];
|
|
||||||
for (let k = 0; k < keys.length; k++) {
|
|
||||||
const key = keys[k];
|
|
||||||
const type = types[k] || "*";
|
|
||||||
if (key in oldSlots) {
|
|
||||||
// Reuse existing slot object (keeps links intact)
|
|
||||||
const slot = this.outputs[oldSlots[key]];
|
|
||||||
slot.type = type;
|
|
||||||
newOutputs.push(slot);
|
|
||||||
delete oldSlots[key];
|
|
||||||
} else {
|
|
||||||
// New key — create a fresh slot
|
|
||||||
newOutputs.push({ name: key, type: type, links: null });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Disconnect links on slots that are being removed
|
|
||||||
for (const name in oldSlots) {
|
|
||||||
const idx = oldSlots[name];
|
|
||||||
if (this.outputs[idx]?.links?.length) {
|
|
||||||
for (const linkId of [...this.outputs[idx].links]) {
|
|
||||||
this.graph?.removeLink(linkId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reassign the outputs array and fix link slot indices
|
|
||||||
this.outputs = newOutputs;
|
|
||||||
if (this.graph) {
|
|
||||||
for (let i = 0; i < this.outputs.length; i++) {
|
|
||||||
const links = this.outputs[i].links;
|
|
||||||
if (!links) continue;
|
|
||||||
for (const linkId of links) {
|
|
||||||
const link = this.graph.links[linkId];
|
|
||||||
if (link) link.origin_slot = i;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setSize(this.computeSize());
|
|
||||||
app.graph.setDirtyCanvas(true, true);
|
|
||||||
} catch (e) {
|
|
||||||
console.error("[JSONLoaderDynamic] Refresh failed:", e);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Restore state on workflow load
|
|
||||||
const origOnConfigure = nodeType.prototype.onConfigure;
|
|
||||||
nodeType.prototype.onConfigure = function (info) {
|
|
||||||
origOnConfigure?.apply(this, arguments);
|
|
||||||
this._configured = true;
|
|
||||||
|
|
||||||
// Hide internal widgets
|
|
||||||
for (const name of ["output_keys", "output_types"]) {
|
|
||||||
const w = this.widgets?.find(w => w.name === name);
|
|
||||||
if (w) { w.type = "hidden"; w.computeSize = () => [0, -4]; }
|
|
||||||
}
|
|
||||||
|
|
||||||
const okWidget = this.widgets?.find(w => w.name === "output_keys");
|
|
||||||
const otWidget = this.widgets?.find(w => w.name === "output_types");
|
|
||||||
|
|
||||||
const keys = okWidget?.value
|
|
||||||
? okWidget.value.split(",").filter(k => k.trim())
|
|
||||||
: [];
|
|
||||||
const types = otWidget?.value
|
|
||||||
? otWidget.value.split(",")
|
|
||||||
: [];
|
|
||||||
|
|
||||||
if (keys.length > 0) {
|
|
||||||
// On load, LiteGraph already restored serialized outputs with links.
|
|
||||||
// Rename and set types to match stored state (preserves links).
|
|
||||||
for (let i = 0; i < this.outputs.length && i < keys.length; i++) {
|
|
||||||
this.outputs[i].name = keys[i].trim();
|
|
||||||
if (types[i]) this.outputs[i].type = types[i];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove any extra outputs beyond the key count
|
|
||||||
while (this.outputs.length > keys.length) {
|
|
||||||
this.removeOutput(this.outputs.length - 1);
|
|
||||||
}
|
|
||||||
} else if (this.outputs.length > 0) {
|
|
||||||
// Widget values empty but serialized outputs exist — sync widgets
|
|
||||||
// from the outputs LiteGraph already restored (fallback).
|
|
||||||
if (okWidget) okWidget.value = this.outputs.map(o => o.name).join(",");
|
|
||||||
if (otWidget) otWidget.value = this.outputs.map(o => o.type).join(",");
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setSize(this.computeSize());
|
|
||||||
};
|
|
||||||
},
|
|
||||||
});
|
|
||||||
Reference in New Issue
Block a user