Add SQLite project database + ComfyUI connector nodes
- db.py: ProjectDB class with SQLite schema (projects, data_files, sequences, history_trees), WAL mode, CRUD, import, and query helpers - api_routes.py: REST API endpoints on NiceGUI/FastAPI for ComfyUI to query project data over the network - project_loader.py: ComfyUI nodes (ProjectLoaderDynamic, Standard, VACE, LoRA) that fetch data from NiceGUI REST API via HTTP - web/project_dynamic.js: Frontend JS for dynamic project loader node - tab_projects_ng.py: Projects management tab in NiceGUI UI - state.py: Added db, current_project, db_enabled fields - main.py: DB init, API route registration, projects tab - utils.py: sync_to_db() dual-write helper - tab_batch_ng.py, tab_raw_ng.py, tab_timeline_ng.py: dual-write sync calls after save_json when project DB is enabled - __init__.py: Merged project node class mappings - tests/test_db.py: 30 tests for database layer - tests/test_project_loader.py: 17 tests for ComfyUI connector nodes Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
286
tests/test_db.py
Normal file
286
tests/test_db.py
Normal file
@@ -0,0 +1,286 @@
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from db import ProjectDB
|
||||
from utils import KEY_BATCH_DATA, KEY_HISTORY_TREE
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def db(tmp_path):
|
||||
"""Create a fresh ProjectDB in a temp directory."""
|
||||
db_path = tmp_path / "test.db"
|
||||
pdb = ProjectDB(db_path)
|
||||
yield pdb
|
||||
pdb.close()
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Projects CRUD
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
class TestProjects:
|
||||
def test_create_and_get(self, db):
|
||||
pid = db.create_project("proj1", "/some/path", "A test project")
|
||||
assert pid > 0
|
||||
proj = db.get_project("proj1")
|
||||
assert proj is not None
|
||||
assert proj["name"] == "proj1"
|
||||
assert proj["folder_path"] == "/some/path"
|
||||
assert proj["description"] == "A test project"
|
||||
|
||||
def test_list_projects(self, db):
|
||||
db.create_project("beta", "/b")
|
||||
db.create_project("alpha", "/a")
|
||||
projects = db.list_projects()
|
||||
assert len(projects) == 2
|
||||
assert projects[0]["name"] == "alpha"
|
||||
assert projects[1]["name"] == "beta"
|
||||
|
||||
def test_get_nonexistent(self, db):
|
||||
assert db.get_project("nope") is None
|
||||
|
||||
def test_delete_project(self, db):
|
||||
db.create_project("to_delete", "/x")
|
||||
assert db.delete_project("to_delete") is True
|
||||
assert db.get_project("to_delete") is None
|
||||
|
||||
def test_delete_nonexistent(self, db):
|
||||
assert db.delete_project("nope") is False
|
||||
|
||||
def test_unique_name_constraint(self, db):
|
||||
db.create_project("dup", "/a")
|
||||
with pytest.raises(Exception):
|
||||
db.create_project("dup", "/b")
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Data files
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
class TestDataFiles:
|
||||
def test_create_and_list(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch_i2v", "i2v", {"extra": "meta"})
|
||||
assert df_id > 0
|
||||
files = db.list_data_files(pid)
|
||||
assert len(files) == 1
|
||||
assert files[0]["name"] == "batch_i2v"
|
||||
assert files[0]["data_type"] == "i2v"
|
||||
|
||||
def test_get_data_file(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
db.create_data_file(pid, "batch_i2v", "i2v", {"key": "value"})
|
||||
df = db.get_data_file(pid, "batch_i2v")
|
||||
assert df is not None
|
||||
assert df["top_level"] == {"key": "value"}
|
||||
|
||||
def test_get_data_file_by_names(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
db.create_data_file(pid, "batch_i2v", "i2v")
|
||||
df = db.get_data_file_by_names("p1", "batch_i2v")
|
||||
assert df is not None
|
||||
assert df["name"] == "batch_i2v"
|
||||
|
||||
def test_get_nonexistent_data_file(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
assert db.get_data_file(pid, "nope") is None
|
||||
|
||||
def test_unique_constraint(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
db.create_data_file(pid, "batch_i2v", "i2v")
|
||||
with pytest.raises(Exception):
|
||||
db.create_data_file(pid, "batch_i2v", "vace")
|
||||
|
||||
def test_cascade_delete(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch_i2v", "i2v")
|
||||
db.upsert_sequence(df_id, 1, {"prompt": "hello"})
|
||||
db.save_history_tree(df_id, {"nodes": {}})
|
||||
db.delete_project("p1")
|
||||
assert db.get_data_file(pid, "batch_i2v") is None
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Sequences
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
class TestSequences:
|
||||
def test_upsert_and_get(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
db.upsert_sequence(df_id, 1, {"prompt": "hello", "seed": 42})
|
||||
data = db.get_sequence(df_id, 1)
|
||||
assert data == {"prompt": "hello", "seed": 42}
|
||||
|
||||
def test_upsert_updates_existing(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
db.upsert_sequence(df_id, 1, {"prompt": "v1"})
|
||||
db.upsert_sequence(df_id, 1, {"prompt": "v2"})
|
||||
data = db.get_sequence(df_id, 1)
|
||||
assert data["prompt"] == "v2"
|
||||
|
||||
def test_list_sequences(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
db.upsert_sequence(df_id, 3, {"a": 1})
|
||||
db.upsert_sequence(df_id, 1, {"b": 2})
|
||||
db.upsert_sequence(df_id, 2, {"c": 3})
|
||||
seqs = db.list_sequences(df_id)
|
||||
assert seqs == [1, 2, 3]
|
||||
|
||||
def test_get_nonexistent_sequence(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
assert db.get_sequence(df_id, 99) is None
|
||||
|
||||
def test_get_sequence_keys(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
db.upsert_sequence(df_id, 1, {
|
||||
"prompt": "hello",
|
||||
"seed": 42,
|
||||
"cfg": 1.5,
|
||||
"flag": True,
|
||||
})
|
||||
keys, types = db.get_sequence_keys(df_id, 1)
|
||||
assert "prompt" in keys
|
||||
assert "seed" in keys
|
||||
idx_prompt = keys.index("prompt")
|
||||
idx_seed = keys.index("seed")
|
||||
idx_cfg = keys.index("cfg")
|
||||
idx_flag = keys.index("flag")
|
||||
assert types[idx_prompt] == "STRING"
|
||||
assert types[idx_seed] == "INT"
|
||||
assert types[idx_cfg] == "FLOAT"
|
||||
assert types[idx_flag] == "STRING" # bools -> STRING
|
||||
|
||||
def test_get_sequence_keys_nonexistent(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
keys, types = db.get_sequence_keys(df_id, 99)
|
||||
assert keys == []
|
||||
assert types == []
|
||||
|
||||
def test_delete_sequences_for_file(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
db.upsert_sequence(df_id, 1, {"a": 1})
|
||||
db.upsert_sequence(df_id, 2, {"b": 2})
|
||||
db.delete_sequences_for_file(df_id)
|
||||
assert db.list_sequences(df_id) == []
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# History trees
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
class TestHistoryTrees:
|
||||
def test_save_and_get(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
tree = {"nodes": {"abc": {"id": "abc"}}, "head_id": "abc"}
|
||||
db.save_history_tree(df_id, tree)
|
||||
result = db.get_history_tree(df_id)
|
||||
assert result == tree
|
||||
|
||||
def test_upsert_updates(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
db.save_history_tree(df_id, {"v": 1})
|
||||
db.save_history_tree(df_id, {"v": 2})
|
||||
result = db.get_history_tree(df_id)
|
||||
assert result == {"v": 2}
|
||||
|
||||
def test_get_nonexistent(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
assert db.get_history_tree(df_id) is None
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Import
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
class TestImport:
|
||||
def test_import_json_file(self, db, tmp_path):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
json_path = tmp_path / "batch_prompt_i2v.json"
|
||||
data = {
|
||||
KEY_BATCH_DATA: [
|
||||
{"sequence_number": 1, "prompt": "hello", "seed": 42},
|
||||
{"sequence_number": 2, "prompt": "world", "seed": 99},
|
||||
],
|
||||
KEY_HISTORY_TREE: {"nodes": {}, "head_id": None},
|
||||
}
|
||||
json_path.write_text(json.dumps(data))
|
||||
|
||||
df_id = db.import_json_file(pid, json_path, "i2v")
|
||||
assert df_id > 0
|
||||
|
||||
seqs = db.list_sequences(df_id)
|
||||
assert seqs == [1, 2]
|
||||
|
||||
s1 = db.get_sequence(df_id, 1)
|
||||
assert s1["prompt"] == "hello"
|
||||
assert s1["seed"] == 42
|
||||
|
||||
tree = db.get_history_tree(df_id)
|
||||
assert tree == {"nodes": {}, "head_id": None}
|
||||
|
||||
def test_import_file_name_from_stem(self, db, tmp_path):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
json_path = tmp_path / "my_batch.json"
|
||||
json_path.write_text(json.dumps({KEY_BATCH_DATA: [{"sequence_number": 1}]}))
|
||||
db.import_json_file(pid, json_path)
|
||||
df = db.get_data_file(pid, "my_batch")
|
||||
assert df is not None
|
||||
|
||||
def test_import_no_batch_data(self, db, tmp_path):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
json_path = tmp_path / "simple.json"
|
||||
json_path.write_text(json.dumps({"prompt": "flat file"}))
|
||||
df_id = db.import_json_file(pid, json_path)
|
||||
seqs = db.list_sequences(df_id)
|
||||
assert seqs == []
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Query helpers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
class TestQueryHelpers:
|
||||
def test_query_sequence_data(self, db):
|
||||
pid = db.create_project("myproject", "/mp")
|
||||
df_id = db.create_data_file(pid, "batch_i2v", "i2v")
|
||||
db.upsert_sequence(df_id, 1, {"prompt": "test", "seed": 7})
|
||||
result = db.query_sequence_data("myproject", "batch_i2v", 1)
|
||||
assert result == {"prompt": "test", "seed": 7}
|
||||
|
||||
def test_query_sequence_data_not_found(self, db):
|
||||
assert db.query_sequence_data("nope", "nope", 1) is None
|
||||
|
||||
def test_query_sequence_keys(self, db):
|
||||
pid = db.create_project("myproject", "/mp")
|
||||
df_id = db.create_data_file(pid, "batch_i2v", "i2v")
|
||||
db.upsert_sequence(df_id, 1, {"prompt": "test", "seed": 7})
|
||||
keys, types = db.query_sequence_keys("myproject", "batch_i2v", 1)
|
||||
assert "prompt" in keys
|
||||
assert "seed" in keys
|
||||
|
||||
def test_list_project_files(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
db.create_data_file(pid, "file_a", "i2v")
|
||||
db.create_data_file(pid, "file_b", "vace")
|
||||
files = db.list_project_files("p1")
|
||||
assert len(files) == 2
|
||||
|
||||
def test_list_project_sequences(self, db):
|
||||
pid = db.create_project("p1", "/p1")
|
||||
df_id = db.create_data_file(pid, "batch", "generic")
|
||||
db.upsert_sequence(df_id, 1, {})
|
||||
db.upsert_sequence(df_id, 2, {})
|
||||
seqs = db.list_project_sequences("p1", "batch")
|
||||
assert seqs == [1, 2]
|
||||
201
tests/test_project_loader.py
Normal file
201
tests/test_project_loader.py
Normal file
@@ -0,0 +1,201 @@
|
||||
import json
|
||||
from unittest.mock import patch, MagicMock
|
||||
from io import BytesIO
|
||||
|
||||
import pytest
|
||||
|
||||
from project_loader import (
|
||||
ProjectLoaderDynamic,
|
||||
ProjectLoaderStandard,
|
||||
ProjectLoaderVACE,
|
||||
ProjectLoaderLoRA,
|
||||
_fetch_json,
|
||||
_fetch_data,
|
||||
_fetch_keys,
|
||||
MAX_DYNAMIC_OUTPUTS,
|
||||
)
|
||||
|
||||
|
||||
def _mock_urlopen(data: dict):
|
||||
"""Create a mock context manager for urllib.request.urlopen."""
|
||||
response = MagicMock()
|
||||
response.read.return_value = json.dumps(data).encode()
|
||||
response.__enter__ = lambda s: s
|
||||
response.__exit__ = MagicMock(return_value=False)
|
||||
return response
|
||||
|
||||
|
||||
class TestFetchHelpers:
|
||||
def test_fetch_json_success(self):
|
||||
data = {"key": "value"}
|
||||
with patch("project_loader.urllib.request.urlopen", return_value=_mock_urlopen(data)):
|
||||
result = _fetch_json("http://example.com/api")
|
||||
assert result == data
|
||||
|
||||
def test_fetch_json_failure(self):
|
||||
import urllib.error
|
||||
with patch("project_loader.urllib.request.urlopen", side_effect=OSError("connection refused")):
|
||||
result = _fetch_json("http://example.com/api")
|
||||
assert result == {}
|
||||
|
||||
def test_fetch_data_builds_url(self):
|
||||
data = {"prompt": "hello"}
|
||||
with patch("project_loader.urllib.request.urlopen", return_value=_mock_urlopen(data)) as mock:
|
||||
result = _fetch_data("http://localhost:8080", "proj1", "batch_i2v", 1)
|
||||
assert result == data
|
||||
called_url = mock.call_args[0][0]
|
||||
assert "/api/projects/proj1/files/batch_i2v/data?seq=1" in called_url
|
||||
|
||||
def test_fetch_keys_builds_url(self):
|
||||
data = {"keys": ["prompt"], "types": ["STRING"]}
|
||||
with patch("project_loader.urllib.request.urlopen", return_value=_mock_urlopen(data)) as mock:
|
||||
result = _fetch_keys("http://localhost:8080", "proj1", "batch_i2v", 1)
|
||||
assert result == data
|
||||
called_url = mock.call_args[0][0]
|
||||
assert "/api/projects/proj1/files/batch_i2v/keys?seq=1" in called_url
|
||||
|
||||
def test_fetch_data_strips_trailing_slash(self):
|
||||
data = {"prompt": "hello"}
|
||||
with patch("project_loader.urllib.request.urlopen", return_value=_mock_urlopen(data)) as mock:
|
||||
_fetch_data("http://localhost:8080/", "proj1", "file1", 1)
|
||||
called_url = mock.call_args[0][0]
|
||||
assert "//api" not in called_url
|
||||
|
||||
|
||||
class TestProjectLoaderDynamic:
|
||||
def test_load_dynamic_with_keys(self):
|
||||
data = {"prompt": "hello", "seed": 42, "cfg": 1.5}
|
||||
node = ProjectLoaderDynamic()
|
||||
with patch("project_loader._fetch_data", return_value=data):
|
||||
result = node.load_dynamic(
|
||||
"http://localhost:8080", "proj1", "batch_i2v", 1,
|
||||
output_keys="prompt,seed,cfg"
|
||||
)
|
||||
assert result[0] == "hello"
|
||||
assert result[1] == 42
|
||||
assert result[2] == 1.5
|
||||
assert len(result) == MAX_DYNAMIC_OUTPUTS
|
||||
|
||||
def test_load_dynamic_empty_keys(self):
|
||||
node = ProjectLoaderDynamic()
|
||||
with patch("project_loader._fetch_data", return_value={"prompt": "hello"}):
|
||||
result = node.load_dynamic(
|
||||
"http://localhost:8080", "proj1", "batch_i2v", 1,
|
||||
output_keys=""
|
||||
)
|
||||
assert all(v == "" for v in result)
|
||||
|
||||
def test_load_dynamic_missing_key(self):
|
||||
node = ProjectLoaderDynamic()
|
||||
with patch("project_loader._fetch_data", return_value={"prompt": "hello"}):
|
||||
result = node.load_dynamic(
|
||||
"http://localhost:8080", "proj1", "batch_i2v", 1,
|
||||
output_keys="nonexistent"
|
||||
)
|
||||
assert result[0] == ""
|
||||
|
||||
def test_load_dynamic_bool_becomes_string(self):
|
||||
node = ProjectLoaderDynamic()
|
||||
with patch("project_loader._fetch_data", return_value={"flag": True}):
|
||||
result = node.load_dynamic(
|
||||
"http://localhost:8080", "proj1", "batch_i2v", 1,
|
||||
output_keys="flag"
|
||||
)
|
||||
assert result[0] == "true"
|
||||
|
||||
def test_input_types_has_manager_url(self):
|
||||
inputs = ProjectLoaderDynamic.INPUT_TYPES()
|
||||
assert "manager_url" in inputs["required"]
|
||||
assert "project_name" in inputs["required"]
|
||||
assert "file_name" in inputs["required"]
|
||||
assert "sequence_number" in inputs["required"]
|
||||
|
||||
def test_category(self):
|
||||
assert ProjectLoaderDynamic.CATEGORY == "utils/json/project"
|
||||
|
||||
|
||||
class TestProjectLoaderStandard:
|
||||
def test_load_standard(self):
|
||||
data = {
|
||||
"general_prompt": "hello",
|
||||
"general_negative": "bad",
|
||||
"current_prompt": "specific",
|
||||
"negative": "neg",
|
||||
"camera": "pan",
|
||||
"flf": 0.5,
|
||||
"seed": 42,
|
||||
"video file path": "/v.mp4",
|
||||
"reference image path": "/r.png",
|
||||
"flf image path": "/f.png",
|
||||
}
|
||||
node = ProjectLoaderStandard()
|
||||
with patch("project_loader._fetch_data", return_value=data):
|
||||
result = node.load_standard("http://localhost:8080", "proj1", "batch", 1)
|
||||
assert result == ("hello", "bad", "specific", "neg", "pan", 0.5, 42, "/v.mp4", "/r.png", "/f.png")
|
||||
|
||||
def test_load_standard_defaults(self):
|
||||
node = ProjectLoaderStandard()
|
||||
with patch("project_loader._fetch_data", return_value={}):
|
||||
result = node.load_standard("http://localhost:8080", "proj1", "batch", 1)
|
||||
assert result[0] == "" # general_prompt
|
||||
assert result[5] == 0.0 # flf
|
||||
assert result[6] == 0 # seed
|
||||
|
||||
|
||||
class TestProjectLoaderVACE:
|
||||
def test_load_vace(self):
|
||||
data = {
|
||||
"general_prompt": "hello",
|
||||
"general_negative": "bad",
|
||||
"current_prompt": "specific",
|
||||
"negative": "neg",
|
||||
"camera": "pan",
|
||||
"flf": 0.5,
|
||||
"seed": 42,
|
||||
"frame_to_skip": 81,
|
||||
"input_a_frames": 16,
|
||||
"input_b_frames": 16,
|
||||
"reference path": "/ref",
|
||||
"reference switch": 1,
|
||||
"vace schedule": 2,
|
||||
"video file path": "/v.mp4",
|
||||
"reference image path": "/r.png",
|
||||
}
|
||||
node = ProjectLoaderVACE()
|
||||
with patch("project_loader._fetch_data", return_value=data):
|
||||
result = node.load_vace("http://localhost:8080", "proj1", "batch", 1)
|
||||
assert result[7] == 81 # frame_to_skip
|
||||
assert result[12] == 2 # vace_schedule
|
||||
|
||||
|
||||
class TestProjectLoaderLoRA:
|
||||
def test_load_loras(self):
|
||||
data = {
|
||||
"lora 1 high": "<lora:model1:1.0>",
|
||||
"lora 1 low": "<lora:model1:0.5>",
|
||||
"lora 2 high": "",
|
||||
"lora 2 low": "",
|
||||
"lora 3 high": "",
|
||||
"lora 3 low": "",
|
||||
}
|
||||
node = ProjectLoaderLoRA()
|
||||
with patch("project_loader._fetch_data", return_value=data):
|
||||
result = node.load_loras("http://localhost:8080", "proj1", "batch", 1)
|
||||
assert result[0] == "<lora:model1:1.0>"
|
||||
assert result[1] == "<lora:model1:0.5>"
|
||||
|
||||
def test_load_loras_empty(self):
|
||||
node = ProjectLoaderLoRA()
|
||||
with patch("project_loader._fetch_data", return_value={}):
|
||||
result = node.load_loras("http://localhost:8080", "proj1", "batch", 1)
|
||||
assert all(v == "" for v in result)
|
||||
|
||||
|
||||
class TestNodeMappings:
|
||||
def test_mappings_exist(self):
|
||||
from project_loader import PROJECT_NODE_CLASS_MAPPINGS, PROJECT_NODE_DISPLAY_NAME_MAPPINGS
|
||||
assert "ProjectLoaderDynamic" in PROJECT_NODE_CLASS_MAPPINGS
|
||||
assert "ProjectLoaderStandard" in PROJECT_NODE_CLASS_MAPPINGS
|
||||
assert "ProjectLoaderVACE" in PROJECT_NODE_CLASS_MAPPINGS
|
||||
assert "ProjectLoaderLoRA" in PROJECT_NODE_CLASS_MAPPINGS
|
||||
assert len(PROJECT_NODE_DISPLAY_NAME_MAPPINGS) == 4
|
||||
Reference in New Issue
Block a user