Fix 8 bugs from second code review

HIGH:
- Fix JS TypeError on empty API response: validate keys/types are arrays
  before using them; add HTTP status check (resp.ok)
- Fix BEGIN IMMEDIATE conflict: set isolation_level=None (autocommit) on
  SQLite connection so explicit transactions work without implicit ones

MEDIUM:
- Fix import_json_file non-atomic: wrap entire operation in BEGIN/COMMIT
  with ROLLBACK on error — no more partial imports
- Fix crash on non-dict batch_data items: skip non-dict elements
- Fix comma-in-key corruption: store keys/types as JSON arrays in hidden
  widgets instead of comma-delimited strings (backward-compat fallback)
- Fix blocking I/O in API routes: change async def to def so FastAPI
  auto-threads the synchronous SQLite calls

LOW:
- Fix missing ?. on app.graph.setDirtyCanvas in refreshDynamicOutputs

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-28 21:32:35 +01:00
parent ba8f104bc1
commit b499eb4dfd
7 changed files with 155 additions and 52 deletions

View File

@@ -272,6 +272,44 @@ class TestImport:
s1 = db.get_sequence(df_id_2, 1)
assert s1["prompt"] == "v2"
def test_import_skips_non_dict_batch_items(self, db, tmp_path):
"""Non-dict elements in batch_data should be silently skipped, not crash."""
pid = db.create_project("p1", "/p1")
json_path = tmp_path / "mixed.json"
data = {KEY_BATCH_DATA: [
{"sequence_number": 1, "prompt": "valid"},
"not a dict",
42,
None,
{"sequence_number": 3, "prompt": "also valid"},
]}
json_path.write_text(json.dumps(data))
df_id = db.import_json_file(pid, json_path)
seqs = db.list_sequences(df_id)
assert seqs == [1, 3]
def test_import_atomic_on_error(self, db, tmp_path):
"""If import fails partway, no partial data should be committed."""
pid = db.create_project("p1", "/p1")
json_path = tmp_path / "batch.json"
data = {KEY_BATCH_DATA: [{"sequence_number": 1, "prompt": "hello"}]}
json_path.write_text(json.dumps(data))
db.import_json_file(pid, json_path)
# Now try to import with bad data that will cause an error
# (overwrite the file with invalid sequence_number that causes int() to fail)
bad_data = {KEY_BATCH_DATA: [{"sequence_number": "not_a_number", "prompt": "bad"}]}
json_path.write_text(json.dumps(bad_data))
with pytest.raises(ValueError):
db.import_json_file(pid, json_path)
# Original data should still be intact (rollback worked)
df = db.get_data_file(pid, "batch")
assert df is not None
s1 = db.get_sequence(df["id"], 1)
assert s1["prompt"] == "hello"
# ------------------------------------------------------------------
# Query helpers

View File

@@ -86,6 +86,20 @@ class TestProjectLoaderDynamic:
assert result[2] == 1.5
assert len(result) == MAX_DYNAMIC_OUTPUTS
def test_load_dynamic_with_json_encoded_keys(self):
"""JSON-encoded output_keys should be parsed correctly."""
import json as _json
data = {"my,key": "comma_val", "normal": "ok"}
node = ProjectLoaderDynamic()
keys_json = _json.dumps(["my,key", "normal"])
with patch("project_loader._fetch_data", return_value=data):
result = node.load_dynamic(
"http://localhost:8080", "proj1", "batch_i2v", 1,
output_keys=keys_json
)
assert result[0] == "comma_val"
assert result[1] == "ok"
def test_load_dynamic_empty_keys(self):
node = ProjectLoaderDynamic()
with patch("project_loader._fetch_data", return_value={"prompt": "hello"}):