Fix shared tensor warning by cloning before save
Aliased tensors (patch_embedding / expanded_patch_embedding / original_patch_embedding) share memory, causing safetensors save_file to warn about duplicates. Clone each tensor to break sharing while keeping all keys for loader compatibility. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
6
nodes.py
6
nodes.py
@@ -86,7 +86,11 @@ class WanVideoSaveMergedModel:
|
|||||||
tensor = v.cpu()
|
tensor = v.cpu()
|
||||||
if target_dtype is not None:
|
if target_dtype is not None:
|
||||||
tensor = tensor.to(target_dtype)
|
tensor = tensor.to(target_dtype)
|
||||||
clean_sd[k] = tensor
|
# Clone to break shared memory between aliased tensors
|
||||||
|
# (e.g. patch_embedding / expanded_patch_embedding / original_patch_embedding)
|
||||||
|
# safetensors save_file doesn't handle shared tensors, and save_model
|
||||||
|
# deduplicates keys which breaks compatibility with ComfyUI's load_file
|
||||||
|
clean_sd[k] = tensor.clone()
|
||||||
pbar.update(1)
|
pbar.update(1)
|
||||||
|
|
||||||
log.info(f"Saving merged WanVideo model to: {output_path}")
|
log.info(f"Saving merged WanVideo model to: {output_path}")
|
||||||
|
|||||||
Reference in New Issue
Block a user