parralel #2
@@ -1,13 +1,16 @@
|
|||||||
from .sharp_node import SharpnessAnalyzer, SharpFrameSelector
|
from .sharp_node import SharpnessAnalyzer, SharpFrameSelector
|
||||||
|
from .parallel_loader import ParallelSharpnessLoader
|
||||||
|
|
||||||
NODE_CLASS_MAPPINGS = {
|
NODE_CLASS_MAPPINGS = {
|
||||||
"SharpnessAnalyzer": SharpnessAnalyzer,
|
"SharpnessAnalyzer": SharpnessAnalyzer,
|
||||||
"SharpFrameSelector": SharpFrameSelector
|
"SharpFrameSelector": SharpFrameSelector,
|
||||||
|
"ParallelSharpnessLoader": ParallelSharpnessLoader
|
||||||
}
|
}
|
||||||
|
|
||||||
NODE_DISPLAY_NAME_MAPPINGS = {
|
NODE_DISPLAY_NAME_MAPPINGS = {
|
||||||
"SharpnessAnalyzer": "1. Sharpness Analyzer",
|
"SharpnessAnalyzer": "1. Sharpness Analyzer",
|
||||||
"SharpFrameSelector": "2. Sharp Frame Selector"
|
"SharpFrameSelector": "2. Sharp Frame Selector",
|
||||||
|
"ParallelSharpnessLoader": "3. Parallel Video Loader (Sharpness)"
|
||||||
}
|
}
|
||||||
|
|
||||||
__all__ = ["NODE_CLASS_MAPPINGS", "NODE_DISPLAY_NAME_MAPPINGS"]
|
__all__ = ["NODE_CLASS_MAPPINGS", "NODE_DISPLAY_NAME_MAPPINGS"]
|
||||||
BIN
assets/nodes.png
Normal file
BIN
assets/nodes.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 169 KiB |
349
example_workflows/comfyui-sharp-example.json
Normal file
349
example_workflows/comfyui-sharp-example.json
Normal file
@@ -0,0 +1,349 @@
|
|||||||
|
{
|
||||||
|
"id": "4fbf6f31-0f7b-4465-8ec8-25df4862e076",
|
||||||
|
"revision": 0,
|
||||||
|
"last_node_id": 34,
|
||||||
|
"last_link_id": 42,
|
||||||
|
"nodes": [
|
||||||
|
{
|
||||||
|
"id": 31,
|
||||||
|
"type": "PrimitiveNode",
|
||||||
|
"pos": [
|
||||||
|
4672,
|
||||||
|
-928
|
||||||
|
],
|
||||||
|
"size": [
|
||||||
|
210,
|
||||||
|
82
|
||||||
|
],
|
||||||
|
"flags": {},
|
||||||
|
"order": 0,
|
||||||
|
"mode": 0,
|
||||||
|
"inputs": [],
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "connect to widget input",
|
||||||
|
"type": "*",
|
||||||
|
"links": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"Run widget replace on values": false,
|
||||||
|
"ue_properties": {
|
||||||
|
"widget_ue_connectable": {},
|
||||||
|
"version": "7.5.2",
|
||||||
|
"input_ue_unconnectable": {}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"widgets_values": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 29,
|
||||||
|
"type": "ParallelSharpnessLoader",
|
||||||
|
"pos": [
|
||||||
|
4992,
|
||||||
|
-1024
|
||||||
|
],
|
||||||
|
"size": [
|
||||||
|
320,
|
||||||
|
262
|
||||||
|
],
|
||||||
|
"flags": {},
|
||||||
|
"order": 1,
|
||||||
|
"mode": 0,
|
||||||
|
"inputs": [],
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "images",
|
||||||
|
"type": "IMAGE",
|
||||||
|
"links": [
|
||||||
|
42
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "scores_info",
|
||||||
|
"type": "STRING",
|
||||||
|
"links": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "batch_int",
|
||||||
|
"type": "INT",
|
||||||
|
"links": null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "batch_status",
|
||||||
|
"type": "STRING",
|
||||||
|
"links": [
|
||||||
|
39
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"aux_id": "ComfyUI-Sharp-Selector.git",
|
||||||
|
"ver": "dab38a1fbf0077655fe568d500866fce6ecc857d",
|
||||||
|
"Node name for S&R": "ParallelSharpnessLoader",
|
||||||
|
"ue_properties": {
|
||||||
|
"widget_ue_connectable": {},
|
||||||
|
"input_ue_unconnectable": {},
|
||||||
|
"version": "7.5.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"widgets_values": [
|
||||||
|
"C:\\path\\to\\video.mp4",
|
||||||
|
0,
|
||||||
|
1440,
|
||||||
|
1,
|
||||||
|
30,
|
||||||
|
24,
|
||||||
|
2000
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"type": "SharpFrameSelector",
|
||||||
|
"pos": [
|
||||||
|
4992,
|
||||||
|
-704
|
||||||
|
],
|
||||||
|
"size": [
|
||||||
|
288,
|
||||||
|
174
|
||||||
|
],
|
||||||
|
"flags": {},
|
||||||
|
"order": 6,
|
||||||
|
"mode": 0,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"name": "images",
|
||||||
|
"type": "IMAGE",
|
||||||
|
"link": null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "scores",
|
||||||
|
"type": "SHARPNESS_SCORES",
|
||||||
|
"link": 3
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "selected_images",
|
||||||
|
"type": "IMAGE",
|
||||||
|
"links": [
|
||||||
|
32
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "count",
|
||||||
|
"type": "INT",
|
||||||
|
"links": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"aux_id": "ComfyUI-Sharp-Selector.git",
|
||||||
|
"ver": "f30f948c9fa8acf9b7fe09559f172d8a63468c8d",
|
||||||
|
"Node name for S&R": "SharpFrameSelector",
|
||||||
|
"ue_properties": {
|
||||||
|
"widget_ue_connectable": {},
|
||||||
|
"input_ue_unconnectable": {},
|
||||||
|
"version": "7.5.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"widgets_values": [
|
||||||
|
"best_n",
|
||||||
|
144,
|
||||||
|
24,
|
||||||
|
3,
|
||||||
|
0
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 34,
|
||||||
|
"type": "Note",
|
||||||
|
"pos": [
|
||||||
|
4224,
|
||||||
|
-1120
|
||||||
|
],
|
||||||
|
"size": [
|
||||||
|
416,
|
||||||
|
736
|
||||||
|
],
|
||||||
|
"flags": {},
|
||||||
|
"order": 2,
|
||||||
|
"mode": 0,
|
||||||
|
"inputs": [],
|
||||||
|
"outputs": [],
|
||||||
|
"properties": {},
|
||||||
|
"widgets_values": [
|
||||||
|
"📌 ComfyUI Sharpness Tools Explained\n\n1. Parallel Video Loader (The \"Loader\")\n\n Best for: Processing long videos or movie files directly from disk.\n\n How it works: It opens the video file itself and uses multi-threading (parallel CPU cores) to scan thousands of frames without loading them into memory. It only \"decodes\" the final few sharpest frames.\n\n Use Case: Extracting dataset images, finding high-quality frames from a raw movie file, or scanning 10,000 frames without crashing your RAM.\n\n Key Feature: Features a \"Batch Counter\" to automatically page through long videos (e.g., scan minute 0-1, then minute 1-2).\n\n2. Standard Sharpness Duo (The \"Filter\")\n\n Best for: Processing images already inside your workflow (e.g., after an img2img pass, or a short generated GIF).\n\n How it works:\n\n Node A (Analyzer): Assigns a score to every image in the batch.\n\n Node B (Selector): Picks the best ones based on those scores.\n\n Use Case: Filtering bad generations, picking the best frame from a small batch of AnimateDiff results, or cleaning up a sequence.\n\n Limitation: It is single-threaded and requires all images to be loaded in VRAM/RAM first (slow for long videos).\n\n🚀 Which one to use?\n\n Starting from a Video File? → Use Parallel Loader.\n\n Starting from a Generation/Latent? → Use Standard Duo."
|
||||||
|
],
|
||||||
|
"color": "#432",
|
||||||
|
"bgcolor": "#653"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 33,
|
||||||
|
"type": "SaveImage",
|
||||||
|
"pos": [
|
||||||
|
5824,
|
||||||
|
-1024
|
||||||
|
],
|
||||||
|
"size": [
|
||||||
|
270,
|
||||||
|
58
|
||||||
|
],
|
||||||
|
"flags": {},
|
||||||
|
"order": 4,
|
||||||
|
"mode": 0,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"name": "images",
|
||||||
|
"type": "IMAGE",
|
||||||
|
"link": 42
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"outputs": [],
|
||||||
|
"properties": {
|
||||||
|
"cnr_id": "comfy-core",
|
||||||
|
"ver": "0.9.2",
|
||||||
|
"ue_properties": {
|
||||||
|
"widget_ue_connectable": {},
|
||||||
|
"input_ue_unconnectable": {}
|
||||||
|
},
|
||||||
|
"Node name for S&R": "SaveImage"
|
||||||
|
},
|
||||||
|
"widgets_values": [
|
||||||
|
"sharp/img_"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 32,
|
||||||
|
"type": "easy showAnything",
|
||||||
|
"pos": [
|
||||||
|
5344,
|
||||||
|
-1024
|
||||||
|
],
|
||||||
|
"size": [
|
||||||
|
448,
|
||||||
|
96
|
||||||
|
],
|
||||||
|
"flags": {},
|
||||||
|
"order": 5,
|
||||||
|
"mode": 0,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"name": "anything",
|
||||||
|
"shape": 7,
|
||||||
|
"type": "*",
|
||||||
|
"link": 39
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "output",
|
||||||
|
"type": "*",
|
||||||
|
"links": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"cnr_id": "comfyui-easy-use",
|
||||||
|
"ver": "5dfcbcf51d8a6efed947bc7bdd6797827fecab55",
|
||||||
|
"Node name for S&R": "easy showAnything",
|
||||||
|
"ue_properties": {
|
||||||
|
"widget_ue_connectable": {},
|
||||||
|
"input_ue_unconnectable": {},
|
||||||
|
"version": "7.5.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"widgets_values": [
|
||||||
|
"Batch 1: Skipped 3440 frames. Scanning range 3440 -> 4880."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 5,
|
||||||
|
"type": "SharpnessAnalyzer",
|
||||||
|
"pos": [
|
||||||
|
4672,
|
||||||
|
-672
|
||||||
|
],
|
||||||
|
"size": [
|
||||||
|
185.9771484375,
|
||||||
|
26
|
||||||
|
],
|
||||||
|
"flags": {},
|
||||||
|
"order": 3,
|
||||||
|
"mode": 0,
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"name": "images",
|
||||||
|
"type": "IMAGE",
|
||||||
|
"link": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "scores",
|
||||||
|
"type": "SHARPNESS_SCORES",
|
||||||
|
"links": [
|
||||||
|
3,
|
||||||
|
21
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"aux_id": "ComfyUI-Sharp-Selector.git",
|
||||||
|
"ver": "0df11447abb7f41bf7f12a2906aa868a5d2027b4",
|
||||||
|
"Node name for S&R": "SharpnessAnalyzer",
|
||||||
|
"ue_properties": {
|
||||||
|
"widget_ue_connectable": {},
|
||||||
|
"input_ue_unconnectable": {},
|
||||||
|
"version": "7.5.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"widgets_values": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"links": [
|
||||||
|
[
|
||||||
|
3,
|
||||||
|
5,
|
||||||
|
0,
|
||||||
|
1,
|
||||||
|
1,
|
||||||
|
"SHARPNESS_SCORES"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
39,
|
||||||
|
29,
|
||||||
|
3,
|
||||||
|
32,
|
||||||
|
0,
|
||||||
|
"STRING"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
42,
|
||||||
|
29,
|
||||||
|
0,
|
||||||
|
33,
|
||||||
|
0,
|
||||||
|
"IMAGE"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"groups": [],
|
||||||
|
"config": {},
|
||||||
|
"extra": {
|
||||||
|
"workflowRendererVersion": "LG",
|
||||||
|
"ue_links": [],
|
||||||
|
"links_added_by_ue": [],
|
||||||
|
"ds": {
|
||||||
|
"scale": 0.8264462809917354,
|
||||||
|
"offset": [
|
||||||
|
-2054.6525613149634,
|
||||||
|
1737.5186871750661
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"frontendVersion": "1.36.14",
|
||||||
|
"VHS_latentpreview": true,
|
||||||
|
"VHS_latentpreviewrate": 0,
|
||||||
|
"VHS_MetadataImage": true,
|
||||||
|
"VHS_KeepIntermediate": true
|
||||||
|
},
|
||||||
|
"version": 0.4
|
||||||
|
}
|
||||||
File diff suppressed because one or more lines are too long
@@ -1,35 +0,0 @@
|
|||||||
import { app } from "../../scripts/app.js";
|
|
||||||
|
|
||||||
app.registerExtension({
|
|
||||||
name: "SharpFrames.Tooltips",
|
|
||||||
async beforeRegisterNodeDef(nodeType, nodeData, app) {
|
|
||||||
if (nodeData.name === "SharpFrameSelector") {
|
|
||||||
const tooltips = {
|
|
||||||
// Must match Python INPUT_TYPES keys exactly
|
|
||||||
"selection_method": "Strategy:\n• 'batched': Best for video. Splits time into slots.\n• 'best_n': Global top sharpest frames.",
|
|
||||||
"batch_size": "For 'batched' mode.\nSize of the analysis window (in frames).",
|
|
||||||
"batch_buffer": "For 'batched' mode.\nFrames to skip AFTER each batch (dead zone).",
|
|
||||||
"num_frames": "For 'best_n' mode.\nTotal frames to output.",
|
|
||||||
"min_sharpness": "Threshold Filter.\nDiscard frames with score below this.\nNote: Scores are lower on resized images.",
|
|
||||||
"images": "Input High-Res images.",
|
|
||||||
"scores": "Input Sharpness Scores from Analyzer."
|
|
||||||
};
|
|
||||||
|
|
||||||
const onNodeCreated = nodeType.prototype.onNodeCreated;
|
|
||||||
nodeType.prototype.onNodeCreated = function () {
|
|
||||||
onNodeCreated?.apply(this, arguments);
|
|
||||||
|
|
||||||
if (this.widgets) {
|
|
||||||
for (const w of this.widgets) {
|
|
||||||
if (tooltips[w.name]) {
|
|
||||||
w.tooltip = tooltips[w.name];
|
|
||||||
// Force update for immediate feedback
|
|
||||||
w.options = w.options || {};
|
|
||||||
w.options.tooltip = tooltips[w.name];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
129
parallel_loader.py
Normal file
129
parallel_loader.py
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
import cv2
|
||||||
|
import torch
|
||||||
|
import numpy as np
|
||||||
|
import concurrent.futures
|
||||||
|
import os
|
||||||
|
|
||||||
|
class ParallelSharpnessLoader:
|
||||||
|
@classmethod
|
||||||
|
def INPUT_TYPES(s):
|
||||||
|
return {
|
||||||
|
"required": {
|
||||||
|
"video_path": ("STRING", {"default": "C:\\path\\to\\video.mp4"}),
|
||||||
|
|
||||||
|
# BATCHING CONTROLS
|
||||||
|
"batch_index": ("INT", {"default": 0, "min": 0, "max": 10000, "step": 1, "label": "Batch Counter (Auto-Increment)"}),
|
||||||
|
"scan_limit": ("INT", {"default": 1440, "min": 1, "max": 10000000, "step": 1, "label": "Frames per Batch"}),
|
||||||
|
|
||||||
|
# STANDARD CONTROLS
|
||||||
|
"frame_scan_step": ("INT", {"default": 5, "min": 1, "step": 1, "label": "Analyze Every Nth Frame"}),
|
||||||
|
"return_count": ("INT", {"default": 4, "min": 1, "max": 1024, "step": 1, "label": "Best Frames to Return"}),
|
||||||
|
"min_distance": ("INT", {"default": 24, "min": 0, "max": 10000, "step": 1, "label": "Min Distance (Frames)"}),
|
||||||
|
"manual_skip_start": ("INT", {"default": 0, "min": 0, "max": 10000000, "step": 1, "label": "Global Start Offset"}),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Added a 4th output: STRING (The status sentence)
|
||||||
|
RETURN_TYPES = ("IMAGE", "STRING", "INT", "STRING")
|
||||||
|
RETURN_NAMES = ("images", "scores_info", "batch_int", "batch_status")
|
||||||
|
FUNCTION = "load_video"
|
||||||
|
CATEGORY = "BetaHelper/Video"
|
||||||
|
|
||||||
|
def calculate_sharpness(self, frame_data):
|
||||||
|
gray = cv2.cvtColor(frame_data, cv2.COLOR_BGR2GRAY)
|
||||||
|
return cv2.Laplacian(gray, cv2.CV_64F).var()
|
||||||
|
|
||||||
|
def load_video(self, video_path, batch_index, scan_limit, frame_scan_step, return_count, min_distance, manual_skip_start):
|
||||||
|
|
||||||
|
# 1. Validation
|
||||||
|
if not os.path.exists(video_path):
|
||||||
|
video_path = video_path.strip('"')
|
||||||
|
if not os.path.exists(video_path):
|
||||||
|
raise FileNotFoundError(f"Video not found: {video_path}")
|
||||||
|
|
||||||
|
# 2. Calculate Offsets
|
||||||
|
current_skip = (batch_index * scan_limit) + manual_skip_start
|
||||||
|
range_end = current_skip + scan_limit
|
||||||
|
|
||||||
|
# Create the Status String
|
||||||
|
status_msg = f"Batch {batch_index}: Skipped {current_skip} frames. Scanning range {current_skip} -> {range_end}."
|
||||||
|
print(f"xx- Parallel Loader | {status_msg}")
|
||||||
|
|
||||||
|
cap = cv2.VideoCapture(video_path)
|
||||||
|
total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
|
||||||
|
|
||||||
|
if current_skip >= total_frames:
|
||||||
|
print("xx- End of video reached.")
|
||||||
|
empty_img = torch.zeros((1, 64, 64, 3))
|
||||||
|
return (empty_img, "End of Video", batch_index, "End of Video Reached")
|
||||||
|
|
||||||
|
# 3. Scanning (Pass 1)
|
||||||
|
if current_skip > 0:
|
||||||
|
cap.set(cv2.CAP_PROP_POS_FRAMES, current_skip)
|
||||||
|
|
||||||
|
frame_scores = []
|
||||||
|
current_frame = current_skip
|
||||||
|
scanned_count = 0
|
||||||
|
|
||||||
|
with concurrent.futures.ThreadPoolExecutor(max_workers=16) as executor:
|
||||||
|
futures = []
|
||||||
|
|
||||||
|
while True:
|
||||||
|
if scanned_count >= scan_limit:
|
||||||
|
break
|
||||||
|
|
||||||
|
ret, frame = cap.read()
|
||||||
|
if not ret: break
|
||||||
|
|
||||||
|
future = executor.submit(self.calculate_sharpness, frame)
|
||||||
|
futures.append((current_frame, future))
|
||||||
|
scanned_count += 1
|
||||||
|
|
||||||
|
# Manual Stepping
|
||||||
|
if frame_scan_step > 1:
|
||||||
|
for _ in range(frame_scan_step - 1):
|
||||||
|
if not cap.grab(): break
|
||||||
|
current_frame += 1
|
||||||
|
|
||||||
|
current_frame += 1
|
||||||
|
|
||||||
|
for idx, future in futures:
|
||||||
|
frame_scores.append((idx, future.result()))
|
||||||
|
|
||||||
|
cap.release()
|
||||||
|
|
||||||
|
# 4. Selection
|
||||||
|
if not frame_scores:
|
||||||
|
return (torch.zeros((1, 64, 64, 3)), "No frames in batch", batch_index, status_msg + " (No frames found)")
|
||||||
|
|
||||||
|
frame_scores.sort(key=lambda x: x[1], reverse=True)
|
||||||
|
selected = []
|
||||||
|
|
||||||
|
for idx, score in frame_scores:
|
||||||
|
if len(selected) >= return_count: break
|
||||||
|
if all(abs(s[0] - idx) >= min_distance for s in selected):
|
||||||
|
selected.append((idx, score))
|
||||||
|
|
||||||
|
selected.sort(key=lambda x: x[0])
|
||||||
|
|
||||||
|
# 5. Extraction (Pass 2)
|
||||||
|
cap = cv2.VideoCapture(video_path)
|
||||||
|
output_tensors = []
|
||||||
|
info_log = []
|
||||||
|
|
||||||
|
for idx, score in selected:
|
||||||
|
cap.set(cv2.CAP_PROP_POS_FRAMES, idx)
|
||||||
|
ret, frame = cap.read()
|
||||||
|
if ret:
|
||||||
|
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
|
||||||
|
frame = frame.astype(np.float32) / 255.0
|
||||||
|
output_tensors.append(torch.from_numpy(frame))
|
||||||
|
info_log.append(f"F:{idx} (Score:{int(score)})")
|
||||||
|
|
||||||
|
cap.release()
|
||||||
|
|
||||||
|
if not output_tensors:
|
||||||
|
return (torch.zeros((1, 64, 64, 3)), "Extraction Failed", batch_index, status_msg)
|
||||||
|
|
||||||
|
# Return all 4 outputs
|
||||||
|
return (torch.stack(output_tensors), ", ".join(info_log), batch_index, status_msg)
|
||||||
42
readme
42
readme
@@ -1,29 +1,31 @@
|
|||||||
# 🔪 ComfyUI Sharp Frame Selector
|
# 🔪 ComfyUI Sharp Frame Selector
|
||||||
|
|
||||||
A suite of custom nodes for [ComfyUI](https://github.com/comfyanonymous/ComfyUI) designed to intelligently extract the sharpest frames from video footage.
|
A collection of high-performance custom nodes for **ComfyUI** designed to detect blur, calculate sharpness scores, and automatically extract the best frames from videos or image batches.
|
||||||
|
|
||||||
Based on the [sharp-frames](https://github.com/Reflct/sharp-frames-python) logic, this tool uses **Laplacian Variance** to score image clarity. It is optimized for high-resolution video processing using a **Sidechain Workflow** that saves massive amounts of RAM.
|
This pack includes two distinct approaches:
|
||||||
|
1. **Parallel Video Loader:** A multi-threaded, path-based loader for processing massive video files directly from disk (Low RAM usage).
|
||||||
|
2. **Standard Sharpness Duo:** A classic filter setup for processing images/latents *inside* your existing workflow.
|
||||||
|
|
||||||
## ✨ Key Features
|
---
|
||||||
|
---
|
||||||
|
|
||||||
* **Sidechain Optimization:** Analyze lightweight 512px proxy images to control the selection of heavy 4K raw frames.
|
## 🚀 Key Features
|
||||||
* **Batched Extraction:** Splits video into time slots (e.g., 1 second) and picks the single best frame from each slot. Perfect for ensuring action scenes are not missed.
|
|
||||||
* **Threshold Filtering:** Automatically discards frames that are too blurry, even if they are the "winner" of their batch.
|
### 1. New: Parallel Video Loader (Path-Based)
|
||||||
* **Buffer Control:** Optional dead-zones between batches to reduce frame count or ensure temporal separation.
|
* **Zero-RAM Scanning:** Scans video files directly from disk without decoding every frame to memory.
|
||||||
|
* **Multi-Threaded:** Uses all CPU cores to calculate sharpness scores at high speed.
|
||||||
|
* **Batching Support:** Includes a "Page" system to process long movies in chunks (e.g., minute-by-minute) without restarting ComfyUI.
|
||||||
|
* **Smart Selection:** Automatically skips "adjacent" frames to ensure you get a diverse selection of sharp images.
|
||||||
|
|
||||||
|
### 2. Standard Sharpness Duo (Tensor-Based)
|
||||||
|
* **Workflow Integration:** Works with any node that outputs an `IMAGE` batch (e.g., AnimateDiff, VideoHelperSuite).
|
||||||
|
* **Precision Filtering:** Sorts and filters generated frames before saving or passing to a second pass (img2img).
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 🚀 Installation
|
## 📦 Installation
|
||||||
|
|
||||||
### Option 1: ComfyUI Manager (Recommended)
|
1. Clone this repository into your `custom_nodes` folder:
|
||||||
1. Open ComfyUI Manager.
|
```bash
|
||||||
2. Search for **"Sharp Frame Selector"**.
|
cd ComfyUI/custom_nodes/
|
||||||
3. Click **Install**.
|
git clone https://github.com/ethanfel/ComfyUI-Sharp-Selector.git
|
||||||
|
|
||||||
### Option 2: Manual Installation
|
|
||||||
Clone this repository into your `custom_nodes` folder:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd ComfyUI/custom_nodes/
|
|
||||||
git clone [https://github.com/YOUR_USERNAME/ComfyUI-Sharp-Selector.git](https://github.com/YOUR_USERNAME/ComfyUI-Sharp-Selector.git)
|
|
||||||
pip install -r ComfyUI-Sharp-Selector/requirements.txt
|
|
||||||
Reference in New Issue
Block a user