Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
102 commits
Select commit Hold shift + click to select a range
069ec7a
Added hook_patches to ModelPatcher for weights (model)
Kosinkadink Sep 13, 2024
3cbd40a
Initial changes to calc_cond_batch to eventually support hook_patches
Kosinkadink Sep 13, 2024
9ae7581
Added current_patcher property to BaseModel
Kosinkadink Sep 13, 2024
1268d04
Consolidated add_hook_patches_as_diffs into add_hook_patches func, fi…
Kosinkadink Sep 14, 2024
f160d46
Added call to initialize_timesteps on hooks in process_conds func, an…
Kosinkadink Sep 14, 2024
5dadd97
Added default_conds support in calc_cond_batch func
Kosinkadink Sep 14, 2024
f5abdc6
Merge branch 'master' into patch_hooks
Kosinkadink Sep 14, 2024
9ded65a
Added initial set of hook-related nodes, added code to register hooks…
Kosinkadink Sep 14, 2024
a5034df
Made CLIP work with hook patches
Kosinkadink Sep 15, 2024
5a9aa58
Added initial hook scheduling nodes, small renaming/refactoring
Kosinkadink Sep 15, 2024
f5c899f
Fixed MaxSpeed and default conds implementations
Kosinkadink Sep 15, 2024
4b472ba
Added support for adding weight hooks that aren't registered on the M…
Kosinkadink Sep 16, 2024
cfb1451
Made Set Clip Hooks node work with hooks from Create Hook nodes, bega…
Kosinkadink Sep 17, 2024
c29006e
Initial work on adding 'model_as_lora' lora type to calculate_weight
Kosinkadink Sep 17, 2024
6b14fc8
Merge branch 'master' into patch_hooks
Kosinkadink Sep 17, 2024
787ef34
Continued work on simpler Create Hook Model As LoRA node, started to …
Kosinkadink Sep 19, 2024
e80dc96
Fix incorrect ref to create_hook_patches_clone after moving function
Kosinkadink Sep 19, 2024
5501429
Added injections support to ModelPatcher + necessary bookkeeping, add…
Kosinkadink Sep 19, 2024
59d72b4
Added wrappers to ModelPatcher to facilitate standardized function wr…
Kosinkadink Sep 20, 2024
5f450d3
Started scaffolding for other hook types, refactored get_hooks_from_c…
Kosinkadink Sep 21, 2024
f28d892
Fix skip_until_exit logic bug breaking injection after first run of m…
Kosinkadink Sep 21, 2024
298397d
Updated clone_has_same_weights function to account for new ModelPatch…
Kosinkadink Sep 21, 2024
5052a78
Added WrapperExecutor for non-classbound functions, added calc_cond_b…
Kosinkadink Sep 22, 2024
a154d0d
Merge branch 'master' into patch_hooks
Kosinkadink Sep 22, 2024
7c86407
Refactored callbacks+wrappers to allow storing lists by id
Kosinkadink Sep 22, 2024
da6c045
Added forward_timestep_embed_patch type, added helper functions on Mo…
Kosinkadink Sep 24, 2024
c422553
Added get_attachment func on ModelPatcher
Kosinkadink Sep 24, 2024
d3229cb
Implement basic MemoryCounter system for determing with cached weight…
Kosinkadink Sep 24, 2024
fd2d572
Modified ControlNet/T2IAdapter get_control function to receive transf…
Kosinkadink Sep 25, 2024
09cbd69
Added create_model_options_clone func, modified type annotations to u…
Kosinkadink Sep 25, 2024
0f7d379
Refactored WrapperExecutor code to remove need for WrapperClassExecut…
Kosinkadink Sep 27, 2024
0c8bd63
Added Combine versions of Cond/Cond Pair Set Props nodes, renamed Pai…
Kosinkadink Sep 27, 2024
5bf2647
Renamed Create Hook Model As LoRA nodes to make the test node the mai…
Kosinkadink Sep 27, 2024
06fbdb0
Merge branch 'master' into patch_hooks
Kosinkadink Oct 5, 2024
4fdfe2f
Merge branch 'master' into patch_hooks
Kosinkadink Oct 7, 2024
1e2777b
Added uuid to conds in CFGGuider and uuids to transformer_options to …
Kosinkadink Oct 8, 2024
1f8d9c0
Fixed models not being unloaded properly due to current_patcher refer…
Kosinkadink Oct 11, 2024
4bbdf2b
Merge branch 'master' into patch_hooks
Kosinkadink Oct 24, 2024
daeb262
Fixed default conds not respecting hook keyframes, made keyframes not…
Kosinkadink Oct 25, 2024
2047bf2
Changed CreateHookModelAsLoraTest to be the new CreateHookModelAsLora…
Kosinkadink Oct 26, 2024
d5169df
Added initial support within CLIP Text Encode (Prompt) node for sched…
Kosinkadink Oct 30, 2024
7a4d2fe
Fix range check in get_hooks_for_clip_schedule so that proper keyfram…
Kosinkadink Oct 30, 2024
3bcbcce
Merge branch 'master' into patch_hooks
Kosinkadink Oct 31, 2024
4898469
Optimized CLIP hook scheduling to treat same strength as same keyframe
Kosinkadink Nov 1, 2024
d8bd2a9
Less fragile memory management.
comfyanonymous Nov 1, 2024
16735c9
Make encode_from_tokens_scheduled call cleaner, rollback change in mo…
Kosinkadink Nov 1, 2024
1735d4f
Fix issue.
comfyanonymous Nov 1, 2024
975927c
Remove useless function.
comfyanonymous Nov 1, 2024
45f16c2
Merge branch 'improved_memory' into patch_hooks_improved_memory
Kosinkadink Nov 1, 2024
89934a4
Merge branch 'improved_memory' into patch_hooks_improved_memory
Kosinkadink Nov 1, 2024
bd5d8f1
Prevent and detect some types of memory leaks.
comfyanonymous Nov 1, 2024
e3c3722
Merge branch 'improved_memory' into patch_hooks_improved_memory
Kosinkadink Nov 1, 2024
5c106a0
Run garbage collector when switching workflow if needed.
comfyanonymous Nov 2, 2024
51e8d55
Moved WrappersMP/CallbacksMP/WrapperExecutor to patcher_extension.py
Kosinkadink Nov 3, 2024
0fbefb8
Refactored code to store wrappers and callbacks in transformer_option…
Kosinkadink Nov 3, 2024
95972ba
Fix issue.
comfyanonymous Nov 4, 2024
9dde713
Refactored hooks in calc_cond_batch to be part of get_area_and_mult t…
Kosinkadink Nov 4, 2024
638c408
Fixed inconsistency of results when schedule_clip is set to False, sm…
Kosinkadink Nov 11, 2024
b12cc83
Modified callbacks and wrappers so that unregistered types can be use…
Kosinkadink Nov 11, 2024
66b3386
Updated different hook types to reflect actual progress of implementa…
Kosinkadink Nov 11, 2024
4195dfb
Merge branch 'master' into patch_hooks_improved_memory
Kosinkadink Nov 11, 2024
9330745
Merge branch 'improved_memory' into patch_hooks_improved_memory
Kosinkadink Nov 11, 2024
1766d90
Fixed existing weight hook_patches (pre-registered) not working prope…
Kosinkadink Nov 12, 2024
5909b06
Removed Register/Direct hook nodes since they were present only for t…
Kosinkadink Nov 12, 2024
1470719
Added clip scheduling support to all other native ComfyUI text encodi…
Kosinkadink Nov 12, 2024
96b2080
Merge branch 'master' into patch_hooks_improved_memory
Kosinkadink Nov 12, 2024
bcc6a22
Made WrapperHook functional, added another wrapper/callback getter, a…
Kosinkadink Nov 14, 2024
e177149
Made opt_hooks append by default instead of replace, renamed comfy.ho…
Kosinkadink Nov 16, 2024
a20be20
Added apply_to_conds to Set CLIP Hooks, modified relevant code to all…
Kosinkadink Nov 16, 2024
f465004
Fix cached_hook_patches not respecting target_device/memory_counter r…
Kosinkadink Nov 17, 2024
e844695
Fixed issue with setting weights from hooks instead of copying them, …
Kosinkadink Nov 18, 2024
0850ae5
Remove unnecessary torch.no_grad calls for hook patches
Kosinkadink Nov 18, 2024
de6013c
Increased MemoryCounter minimum memory to leave free by *2 until a be…
Kosinkadink Nov 18, 2024
365170a
Merge branch 'master' into patch_hooks_improved_memory
Kosinkadink Nov 18, 2024
9fe3db4
For encode_from_tokens_scheduled, allow start_percent and end_percent…
Kosinkadink Nov 18, 2024
9b2b130
Removed a .to call on results of calculate_weight in patch_hook_weigh…
Kosinkadink Nov 19, 2024
59891b0
Made encode_from_tokens_scheduled work when no hooks are set on patcher
Kosinkadink Nov 19, 2024
3501698
Small cleanup of comments
Kosinkadink Nov 19, 2024
d38c535
Turn off hook patch caching when only 1 hook present in sampling, rep…
Kosinkadink Nov 21, 2024
1c86976
On Cond/Cond Pair nodes, removed opt_ prefix from optional inputs
Kosinkadink Nov 21, 2024
9a69ccf
Allow both FLOATS and FLOAT for floats_strength input
Kosinkadink Nov 21, 2024
c044c3b
Revert change, does not work
Kosinkadink Nov 21, 2024
76b9ed1
Merge branch 'master' into patch_hooks_improved_memory
Kosinkadink Nov 21, 2024
0a432c1
Merge branch 'master' into patch_hooks_improved_memory
Kosinkadink Nov 22, 2024
815c6f3
Merge branch 'master' into patch_hooks_improved_memory
Kosinkadink Nov 22, 2024
8b2c324
Made patch_hook_weight_to_device respect set_func and convert_func
Kosinkadink Nov 24, 2024
602c12b
Make discard_model_sampling True by default
Kosinkadink Nov 24, 2024
ac5a3bd
Add changes manually from 'master' so merge conflict resolution goes …
Kosinkadink Nov 24, 2024
26ccd3b
Merge branch 'master' into patch_hooks_improved_memory
Kosinkadink Nov 24, 2024
57f1ea8
Merge branch 'master' into patch_hooks_improved_memory
Kosinkadink Nov 27, 2024
5994cd8
Merge branch 'master' into patch_hooks_improved_memory
Kosinkadink Nov 27, 2024
3911241
Merge branch 'master' into patch_hooks_improved_memory
Kosinkadink Nov 28, 2024
a54e734
Cleaned up text encode nodes with just a single clip.encode_from_toke…
Kosinkadink Nov 28, 2024
f48c0c1
Make sure encode_from_tokens_scheduled will respect use_clip_schedule…
Kosinkadink Nov 30, 2024
b30d5c4
Merge branch 'master' into patch_hooks_improved_memory
Kosinkadink Nov 30, 2024
bdde26b
Made nodes in nodes_hooks be marked as experimental (beta)
Kosinkadink Nov 30, 2024
000a21a
Add get_nested_additional_models for cases where additional_models co…
Kosinkadink Dec 1, 2024
dff03e5
Made finalize_default_conds area math consistent with other sampling …
Kosinkadink Dec 2, 2024
abebf91
Changed 'opt_hooks' input of Cond/Cond Pair Set Default Combine nodes…
Kosinkadink Dec 2, 2024
746edf4
Remove a couple old TODO's and a no longer necessary workaround
Kosinkadink Dec 2, 2024
a9c1fb9
Merge commit '2d5b3e0078c927ec6fcf47f80bf4035706934605' into patch_ho…
comfyanonymous Dec 2, 2024
3cc408a
Merge branch 'master' into patch_hooks_improved_memory
comfyanonymous Dec 2, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 18 additions & 4 deletions comfy/controlnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@
import comfy.ldm.hydit.controlnet
import comfy.ldm.flux.controlnet
import comfy.cldm.dit_embedder
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from comfy.hooks import HookGroup


def broadcast_image_to(tensor, target_batch_size, batched_number):
current_batch_size = tensor.shape[0]
Expand Down Expand Up @@ -78,6 +82,7 @@ def __init__(self):
self.concat_mask = False
self.extra_concat_orig = []
self.extra_concat = None
self.extra_hooks: HookGroup = None
self.preprocess_image = lambda a: a

def set_cond_hint(self, cond_hint, strength=1.0, timestep_percent_range=(0.0, 1.0), vae=None, extra_concat=[]):
Expand Down Expand Up @@ -115,6 +120,14 @@ def get_models(self):
if self.previous_controlnet is not None:
out += self.previous_controlnet.get_models()
return out

def get_extra_hooks(self):
out = []
if self.extra_hooks is not None:
out.append(self.extra_hooks)
if self.previous_controlnet is not None:
out += self.previous_controlnet.get_extra_hooks()
return out

def copy_to(self, c):
c.cond_hint_original = self.cond_hint_original
Expand All @@ -130,6 +143,7 @@ def copy_to(self, c):
c.strength_type = self.strength_type
c.concat_mask = self.concat_mask
c.extra_concat_orig = self.extra_concat_orig.copy()
c.extra_hooks = self.extra_hooks.clone() if self.extra_hooks else None
c.preprocess_image = self.preprocess_image

def inference_memory_requirements(self, dtype):
Expand Down Expand Up @@ -200,10 +214,10 @@ def __init__(self, control_model=None, global_average_pooling=False, compression
self.concat_mask = concat_mask
self.preprocess_image = preprocess_image

def get_control(self, x_noisy, t, cond, batched_number):
def get_control(self, x_noisy, t, cond, batched_number, transformer_options):
control_prev = None
if self.previous_controlnet is not None:
control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number)
control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number, transformer_options)

if self.timestep_range is not None:
if t[0] > self.timestep_range[0] or t[0] < self.timestep_range[1]:
Expand Down Expand Up @@ -758,10 +772,10 @@ def scale_image_to(self, width, height):
height = math.ceil(height / unshuffle_amount) * unshuffle_amount
return width, height

def get_control(self, x_noisy, t, cond, batched_number):
def get_control(self, x_noisy, t, cond, batched_number, transformer_options):
control_prev = None
if self.previous_controlnet is not None:
control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number)
control_prev = self.previous_controlnet.get_control(x_noisy, t, cond, batched_number, transformer_options)

if self.timestep_range is not None:
if t[0] > self.timestep_range[0] or t[0] < self.timestep_range[1]:
Expand Down
Loading
Loading