Skip to content

Commit

Permalink
Change clip workaround for SD3
Browse files Browse the repository at this point in the history
  • Loading branch information
city96 committed Dec 22, 2024
1 parent 8d1b56c commit eaffa0a
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 10 deletions.
9 changes: 0 additions & 9 deletions nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,15 +232,6 @@ def load_patcher(self, clip_paths, clip_type, clip_data):
embedding_directory = folder_paths.get_folder_paths("embeddings"),
)
clip.patcher = GGUFModelPatcher.clone(clip.patcher)

# for some reason this is just missing in some SAI checkpoints
if getattr(clip.cond_stage_model, "clip_l", None) is not None:
if getattr(clip.cond_stage_model.clip_l.transformer.text_projection.weight, "tensor_shape", None) is None:
clip.cond_stage_model.clip_l.transformer.text_projection = comfy.ops.manual_cast.Linear(768, 768)
if getattr(clip.cond_stage_model, "clip_g", None) is not None:
if getattr(clip.cond_stage_model.clip_g.transformer.text_projection.weight, "tensor_shape", None) is None:
clip.cond_stage_model.clip_g.transformer.text_projection = comfy.ops.manual_cast.Linear(1280, 1280)

return clip

def load_clip(self, clip_name, type="stable_diffusion"):
Expand Down
7 changes: 6 additions & 1 deletion ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,12 @@ def ggml_load_from_state_dict(self, state_dict, prefix, local_metadata, strict,
elif k[prefix_len:] == "bias" and v is not None:
self.bias = torch.nn.Parameter(v, requires_grad=False)
else:
missing_keys.append(k)
unexpected_keys.append(k)
# For Linear layer with missing weight
if self.weight is None and isinstance(self, torch.nn.Linear):
v = torch.zeros(self.in_features, self.out_features)
self.weight = torch.nn.Parameter(v, requires_grad=False)
missing_keys.append(prefix+"weight")

def _save_to_state_dict(self, *args, **kwargs):
if self.is_ggml_quantized():
Expand Down

0 comments on commit eaffa0a

Please sign in to comment.