Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add filename and sha256 outputs to UnetLoaderGGUF #192

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 18 additions & 2 deletions nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
import torch
import logging
import collections
import hashlib
from functools import lru_cache

import comfy.sd
import comfy.utils
Expand Down Expand Up @@ -125,11 +127,21 @@ def INPUT_TYPES(s):
}
}

RETURN_TYPES = ("MODEL",)
RETURN_TYPES = ("MODEL", "STRING", "STRING")
RETURN_NAMES = ("MODEL", "filename", "sha256")
FUNCTION = "load_unet"
CATEGORY = "bootleg"
TITLE = "Unet Loader (GGUF)"

@staticmethod
@lru_cache(maxsize=None)
def hash_file(file_path):
hash_func = hashlib.sha256()
with open(file_path, "rb") as file:
while chunk := file.read(256 * 1024):
hash_func.update(chunk)
return hash_func.hexdigest()

def load_unet(self, unet_name, dequant_dtype=None, patch_dtype=None, patch_on_device=None):
ops = GGMLOps()

Expand Down Expand Up @@ -158,7 +170,11 @@ def load_unet(self, unet_name, dequant_dtype=None, patch_dtype=None, patch_on_de
raise RuntimeError("ERROR: Could not detect model type of: {}".format(unet_path))
model = GGUFModelPatcher.clone(model)
model.patch_on_device = patch_on_device
return (model,)

hash = self.hash_file(unet_path)

return (model, unet_name, hash)


class UnetLoaderGGUFAdvanced(UnetLoaderGGUF):
@classmethod
Expand Down