stable-diffusion-webui/modules/hashes.py

114 lines
3.1 KiB
Python
Raw Normal View History

2023-01-14 06:56:59 +00:00
import hashlib
import json
import os.path
import filelock
2023-02-04 08:38:56 +00:00
from modules import shared
2023-07-13 20:48:14 +00:00
from modules.paths import data_path, script_path
2023-01-14 06:56:59 +00:00
cache_filename = os.path.join(data_path, "cache.json")
2023-01-14 06:56:59 +00:00
cache_data = None
def dump_cache():
with filelock.FileLock(f"{cache_filename}.lock"):
2023-01-14 06:56:59 +00:00
with open(cache_filename, "w", encoding="utf8") as file:
json.dump(cache_data, file, indent=4)
def cache(subsection):
global cache_data
if cache_data is None:
with filelock.FileLock(f"{cache_filename}.lock"):
2023-01-14 06:56:59 +00:00
if not os.path.isfile(cache_filename):
cache_data = {}
else:
2023-07-13 20:48:14 +00:00
try:
with open(cache_filename, "r", encoding="utf8") as file:
cache_data = json.load(file)
except Exception:
os.replace(cache_filename, os.path.join(script_path, "tmp", "cache.json"))
print('[ERROR] issue occurred while trying to read cache.json, move current cache to tmp/cache.json and create new cache')
cache_data = {}
2023-01-14 06:56:59 +00:00
s = cache_data.get(subsection, {})
cache_data[subsection] = s
return s
def calculate_sha256(filename):
hash_sha256 = hashlib.sha256()
2023-01-15 14:42:34 +00:00
blksize = 1024 * 1024
2023-01-14 06:56:59 +00:00
with open(filename, "rb") as f:
2023-01-15 14:42:34 +00:00
for chunk in iter(lambda: f.read(blksize), b""):
2023-01-14 06:56:59 +00:00
hash_sha256.update(chunk)
return hash_sha256.hexdigest()
def sha256_from_cache(filename, title, use_addnet_hash=False):
hashes = cache("hashes-addnet") if use_addnet_hash else cache("hashes")
2023-01-14 06:56:59 +00:00
ondisk_mtime = os.path.getmtime(filename)
if title not in hashes:
return None
cached_sha256 = hashes[title].get("sha256", None)
cached_mtime = hashes[title].get("mtime", 0)
if ondisk_mtime > cached_mtime or cached_sha256 is None:
return None
return cached_sha256
def sha256(filename, title, use_addnet_hash=False):
hashes = cache("hashes-addnet") if use_addnet_hash else cache("hashes")
2023-01-14 06:56:59 +00:00
sha256_value = sha256_from_cache(filename, title, use_addnet_hash)
if sha256_value is not None:
return sha256_value
2023-01-14 06:56:59 +00:00
2023-02-04 08:38:56 +00:00
if shared.cmd_opts.no_hashing:
return None
2023-01-14 06:56:59 +00:00
print(f"Calculating sha256 for {filename}: ", end='')
if use_addnet_hash:
with open(filename, "rb") as file:
sha256_value = addnet_hash_safetensors(file)
else:
sha256_value = calculate_sha256(filename)
2023-01-14 06:56:59 +00:00
print(f"{sha256_value}")
hashes[title] = {
"mtime": os.path.getmtime(filename),
2023-01-14 06:56:59 +00:00
"sha256": sha256_value,
}
dump_cache()
return sha256_value
def addnet_hash_safetensors(b):
"""kohya-ss hash for safetensors from https://github.com/kohya-ss/sd-scripts/blob/main/library/train_util.py"""
hash_sha256 = hashlib.sha256()
blksize = 1024 * 1024
2023-01-14 06:56:59 +00:00
b.seek(0)
header = b.read(8)
n = int.from_bytes(header, "little")
2023-01-14 06:56:59 +00:00
offset = n + 8
b.seek(offset)
for chunk in iter(lambda: b.read(blksize), b""):
hash_sha256.update(chunk)
return hash_sha256.hexdigest()
2023-01-14 06:56:59 +00:00