mirror of
https://github.com/AUTOMATIC1111/stable-diffusion-webui.git
synced 2024-06-07 21:20:49 +00:00
Fix params.txt saving for infotexts modified by process_batch
This commit is contained in:
parent
3715ece0ad
commit
b20737815a
@ -585,10 +585,6 @@ def process_images_inner(p: StableDiffusionProcessing) -> Processed:
|
|||||||
if not p.disable_extra_networks:
|
if not p.disable_extra_networks:
|
||||||
extra_networks.activate(p, extra_network_data)
|
extra_networks.activate(p, extra_network_data)
|
||||||
|
|
||||||
with open(os.path.join(paths.data_path, "params.txt"), "w", encoding="utf8") as file:
|
|
||||||
processed = Processed(p, [], p.seed, "")
|
|
||||||
file.write(processed.infotext(p, 0))
|
|
||||||
|
|
||||||
if state.job_count == -1:
|
if state.job_count == -1:
|
||||||
state.job_count = p.n_iter
|
state.job_count = p.n_iter
|
||||||
|
|
||||||
@ -614,6 +610,15 @@ def process_images_inner(p: StableDiffusionProcessing) -> Processed:
|
|||||||
if p.scripts is not None:
|
if p.scripts is not None:
|
||||||
p.scripts.process_batch(p, batch_number=n, prompts=prompts, seeds=seeds, subseeds=subseeds)
|
p.scripts.process_batch(p, batch_number=n, prompts=prompts, seeds=seeds, subseeds=subseeds)
|
||||||
|
|
||||||
|
# params.txt should be saved after scripts.process_batch, since the
|
||||||
|
# infotext could be modified by that callback
|
||||||
|
# Example: a wildcard processed by process_batch sets an extra model
|
||||||
|
# strength, which is saved as "Model Strength: 1.0" in the infotext
|
||||||
|
if n == 0:
|
||||||
|
with open(os.path.join(paths.data_path, "params.txt"), "w", encoding="utf8") as file:
|
||||||
|
processed = Processed(p, [], p.seed, "")
|
||||||
|
file.write(processed.infotext(p, 0))
|
||||||
|
|
||||||
uc = get_conds_with_caching(prompt_parser.get_learned_conditioning, negative_prompts, p.steps, cached_uc)
|
uc = get_conds_with_caching(prompt_parser.get_learned_conditioning, negative_prompts, p.steps, cached_uc)
|
||||||
c = get_conds_with_caching(prompt_parser.get_multicond_learned_conditioning, prompts, p.steps, cached_c)
|
c = get_conds_with_caching(prompt_parser.get_multicond_learned_conditioning, prompts, p.steps, cached_c)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user