2022-10-15 09:00:31 +00:00
|
|
|
import copy
|
2022-10-24 06:16:47 +00:00
|
|
|
import random
|
2022-10-15 09:00:31 +00:00
|
|
|
import shlex
|
2022-09-12 16:13:03 +00:00
|
|
|
|
|
|
|
import modules.scripts as scripts
|
|
|
|
import gradio as gr
|
|
|
|
|
2023-09-17 16:37:15 +00:00
|
|
|
from modules import sd_samplers, errors, sd_models
|
2022-09-12 16:13:03 +00:00
|
|
|
from modules.processing import Processed, process_images
|
2023-05-10 05:43:42 +00:00
|
|
|
from modules.shared import state
|
2022-09-12 16:13:03 +00:00
|
|
|
|
2022-10-15 09:00:31 +00:00
|
|
|
|
2023-09-17 16:37:15 +00:00
|
|
|
def process_model_tag(tag):
|
|
|
|
info = sd_models.get_closet_checkpoint_match(tag)
|
|
|
|
assert info is not None, f'Unknown checkpoint: {tag}'
|
|
|
|
return info.name
|
|
|
|
|
|
|
|
|
2022-10-15 09:00:31 +00:00
|
|
|
def process_string_tag(tag):
|
|
|
|
return tag
|
|
|
|
|
|
|
|
|
|
|
|
def process_int_tag(tag):
|
|
|
|
return int(tag)
|
|
|
|
|
|
|
|
|
|
|
|
def process_float_tag(tag):
|
|
|
|
return float(tag)
|
|
|
|
|
|
|
|
|
|
|
|
def process_boolean_tag(tag):
|
|
|
|
return True if (tag == "true") else False
|
|
|
|
|
|
|
|
|
|
|
|
prompt_tags = {
|
2023-09-17 16:37:15 +00:00
|
|
|
"sd_model": process_model_tag,
|
2022-10-15 09:00:31 +00:00
|
|
|
"outpath_samples": process_string_tag,
|
|
|
|
"outpath_grids": process_string_tag,
|
|
|
|
"prompt_for_display": process_string_tag,
|
|
|
|
"prompt": process_string_tag,
|
|
|
|
"negative_prompt": process_string_tag,
|
|
|
|
"styles": process_string_tag,
|
|
|
|
"seed": process_int_tag,
|
|
|
|
"subseed_strength": process_float_tag,
|
|
|
|
"subseed": process_int_tag,
|
|
|
|
"seed_resize_from_h": process_int_tag,
|
|
|
|
"seed_resize_from_w": process_int_tag,
|
|
|
|
"sampler_index": process_int_tag,
|
2022-12-13 17:03:16 +00:00
|
|
|
"sampler_name": process_string_tag,
|
2022-10-15 09:00:31 +00:00
|
|
|
"batch_size": process_int_tag,
|
|
|
|
"n_iter": process_int_tag,
|
|
|
|
"steps": process_int_tag,
|
|
|
|
"cfg_scale": process_float_tag,
|
|
|
|
"width": process_int_tag,
|
|
|
|
"height": process_int_tag,
|
|
|
|
"restore_faces": process_boolean_tag,
|
|
|
|
"tiling": process_boolean_tag,
|
|
|
|
"do_not_save_samples": process_boolean_tag,
|
|
|
|
"do_not_save_grid": process_boolean_tag
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def cmdargs(line):
|
|
|
|
args = shlex.split(line)
|
|
|
|
pos = 0
|
|
|
|
res = {}
|
|
|
|
|
|
|
|
while pos < len(args):
|
|
|
|
arg = args[pos]
|
|
|
|
|
|
|
|
assert arg.startswith("--"), f'must start with "--": {arg}'
|
2022-12-13 17:03:16 +00:00
|
|
|
assert pos+1 < len(args), f'missing argument for command line option {arg}'
|
|
|
|
|
2022-10-15 09:00:31 +00:00
|
|
|
tag = arg[2:]
|
|
|
|
|
2022-12-13 17:03:16 +00:00
|
|
|
if tag == "prompt" or tag == "negative_prompt":
|
|
|
|
pos += 1
|
|
|
|
prompt = args[pos]
|
|
|
|
pos += 1
|
|
|
|
while pos < len(args) and not args[pos].startswith("--"):
|
|
|
|
prompt += " "
|
|
|
|
prompt += args[pos]
|
|
|
|
pos += 1
|
|
|
|
res[tag] = prompt
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
2022-10-15 09:00:31 +00:00
|
|
|
func = prompt_tags.get(tag, None)
|
|
|
|
assert func, f'unknown commandline option: {arg}'
|
|
|
|
|
|
|
|
val = args[pos+1]
|
2022-12-13 17:03:16 +00:00
|
|
|
if tag == "sampler_name":
|
|
|
|
val = sd_samplers.samplers_map.get(val.lower(), None)
|
2022-10-15 09:00:31 +00:00
|
|
|
|
|
|
|
res[tag] = func(val)
|
|
|
|
|
|
|
|
pos += 2
|
|
|
|
|
|
|
|
return res
|
|
|
|
|
|
|
|
|
2022-10-24 06:16:47 +00:00
|
|
|
def load_prompt_file(file):
|
2022-11-04 05:38:11 +00:00
|
|
|
if file is None:
|
2023-05-11 11:24:22 +00:00
|
|
|
return None, gr.update(), gr.update(lines=7)
|
2022-10-24 06:16:47 +00:00
|
|
|
else:
|
|
|
|
lines = [x.strip() for x in file.decode('utf8', errors='ignore').split("\n")]
|
2023-05-11 11:24:22 +00:00
|
|
|
return None, "\n".join(lines), gr.update(lines=7)
|
|
|
|
|
2022-10-24 06:16:47 +00:00
|
|
|
|
2022-09-12 16:13:03 +00:00
|
|
|
class Script(scripts.Script):
|
|
|
|
def title(self):
|
2022-09-17 08:34:33 +00:00
|
|
|
return "Prompts from file or textbox"
|
2022-09-12 16:13:03 +00:00
|
|
|
|
2023-05-11 15:28:15 +00:00
|
|
|
def ui(self, is_img2img):
|
2023-01-05 08:29:07 +00:00
|
|
|
checkbox_iterate = gr.Checkbox(label="Iterate seed every line", value=False, elem_id=self.elem_id("checkbox_iterate"))
|
|
|
|
checkbox_iterate_batch = gr.Checkbox(label="Use same random seed for all lines", value=False, elem_id=self.elem_id("checkbox_iterate_batch"))
|
|
|
|
|
|
|
|
prompt_txt = gr.Textbox(label="List of prompt inputs", lines=1, elem_id=self.elem_id("prompt_txt"))
|
2023-01-18 20:04:24 +00:00
|
|
|
file = gr.File(label="Upload prompt inputs", type='binary', elem_id=self.elem_id("file"))
|
2022-10-24 06:16:47 +00:00
|
|
|
|
2023-05-11 11:24:22 +00:00
|
|
|
file.change(fn=load_prompt_file, inputs=[file], outputs=[file, prompt_txt, prompt_txt], show_progress=False)
|
2022-10-24 06:16:47 +00:00
|
|
|
|
|
|
|
# We start at one line. When the text changes, we jump to seven lines, or two lines if no \n.
|
|
|
|
# We don't shrink back to 1, because that causes the control to ignore [enter], and it may
|
|
|
|
# be unclear to the user that shift-enter is needed.
|
2023-05-11 11:24:22 +00:00
|
|
|
prompt_txt.change(lambda tb: gr.update(lines=7) if ("\n" in tb) else gr.update(lines=2), inputs=[prompt_txt], outputs=[prompt_txt], show_progress=False)
|
2022-11-04 05:38:11 +00:00
|
|
|
return [checkbox_iterate, checkbox_iterate_batch, prompt_txt]
|
2022-10-24 06:16:47 +00:00
|
|
|
|
2022-11-04 05:38:11 +00:00
|
|
|
def run(self, p, checkbox_iterate, checkbox_iterate_batch, prompt_txt: str):
|
2023-06-02 11:58:10 +00:00
|
|
|
lines = [x for x in (x.strip() for x in prompt_txt.splitlines()) if x]
|
2022-09-12 16:13:03 +00:00
|
|
|
|
|
|
|
p.do_not_save_grid = True
|
|
|
|
|
2022-10-15 09:00:31 +00:00
|
|
|
job_count = 0
|
|
|
|
jobs = []
|
|
|
|
|
|
|
|
for line in lines:
|
|
|
|
if "--" in line:
|
|
|
|
try:
|
|
|
|
args = cmdargs(line)
|
|
|
|
except Exception:
|
2023-05-31 16:56:37 +00:00
|
|
|
errors.report(f"Error parsing line {line} as commandline", exc_info=True)
|
2022-10-15 09:00:31 +00:00
|
|
|
args = {"prompt": line}
|
|
|
|
else:
|
|
|
|
args = {"prompt": line}
|
2022-09-12 16:13:03 +00:00
|
|
|
|
2023-01-14 13:45:39 +00:00
|
|
|
job_count += args.get("n_iter", p.n_iter)
|
2022-10-15 09:00:31 +00:00
|
|
|
|
|
|
|
jobs.append(args)
|
|
|
|
|
|
|
|
print(f"Will process {len(lines)} lines in {job_count} jobs.")
|
2022-11-02 04:02:45 +00:00
|
|
|
if (checkbox_iterate or checkbox_iterate_batch) and p.seed == -1:
|
2022-10-24 06:16:47 +00:00
|
|
|
p.seed = int(random.randrange(4294967294))
|
|
|
|
|
2022-10-15 09:00:31 +00:00
|
|
|
state.job_count = job_count
|
|
|
|
|
|
|
|
images = []
|
2022-11-09 20:24:31 +00:00
|
|
|
all_prompts = []
|
|
|
|
infotexts = []
|
2023-05-10 08:37:18 +00:00
|
|
|
for args in jobs:
|
2022-10-15 09:00:31 +00:00
|
|
|
state.job = f"{state.job_no + 1} out of {state.job_count}"
|
|
|
|
|
|
|
|
copy_p = copy.copy(p)
|
|
|
|
for k, v in args.items():
|
2023-09-17 16:37:15 +00:00
|
|
|
if k == "sd_model":
|
|
|
|
copy_p.override_settings['sd_model_checkpoint'] = v
|
|
|
|
else:
|
|
|
|
setattr(copy_p, k, v)
|
2022-10-15 09:00:31 +00:00
|
|
|
|
|
|
|
proc = process_images(copy_p)
|
2022-09-12 16:13:03 +00:00
|
|
|
images += proc.images
|
2023-05-11 15:28:15 +00:00
|
|
|
|
2022-11-02 04:02:45 +00:00
|
|
|
if checkbox_iterate:
|
2022-10-24 06:16:47 +00:00
|
|
|
p.seed = p.seed + (p.batch_size * p.n_iter)
|
2022-11-09 20:24:31 +00:00
|
|
|
all_prompts += proc.all_prompts
|
|
|
|
infotexts += proc.infotexts
|
2022-10-24 06:16:47 +00:00
|
|
|
|
2022-11-09 20:24:31 +00:00
|
|
|
return Processed(p, images, p.seed, "", all_prompts=all_prompts, infotexts=infotexts)
|