From 095830e1e8a99276b3055c720981e89fc6af853d Mon Sep 17 00:00:00 2001 From: AUTOMATIC <16777216c@gmail.com> Date: Mon, 12 Sep 2022 19:13:03 +0300 Subject: [PATCH] Prompts from file. How to? #248 --- scripts/prompts_from_file.py | 42 ++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 scripts/prompts_from_file.py diff --git a/scripts/prompts_from_file.py b/scripts/prompts_from_file.py new file mode 100644 index 000000000..da2ddd54e --- /dev/null +++ b/scripts/prompts_from_file.py @@ -0,0 +1,42 @@ +import math +import os +import sys +import traceback + +import modules.scripts as scripts +import gradio as gr + +from modules.processing import Processed, process_images +from PIL import Image +from modules.shared import opts, cmd_opts, state + + +class Script(scripts.Script): + def title(self): + return "Prompts from file" + + def ui(self, is_img2img): + file = gr.File(label="File with inputs", type='bytes') + + return [file] + + def run(self, p, data: bytes): + lines = [x.strip() for x in data.decode('utf8', errors='ignore').split("\n")] + lines = [x for x in lines if len(x) > 0] + + batch_count = math.ceil(len(lines) / p.batch_size) + print(f"Will process {len(lines)} images in {batch_count} batches.") + + p.batch_count = 1 + p.do_not_save_grid = True + + state.job_count = batch_count + + images = [] + for batch_no in range(batch_count): + state.job = f"{batch_no} out of {batch_count}" + p.prompt = lines[batch_no*p.batch_size:(batch_no+1)*p.batch_size] + proc = process_images(p) + images += proc.images + + return Processed(p, images, p.seed, "")