Spaces:
Running
on
Zero
Running
on
Zero
Commit
•
b31f6c0
1
Parent(s):
f0435a3
Debug and not add no prompters to the queue
Browse files
app.py
CHANGED
@@ -13,6 +13,7 @@ from diffusers import (
|
|
13 |
DPMSolverMultistepScheduler, # <-- Added import
|
14 |
EulerDiscreteScheduler # <-- Added import
|
15 |
)
|
|
|
16 |
from share_btn import community_icon_html, loading_icon_html, share_js
|
17 |
from gallery_history import fetch_gallery_history, show_gallery_history
|
18 |
from illusion_style import css
|
@@ -89,6 +90,10 @@ def upscale(samples, upscale_method, scale_by):
|
|
89 |
s = common_upscale(samples["images"], width, height, upscale_method, "disabled")
|
90 |
return (s)
|
91 |
|
|
|
|
|
|
|
|
|
92 |
# Inference function
|
93 |
def inference(
|
94 |
control_image: Image.Image,
|
@@ -103,8 +108,10 @@ def inference(
|
|
103 |
sampler = "DPM++ Karras SDE",
|
104 |
progress = gr.Progress(track_tqdm=True)
|
105 |
):
|
106 |
-
|
107 |
-
|
|
|
|
|
108 |
|
109 |
# Generate the initial image
|
110 |
#init_image = init_pipe(prompt).images[0]
|
@@ -143,6 +150,10 @@ def inference(
|
|
143 |
control_guidance_end=float(control_guidance_end),
|
144 |
controlnet_conditioning_scale=float(controlnet_conditioning_scale)
|
145 |
)
|
|
|
|
|
|
|
|
|
146 |
return out_image["images"][0], gr.update(visible=True), my_seed
|
147 |
|
148 |
#return out
|
@@ -186,6 +197,10 @@ with gr.Blocks(css=css) as app:
|
|
186 |
|
187 |
history = show_gallery_history()
|
188 |
prompt.submit(
|
|
|
|
|
|
|
|
|
189 |
inference,
|
190 |
inputs=[control_image, prompt, negative_prompt, guidance_scale, controlnet_conditioning_scale, control_start, control_end, strength, seed, sampler],
|
191 |
outputs=[result_image, share_group, used_seed]
|
@@ -193,6 +208,10 @@ with gr.Blocks(css=css) as app:
|
|
193 |
fn=fetch_gallery_history, inputs=[prompt, result_image], outputs=history, queue=False
|
194 |
)
|
195 |
run_btn.click(
|
|
|
|
|
|
|
|
|
196 |
inference,
|
197 |
inputs=[control_image, prompt, negative_prompt, guidance_scale, controlnet_conditioning_scale, control_start, control_end, strength, seed, sampler],
|
198 |
outputs=[result_image, share_group, used_seed]
|
@@ -203,4 +222,4 @@ with gr.Blocks(css=css) as app:
|
|
203 |
app.queue(max_size=20)
|
204 |
|
205 |
if __name__ == "__main__":
|
206 |
-
app.launch(max_threads=
|
|
|
13 |
DPMSolverMultistepScheduler, # <-- Added import
|
14 |
EulerDiscreteScheduler # <-- Added import
|
15 |
)
|
16 |
+
import time
|
17 |
from share_btn import community_icon_html, loading_icon_html, share_js
|
18 |
from gallery_history import fetch_gallery_history, show_gallery_history
|
19 |
from illusion_style import css
|
|
|
90 |
s = common_upscale(samples["images"], width, height, upscale_method, "disabled")
|
91 |
return (s)
|
92 |
|
93 |
+
def check_prompt(prompt: str):
|
94 |
+
if prompt is None or prompt == "":
|
95 |
+
raise gr.Error("Prompt is required")
|
96 |
+
|
97 |
# Inference function
|
98 |
def inference(
|
99 |
control_image: Image.Image,
|
|
|
108 |
sampler = "DPM++ Karras SDE",
|
109 |
progress = gr.Progress(track_tqdm=True)
|
110 |
):
|
111 |
+
start_time = time.time()
|
112 |
+
start_time_struct = time.localtime(start_time)
|
113 |
+
start_time_formatted = time.strftime("%H:%M:%S", start_time_struct)
|
114 |
+
print(f"Inference started at {start_time_formatted}")
|
115 |
|
116 |
# Generate the initial image
|
117 |
#init_image = init_pipe(prompt).images[0]
|
|
|
150 |
control_guidance_end=float(control_guidance_end),
|
151 |
controlnet_conditioning_scale=float(controlnet_conditioning_scale)
|
152 |
)
|
153 |
+
end_time = time.time()
|
154 |
+
end_time_struct = time.localtime(end_time)
|
155 |
+
end_time_formatted = time.strftime("%H:%M:%S", end_time_struct)
|
156 |
+
print(f"Inference ended at {end_time_formatted}, taking {end_time-start_time}s")
|
157 |
return out_image["images"][0], gr.update(visible=True), my_seed
|
158 |
|
159 |
#return out
|
|
|
197 |
|
198 |
history = show_gallery_history()
|
199 |
prompt.submit(
|
200 |
+
check_prompt,
|
201 |
+
inputs=[prompt],
|
202 |
+
queue=False
|
203 |
+
).then(
|
204 |
inference,
|
205 |
inputs=[control_image, prompt, negative_prompt, guidance_scale, controlnet_conditioning_scale, control_start, control_end, strength, seed, sampler],
|
206 |
outputs=[result_image, share_group, used_seed]
|
|
|
208 |
fn=fetch_gallery_history, inputs=[prompt, result_image], outputs=history, queue=False
|
209 |
)
|
210 |
run_btn.click(
|
211 |
+
check_prompt,
|
212 |
+
inputs=[prompt],
|
213 |
+
queue=False
|
214 |
+
).then(
|
215 |
inference,
|
216 |
inputs=[control_image, prompt, negative_prompt, guidance_scale, controlnet_conditioning_scale, control_start, control_end, strength, seed, sampler],
|
217 |
outputs=[result_image, share_group, used_seed]
|
|
|
222 |
app.queue(max_size=20)
|
223 |
|
224 |
if __name__ == "__main__":
|
225 |
+
app.launch(max_threads=240)
|