Spaces:
Running
on
Zero
Running
on
Zero
Muhammadreza
commited on
Commit
•
dd360f6
1
Parent(s):
3c73edd
Update app.py
Browse files
app.py
CHANGED
@@ -11,13 +11,15 @@ pipe.scheduler = DPMSolverSinglestepScheduler.from_config(pipe.scheduler.config,
|
|
11 |
torch.cuda.empty_cache()
|
12 |
|
13 |
@spaces.GPU
|
14 |
-
def genie (prompt, negative_prompt, steps, seed):
|
15 |
generator = np.random.seed(0) if seed == 0 else torch.manual_seed(seed)
|
16 |
int_image = pipe(prompt=prompt, negative_prompt=negative_prompt, generator=generator, num_inference_steps=steps, guidance_scale=3.0).images[0]
|
17 |
return int_image
|
18 |
|
19 |
gr.Interface(fn=genie, inputs=[gr.Textbox(label='What you want the AI to generate. 75 Token Limit.'),
|
20 |
gr.Textbox(label='What you DO NOT want the AI to generate. 75 Token Limit.'),
|
|
|
|
|
21 |
gr.Slider(1, maximum=8, value=6, step=1, label='Number of Iterations'),
|
22 |
gr.Slider(minimum=0, step=1, maximum=999999999999999999, randomize=True),
|
23 |
],
|
|
|
11 |
torch.cuda.empty_cache()
|
12 |
|
13 |
@spaces.GPU
|
14 |
+
def genie (prompt, negative_prompt, width, height, steps, seed):
|
15 |
generator = np.random.seed(0) if seed == 0 else torch.manual_seed(seed)
|
16 |
int_image = pipe(prompt=prompt, negative_prompt=negative_prompt, generator=generator, num_inference_steps=steps, guidance_scale=3.0).images[0]
|
17 |
return int_image
|
18 |
|
19 |
gr.Interface(fn=genie, inputs=[gr.Textbox(label='What you want the AI to generate. 75 Token Limit.'),
|
20 |
gr.Textbox(label='What you DO NOT want the AI to generate. 75 Token Limit.'),
|
21 |
+
gr.Slider(768, maximum=1024, value=768, step=16, label='Width'),
|
22 |
+
gr.Slider(768, maximum=1024, value=768, step=16, label='Height'),
|
23 |
gr.Slider(1, maximum=8, value=6, step=1, label='Number of Iterations'),
|
24 |
gr.Slider(minimum=0, step=1, maximum=999999999999999999, randomize=True),
|
25 |
],
|