Spaces:
Running
on
Zero
Running
on
Zero
Muhammadreza
commited on
Commit
•
95f2997
1
Parent(s):
dd360f6
Update app.py
Browse files
app.py
CHANGED
@@ -13,7 +13,7 @@ torch.cuda.empty_cache()
|
|
13 |
@spaces.GPU
|
14 |
def genie (prompt, negative_prompt, width, height, steps, seed):
|
15 |
generator = np.random.seed(0) if seed == 0 else torch.manual_seed(seed)
|
16 |
-
int_image = pipe(prompt=prompt, negative_prompt=negative_prompt, generator=generator, num_inference_steps=steps, guidance_scale=3.0).images[0]
|
17 |
return int_image
|
18 |
|
19 |
gr.Interface(fn=genie, inputs=[gr.Textbox(label='What you want the AI to generate. 75 Token Limit.'),
|
|
|
13 |
@spaces.GPU
|
14 |
def genie (prompt, negative_prompt, width, height, steps, seed):
|
15 |
generator = np.random.seed(0) if seed == 0 else torch.manual_seed(seed)
|
16 |
+
int_image = pipe(prompt=prompt, negative_prompt=negative_prompt, width=width, height=height, generator=generator, num_inference_steps=steps, guidance_scale=3.0).images[0]
|
17 |
return int_image
|
18 |
|
19 |
gr.Interface(fn=genie, inputs=[gr.Textbox(label='What you want the AI to generate. 75 Token Limit.'),
|