multimodalart HF staff commited on
Commit
d94d7b7
1 Parent(s): 1d162bd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -25,12 +25,12 @@ pipe = pipe.to(device)
25
  @spaces.GPU
26
  def run(prompt, negative_prompt=None, guidance_scale=7.0, pag_scale=3.0, pag_layers=["mid"], randomize_seed=True, seed=42, progress=gr.Progress(track_tqdm=True)):
27
  prompt = prompt.strip()
28
- negative_prompt = negative_prompt.strip()
 
29
  if(randomize_seed):
30
  seed = random.randint(0, sys.maxsize)
31
- if(negative_prompt == ""):
32
- negative_prompt = None
33
- if(prompt == "" and negative_prompt == ""):
34
  guidance_scale = 0.0
35
 
36
  generator = torch.Generator(device="cuda").manual_seed(seed)
 
25
  @spaces.GPU
26
  def run(prompt, negative_prompt=None, guidance_scale=7.0, pag_scale=3.0, pag_layers=["mid"], randomize_seed=True, seed=42, progress=gr.Progress(track_tqdm=True)):
27
  prompt = prompt.strip()
28
+ negative_prompt = negative_prompt.strip() if negative_prompt and negative_prompt.strip() else None
29
+ print("negative prompt", negative_prompt)
30
  if(randomize_seed):
31
  seed = random.randint(0, sys.maxsize)
32
+
33
+ if not prompt and not negative_prompt:
 
34
  guidance_scale = 0.0
35
 
36
  generator = torch.Generator(device="cuda").manual_seed(seed)