nick911 commited on
Commit
3521ec1
1 Parent(s): 42db233

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -6
app.py CHANGED
@@ -19,12 +19,14 @@ pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, times
19
  def generate(prompt):
20
  image = pipe(prompt, num_inference_steps=1, guidance_scale=0).images[0]
21
  return image
22
- # Ensure using the same inference steps as the loaded model and CFG set to 0.
23
 
24
- # @spaces.GPU
25
- # def greet(prompt):
26
- # image = pipe(prompt, num_inference_steps=1, guidance_scale=0).images[0].save("output.png")
27
- # return image
28
  output_image = gr.Image(type="pil")
29
  demo = gr.Interface(fn=generate, inputs="text", outputs=output_image)
30
- demo.launch()
 
 
 
 
 
 
 
 
19
  def generate(prompt):
20
  image = pipe(prompt, num_inference_steps=1, guidance_scale=0).images[0]
21
  return image
 
22
 
 
 
 
 
23
  output_image = gr.Image(type="pil")
24
  demo = gr.Interface(fn=generate, inputs="text", outputs=output_image)
25
+
26
+ if __name__ == "__main__":
27
+ unet = UNet2DConditionModel.from_config(base, subfolder="unet").to("cuda", torch.float16)
28
+ unet.load_state_dict(load_file(hf_hub_download(repo, ckpt), device="cuda"))
29
+ pipe = StableDiffusionXLPipeline.from_pretrained(base, unet=unet, torch_dtype=torch.float16, variant="fp16").to("cuda")
30
+ # Ensure sampler uses "trailing" timesteps and "sample" prediction type.
31
+ pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing", prediction_type="sample")
32
+ demo.launch()