KingNish commited on
Commit
9413574
1 Parent(s): 647a1db

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -5,7 +5,7 @@ import spaces
5
  import torch
6
  import time
7
  from diffusers import DiffusionPipeline, AutoencoderTiny
8
- from custom_pipeline import HighSpeedFluxPipeline
9
 
10
  # Constants
11
  MAX_SEED = np.iinfo(np.int32).max
@@ -16,7 +16,7 @@ DEFAULT_INFERENCE_STEPS = 1
16
 
17
  # Device and model setup
18
  dtype = torch.float16
19
- pipe = HighSpeedFluxPipeline.from_pretrained(
20
  "black-forest-labs/FLUX.1-schnell", torch_dtype=dtype
21
  ).to("cuda")
22
  pipe.vae = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=torch.float16).to("cuda")
@@ -24,7 +24,7 @@ torch.cuda.empty_cache()
24
 
25
  # Inference function
26
  @spaces.GPU(duration=25)
27
- def generate_image(prompt, seed=42, width=DEFAULT_WIDTH, height=DEFAULT_HEIGHT, randomize_seed=False, num_inference_steps=2, progress=gr.Progress(track_tqdm=True)):
28
  if randomize_seed:
29
  seed = random.randint(0, MAX_SEED)
30
  generator = torch.Generator().manual_seed(int(float(seed)))
 
5
  import torch
6
  import time
7
  from diffusers import DiffusionPipeline, AutoencoderTiny
8
+ from custom_pipeline import FLUXPipelineWithIntermediateOutputs
9
 
10
  # Constants
11
  MAX_SEED = np.iinfo(np.int32).max
 
16
 
17
  # Device and model setup
18
  dtype = torch.float16
19
+ pipe = FLUXPipelineWithIntermediateOutputs.from_pretrained(
20
  "black-forest-labs/FLUX.1-schnell", torch_dtype=dtype
21
  ).to("cuda")
22
  pipe.vae = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=torch.float16).to("cuda")
 
24
 
25
  # Inference function
26
  @spaces.GPU(duration=25)
27
+ def generate_image(prompt, seed=24, width=DEFAULT_WIDTH, height=DEFAULT_HEIGHT, randomize_seed=False, num_inference_steps=2, progress=gr.Progress(track_tqdm=True)):
28
  if randomize_seed:
29
  seed = random.randint(0, MAX_SEED)
30
  generator = torch.Generator().manual_seed(int(float(seed)))