Spaces:
vilarin
/
Running on Zero

vilarin commited on
Commit
b351bc6
1 Parent(s): 4b9fa98

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -1
app.py CHANGED
@@ -7,6 +7,7 @@ from diffusers import StableDiffusion3Pipeline, AutoencoderKL, SD3Transformer2DM
7
  import spaces
8
  from PIL import Image
9
  import requests
 
10
  from translatepy import Translator
11
 
12
  os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
@@ -40,11 +41,21 @@ vae = AutoencoderKL.from_pretrained(
40
  torch_dtype=torch.float16,
41
  )
42
 
 
 
 
 
 
43
 
 
 
 
 
 
44
 
45
  # Ensure model and scheduler are initialized in GPU-enabled function
46
  if torch.cuda.is_available():
47
- pipe = StableDiffusion3Pipeline.from_pretrained(repo, vae=vae, torch_dtype=torch.float16).to("cuda")
48
 
49
  pipe.scheduler = FlowMatchEulerDiscreteScheduler.from_config(pipe.scheduler.config)
50
 
 
7
  import spaces
8
  from PIL import Image
9
  import requests
10
+ import transformers
11
  from translatepy import Translator
12
 
13
  os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
 
41
  torch_dtype=torch.float16,
42
  )
43
 
44
+ transformer = SD3Transformer2DModel.from_pretrained(
45
+ repo,
46
+ subfolder="transformer",
47
+ torch_dtype=torch.float16,
48
+ )
49
 
50
+ text_encoder_3 = T5EncoderModel.from_pretrained(
51
+ repo,
52
+ subfolder="text_encoder_3",
53
+ torch_dtype=torch.float16,
54
+ )
55
 
56
  # Ensure model and scheduler are initialized in GPU-enabled function
57
  if torch.cuda.is_available():
58
+ pipe = StableDiffusion3Pipeline.from_pretrained(repo, vae=vae, transformer=transformer, text_encoder_3=text_encoder_3, torch_dtype=torch.float16).to("cuda")
59
 
60
  pipe.scheduler = FlowMatchEulerDiscreteScheduler.from_config(pipe.scheduler.config)
61