Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -14,6 +14,7 @@ translator = Translator()
|
|
14 |
HF_TOKEN = os.environ.get("HF_TOKEN", None)
|
15 |
# Constants
|
16 |
model = "stabilityai/stable-diffusion-3-medium"
|
|
|
17 |
MAX_SEED = np.iinfo(np.int32).max
|
18 |
|
19 |
CSS = """
|
@@ -37,7 +38,7 @@ JS = """function () {
|
|
37 |
|
38 |
# Ensure model and scheduler are initialized in GPU-enabled function
|
39 |
if torch.cuda.is_available():
|
40 |
-
pipe = StableDiffusion3Pipeline.from_pretrained(
|
41 |
|
42 |
|
43 |
# Function
|
@@ -48,7 +49,7 @@ def generate_image(
|
|
48 |
width=1024,
|
49 |
height=1024,
|
50 |
scale=1.5,
|
51 |
-
steps=
|
52 |
clip=3):
|
53 |
|
54 |
|
@@ -124,7 +125,7 @@ with gr.Blocks(css=CSS, js=JS, theme="soft") as demo:
|
|
124 |
minimum=1,
|
125 |
maximum=50,
|
126 |
step=1,
|
127 |
-
value=
|
128 |
)
|
129 |
clip = gr.Slider(
|
130 |
label="Clip Skip",
|
|
|
14 |
HF_TOKEN = os.environ.get("HF_TOKEN", None)
|
15 |
# Constants
|
16 |
model = "stabilityai/stable-diffusion-3-medium"
|
17 |
+
repo= "stabilityai/stable-diffusion-3-medium-diffusers"
|
18 |
MAX_SEED = np.iinfo(np.int32).max
|
19 |
|
20 |
CSS = """
|
|
|
38 |
|
39 |
# Ensure model and scheduler are initialized in GPU-enabled function
|
40 |
if torch.cuda.is_available():
|
41 |
+
pipe = StableDiffusion3Pipeline.from_pretrained(repo, torch_dtype=torch.float16).to("cuda")
|
42 |
|
43 |
|
44 |
# Function
|
|
|
49 |
width=1024,
|
50 |
height=1024,
|
51 |
scale=1.5,
|
52 |
+
steps=28,
|
53 |
clip=3):
|
54 |
|
55 |
|
|
|
125 |
minimum=1,
|
126 |
maximum=50,
|
127 |
step=1,
|
128 |
+
value=28,
|
129 |
)
|
130 |
clip = gr.Slider(
|
131 |
label="Clip Skip",
|