Spaces:
Running
on
Zero
Running
on
Zero
File size: 2,901 Bytes
9a6c912 876d6e2 9a6c912 876d6e2 9a6c912 0280229 9a6c912 876d6e2 0280229 9a6c912 0280229 9a6c912 5c45880 31997ba 9a6c912 31997ba 9a6c912 876d6e2 9a6c912 876d6e2 9a6c912 876d6e2 9a6c912 31997ba 9a6c912 31997ba |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 |
import gradio as gr
import requests
import os
import shutil
from pathlib import Path
from typing import Any
from tempfile import TemporaryDirectory
from typing import Optional
import torch
from io import BytesIO
from huggingface_hub import CommitInfo, Discussion, HfApi, hf_hub_download
from huggingface_hub.file_download import repo_folder_name
from diffusers import StableDiffusionXLPipeline
from transformers import CONFIG_MAPPING
import spaces
COMMIT_MESSAGE = " This PR adds fp32 and fp16 weights in safetensors format to {}"
device = "cuda" if torch.cuda.is_available() else "cpu"
if torch.cuda.is_available():
torch_dtype = torch.bfloat16
else:
torch_dtype = torch.float32
@spaces.GPU()
def convert_single(model_id: str, filename: str, folder: str, progress: Any, token: str):
progress(0, desc="Downloading model")
local_file = os.path.join(model_id, filename)
ckpt_file = local_file if os.path.isfile(local_file) else hf_hub_download(repo_id=model_id, filename=filename, token=token)
pipeline = StableDiffusionXLPipeline.from_single_file(ckpt_file)
pipeline.save_pretrained(folder, safe_serialization=True)
pipeline = pipeline.to(torch_dtype=torch.float16)
pipeline.save_pretrained(folder, safe_serialization=True, variant="fp16")
return folder
def previous_pr(api: "HfApi", model_id: str, pr_title: str) -> Optional["Discussion"]:
try:
discussions = api.get_repo_discussions(repo_id=model_id)
except Exception:
return None
for discussion in discussions:
if discussion.status == "open" and discussion.is_pull_request and discussion.title == pr_title:
details = api.get_discussion_details(repo_id=model_id, discussion_num=discussion.num)
if details.target_branch == "refs/heads/main":
return discussion
def convert(token: str, model_id: str, filename: str, progress=gr.Progress()):
api = HfApi()
pr_title = "Adding `diffusers` weights of this model"
with TemporaryDirectory() as d:
folder = os.path.join(d, repo_folder_name(repo_id=model_id, repo_type="models"))
os.makedirs(folder)
new_pr = None
try:
folder = convert_single(model_id, filename, folder, progress, token)
progress(0.7, desc="Uploading to Hub")
new_pr = api.upload_folder(folder_path=folder, path_in_repo="./", repo_id=model_id, repo_type="model", token=token, commit_message=pr_title, commit_description=COMMIT_MESSAGE.format(model_id), create_pr=True)
pr_number = new_pr.split("%2F")[-1].split("/")[0]
link = f"Pr created at: {'https://huggingface.co/' + os.path.join(model_id, 'discussions', pr_number)}"
progress(1, desc="Done")
except Exception as e:
raise gr.exceptions.Error(str(e))
finally:
shutil.rmtree(folder)
return link |