sft / app.py
bstraehle's picture
Update app.py
635e786 verified
raw
history blame
1.3 kB
import gradio as gr
import os
from huggingface_hub import HfApi, login
from transformers import AutoTokenizer, AutoModelForCausalLM
def process(model_id, dataset):
print("111")
# Download Sample Model from Hugging Face to Publish Again
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(model_id)
# Local Path of Model
print("222")
model_path = model_id
model.save_pretrained(model_path)
login(token=os.environ["HF_TOKEN"])
api = HfApi()
model_repo_name = "bstraehle/Meta-Llama-3-8B"
#Create Repo in Hugging Face
print("333")
api.create_repo(repo_id=model_repo_name)
#Upload Model folder from Local to HuggingFace
print("444")
api.upload_folder(
folder_path=model_path,
repo_id=model_repo_name
)
# Publish Model Tokenizer on Hugging Face
print("555")
tokenizer.push_to_hub(model_repo_name)
return "Done"
demo = gr.Interface(fn=process,
inputs=[gr.Textbox(label = "Model ID", value = "meta-llama/Meta-Llama-3-8B", lines = 1), # google/gemma-2b
gr.Textbox(label = "Dataset", value = "imdb", lines = 1)],
outputs=[gr.Textbox(label = "Completion")])
demo.launch()