Spaces:
Running
on
Zero
Running
on
Zero
import spaces | |
import gradio as gr | |
from utils import gradio_copy_text, COPY_ACTION_JS | |
from tagger import convert_danbooru_to_e621_prompt, insert_recom_prompt | |
from genimage import generate_image | |
from llmdolphin import (get_llm_formats, get_dolphin_model_format, | |
get_dolphin_models, get_dolphin_model_info, select_dolphin_model, | |
select_dolphin_format, add_dolphin_models, get_dolphin_sysprompt, | |
get_dolphin_sysprompt_mode, select_dolphin_sysprompt, get_dolphin_languages, | |
select_dolphin_language, dolphin_respond, dolphin_parse) | |
with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", fill_width=True, css="", delete_cache=(60, 3600)) as app: | |
gr.Markdown("""# Natural Text to SD Prompt Translator With LLM alpha | |
Text in natural language (English, Japanese, ...) => Prompt | |
""") | |
with gr.Column(): | |
with gr.Group(): | |
chatbot = gr.Chatbot(likeable=False, show_copy_button=True, show_share_button=False, layout="bubble", container=True) | |
with gr.Row(): | |
chat_msg = gr.Textbox(show_label=False, placeholder="Input text in English, Japanese, or any other languages and press Enter or click Send.", scale=4) | |
chat_submit = gr.Button("Send", scale=1) | |
chat_clear = gr.Button("Clear", scale=1) | |
with gr.Accordion("Additional inputs", open=False): | |
chat_format = gr.Dropdown(choices=get_llm_formats(), value=get_dolphin_model_format(get_dolphin_models()[0][1]), label="Message format") | |
chat_sysmsg = gr.Textbox(value=get_dolphin_sysprompt(), label="System message") | |
with gr.Row(): | |
chat_tokens = gr.Slider(minimum=1, maximum=4096, value=512, step=1, label="Max tokens") | |
chat_temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature") | |
chat_topp = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p") | |
chat_topk = gr.Slider(minimum=0, maximum=100, value=40, step=1, label="Top-k") | |
chat_rp = gr.Slider(minimum=0.0, maximum=2.0, value=1.1, step=0.1, label="Repetition penalty") | |
with gr.Accordion("Add models", open=False): | |
chat_add_text = gr.Textbox(label="URL or Repo ID", placeholder="https://huggingface.co/mradermacher/MagnumChronos-i1-GGUF/blob/main/MagnumChronos.i1-Q4_K_M.gguf", lines=1) | |
chat_add_format = gr.Dropdown(choices=get_llm_formats(), value=get_llm_formats()[0], label="Message format") | |
chat_add_submit = gr.Button("Update lists of models") | |
with gr.Accordion("Modes", open=True): | |
chat_model = gr.Dropdown(choices=get_dolphin_models(), value=get_dolphin_models()[0][1], allow_custom_value=True, label="Model") | |
chat_model_info = gr.Markdown(value=get_dolphin_model_info(get_dolphin_models()[0][1]), label="Model info") | |
with gr.Row(): | |
chat_mode = gr.Dropdown(choices=get_dolphin_sysprompt_mode(), value=get_dolphin_sysprompt_mode()[0], allow_custom_value=False, label="Mode") | |
chat_lang = gr.Dropdown(choices=get_dolphin_languages(), value="English", allow_custom_value=True, label="Output language") | |
with gr.Row(): | |
with gr.Group(): | |
output_text = gr.TextArea(label="Output tags", interactive=False, show_copy_button=True) | |
copy_btn = gr.Button(value="Copy to clipboard", size="sm", interactive=False) | |
with gr.Group(): | |
output_text_pony = gr.TextArea(label="Output tags (Pony e621 style)", interactive=False, show_copy_button=True) | |
copy_btn_pony = gr.Button(value="Copy to clipboard", size="sm", interactive=False) | |
with gr.Accordion(label="Advanced options", open=False, visible=False): | |
tag_type = gr.Radio(label="Output tag conversion", info="danbooru for Animagine, e621 for Pony.", choices=["danbooru", "e621"], value="e621", visible=False) | |
dummy_np = gr.Textbox(label="Negative prompt", value="", visible=False) | |
dummy_np_pony = gr.Textbox(label="Negative prompt", value="", visible=False) | |
recom_animagine = gr.Textbox(label="Animagine reccomended prompt", value="Animagine", visible=False) | |
recom_pony = gr.Textbox(label="Pony reccomended prompt", value="Pony", visible=False) | |
generate_image_btn = gr.Button(value="GENERATE IMAGE", size="lg", variant="primary") | |
result_image = gr.Gallery(label="Generated images", columns=1, object_fit="contain", container=True, preview=True, show_label=False, show_share_button=False, show_download_button=True, interactive=False, visible=True, format="png") | |
gr.LoginButton() | |
gr.on( | |
triggers=[chat_msg.submit, chat_submit.click], | |
fn=dolphin_respond, | |
inputs=[chat_msg, chatbot, chat_model, chat_sysmsg, chat_tokens, chat_temperature, chat_topp, chat_topk, chat_rp], | |
outputs=[chatbot], | |
queue=True, | |
show_progress="full", | |
trigger_mode="once", | |
).success(dolphin_parse, [chatbot], [output_text, copy_btn, copy_btn_pony]).success( | |
convert_danbooru_to_e621_prompt, [output_text, tag_type], [output_text_pony], queue=False, | |
).success(insert_recom_prompt, [output_text, dummy_np, recom_animagine], [output_text, dummy_np], queue=False, | |
).success(insert_recom_prompt, [output_text_pony, dummy_np_pony, recom_pony], [output_text_pony, dummy_np_pony], queue=False) | |
chat_clear.click(lambda: None, None, chatbot, queue=False) | |
chat_model.change(select_dolphin_model, [chat_model], [chat_model, chat_format, chat_model_info], queue=True, show_progress="full")\ | |
.success(lambda: None, None, chatbot, queue=False) | |
chat_format.change(select_dolphin_format, [chat_format], [chat_format], queue=False)\ | |
.success(lambda: None, None, chatbot, queue=False) | |
chat_mode.change(select_dolphin_sysprompt, [chat_mode], [chat_sysmsg], queue=False) | |
chat_lang.change(select_dolphin_language, [chat_lang], [chat_sysmsg], queue=False) | |
gr.on( | |
triggers=[chat_add_text.submit, chat_add_submit.click], | |
fn=add_dolphin_models, | |
inputs=[chat_add_text, chat_add_format], | |
outputs=[chat_model], | |
queue=False, | |
trigger_mode="once", | |
) | |
copy_btn.click(gradio_copy_text, [output_text], js=COPY_ACTION_JS) | |
copy_btn_pony.click(gradio_copy_text, [output_text_pony], js=COPY_ACTION_JS) | |
generate_image_btn.click(generate_image, [output_text, dummy_np], [result_image], show_progress="full") | |
if __name__ == "__main__": | |
app.queue() | |
app.launch() | |