File size: 7,030 Bytes
80e6c51
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
import gradio as gr
from utils import (
    gradio_copy_text,
    COPY_ACTION_JS,
)
from tagger import (
    convert_danbooru_to_e621_prompt,
    insert_recom_prompt,
)
from genimage import (
    generate_image,
)
from llmdolphin import (
    get_llm_formats,
    get_dolphin_model_format,
    get_dolphin_models,
    get_dolphin_model_info,
    select_dolphin_model,
    select_dolphin_format,
    add_dolphin_models,
    get_dolphin_sysprompt,
    get_dolphin_sysprompt_mode,
    select_dolphin_sysprompt,
    get_dolphin_languages,
    select_dolphin_language,
    dolphin_respond,
    dolphin_parse,
)


with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", css="") as app:
    gr.Markdown("""# Natural Text to SD Prompt Translator With LLM alpha

                    Text in natural language (English, Japanese, ...) => Prompt

                """)
    with gr.Column(scale=1):
        with gr.Group():
            chatbot = gr.Chatbot(likeable=False, show_copy_button=True, show_share_button=False, layout="panel", container=True, )
            with gr.Row():
                chat_msg = gr.Textbox(show_label=False, placeholder="Input text in English, Japanese, or any other languages and press Enter or click Send.", scale=4)
                chat_submit = gr.Button("Send", scale=1)
                chat_clear = gr.Button("Clear", scale=1)
            with gr.Accordion("Additional inputs", open=False):
                chat_format = gr.Dropdown(choices=get_llm_formats(), value=get_dolphin_model_format(get_dolphin_models()[0][1]), label="Message format")
                chat_sysmsg = gr.Textbox(value=get_dolphin_sysprompt(), label="System message")
                chat_tokens = gr.Slider(minimum=1, maximum=4096, value=1024, step=1, label="Max tokens")
                chat_temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
                chat_topp = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p")
                chat_topk = gr.Slider(minimum=0, maximum=100, value=40, step=1, label="Top-k")
                chat_rp = gr.Slider(minimum=0.0, maximum=2.0, value=1.1, step=0.1, label="Repetition penalty")
                with gr.Accordion("Add models", open=True):
                    chat_add_text = gr.Textbox(label="URL or Repo ID", placeholder="http://huggingface.co/.../...gguf or author/model", lines=1)
                    chat_add_format = gr.Dropdown(choices=get_llm_formats(), value=get_llm_formats()[0], label="Message format")
                    chat_add_submit = gr.Button("Update lists of models")
            with gr.Accordion("Modes", open=True):
                chat_model = gr.Dropdown(choices=get_dolphin_models(), value=get_dolphin_models()[0][1], allow_custom_value=True, label="Model")
                chat_model_info = gr.Markdown(value=get_dolphin_model_info(get_dolphin_models()[0][1]), label="Model info")
                with gr.Row():
                    chat_mode = gr.Dropdown(choices=get_dolphin_sysprompt_mode(), value=get_dolphin_sysprompt_mode()[0], allow_custom_value=False, label="Mode")
                    chat_lang = gr.Dropdown(choices=get_dolphin_languages(), value="English", allow_custom_value=True, label="Output language")

    with gr.Column(scale=1):
        with gr.Row():
            with gr.Group():
                output_text = gr.TextArea(label="Output tags", interactive=False, show_copy_button=True)
                copy_btn = gr.Button(value="Copy to clipboard", size="sm", interactive=False)
                elapsed_time_md = gr.Markdown(label="Elapsed time", value="", visible=False)
            with gr.Group():
                output_text_pony = gr.TextArea(label="Output tags (Pony e621 style)", interactive=False, show_copy_button=True)
                copy_btn_pony = gr.Button(value="Copy to clipboard", size="sm", interactive=False)
            with gr.Accordion(label="Advanced options", open=False, visible=False):
                tag_type = gr.Radio(label="Output tag conversion", info="danbooru for Animagine, e621 for Pony.", choices=["danbooru", "e621"], value="e621", visible=False)
                dummy_np = gr.Textbox(label="Negative prompt", value="", visible=False)
                dummy_np_pony = gr.Textbox(label="Negative prompt", value="", visible=False)
                recom_animagine = gr.Textbox(label="Animagine reccomended prompt", value="Animagine", visible=False)
                recom_pony = gr.Textbox(label="Pony reccomended prompt", value="Pony", visible=False)
        generate_image_btn = gr.Button(value="GENERATE IMAGE", size="lg", variant="primary")
        result_image = gr.Gallery(label="Generated images", columns=1, object_fit="contain", container=True, preview=True, show_label=False, show_share_button=False, show_download_button=True, interactive=False, visible=True, format="png")

        gr.on(
            triggers=[chat_msg.submit, chat_submit.click],
            fn=dolphin_respond,
            inputs=[chat_msg, chatbot, chat_model, chat_sysmsg, chat_tokens, chat_temperature, chat_topp, chat_topk, chat_rp],
            outputs=[chatbot],
            queue=True,
            show_progress="full",
            trigger_mode="once",
        ).success(dolphin_parse, [chatbot], [output_text, copy_btn, copy_btn_pony]).success(
            convert_danbooru_to_e621_prompt, [output_text, tag_type], [output_text_pony], queue=False,
        ).success(
            insert_recom_prompt, [output_text, dummy_np, recom_animagine], [output_text, dummy_np], queue=False,
        ).success(
            insert_recom_prompt, [output_text_pony, dummy_np_pony, recom_pony], [output_text_pony, dummy_np_pony], queue=False,
        )
        chat_clear.click(lambda: None, None, chatbot, queue=False)
        chat_model.change(select_dolphin_model, [chat_model], [chat_model, chat_format, chat_model_info], queue=True, show_progress="full")\
        .success(lambda: None, None, chatbot, queue=False)
        chat_format.change(select_dolphin_format, [chat_format], [chat_format], queue=False)\
        .success(lambda: None, None, chatbot, queue=False)
        chat_mode.change(select_dolphin_sysprompt, [chat_mode], [chat_sysmsg], queue=False)
        chat_lang.change(select_dolphin_language, [chat_lang], [chat_sysmsg], queue=False)
        gr.on(
            triggers=[chat_add_text.submit, chat_add_submit.click],
            fn=add_dolphin_models,
            inputs=[chat_add_text, chat_add_format],
            outputs=[chat_model],
            queue=False,
            trigger_mode="once",
        )

        copy_btn.click(gradio_copy_text, [output_text], js=COPY_ACTION_JS)
        copy_btn_pony.click(gradio_copy_text, [output_text_pony], js=COPY_ACTION_JS)

        generate_image_btn.click(generate_image, [output_text, dummy_np], [result_image], show_progress="full")


if __name__ == "__main__":
    app.queue()
    app.launch()