John6666 commited on
Commit
f03d430
β€’
1 Parent(s): 1b196e1

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +20 -23
  2. llmdolphin.py +4 -2
app.py CHANGED
@@ -32,38 +32,35 @@ with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", css="") as app:
32
  gr.Markdown("""# Natural Text to SD Prompt Translator With LLM alpha
33
  Text in natural language (English, Japanese, ...) => Prompt
34
  """)
35
- with gr.Column(scale=1):
36
  with gr.Group():
37
- chatbot = gr.Chatbot(likeable=False, show_copy_button=True, show_share_button=False, layout="panel", container=True, )
38
  with gr.Row():
39
  chat_msg = gr.Textbox(show_label=False, placeholder="Input text in English, Japanese, or any other languages and press Enter or click Send.", scale=4)
40
  chat_submit = gr.Button("Send", scale=1)
41
  chat_clear = gr.Button("Clear", scale=1)
42
- with gr.Accordion("Additional inputs", open=False):
43
- chat_format = gr.Dropdown(choices=get_llm_formats(), value=get_dolphin_model_format(get_dolphin_models()[0][1]), label="Message format")
44
- chat_sysmsg = gr.Textbox(value=get_dolphin_sysprompt(), label="System message")
45
- chat_tokens = gr.Slider(minimum=1, maximum=4096, value=1024, step=1, label="Max tokens")
46
- chat_temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
47
- chat_topp = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p")
48
- chat_topk = gr.Slider(minimum=0, maximum=100, value=40, step=1, label="Top-k")
49
- chat_rp = gr.Slider(minimum=0.0, maximum=2.0, value=1.1, step=0.1, label="Repetition penalty")
50
- with gr.Accordion("Add models", open=True):
51
- chat_add_text = gr.Textbox(label="URL or Repo ID", placeholder="http://huggingface.co/.../...gguf or author/model", lines=1)
52
- chat_add_format = gr.Dropdown(choices=get_llm_formats(), value=get_llm_formats()[0], label="Message format")
53
- chat_add_submit = gr.Button("Update lists of models")
54
- with gr.Accordion("Modes", open=True):
55
- chat_model = gr.Dropdown(choices=get_dolphin_models(), value=get_dolphin_models()[0][1], allow_custom_value=True, label="Model")
56
- chat_model_info = gr.Markdown(value=get_dolphin_model_info(get_dolphin_models()[0][1]), label="Model info")
57
- with gr.Row():
58
- chat_mode = gr.Dropdown(choices=get_dolphin_sysprompt_mode(), value=get_dolphin_sysprompt_mode()[0], allow_custom_value=False, label="Mode")
59
- chat_lang = gr.Dropdown(choices=get_dolphin_languages(), value="English", allow_custom_value=True, label="Output language")
60
-
61
- with gr.Column(scale=1):
62
  with gr.Row():
63
  with gr.Group():
64
  output_text = gr.TextArea(label="Output tags", interactive=False, show_copy_button=True)
65
  copy_btn = gr.Button(value="Copy to clipboard", size="sm", interactive=False)
66
- elapsed_time_md = gr.Markdown(label="Elapsed time", value="", visible=False)
67
  with gr.Group():
68
  output_text_pony = gr.TextArea(label="Output tags (Pony e621 style)", interactive=False, show_copy_button=True)
69
  copy_btn_pony = gr.Button(value="Copy to clipboard", size="sm", interactive=False)
 
32
  gr.Markdown("""# Natural Text to SD Prompt Translator With LLM alpha
33
  Text in natural language (English, Japanese, ...) => Prompt
34
  """)
35
+ with gr.Column():
36
  with gr.Group():
37
+ chatbot = gr.Chatbot(likeable=False, show_copy_button=True, show_share_button=False, layout="bubble", container=True)
38
  with gr.Row():
39
  chat_msg = gr.Textbox(show_label=False, placeholder="Input text in English, Japanese, or any other languages and press Enter or click Send.", scale=4)
40
  chat_submit = gr.Button("Send", scale=1)
41
  chat_clear = gr.Button("Clear", scale=1)
42
+ with gr.Accordion("Additional inputs", open=False):
43
+ chat_format = gr.Dropdown(choices=get_llm_formats(), value=get_dolphin_model_format(get_dolphin_models()[0][1]), label="Message format")
44
+ chat_sysmsg = gr.Textbox(value=get_dolphin_sysprompt(), label="System message")
45
+ chat_tokens = gr.Slider(minimum=1, maximum=4096, value=1024, step=1, label="Max tokens")
46
+ chat_temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
47
+ chat_topp = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p")
48
+ chat_topk = gr.Slider(minimum=0, maximum=100, value=40, step=1, label="Top-k")
49
+ chat_rp = gr.Slider(minimum=0.0, maximum=2.0, value=1.1, step=0.1, label="Repetition penalty")
50
+ with gr.Accordion("Add models", open=True):
51
+ chat_add_text = gr.Textbox(label="URL or Repo ID", placeholder="http://huggingface.co/.../...gguf or author/model", lines=1)
52
+ chat_add_format = gr.Dropdown(choices=get_llm_formats(), value=get_llm_formats()[0], label="Message format")
53
+ chat_add_submit = gr.Button("Update lists of models")
54
+ with gr.Accordion("Modes", open=True):
55
+ chat_model = gr.Dropdown(choices=get_dolphin_models(), value=get_dolphin_models()[0][1], allow_custom_value=True, label="Model")
56
+ chat_model_info = gr.Markdown(value=get_dolphin_model_info(get_dolphin_models()[0][1]), label="Model info")
57
+ with gr.Row():
58
+ chat_mode = gr.Dropdown(choices=get_dolphin_sysprompt_mode(), value=get_dolphin_sysprompt_mode()[0], allow_custom_value=False, label="Mode")
59
+ chat_lang = gr.Dropdown(choices=get_dolphin_languages(), value="English", allow_custom_value=True, label="Output language")
 
 
60
  with gr.Row():
61
  with gr.Group():
62
  output_text = gr.TextArea(label="Output tags", interactive=False, show_copy_button=True)
63
  copy_btn = gr.Button(value="Copy to clipboard", size="sm", interactive=False)
 
64
  with gr.Group():
65
  output_text_pony = gr.TextArea(label="Output tags (Pony e621 style)", interactive=False, show_copy_button=True)
66
  copy_btn_pony = gr.Button(value="Copy to clipboard", size="sm", interactive=False)
llmdolphin.py CHANGED
@@ -17,6 +17,8 @@ llm_models = {
17
  "MN-12B-Starcannon-v2.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Starcannon-v2-i1-GGUF", MessagesFormatterType.CHATML],
18
  "Lumimaid-Magnum-12B.i1-Q4_K_M.gguf": ["mradermacher/Lumimaid-Magnum-12B-i1-GGUF", MessagesFormatterType.MISTRAL],
19
  "Nemo-12B-Marlin-v1.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v1-i1-GGUF", MessagesFormatterType.MISTRAL],
 
 
20
  "DarkIdol-Llama-3.1-8B-Instruct-1.2-Uncensored-Q5_K_M.gguf": ["bartowski/DarkIdol-Llama-3.1-8B-Instruct-1.2-Uncensored-GGUF", MessagesFormatterType.LLAMA_3],
21
  "Llama-3-Swallow-8B-Instruct-v0.1.Q5_K_M.gguf": ["YukiTomita-CC/Llama-3-Swallow-8B-Instruct-v0.1-IMat-GGUF_dolly-15k-ja-prompt", MessagesFormatterType.ALPACA],
22
  "natsumura-storytelling-rp-1.0-llama-3.1-8B.Q5_K_M.gguf": ["tohur/natsumura-storytelling-rp-1.0-llama-3.1-8b-GGUF", MessagesFormatterType.LLAMA_3],
@@ -703,7 +705,7 @@ def dolphin_parse_simple(
703
  return ""
704
  prompts = []
705
  if dolphin_sysprompt_mode == "Japanese to Danbooru Dictionary" and is_japanese(raw_prompt):
706
- prompts = list_uniq(jatags_to_danbooru_tags(to_list_ja(raw_prompt)) + ["nsfw", "explicit"])
707
  else:
708
- prompts = list_uniq(to_list(raw_prompt) + ["nsfw", "explicit"])
709
  return ", ".join(prompts)
 
17
  "MN-12B-Starcannon-v2.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Starcannon-v2-i1-GGUF", MessagesFormatterType.CHATML],
18
  "Lumimaid-Magnum-12B.i1-Q4_K_M.gguf": ["mradermacher/Lumimaid-Magnum-12B-i1-GGUF", MessagesFormatterType.MISTRAL],
19
  "Nemo-12B-Marlin-v1.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v1-i1-GGUF", MessagesFormatterType.MISTRAL],
20
+ "Nemo-12B-Marlin-v2.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v2-i1-GGUF", MessagesFormatterType.MISTRAL],
21
+ "Evolved-Llama3-8B.i1-Q5_K_M.gguf": ["mradermacher/Evolved-Llama3-8B-i1-GGUF", MessagesFormatterType.LLAMA_3],
22
  "DarkIdol-Llama-3.1-8B-Instruct-1.2-Uncensored-Q5_K_M.gguf": ["bartowski/DarkIdol-Llama-3.1-8B-Instruct-1.2-Uncensored-GGUF", MessagesFormatterType.LLAMA_3],
23
  "Llama-3-Swallow-8B-Instruct-v0.1.Q5_K_M.gguf": ["YukiTomita-CC/Llama-3-Swallow-8B-Instruct-v0.1-IMat-GGUF_dolly-15k-ja-prompt", MessagesFormatterType.ALPACA],
24
  "natsumura-storytelling-rp-1.0-llama-3.1-8B.Q5_K_M.gguf": ["tohur/natsumura-storytelling-rp-1.0-llama-3.1-8b-GGUF", MessagesFormatterType.LLAMA_3],
 
705
  return ""
706
  prompts = []
707
  if dolphin_sysprompt_mode == "Japanese to Danbooru Dictionary" and is_japanese(raw_prompt):
708
+ prompts = list_uniq(jatags_to_danbooru_tags(to_list_ja(raw_prompt)) + ["nsfw", "explicit", "rating_explicit"])
709
  else:
710
+ prompts = list_uniq(to_list(raw_prompt) + ["nsfw", "explicit", "rating_explicit"])
711
  return ", ".join(prompts)