Spaces:
Running
on
A10G
Running
on
A10G
Update ui_client.py
Browse files- ui_client.py +16 -16
ui_client.py
CHANGED
@@ -44,7 +44,7 @@ def convert_char_voice_map_to_md(char_voice_map):
|
|
44 |
def generate_script_fn(instruction, _state: gr.State):
|
45 |
try:
|
46 |
session_id = _state['session_id']
|
47 |
-
api_key =
|
48 |
json_script = generate_json_file(session_id, instruction, api_key)
|
49 |
table_text = convert_json_to_md(json_script)
|
50 |
except Exception as e:
|
@@ -56,8 +56,8 @@ def generate_script_fn(instruction, _state: gr.State):
|
|
56 |
_state,
|
57 |
gr.Button.update(interactive=False),
|
58 |
gr.Button.update(interactive=True),
|
59 |
-
gr.Button.update(interactive=
|
60 |
-
gr.Button.update(interactive=
|
61 |
]
|
62 |
|
63 |
_state = {
|
@@ -78,7 +78,8 @@ def generate_script_fn(instruction, _state: gr.State):
|
|
78 |
def generate_audio_fn(state):
|
79 |
btn_state = gr.Button.update(interactive=True)
|
80 |
try:
|
81 |
-
|
|
|
82 |
table_text = convert_char_voice_map_to_md(char_voice_map)
|
83 |
# TODO: output char_voice_map to a table
|
84 |
return [
|
@@ -108,8 +109,7 @@ def clear_fn(state):
|
|
108 |
if DELETE_FILE_WHEN_DO_CLEAR:
|
109 |
shutil.rmtree('output', ignore_errors=True)
|
110 |
state = {'session_id': pipeline.init_session()}
|
111 |
-
return [gr.
|
112 |
-
gr.Markdown.update(value=''),
|
113 |
gr.Textbox.update(value=''),
|
114 |
gr.Video.update(value=None),
|
115 |
gr.Markdown.update(value=''),
|
@@ -398,17 +398,17 @@ with gr.Blocks(css=css) as interface:
|
|
398 |
|
399 |
system_voice_presets = get_system_voice_presets()
|
400 |
# State
|
401 |
-
ui_state = gr.State(value={'session_id': pipeline.init_session()
|
402 |
selected_voice_presets = gr.State(value={'selected_voice_preset': None})
|
403 |
added_voice_preset_state = gr.State(value={'added_file': None, 'count': 0})
|
404 |
# UI Component
|
405 |
-
gr.Markdown(
|
406 |
-
"""
|
407 |
-
How can I access GPT-4? <a href="https://platform.openai.com/account/api-keys">[Ref1]</a><a href="https://help.openai.com/en/articles/7102672-how-can-i-access-gpt-4">[Ref2]</a>
|
408 |
-
"""
|
409 |
-
)
|
410 |
-
key_text_input = gr.Textbox(label='Please Enter OPENAI Key for accessing GPT-4 API', lines=1, placeholder="OPENAI Key here.",
|
411 |
-
|
412 |
text_input_value = '' if DEBUG is False else "an audio introduction to quantum mechanics"
|
413 |
|
414 |
text_input = gr.Textbox(
|
@@ -481,7 +481,7 @@ with gr.Blocks(css=css) as interface:
|
|
481 |
)
|
482 |
|
483 |
# events
|
484 |
-
key_text_input.change(fn=set_openai_key, inputs=[key_text_input, ui_state], outputs=[key_text_input])
|
485 |
text_input.change(fn=textbox_listener, inputs=[text_input], outputs=[generate_script_btn])
|
486 |
generate_audio_btn.click(
|
487 |
fn=generate_audio_fn,
|
@@ -511,7 +511,7 @@ with gr.Blocks(css=css) as interface:
|
|
511 |
]
|
512 |
)
|
513 |
clear_btn.click(fn=clear_fn, inputs=ui_state,
|
514 |
-
outputs=[
|
515 |
ui_state, voice_presets_df, del_voice_btn,
|
516 |
vp_text_id, vp_text_desc, vp_file])
|
517 |
generate_script_btn.click(
|
|
|
44 |
def generate_script_fn(instruction, _state: gr.State):
|
45 |
try:
|
46 |
session_id = _state['session_id']
|
47 |
+
api_key = utils.get_api_key()
|
48 |
json_script = generate_json_file(session_id, instruction, api_key)
|
49 |
table_text = convert_json_to_md(json_script)
|
50 |
except Exception as e:
|
|
|
56 |
_state,
|
57 |
gr.Button.update(interactive=False),
|
58 |
gr.Button.update(interactive=True),
|
59 |
+
gr.Button.update(interactive=True),
|
60 |
+
gr.Button.update(interactive=True),
|
61 |
]
|
62 |
|
63 |
_state = {
|
|
|
78 |
def generate_audio_fn(state):
|
79 |
btn_state = gr.Button.update(interactive=True)
|
80 |
try:
|
81 |
+
api_key = utils.get_api_key()
|
82 |
+
audio_path, char_voice_map = generate_audio(**state, api_key=api_key)
|
83 |
table_text = convert_char_voice_map_to_md(char_voice_map)
|
84 |
# TODO: output char_voice_map to a table
|
85 |
return [
|
|
|
109 |
if DELETE_FILE_WHEN_DO_CLEAR:
|
110 |
shutil.rmtree('output', ignore_errors=True)
|
111 |
state = {'session_id': pipeline.init_session()}
|
112 |
+
return [gr.Markdown.update(value=''),
|
|
|
113 |
gr.Textbox.update(value=''),
|
114 |
gr.Video.update(value=None),
|
115 |
gr.Markdown.update(value=''),
|
|
|
398 |
|
399 |
system_voice_presets = get_system_voice_presets()
|
400 |
# State
|
401 |
+
ui_state = gr.State(value={'session_id': pipeline.init_session()})
|
402 |
selected_voice_presets = gr.State(value={'selected_voice_preset': None})
|
403 |
added_voice_preset_state = gr.State(value={'added_file': None, 'count': 0})
|
404 |
# UI Component
|
405 |
+
# gr.Markdown(
|
406 |
+
# """
|
407 |
+
# How can I access GPT-4? <a href="https://platform.openai.com/account/api-keys">[Ref1]</a><a href="https://help.openai.com/en/articles/7102672-how-can-i-access-gpt-4">[Ref2]</a>
|
408 |
+
# """
|
409 |
+
# )
|
410 |
+
# key_text_input = gr.Textbox(label='Please Enter OPENAI Key for accessing GPT-4 API', lines=1, placeholder="OPENAI Key here.",
|
411 |
+
# value=utils.get_key())
|
412 |
text_input_value = '' if DEBUG is False else "an audio introduction to quantum mechanics"
|
413 |
|
414 |
text_input = gr.Textbox(
|
|
|
481 |
)
|
482 |
|
483 |
# events
|
484 |
+
# key_text_input.change(fn=set_openai_key, inputs=[key_text_input, ui_state], outputs=[key_text_input])
|
485 |
text_input.change(fn=textbox_listener, inputs=[text_input], outputs=[generate_script_btn])
|
486 |
generate_audio_btn.click(
|
487 |
fn=generate_audio_fn,
|
|
|
511 |
]
|
512 |
)
|
513 |
clear_btn.click(fn=clear_fn, inputs=ui_state,
|
514 |
+
outputs=[char_voice_map_markdown, text_input, audio_output, audio_script_markdown, generate_audio_btn, generate_script_btn,
|
515 |
ui_state, voice_presets_df, del_voice_btn,
|
516 |
vp_text_id, vp_text_desc, vp_file])
|
517 |
generate_script_btn.click(
|