from transformers import BertTokenizerFast,TFBertForSequenceClassification,TextClassificationPipeline import numpy as np import tensorflow as tf import gradio as gr import openai model_path = "leadingbridge/sentiment-analysis" tokenizer = BertTokenizerFast.from_pretrained(model_path) model = TFBertForSequenceClassification.from_pretrained(model_path, id2label={0: 'negative', 1: 'positive'} ) def sentiment_analysis(text): pipe = TextClassificationPipeline(model=model, tokenizer=tokenizer) result = pipe(text) return result def openai_chatbot(prompt): # Set up the OpenAI API client openai.api_key = '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' # Set up the model and prompt model_engine = "text-davinci-003" # Generate a response completion = openai.Completion.create( engine=model_engine, prompt=prompt, max_tokens=1024, n=1, stop=None, temperature=0.5, ) response = completion.choices[0].text return f'🤖 {response}' with gr.Blocks() as demo: gr.Markdown("Choose the Chinese NLP model you want to use.") with gr.Tab("Sentiment Analysis"): text_button = gr.Button("proceed") text_button.click(fn=sentiment_analysis,inputs=gr.Textbox(placeholder="Enter a positive or negative sentence here..."), outputs=gr.Textbox(label="Sentiment Analysis")) with gr.Tab("General Chatbot"): text_button = gr.Button("proceed") text_button.click(fn=openai_chatbot,inputs=gr.Textbox(placeholder="Enter any topic you would like to discuss in Chinese"), outputs=gr.Textbox(label="Chatbot Response")) demo.launch(inline=False)