File size: 6,857 Bytes
083c4a0
2ae71de
 
 
 
 
2e004f5
2ae71de
 
 
 
2e004f5
2ae71de
2e004f5
 
 
 
 
 
 
 
 
2ae71de
 
 
 
 
 
 
 
 
 
 
2e004f5
2ae71de
 
 
 
 
 
 
 
2e004f5
2ae71de
 
 
 
 
 
 
 
 
 
 
 
 
 
2e004f5
2ae71de
 
 
2e004f5
2ae71de
 
2e004f5
 
2e1747d
 
 
 
 
 
 
 
 
 
 
1f154d2
2e1747d
 
 
2e004f5
 
 
 
 
 
 
 
 
 
 
 
 
 
1f154d2
2e1747d
 
 
2e004f5
 
 
 
 
 
 
 
 
1f154d2
2e1747d
 
 
2e004f5
 
 
 
 
 
 
 
1f154d2
2e1747d
 
 
2e004f5
 
 
 
 
 
 
 
1f154d2
2e1747d
 
 
2e004f5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1f154d2
2e1747d
 
 
2e004f5
 
 
 
 
 
 
 
 
1f154d2
2e004f5
 
 
 
083c4a0
 
 
2e004f5
 
 
 
 
 
083c4a0
2e004f5
8fff274
 
 
083c4a0
 
 
2ae71de
2e004f5
2ae71de
2e004f5
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
import gradio as gr
import requests
import time
from ast import literal_eval
from datetime import datetime


def to_md(text):
    # return text.replace("\n", "<br />")
    return text.replace("\n", "<br />")


def infer(
        prompt,
        model_name,
        max_new_tokens=10,
        temperature=0.1,
        top_p=1.0,
        top_k=40,
        num_completions=1,
        seed=42,
        stop="\n"
):
    model_name_map = {
        "GPT-JT-6B-v1": "Together-gpt-JT-6B-v1",
    }
    max_new_tokens = int(max_new_tokens)
    num_completions = int(num_completions)
    temperature = float(temperature)
    top_p = float(top_p)
    top_k = int(top_k)
    stop = stop.split(";")
    seed = seed

    assert 1 <= max_new_tokens <= 256
    assert 1 <= num_completions <= 5
    assert 0.0 <= temperature <= 10.0
    assert 0.0 <= top_p <= 1.0
    assert 1 <= top_k <= 1000

    if temperature == 0.0:
        temperature = 0.01
    if prompt == "":
        prompt = " "
    my_post_dict = {
        "model": "Together-gpt-JT-6B-v1",
        "prompt": prompt,
        "top_p": top_p,
        "top_k": top_k,
        "temperature": temperature,
        "max_tokens": max_new_tokens,
        "stop": stop,
    }
    print(f"send: {datetime.now()}")
    response = requests.get("https://staging.together.xyz/api/inference", params=my_post_dict).json()
    generated_text = response['output']['choices'][0]['text']
    print(f"recv: {datetime.now()}")

    for stop_word in stop:
        if stop_word != '' and stop_word in generated_text:
            generated_text = generated_text[:generated_text.find(stop_word)]

    return generated_text


examples = [
    [
        # Question Answering
        '''Please answer the following question:

Question: What is the capital of Canada?
Answer: Ottawa

Question: What is the currency of Switzerland?
Answer: Swiss franc

Question: In which country is Wisconsin located?
Answer:''', "GPT-JT-6B-v1", 5, 0.0, 1.0, 40],
    [
        # Sentiment Analysis
        '''Label the tweets as either "positive", "negative", "mixed", or "neutral":

Tweet: I can say that there isn't anything I would change.
Label: positive

Tweet: I'm not sure about this.
Label: neutral

Tweet: I liked some parts but I didn't like other parts.
Label: mixed

Tweet: I think the background image could have been better.
Label: negative

Tweet: I really like it.
Label:''', "GPT-JT-6B-v1", 2, 0.0, 1.0, 40],
    [
        # Topic Classification
        '''Given a news article, classify its topic.
Possible labels: 1. World 2. Sports 3. Business 4. Sci/Tech

Article: A nearby star thought to harbor comets and asteroids now appears to be home to planets, too.
Label: Sci/Tech

Article: Soaring crude prices plus worries about the economy and the outlook for earnings are expected to hang over the stock market next week during the depth of the summer doldrums.
Label: Business

Article: Murtagh a stickler for success Northeastern field hockey coach Cheryl Murtagh doesn't want the glare of the spotlight that shines on her to detract from a team that has been the America East champion for the past three years and has been to the NCAA tournament 13 times.
Label:''', "GPT-JT-6B-v1", 5, 0.0, 1.0, 40],
    [
        # Paraphrasing
        '''Paraphrase the given sentence into a different sentence.

Input: Can you recommend some upscale restaurants in New York?
Output: What upscale restaurants do you recommend in New York?

Input: What are the famous places we should not miss in Paris?
Output: Recommend some of the best places to visit in Paris?

Input: Could you recommend some hotels that have cheap price in Zurich?
Output:''', "GPT-JT-6B-v1", 20, 0.8, 1.0, 40],
    [
        # Text Summarization
        '''Given a review from Amazon's food products, the task is to generate a short summary of the given review in the input.

Input: I have bought several of the Vitality canned dog food products and have found them all to be of good quality. The product looks more like a stew than a processed meat and it smells better. My Labrador is finicky and she appreciates this product better than most.
Output: Good Quality Dog Food

Input: Product arrived labeled as Jumbo Salted Peanuts...the peanuts were actually small sized unsalted. Not sure if this was an error or if the vendor intended to represent the product as 'Jumbo'.
Output: Not as Advertised

Input: My toddler loves this game to a point where he asks for it. That's a big thing for me. Secondly, no glitching unlike one of their competitors (PlayShifu). Any tech I don’t have to reach out to support for help is a good tech for me. I even enjoy some of the games and activities in this. Overall, this is a product that shows that the developers took their time and made sure people would not be asking for refund. I’ve become bias regarding this product and honestly I look forward to buying more of this company’s stuff. Please keep up the great work.
Output:''', "GPT-JT-6B-v1", 10, 0.0, 1.0, 40],
    [
        # Word Sense Disambiguation
        '''Identify which sense of a word is meant in a given context.

Context: The river overflowed the bank.
Word: bank
Sense: river bank

Context: A mouse takes much more room than a trackball.
Word: mouse
Sense: computer mouse

Context: The bank will not be accepting cash on Saturdays.
Word: bank
Sense: commercial (finance) banks

Context: Bill killed the project
Word: kill
Sense:''', "GPT-JT-6B-v1", 10, 0.0, 1.0, 40],
    [
        # Natural Language Interface
        '''Given a pair of sentences, choose whether the two sentences agree (entailment)/disagree (contradiction) with each other.
Possible labels: 1. entailment 2. contradiction

Sentence 1: The skier was on the edge of the ramp. Sentence 2: The skier was dressed in winter clothes.
Label: entailment

Sentence 1: The boy skated down the staircase railing. Sentence 2: The boy is a newbie skater.
Label: contradiction

Sentence 1: Two middle-aged people stand by a golf hole. Sentence 2: A couple riding in a golf cart.
Label:''', "GPT-JT-6B-v1", 2, 0.0, 1.0, 40]
]


def main():
    iface = gr.Interface(
        fn=infer,
        inputs=[
            gr.Textbox(lines=20),  # prompt
            gr.Dropdown(["GPT-JT-6B-v1"]),  # model_name
            gr.Slider(1, 256, value=200),  # max_tokens
            gr.Slider(0.0, 10.0, value=0.1),  # temperature
            gr.Slider(0.0, 1.0, value=0.9),  # top_p
            gr.Slider(0, 1000, value=40)  # top_k
        ],
        outputs=gr.Textbox(lines=7),
        examples=examples,
        title="GPT-JT Demo (Gradio port)",
        description='''This is a port of the Streamlit [GPT-JT demo](https://huggingface.co/spaces/togethercomputer/GPT-JT) by [Together](https://huggingface.co/togethercomputer) to Gradio'''
    )

    iface.launch(debug=True)


if __name__ == '__main__':
    main()