kix-intl commited on
Commit
f3ce705
1 Parent(s): 6f5d8ea

add app.py

Browse files
Files changed (2) hide show
  1. app.py +41 -0
  2. requirements.txt +3 -0
app.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import DistilBertTokenizer, DistilBertForSequenceClassification
3
+ import gradio as gr
4
+
5
+ # モデルとトークナイザーのロード
6
+ model_name = "kix-intl/elon-musk-detector" # あなたのモデル名に置き換えてください
7
+ tokenizer = DistilBertTokenizer.from_pretrained(model_name)
8
+ model = DistilBertForSequenceClassification.from_pretrained(model_name)
9
+
10
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
11
+ model.to(device)
12
+
13
+ def classify_tweet(tweet):
14
+ inputs = tokenizer(tweet, return_tensors="pt", truncation=True, padding=True, max_length=128).to(device)
15
+ with torch.no_grad():
16
+ outputs = model(**inputs)
17
+ probabilities = torch.softmax(outputs.logits, dim=1)
18
+ prediction = torch.argmax(probabilities, dim=1).item()
19
+ confidence = probabilities[0][prediction].item()
20
+
21
+ if prediction == 1:
22
+ return f"Elon Musk (Confidence: {confidence:.2f})"
23
+ else:
24
+ return f"Not Elon Musk (Confidence: {confidence:.2f})"
25
+
26
+ # Gradioインターフェースの作成
27
+ iface = gr.Interface(
28
+ fn=classify_tweet,
29
+ inputs=gr.Textbox(lines=2, placeholder="Enter a tweet here..."),
30
+ outputs=gr.Textbox(),
31
+ title="Elon Musk Tweet Classifier",
32
+ description="This model classifies whether a given tweet is likely to be written by Elon Musk or not.",
33
+ examples=[
34
+ ["Tesla's new model is amazing!"],
35
+ ["Just had a great coffee at my favorite local café."],
36
+ ["Sending astronauts to Mars is the next big challenge for humanity."]
37
+ ]
38
+ )
39
+
40
+ # Hugging Faceにデプロイする場合は、以下の行のコメントを解除してください
41
+ iface.launch()
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ gradio
2
+ torch
3
+ transformers