File size: 1,403 Bytes
04cbe77
19cb6e6
838ec20
a28e4c5
 
838ec20
4b8add8
838ec20
 
28d4a48
838ec20
 
 
398a30c
838ec20
 
 
8ff6443
398a30c
838ec20
 
 
04cbe77
 
838ec20
 
 
 
 
7786a59
838ec20
 
 
 
 
398a30c
838ec20
19cb6e6
838ec20
 
 
 
 
 
0440734
838ec20
19cb6e6
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import numpy as np
import gradio as gr
import tensorflow as tf
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import Tokenizer
import pickle

# Load the model
model = load_model('doctor_ai_model.h5')

# Load the tokenizer
with open('tokenizer.pkl', 'rb') as f:
    tokenizer = pickle.load(f)

# Load the label encoder
with open('label_encoder.pkl', 'rb') as f:
    label_encoder = pickle.load(f)

def chatbot(input_text):
    # Tokenize and pad the input
    sequences = tokenizer.texts_to_sequences([input_text])
    input_tensor = tf.keras.preprocessing.sequence.pad_sequences(sequences)

    # Make a prediction
    response = model.predict(input_tensor)
    print("Model output probabilities:", response)

    # Get predicted label
    predicted_label = np.argmax(response, axis=1)

    # Handle unknown labels
    if predicted_label[0] < len(label_encoder.classes_):
        decoded_label = label_encoder.inverse_transform(predicted_label)
    else:
        decoded_label = "Unknown label"

    return decoded_label[0]

# Create a Gradio interface
iface = gr.Interface(fn=chatbot, 
                     inputs="text", 
                     outputs="text", 
                     title="Doctor AI Chatbot",
                     description="Enter a medical-related question to get answers based on trained categories.")

# Launch the interface
iface.launch()