szili2011 commited on
Commit
b694fa5
1 Parent(s): 321461b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -32
app.py CHANGED
@@ -1,47 +1,33 @@
1
  import numpy as np
2
- import gradio as gr
3
- import tensorflow as tf
4
- from tensorflow.keras.models import load_model
5
- from tensorflow.keras.preprocessing.text import Tokenizer
6
  import pickle
 
 
 
 
 
7
 
8
- # Load the model
9
  model = load_model('doctor_ai_model.h5')
10
-
11
- # Load the tokenizer
12
  with open('tokenizer.pkl', 'rb') as f:
13
  tokenizer = pickle.load(f)
14
-
15
- # Load the label encoder
16
  with open('label_encoder.pkl', 'rb') as f:
17
  label_encoder = pickle.load(f)
18
 
19
- def chatbot(input_text):
20
- # Tokenize and pad the input
21
- sequences = tokenizer.texts_to_sequences([input_text])
22
- input_tensor = tf.keras.preprocessing.sequence.pad_sequences(sequences)
23
-
24
- # Make a prediction
25
- response = model.predict(input_tensor)
26
- print("Model output probabilities:", response)
27
-
28
- # Get predicted label
29
- predicted_label = np.argmax(response, axis=1)
30
-
31
- # Handle unknown labels
32
- if predicted_label[0] < len(label_encoder.classes_):
33
- decoded_label = label_encoder.inverse_transform(predicted_label)
34
- else:
35
- decoded_label = "Unknown label"
36
 
 
 
 
 
37
  return decoded_label[0]
38
 
39
- # Create a Gradio interface
40
- iface = gr.Interface(fn=chatbot,
41
- inputs="text",
42
- outputs="text",
43
- title="Doctor AI Chatbot",
44
- description="Enter a medical-related question to get answers based on trained categories.")
45
 
46
  # Launch the interface
47
  iface.launch()
 
1
  import numpy as np
 
 
 
 
2
  import pickle
3
+ from keras.preprocessing.text import Tokenizer
4
+ from keras.preprocessing.sequence import pad_sequences
5
+ from sklearn.preprocessing import LabelEncoder
6
+ from keras.models import load_model
7
+ import gradio as gr
8
 
9
+ # Load model and encoders
10
  model = load_model('doctor_ai_model.h5')
 
 
11
  with open('tokenizer.pkl', 'rb') as f:
12
  tokenizer = pickle.load(f)
 
 
13
  with open('label_encoder.pkl', 'rb') as f:
14
  label_encoder = pickle.load(f)
15
 
16
+ # Function to get response from model
17
+ def get_response(input_text):
18
+ # Preprocess input text
19
+ input_sequences = tokenizer.texts_to_sequences([input_text])
20
+ input_tensor = pad_sequences(input_sequences, maxlen=100) # Adjust maxlen as necessary
 
 
 
 
 
 
 
 
 
 
 
 
21
 
22
+ # Make prediction
23
+ predicted_label = model.predict(input_tensor)
24
+ decoded_label = label_encoder.inverse_transform([np.argmax(predicted_label)])
25
+
26
  return decoded_label[0]
27
 
28
+ # Create Gradio interface
29
+ iface = gr.Interface(fn=get_response, inputs="text", outputs="text", title="Doctor AI",
30
+ description="Ask your health-related questions and get advice!")
 
 
 
31
 
32
  # Launch the interface
33
  iface.launch()