Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,52 +1,38 @@
|
|
1 |
-
import gradio as gr
|
2 |
-
import tensorflow as tf
|
3 |
-
import numpy as np
|
4 |
import pickle
|
|
|
|
|
|
|
|
|
|
|
5 |
|
6 |
-
#
|
7 |
-
model_path =
|
8 |
-
|
9 |
-
label_encoder_path = "label_encoder.pkl"
|
10 |
-
|
11 |
-
# Load the trained model
|
12 |
-
try:
|
13 |
-
model = tf.keras.models.load_model(model_path)
|
14 |
-
except Exception as e:
|
15 |
-
print(f"Error loading model: {e}")
|
16 |
-
|
17 |
-
# Load the tokenizer and label encoder
|
18 |
-
with open(tokenizer_path, 'rb') as handle:
|
19 |
-
tokenizer = pickle.load(handle)
|
20 |
|
21 |
-
|
22 |
-
|
23 |
|
24 |
-
# Define the
|
25 |
-
|
26 |
-
|
27 |
-
# Tokenize the input question
|
28 |
-
seq = tokenizer.texts_to_sequences([question])
|
29 |
-
padded_seq = tf.keras.preprocessing.sequence.pad_sequences(seq, maxlen=27) # Adjust maxlen to match your model
|
30 |
-
|
31 |
-
# Make prediction
|
32 |
-
prediction = model.predict(padded_seq)
|
33 |
-
|
34 |
-
# Convert prediction to label
|
35 |
-
predicted_label = label_encoder.inverse_transform(np.argmax(prediction, axis=1))
|
36 |
-
|
37 |
-
return predicted_label[0]
|
38 |
-
except Exception as e:
|
39 |
-
return f"Error during prediction: {e}"
|
40 |
|
41 |
-
# Define the
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
49 |
|
50 |
-
#
|
51 |
-
if __name__ ==
|
52 |
-
|
|
|
|
|
|
|
|
|
1 |
import pickle
|
2 |
+
import tensorflow as tf
|
3 |
+
from tensorflow import keras
|
4 |
+
from fastapi import FastAPI
|
5 |
+
from pydantic import BaseModel
|
6 |
+
import uvicorn
|
7 |
|
8 |
+
# Load the model
|
9 |
+
model_path = 'doctor_ai_model.h5'
|
10 |
+
model = keras.models.load_model(model_path)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
|
12 |
+
# Create FastAPI app
|
13 |
+
app = FastAPI()
|
14 |
|
15 |
+
# Define the request model
|
16 |
+
class InputData(BaseModel):
|
17 |
+
input_data: list
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
|
19 |
+
# Define the prediction endpoint
|
20 |
+
@app.post('/predict')
|
21 |
+
async def predict(data: InputData):
|
22 |
+
# Prepare input data
|
23 |
+
input_array = tf.convert_to_tensor(data.input_data)
|
24 |
+
|
25 |
+
# Check if input shape matches the model's input shape
|
26 |
+
expected_shape = (None, 27)
|
27 |
+
if input_array.shape[1] != expected_shape[1]:
|
28 |
+
return {'error': f'Input data must have shape: {expected_shape}'}
|
29 |
+
|
30 |
+
# Make a prediction
|
31 |
+
prediction = model.predict(tf.expand_dims(input_array, axis=0)) # Expand dims to match batch size
|
32 |
+
predicted_class = tf.argmax(prediction, axis=1).numpy().tolist()
|
33 |
+
|
34 |
+
return {'predicted_class': predicted_class}
|
35 |
|
36 |
+
# Start the FastAPI server (this will run offline)
|
37 |
+
if __name__ == '__main__':
|
38 |
+
uvicorn.run(app, host='127.0.0.1', port=8000) # Use localhost for offline mode
|