Update app.py
Browse files
app.py
CHANGED
@@ -1,10 +1,11 @@
|
|
1 |
import streamlit as st
|
2 |
import tensorflow as tf
|
|
|
3 |
from PIL import Image
|
4 |
import numpy as np
|
5 |
# from .modules.tools import Analysing_image, text_to_speech
|
6 |
from gtts import gTTS
|
7 |
-
from transformers import BioGptTokenizer, AutoModelForCausalLM, pipeline
|
8 |
from deep_translator import GoogleTranslator
|
9 |
import tempfile
|
10 |
import os
|
@@ -101,7 +102,17 @@ with tab1:
|
|
101 |
st.info("Please upload an image to classify.")
|
102 |
else:
|
103 |
st.info("Using Pretrained model")
|
104 |
-
model = tf.keras.models.load_model("./models/medical_classifier.h5")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
105 |
if image_file is not None:
|
106 |
predict_class = Analysing_image(st, model, image_file)
|
107 |
else:
|
|
|
1 |
import streamlit as st
|
2 |
import tensorflow as tf
|
3 |
+
from tensorflow.keras.layers import InputLayer
|
4 |
from PIL import Image
|
5 |
import numpy as np
|
6 |
# from .modules.tools import Analysing_image, text_to_speech
|
7 |
from gtts import gTTS
|
8 |
+
from transformers import BioGptTokenizer, AutoModelForCausalLM, pipeline, TFAutoModel
|
9 |
from deep_translator import GoogleTranslator
|
10 |
import tempfile
|
11 |
import os
|
|
|
102 |
st.info("Please upload an image to classify.")
|
103 |
else:
|
104 |
st.info("Using Pretrained model")
|
105 |
+
# model = tf.keras.models.load_model("./models/medical_classifier.h5")
|
106 |
+
model = TFAutoModel.from_pretrained('./models/medical_classifier.h5')
|
107 |
+
config = model.config
|
108 |
+
if 'batch_shape' in config:
|
109 |
+
config['input_shape'] = config.pop('batch_shape')[1:] # Remove batch size
|
110 |
+
|
111 |
+
# Create the input layer with the corrected configuration
|
112 |
+
input_layer = InputLayer(**model.config)
|
113 |
+
# Rebuild the model (if necessary)
|
114 |
+
model.build(input_shape=model.config['input_shape'])
|
115 |
+
|
116 |
if image_file is not None:
|
117 |
predict_class = Analysing_image(st, model, image_file)
|
118 |
else:
|