File size: 2,431 Bytes
a5e0c71 10e58eb a5e0c71 4cd8e85 e9e7980 4cd8e85 a5e0c71 4cd8e85 7c579e4 2db5c0c 0de90cf a5e0c71 7242ff5 a5e0c71 d1a8df6 ea46f1b ac83258 ee00fdd bf0e2a9 faf4dab f52b50a 5547529 a2db5a6 662352e a5e0c71 88e980b a5e0c71 6e86beb a5e0c71 c79a95e 662352e d1a8df6 a5e0c71 38397f0 a5e0c71 a403ed6 a5e0c71 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 |
import gradio as gr
import pandas as pd
import numpy as np
import os
#import cv2
from tqdm import tqdm
import tensorflow as tf
from tensorflow import keras
from keras.utils import np_utils
#from tensorflow.python.keras.preprocessing import image
#from tensorflow.python.keras.preprocessing.image import ImageDataGenerator
from keras.preprocessing import image
from keras.preprocessing.image import ImageDataGenerator
#from skimage import io
import matplotlib.pyplot as plt
#from tensorflow.python.keras.utils import np_utils
import pickle
#with Path("modelo_entrenado.pkl").open("br")as f:
# new_model=pickle.load(f)
#new_model = pickle.load(open("modelo_entrenado.pkl", 'rb'))
new_model = tf.keras.models.load_model('modelo_entrenado.h5')
objects = ('angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral')
y_pos = np.arange(len(objects))
print(y_pos)
def emotion_analysis(emotions):
objects = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral']
y_pos = np.arange(len(objects))
plt.bar(y_pos, emotions, align='center', alpha=0.9)
plt.tick_params(axis='x', which='both', pad=10,width=4,length=10)
plt.xticks(y_pos, objects)
plt.ylabel('percentage')
plt.title('emotion')
plt.show()
def predict_image(pic):
#img = image.load_img(pic, grayscale=True, target_size=(48, 48))
#show_img=image.load_img(pic, grayscale=False, target_size=(200, 200))
#pic = pic.reshape(-1,48, 48,1])
x = image.img_to_array(pic)
x = np.expand_dims(x, axis = 0)
x /= 255
x = x.reshape(-1,48,48,1)
custom = new_model.predict(x)
emotion_analysis(custom[0])
x = np.array(x, 'float32')
x = x.reshape([48, 48]);
#plt.gray()
#plt.imshow(show_img)
#plt.show()
m=0.000000000000000000001
a=custom[0]
for i in range(0,len(a)):
if a[i]>m:
m=a[i]
ind=i
return ('Expression Prediction:',objects[ind])
iface = gr.Interface(
predict_image,
[
#gr.inputs.Image(shape=None, image_mode="RGB", invert_colors=False, source="upload", tool="editor", type="numpy", label=None, optional=False)
#gr.inputs.Image(source="upload",shape=(48,48))
gr.inputs.Image(source="upload",type="pil",shape=(48,48))
],
"text",
interpretation="default",
title = 'FER',
description = 'El ',
theme = 'grass'
)
iface.launch()
|