KikoDM commited on
Commit
a5e0c71
·
1 Parent(s): 6fea8d4

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +79 -0
app.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import pandas as pd
3
+ import numpy as np
4
+ import os
5
+ import cv2
6
+ from tqdm import tqdm
7
+ import tensorflow as tf
8
+ from tensorflow import keras
9
+ from keras.utils import np_utils
10
+ from tensorflow.python.keras.preprocessing import image
11
+ from tensorflow.python.keras.preprocessing.image import ImageDataGenerator
12
+ from skimage import io
13
+ import matplotlib.pyplot as plt
14
+ from tensorflow.python.keras.utils import np_utils
15
+
16
+ new_model = tf.keras.models.load_model('my_model')
17
+ objects = ('angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral')
18
+ y_pos = np.arange(len(objects))
19
+ print(y_pos)
20
+
21
+
22
+
23
+ def emotion_analysis(emotions):
24
+ objects = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral']
25
+ y_pos = np.arange(len(objects))
26
+ plt.bar(y_pos, emotions, align='center', alpha=0.9)
27
+ plt.tick_params(axis='x', which='both', pad=10,width=4,length=10)
28
+ plt.xticks(y_pos, objects)
29
+ plt.ylabel('percentage')
30
+ plt.title('emotion')
31
+ plt.show()
32
+
33
+
34
+ def predict_image(pic):
35
+ img = image.load_img(pic, grayscale=True, target_size=(48, 48))
36
+ show_img=image.load_img(pic, grayscale=False, target_size=(200, 200))
37
+ x = image.img_to_array(img)
38
+ x = np.expand_dims(x, axis = 0)
39
+
40
+ x /= 255
41
+
42
+ custom = new_model.predict(x)
43
+
44
+ emotion_analysis(custom[0])
45
+
46
+ x = np.array(x, 'float32')
47
+ x = x.reshape([48, 48]);
48
+
49
+ plt.gray()
50
+ plt.imshow(show_img)
51
+ plt.show()
52
+
53
+ m=0.000000000000000000001
54
+ a=custom[0]
55
+ for i in range(0,len(a)):
56
+ if a[i]>m:
57
+ m=a[i]
58
+ ind=i
59
+
60
+ return ('Expression Prediction:',objects[ind])
61
+
62
+ iface = gr.Interface(
63
+ predict_image,
64
+ [
65
+ gradio.inputs.Image(self, shape=None, image_mode="RGB", invert_colors=False, source="upload", tool="editor", type="numpy", label=None, optional=False)
66
+ ],
67
+
68
+ "text",
69
+
70
+ ],
71
+ interpretation="default",
72
+ title = 'FER',
73
+ description = 'El ',
74
+ theme = 'grass'
75
+ )
76
+
77
+
78
+
79
+ iface.launch()