Skip to content
Snippets Groups Projects
Commit bb86864d authored by Timothé Boulet's avatar Timothé Boulet :alien:
Browse files

bugfix

parent b943fe14
No related branches found
No related tags found
No related merge requests found
%% Cell type:code id: tags:
```
#@title Imports
#%load_ext autoreload #Need to uncomment for import sometime, dont understand
#Tensorflow :
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import datasets, layers, models, losses
import tensorflow_datasets as tfds
#from google.colab import files
#Others :
from matplotlib import image
import os
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
import random as rd
import cv2
import csv
#Data loaders :
from loadFer2013DS import *
from loadRavdessDS import *
from loadExpWDS import *
from loadAffwildDS import *
#Others
from utils import *
from config import *
```
%% Output
INFO:tensorflow:Enabling eager execution
INFO:tensorflow:Enabling v2 tensorshape
INFO:tensorflow:Enabling resource variables
INFO:tensorflow:Enabling tensor equality
INFO:tensorflow:Enabling control flow v2
WARNING:tensorflow:SavedModel saved prior to TF 2.5 detected when loading Keras model. Please ensure that you are saving the model with model.save() or tf.keras.models.save_model(), *NOT* tf.saved_model.save(). To confirm, there should be a file named "keras_metadata.pb" in the SavedModel directory.
Model used: firstModel
%% Cell type:code id: tags:
```
#Chargement des données
print("Array loading...")
Xf = np.load("data/array/Xf")
Xe = np.load("data/array/Xe")
Xa = np.load("data/array/Xa")
Xr = np.load("data/array/Xr")
Yf = np.load("data/array/Yf")
Ye = np.load("data/array/Ye")
Ya = np.load("data/array/Ya")
Yr = np.load("data/array/Yr")
print("Concatenation...")
X = np.concatenate([Xf, Xa, Xe, Xr])
Y = np.concatenate([Yf, Ya, Ye, Yr])
```
%% Cell type:code id: tags:
```
#Enregistre X et Y directement, à faire si assez de ram
np.save("data/array/X", X)
np.save("data/array/Y", Y)
def loadData():
return np.load("data/array/X"), np.load("data/array/Y")
```
%% Cell type:code id: tags:
```
#@title Visualisation de chaque dataset
for X, Y, name in zip([Xf, Xr, Xe, Xa], [Yf, Yr, Ye, Ya], ["fer2013", "ravdess", "expW", "affwild"]):
N=5
M=5
print("Dataset:", name)
print("Images:", X.shape, "La bels:", Y.shape)
plt.figure()
for i in range(N*M):
if X.shape[0] == 0: continue
k = rd.randrange(X.shape[0])
plt.subplot(N, M, i+1)
plt.xticks([])
plt.yticks([])
plt.grid(False)
afficher(X[k])
plt.title(emotions[int(Y[k])])
plt.show()
```
%% Cell type:code id: tags:
```
#Visualisation du dataset global
print("X_train:", X_train.shape)
print("Y_train:", Y_train.shape)
print("\nX_test:", X_test.shape)
print("Y_test:", Y_test.shape)
N=5
M=5
plt.figure()
for i in range(N*M):
k = rd.randrange(X_train.shape[0])
plt.subplot(N, M, i+1)
plt.xticks([])
plt.yticks([])
plt.grid(False)
afficher(X_train[k])
plt.title(emotions[int(Y_train[k])])
plt.show()
```
%% Cell type:code id: tags:
```
#@title Hyperparamètres
epochs = 2
batch_size = 128
validation_size = 0.1
```
%% Cell type:code id: tags:
```
#Labels catégoriques
Ycat = keras.utils.to_categorical(Y)
print("X", X_train.shape)
print("Y", Y_train_cat.shape)
print("X", X.shape)
print("Y", Ycat.shape)
```
%% Cell type:code id: tags:
```
#MODELE
class MyModel(keras.Sequential):
def __init__(self, input_shape):
super(MyModel, self).__init__()
#Pre processing
self.add(keras.layers.experimental.preprocessing.RandomContrast(factor=(0.5,0.5)))
self.add(keras.layers.experimental.preprocessing.RandomFlip(mode="horizontal"))
#48*48 *1
self.add(keras.layers.Conv2D(32, kernel_size = (3, 3), activation = 'relu', input_shape = input_shape))
self.add(keras.layers.MaxPooling2D(pool_size = 2))
self.add(keras.layers.BatchNormalization())
#23*23 *32
self.add(keras.layers.Conv2D(64, kernel_size = (3, 3), activation = 'relu'))
self.add(keras.layers.MaxPooling2D(pool_size = 2))
self.add(keras.layers.BatchNormalization())
#10*10 *64
self.add(keras.layers.Conv2D(128, kernel_size = (3, 3), activation = 'relu'))
self.add(keras.layers.MaxPooling2D(pool_size = 2))
self.add(keras.layers.BatchNormalization())
#4*4 *128
self.add(keras.layers.Conv2D(256, kernel_size = (3, 3), activation = 'relu'))
self.add(keras.layers.MaxPooling2D(pool_size = 2))
self.add(keras.layers.BatchNormalization())
#1*1 *256
self.add(keras.layers.Flatten())
self.add(keras.layers.Dense(128, activation = 'relu'))
self.add(keras.layers.Dropout(0.2))
self.add(keras.layers.Dense(64, activation = 'relu'))
self.add(keras.layers.Dropout(0.2))
#self.add(keras.layers.BatchNormalization())
self.add(keras.layers.Dense(7, activation = 'softmax'))
#7
def predir(self, monImage):
return self.predict(np.array([monImage]))[0,:]
def compile_o(self):
self.compile(optimizer = 'adam', loss=losses.categorical_crossentropy, metrics = ['accuracy'])
myModel = MyModel(input_shape)
myModel.compile_o()
```
%% Cell type:code id: tags:
```
theImage = X_train[0]
afficher(theImage)
print(predir(myModel, theImage))
```
%% Cell type:code id: tags:
```
history = myModel.fit(X, Y, epochs=5, validation_rate=0.05)
#Affichage de l'historique de l'apprentissage
plt.plot(history.history['accuracy'], label='accuracy')
plt.plot(history.history['val_accuracy'], label='val_accuracy')
plt.legend()
plt.ylim([min(history.history['val_accuracy']+history.history['accuracy']), 1])
plt.show()
```
%% Cell type:code id: tags:
```
myModel.save('exp904')
```
%% Cell type:code id: tags:
```
```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment