From ff093af6891544d3d45939c6a75e4c8dc063ab8c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Timoth=C3=A9=20Boulet?= <timothe.boulet@student-cs.fr>
Date: Wed, 16 Jun 2021 14:34:49 +0200
Subject: [PATCH] improve game!

---
 __pycache__/faceAnalysis.cpython-39.pyc | Bin 630 -> 640 bytes
 __pycache__/game.cpython-39.pyc         | Bin 0 -> 2074 bytes
 __pycache__/utils.cpython-39.pyc        | Bin 2290 -> 2279 bytes
 __pycache__/videoCapture.cpython-39.pyc | Bin 0 -> 545 bytes
 faceAnalysis.py                         |   3 +-
 game.py                                 | 122 +++++++++++++++++-------
 main.py                                 |   6 ++
 videoCapture.py                         |  30 +++---
 8 files changed, 114 insertions(+), 47 deletions(-)
 create mode 100644 __pycache__/game.cpython-39.pyc
 create mode 100644 __pycache__/videoCapture.cpython-39.pyc
 create mode 100644 main.py

diff --git a/__pycache__/faceAnalysis.cpython-39.pyc b/__pycache__/faceAnalysis.cpython-39.pyc
index 9da9aaba520f5915277c7b1f95b1d04246a2ea10..368b1bcddbeb049eab5511f2cb30b5052084d6ac 100644
GIT binary patch
delta 325
zcmeyy(!k1_$ji&cz`(%3`|e~y`9xmX`XB}dhE#?WhGs@ah7`sWrX0p7rWD2$<{ah_
z5T7N7C6_gdHJ2@lEtfrtosl7xBa3qZR|;z?yEwx_#whMoh7`6`_GTtVh7|S`juwU}
zo)o5F22IXV2?hoR=ZwUn^wiwcyb^_!Qia_7l*21>QuXxoUV@z9r^$GWapIH?(J0=$
z{GwdPyp*8S;>@blA{GV)hN#Jgj6Uo<3=9mvtehveFnZ{M*u`MCGjlO<F>*0-Fc$GK
zFfeHH-eN5%%FHXd#hO=|TTofV&cMJh`5&WPNEA~^S`>G2Vp(cRd~SY9YR)Z|oczR;
vA~psF2C&v5keNsXF9QR^Ee@O9{FKt1R68aH28Lpg3JyjP6k!%%;$i{-{4z-4

delta 305
zcmZo*{l>zZ$ji&cz`($8g=<QJ$wXe+geV3Eh7^Vr#vFzy2+bJ91m-hGF{d!3Fy*j>
zfW(<|SaaE;*mBvU*mF6eI2ajH8L~JRaHX)Ma)>i5WQ^iY<w;>p<!EMNWJqC4VQ*oG
z;!R-;X3*rAIDeApE#AERqFl$kl%Uk&%&OEP76t}}Ta(=wec1UJ7#Mz8B~PBo=wZao
zz`#%pb{sPo6Bi>FBL`!V00RSqCf_aA?9`&f;#;hFrMU%_MVuh*%*ka&llhqBqHeL}
z=BK3Q6yM^?$xlp)2Xl%zKuTB(iZb&`z-APIEJ7mqLGm0nx%nxjIjMF`Ae%udI2a*N
Kgh_yriwOV`vO`w@

diff --git a/__pycache__/game.cpython-39.pyc b/__pycache__/game.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c404eea4190b7533106a8631dbc60cf05a1a5d5b
GIT binary patch
literal 2074
zcmYe~<>g{vU|{fhe=?z+kAdMah=Yt-7#J8F7#J9e#TXbEQW#PgTNt7kQrJ^iQ`lM<
zqZm_If*CY9k{OY-a4;}1fOI*7bj5*mF{Uu&Fw`=pfM~{ArWD2$rW~eR=2~V(h8m^?
zEGdjB%nO-nS!$STSR@&0S!-Bp7_ym)97<SgSeqGx8B$nkSj8DqSj8D?*=iUUu+^|F
zWUOVcVXt9LV+v-_Wb=CovR#w$7DsAseo1D2Uhzvd1_p*I-ju|WME&C2%$(H9V*M%>
zy@I@SP4-*N$z?{j*oqSKQu1@7*pl<}(lXPFm>3usqBuc<MTvRoske9_+Jc~}GINVk
z6H_!fqWGXzgyg&Ar=+JAfjnBo%D}(?F@Xyz?Uz(^iyg|2;)j^!nVXoN8jx6$af=Ni
zu#%yOlYxQZmyxqoOlWaxQE^O3W^R5-Oj=@cW@3(RYDGa&YH@L9ex7bhYDsD`$Y(L>
ziMgqI1(ii04}t={SP0~IW)4OUMjj>}Mh->+Mjl2kMzH)#IR*xX=OB3oh6nb3$so5v
zcnl1n*aTS)j-(nU1_p5CFw}w~gCU18m#LPO5hBK1%LbNXsb#NWS-_gYxR9}yBZaY+
zvxa>E%R+`)E-1|nrFoz<ZwXrpQ!`@>Q!QUDe+}OP_Js_!0_hC3f;AiqI2JNYU@GFP
z;aI@AkRgVtR;X6EMyR-B0#hMh4O<p-jc^Kc3QI3jtq8=d8XgIT8m4TPqAN9=DXiuU
zwVWkvHT>viL)60E!;_~}!(Ss(#!&PQB2&X9!BE4P%~JF(j{_o7%Tpr&b{Ag_%L1+x
zwi@AuOpFXQEDN}6_)^$QcxpJC8O0e=IBHn3cvDzYI8(S5vcu)MOQ1fj;g(<ktK)&H
z<Atl^O5v;F1Dna5!cfXMfw5=`#O4}a35FVm8fH+UmS)IiLCDnd)(9@(s}Wwv2)2no
zg>@lg3cmwGi9m`#Gh;T>1jeFGpl~Y8s$p4xEGr0={Q(Li3nIg)FbnD)ZjgJjS&Fit
zW^*hMT*v@+fe?r;5l#`R;hfEoB3#2Tn;}JHE^9N$eGp$U6;+f7r-*_@1YH<nb!tUx
zSQl{Bh=TpVx<DkI0m>4Ev&2BAPGBnZN?{0Q&=m740wv~K3?Z6~QM^T|C8b4qu8_R^
z5+qT|&cMLnoS1ufWl>@kdvJ1oQL2@KCeup|P{!kDU|`T>1{(k-UjAobV0a12&{gaq
znYpP&V5K4;!4D9n0w8T5bHVDP*n+`DL=i~%7GvQp#)47>1_lPN)KZ0#)S}$Xyu&LM
z6cxhqON$i}%TlWpGIKLa4zE<G;#Gj=@M4A3JcTN5h2qrY{JfOZVg<!2%dq^+<V=Oa
z($t*7($r#wf{gr<{9=WIqRisdVuhUiqGE-VRE4tqlA=_Fg4Ck4{G!~%yyR2`TTM-I
zaG??<n44Hqkdt4MlbNJfPzhp1u|a8`OmI<Bl%JehTzreABr`WPiYF{HB{knUv7n^1
zC>5OjZ?Poj7gR=ZWET4uq~@ij++qQhVNq<Exy2dz<x%Vfr6nP$6(zTri%W{4MBM!S
zLgGDKgMvL=BjbZTeFJ=4BW|%J78IoBr9`opCuWv-r&iu#&M!*2#a@(}lbTqZ8YP&L
zT3k|;U+I{W6P}rul3!kYi?yIAGp{6yt)e6|IlDNDtr9}BfUMLMyTwtElUNCglw16n
zd1aY-$(czxscuDyxv96fQ%d5CQVUBni&9f=apc8ALJSfX%mq0mQB0WyMWA9Uia9y4
zAd0mZlnSD_L7oRCh|Iin2*)!ovm`SyCrS(~lbM&C>R*zPS_Dl_#ZkN<ai7HEk^oSY
z6oG21B2e;*;)RsTeyOD;MTt34Y!C-TF&Cwl++s}wxiN|d7U4db#U)X!U=`pp972F<
z(pxN`A{vyGit|8)Eei)Ds95G;=3-=F0u|F7j2z&CnuU!EB*VnVEC4FJ89{PfOcIPt
z%p7bWJqnB(j67^yj8cpoj9knTjBHF22zAT?jBFs&n8X;lAUYYjK*hc?D1oySaf1>#
p4~+2BWCACMB9JV^rJ$l8!UDUQ!zMRBr8Fni4wNa1L0N}`9{`hb8u9=D

literal 0
HcmV?d00001

diff --git a/__pycache__/utils.cpython-39.pyc b/__pycache__/utils.cpython-39.pyc
index e89e1b3c25003428354d2326b863319855205de8..a5d9d659083885aff989b884eaf6c8e88c6cd46f 100644
GIT binary patch
delta 306
zcmew)_*{@Tk(ZZ?fq{V`=gf(O#T$9685x}?cQR&6u`n<&I5RLX6w5F$Fw`)VFf3rK
zVN7RS$k;qtfJuUpX|fTMH8XRO@Z<uf1&mRX1(}l>1t*s>XEG{GzRzso5XI`4mtIsE
z#qN?>oL*X762;<{nphOY>XBGbP#MJ>oR|{D9$Z>blv$h_#qO6{T2hplqse%SH8U?I
zwL+8W*5q&&YewbClUY(21t<St36s`fU|`T>c|L0<2<)^k(qv#@fD)TCS=$*I1t))G
zQ)ASaEWw_|s6M%w{h+oHNQ=NN*1Xc(g32O6kOnaZ28JlX+{BWCocxlU%p|>nN)Rh*
haz2NFlrTtz0*Fus5g;3jKxP#2O-^JHo_vNw4FE%tQosNJ

delta 302
zcmaDZ_(_mAk(ZZ?fq{X6Lw!m@#YWz0M#kjHos8KN_j3x%F)%RHFf3pwVO+pe!<f#v
zka6K;J|+n+=3DFqMX4#7$t9E3nXH+aibN)-GA&@dHJOb$nNetR7IP+}(&Q`577n*q
z9rMzQDsQp7WEQ8F7MI*&aZ61sy2a{|SWr-Ti#a$k<raHzX+cqDaq2C0ztqx_qQo3c
z##^kJc`2zCnoLoXeORm+wI_G5q%aCie#;UjZNR|5pvm%l)=UuCX<uZ>z`y_{Hpj8H
zGcpQIe#WN8XfT<VJ&RF)as~TAZ7Yx#fm^J3rMU%_MM5AA5)2Fsw*+$&OA2!GOL8)k
g^a?6LtXq>)I1EHZKq{0#gf@r(*;r&Xc_oJ(0QquRUjP6A

diff --git a/__pycache__/videoCapture.cpython-39.pyc b/__pycache__/videoCapture.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8b4efb37fafe3a967bb28295d3809bcd65d02650
GIT binary patch
literal 545
zcmYe~<>g{vU|^W|?qmWhGXuk85C<7EF)%PVFfcF_^Dr<lq%fo~wlG97q%Z|DXfh`w
zt7BzgU|?ZjU~mShtpKTIOkv1jsAWh2(Tuf>H4F=wQWzI9)-u&FEnr^AP?S=`w18zH
zLkv?bb1h2^bFoQbN)6)z#u}D|jEoF53=3FOm}*!SGJ)A_Da;EQQ&=1rO4w6an;Em2
zilhot3Z-h87H}+N0IT2x(J2hU44Q0yFF_6~<zrxA@XSq2PgN)>O3W-dyfXDABLf4&
z%l`}v47V5yHMwpvCzl!B;>iT52`I`>PAx9J#S@m9lA7<FSWr@0lzNLJv)I2NH7_;g
z7E4iTV#+PH%-rIP{PJ7u<%yXk-l>(hnDdKLZm}1o=A<STr`{4wNi8lZ%CB_H$qCQQ
zOUW-U)?_UL1<Wm`%z|6Y$%zHGn2S<NZn35nCFZ8CWGE75U|{%V<!lubTAW%`98;2+
zn_m)>mYAHGn4_CoQBagxT%4Jor<;;mlA2tSnV%O^hU7ZEg34Pw$O6oupqBvootcA?
zgNciYgOP`kjfsnihe?c)g^7c)NR)wr!A}!HgDpoQL_op9VUwGmQks)$2Quq3$S44M
C4vppj

literal 0
HcmV?d00001

diff --git a/faceAnalysis.py b/faceAnalysis.py
index 22660aa..845f556 100644
--- a/faceAnalysis.py
+++ b/faceAnalysis.py
@@ -1,7 +1,6 @@
 #Objective of this file is to analyse a face
-import keras
+print("Chargement du modèle...")
 import numpy as np
-import cv2
 from utils import *
 from config import emotions, input_shape, modelName
 
diff --git a/game.py b/game.py
index 9ae9591..0249138 100644
--- a/game.py
+++ b/game.py
@@ -1,15 +1,10 @@
-#Use your camera for processing the video. Stop by pressing Q
-import cv2
-import imageProcess as ip
-import faceAnalysis as fa
-import random
-from config import emotions
+def smileyRandom(emotionToDodge):
+    #Return a random smiley and te emotion associated
 
-cap = cv2.VideoCapture(0)   #0 means we capture the first camera, your webcam probably
-score = 0
-N = 15
+    import cv2
+    import random
+    from config import emotions
 
-def smileyRandom(emotionToDodge):
     emotionNbr = random.randrange(0,6)
     emotion = emotions[emotionNbr]
     if emotion == emotionToDodge: return smileyRandom(emotion)
@@ -17,32 +12,95 @@ def smileyRandom(emotionToDodge):
     smiley = cv2.imread(smileyImagePath)
     return smiley, emotion
 
-smiley, emotion = smileyRandom("")
 
-while cap.isOpened():		 #or while 1. cap.isOpened() is false if there is a problem
-    ret, frame = cap.read()  #Read next video frame, stop if frame not well read
-    if not ret: break
-    
-    emotionsList = ip.imageProcess(frame, returnEmotion=True)
-    
-    if emotion in emotionsList: #If emotion recognized, increase score, reset smiley to mimick and write "GG!"
-        score += 1
-        cv2.putText(smiley, "Emotion reconnue !", (50,50), cv2.FONT_HERSHEY_SIMPLEX, 1, (0,255,0), 2)
-        cv2.imshow("Smiley", smiley)         
-        smiley, emotion = smileyRandom(emotion)
+def game(playTime = 30, invincibleFrame=0.5, dt_required=0.5, n_photos=None):
+    #Play a game during playTime seconds.
+
+    #Use your camera for processing the video. Stop by pressing Q
+    import cv2
+    import matplotlib.pyplot as plt
+    import imageProcess as ip
+    import time
+
+
+    cap = cv2.VideoCapture(0)   #0 means we capture the first camera, your webcam probably
+    score = 0       
+
+    timeScoring = time.time()   #last instant an emotion was found.
+    timeInitial = time.time()
+    timeSinceOtherEmotions = time.time()
+    timeLastPhoto = time.time()
+
+    smiley, emotion = smileyRandom("")
+    smileyNeutral = smiley.copy()
+    photos= []
+
+
+
+
+
+    while cap.isOpened():		 #or while 1. cap.isOpened() is false if there is a problem
+        ret, frame = cap.read()  #Read next video frame, stop if frame not well read
+        if not ret: break
+        
+        emotionsList = ip.imageProcess(frame, returnEmotion=True)
+        
+        
+        if time.time()-timeSinceOtherEmotions > dt_required:    #If emotions maintained for dt seconds, score is increased and a new smiley is generated
+            score += 1
+            smiley, emotion = smileyRandom(emotion)
+            smileyNeutral = smiley.copy()
+            timeScoring = time.time()
+            timeSinceOtherEmotions = time.time()
+        
+        elif emotion in emotionsList and time.time()-timeScoring>invincibleFrame: #If emotion recognized, increase score, reset smiley to mimick, start timer for impossibility of scoring (0.5s)
+            pass
+
+        else:
+            timeSinceOtherEmotions = time.time()
+
+
+
+
+        #Modify and show photos
+        smiley = smileyNeutral.copy()
+        cv2.imshow("Caméra", frame)  			#Show you making emotional faces
+        cv2.putText(smiley, "Score: "+str(score), (40,40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0,0,255), 2)
+        cv2.putText(smiley, "Timer: "+str(time.time()-timeInitial), (20,240), cv2.FONT_HERSHEY_SIMPLEX, 1, (255,0,0), 2)
+        cv2.imshow("Smiley", smiley)            #Show the smiley to mimic
+
+
+
 
+        #Save temporarily photo:
+        if n_photos is not None:
+            if time.time()-timeLastPhoto > playTime/(n_photos+1):
+                timeLastPhoto = time.time()
+                photos.append(frame)
 
-    cv2.imshow("Caméra", frame)  			#Show you making emotional faces
-    cv2.putText(smiley, "Score: "+str(score), (40,40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0,0,255), 2)
-    cv2.imshow("Smiley", smiley)            #Show the smiley to mimic
+        #Stop game if Q pressd or time exceed play time.
+        if cv2.waitKey(1) & 0xFF == ord('q'):			#If you press Q, stop the while and so the capture
+            break   
 
-    if cv2.waitKey(1) & 0xFF == ord('q'):			#If you press Q, stop the while and so the capture
-        break   
+        elif cv2.waitKey(1) & 0xFF == ord('p'):			#If you press P, pass the smiley but lower your score
+            score -= 1
+            smiley, emotion = smileyRandom(emotion)
+            smileyNeutral = smiley.copy()
+            timeScoring = time.time()
+            timeSinceOtherEmotions = time.time()
+        
 
-    elif cv2.waitKey(1) & 0xFF == ord('p'):			#If you press P, pass the smiley but lower your score
-        score -= 1
-        smiley, emotion = smileyRandom(emotion)
+        elif time.time() - timeInitial > playTime:
+            break
 
+    cap.release()
+    cv2.destroyAllWindows()
 
-cap.release()
-cv2.destroyAllWindows()
\ No newline at end of file
+    print(f"Jeu terminé ! Vous avez imité {score} emotions en {playTime} secondes !")
+    if n_photos is not None:
+        print("Voici quelques photos prises lors de votre performance =)")
+        for photo in photos:
+            plt.imshow(photo)
+            plt.xticks([])
+            plt.yticks([])
+            plt.show()
\ No newline at end of file
diff --git a/main.py b/main.py
new file mode 100644
index 0000000..4ca5558
--- /dev/null
+++ b/main.py
@@ -0,0 +1,6 @@
+from game import *
+from videoCapture import *
+
+game(playTime=300, invincibleFrame=5, dt_required=0.5, n_photos=5)
+
+#videoCapture()
\ No newline at end of file
diff --git a/videoCapture.py b/videoCapture.py
index f2b8ce9..6c27ee3 100644
--- a/videoCapture.py
+++ b/videoCapture.py
@@ -1,19 +1,23 @@
-#Use your camera for processing the video. Stop by pressing Q
-import cv2
-import imageProcess as ip
 
-cap = cv2.VideoCapture(0)   #0 means we capture the first camera, your webcam probably
 
-while cap.isOpened():		 #or while 1. cap.isOpened() is false if there is a problem
-    ret, frame = cap.read()  #Read next video frame, stop if frame not well read
-    if not ret: break
+def videoCapture():
 
-    ip.imageProcess(frame)                          #Process frame
+    #Use your camera for processing the video. Stop by pressing Q
+    import cv2
+    import imageProcess as ip
 
-    cv2.imshow("Image traitée", frame)  			#Show processed image in a window
+    cap = cv2.VideoCapture(0)   #0 means we capture the first camera, your webcam probably
 
-    if cv2.waitKey(1) & 0xFF == ord('q'):			#If you press Q, stop the while and so the capture
-        break       
+    while cap.isOpened():		 #or while 1. cap.isOpened() is false if there is a problem
+        ret, frame = cap.read()  #Read next video frame, stop if frame not well read
+        if not ret: break
 
-cap.release()
-cv2.destroyAllWindows()
\ No newline at end of file
+        ip.imageProcess(frame)                          #Process frame
+
+        cv2.imshow("Image traitée", frame)  			#Show processed image in a window
+
+        if cv2.waitKey(1) & 0xFF == ord('q'):			#If you press Q, stop the while and so the capture
+            break       
+
+    cap.release()
+    cv2.destroyAllWindows()
\ No newline at end of file
-- 
GitLab