summaryrefslogtreecommitdiff
path: root/scraper/reports/pdf_unknown_trigram.html
diff options
context:
space:
mode:
authorJules Laplace <julescarbon@gmail.com>2018-12-14 02:31:14 +0100
committerJules Laplace <julescarbon@gmail.com>2018-12-14 02:31:14 +0100
commit45e0625bcbc2c7f041b8c5d177c5dcf487f07d26 (patch)
tree728353b00677a865679e72429dfe6b200dc57100 /scraper/reports/pdf_unknown_trigram.html
parent3ab28a3ff3d0e1b71f123e38ce3d0df42caddc7c (diff)
new reports
Diffstat (limited to 'scraper/reports/pdf_unknown_trigram.html')
-rw-r--r--scraper/reports/pdf_unknown_trigram.html2
1 files changed, 1 insertions, 1 deletions
diff --git a/scraper/reports/pdf_unknown_trigram.html b/scraper/reports/pdf_unknown_trigram.html
index e75d48b3..1ea7b358 100644
--- a/scraper/reports/pdf_unknown_trigram.html
+++ b/scraper/reports/pdf_unknown_trigram.html
@@ -1 +1 @@
-<!doctype html><html><head><meta charset='utf-8'><title>PDF Report: Unknown Trigrams</title><link rel='stylesheet' href='reports.css'></head><body><h2>PDF Report: Unknown Trigrams</h2><table border='1' cellpadding='3' cellspacing='3'><tr><td>of computer science</td><td>646</td></tr><tr><td>department of computer</td><td>501</td></tr><tr><td>computer science and</td><td>312</td></tr><tr><td>institute of technology</td><td>224</td></tr><tr><td>facial expression recognition</td><td>208</td></tr><tr><td>science and engineering</td><td>187</td></tr><tr><td>science and technology</td><td>185</td></tr><tr><td>department of electrical</td><td>179</td></tr><tr><td>carnegie mellon university</td><td>161</td></tr><tr><td>university of california</td><td>144</td></tr><tr><td>for face recognition</td><td>142</td></tr><tr><td>university of technology</td><td>137</td></tr><tr><td>school of computer</td><td>136</td></tr><tr><td>of electrical and</td><td>135</td></tr><tr><td>senior member ieee</td><td>133</td></tr><tr><td>for the degree</td><td>129</td></tr><tr><td>academy of sciences</td><td>128</td></tr><tr><td>of electrical engineering</td><td>125</td></tr><tr><td>member ieee and</td><td>121</td></tr><tr><td>and computer engineering</td><td>112</td></tr><tr><td>of science and</td><td>110</td></tr><tr><td>electrical and computer</td><td>108</td></tr><tr><td>student member ieee</td><td>107</td></tr><tr><td>in the wild</td><td>104</td></tr><tr><td>in this paper</td><td>102</td></tr><tr><td>of the requirements</td><td>90</td></tr><tr><td>doctor of philosophy</td><td>89</td></tr><tr><td>state of the</td><td>86</td></tr><tr><td>of the art</td><td>81</td></tr><tr><td>journal of computer</td><td>81</td></tr><tr><td>and computer science</td><td>78</td></tr><tr><td>dx doi org</td><td>76</td></tr><tr><td>http dx doi</td><td>73</td></tr><tr><td>one of the</td><td>73</td></tr><tr><td>of computer engineering</td><td>71</td></tr><tr><td>requirements for the</td><td>71</td></tr><tr><td>of engineering and</td><td>69</td></tr><tr><td>university of science</td><td>69</td></tr><tr><td>in partial ful</td><td>69</td></tr><tr><td>university of maryland</td><td>68</td></tr><tr><td>college of engineering</td><td>67</td></tr><tr><td>electrical engineering and</td><td>66</td></tr><tr><td>engineering and computer</td><td>65</td></tr><tr><td>partial ful llment</td><td>65</td></tr><tr><td>of hong kong</td><td>64</td></tr><tr><td>proceedings of the</td><td>63</td></tr><tr><td>university of hong</td><td>63</td></tr><tr><td>department of information</td><td>62</td></tr><tr><td>of chinese academy</td><td>62</td></tr><tr><td>face veri cation</td><td>61</td></tr><tr><td>arti cial intelligence</td><td>60</td></tr><tr><td>the requirements for</td><td>58</td></tr><tr><td>should be addressed</td><td>57</td></tr><tr><td>submitted in partial</td><td>57</td></tr><tr><td>facial expression analysis</td><td>56</td></tr><tr><td>face recognition using</td><td>56</td></tr><tr><td>computer science department</td><td>55</td></tr><tr><td>computer vision and</td><td>55</td></tr><tr><td>dept of computer</td><td>55</td></tr><tr><td>of information technology</td><td>54</td></tr><tr><td>for facial expression</td><td>54</td></tr><tr><td>department of psychology</td><td>54</td></tr><tr><td>of information science</td><td>54</td></tr><tr><td>computer science university</td><td>52</td></tr><tr><td>pittsburgh pa usa</td><td>51</td></tr><tr><td>based on the</td><td>51</td></tr><tr><td>the chinese university</td><td>50</td></tr><tr><td>of facial expressions</td><td>49</td></tr><tr><td>llment of the</td><td>49</td></tr><tr><td>imperial college london</td><td>47</td></tr><tr><td>for action recognition</td><td>47</td></tr><tr><td>creative commons attribution</td><td>45</td></tr><tr><td>department of computing</td><td>44</td></tr><tr><td>and information engineering</td><td>44</td></tr><tr><td>of sciences beijing</td><td>44</td></tr><tr><td>human computer interaction</td><td>44</td></tr><tr><td>school of electrical</td><td>43</td></tr><tr><td>department of informatics</td><td>42</td></tr><tr><td>expression recognition using</td><td>42</td></tr><tr><td>will be inserted</td><td>42</td></tr><tr><td>inserted by the</td><td>42</td></tr><tr><td>by the editor</td><td>42</td></tr><tr><td>received date accepted</td><td>42</td></tr><tr><td>date accepted date</td><td>42</td></tr><tr><td>sciences beijing china</td><td>42</td></tr><tr><td>of the face</td><td>41</td></tr><tr><td>school of information</td><td>41</td></tr><tr><td>of maryland college</td><td>40</td></tr><tr><td>maryland college park</td><td>40</td></tr><tr><td>computer science engineering</td><td>40</td></tr><tr><td>of pattern recognition</td><td>40</td></tr><tr><td>convolutional neural networks</td><td>39</td></tr><tr><td>engineering and technology</td><td>39</td></tr><tr><td>of electronics and</td><td>39</td></tr><tr><td>in computer vision</td><td>39</td></tr><tr><td>of computer applications</td><td>39</td></tr><tr><td>of this work</td><td>38</td></tr><tr><td>of computer and</td><td>38</td></tr><tr><td>of face recognition</td><td>38</td></tr><tr><td>university of chinese</td><td>38</td></tr><tr><td>school of computing</td><td>38</td></tr><tr><td>university of surrey</td><td>38</td></tr><tr><td>information science and</td><td>38</td></tr><tr><td>university of illinois</td><td>37</td></tr><tr><td>is an open</td><td>37</td></tr><tr><td>an open access</td><td>37</td></tr><tr><td>the creative commons</td><td>37</td></tr><tr><td>all rights reserved</td><td>37</td></tr><tr><td>faculty of engineering</td><td>37</td></tr><tr><td>university of amsterdam</td><td>37</td></tr><tr><td>submitted to the</td><td>36</td></tr><tr><td>invariant face recognition</td><td>36</td></tr><tr><td>faculty of electrical</td><td>36</td></tr><tr><td>of the most</td><td>36</td></tr><tr><td>department of electronics</td><td>36</td></tr><tr><td>michigan state university</td><td>36</td></tr><tr><td>of information engineering</td><td>36</td></tr><tr><td>university of texas</td><td>35</td></tr><tr><td>laboratory of pattern</td><td>35</td></tr><tr><td>face recognition with</td><td>35</td></tr><tr><td>center for research</td><td>35</td></tr><tr><td>university of thessaloniki</td><td>34</td></tr><tr><td>face recognition system</td><td>34</td></tr><tr><td>open access article</td><td>34</td></tr><tr><td>the original work</td><td>34</td></tr><tr><td>university of oxford</td><td>34</td></tr><tr><td>college of computer</td><td>34</td></tr><tr><td>automatic facial expression</td><td>34</td></tr><tr><td>of california san</td><td>34</td></tr><tr><td>california san diego</td><td>34</td></tr><tr><td>university of southern</td><td>33</td></tr><tr><td>of southern california</td><td>33</td></tr><tr><td>robust face recognition</td><td>33</td></tr><tr><td>to cite this</td><td>33</td></tr><tr><td>provided the original</td><td>33</td></tr><tr><td>institute of computing</td><td>33</td></tr><tr><td>cas beijing china</td><td>33</td></tr><tr><td>https doi org</td><td>32</td></tr><tr><td>master of science</td><td>32</td></tr><tr><td>based face recognition</td><td>32</td></tr><tr><td>work is properly</td><td>32</td></tr><tr><td>science and information</td><td>32</td></tr><tr><td>institute of automation</td><td>32</td></tr><tr><td>due to the</td><td>32</td></tr><tr><td>journal of engineering</td><td>32</td></tr><tr><td>face detection and</td><td>31</td></tr><tr><td>in face recognition</td><td>31</td></tr><tr><td>luc van gool</td><td>31</td></tr><tr><td>institute of information</td><td>31</td></tr><tr><td>institute of science</td><td>31</td></tr><tr><td>under the creative</td><td>31</td></tr><tr><td>reproduction in any</td><td>31</td></tr><tr><td>max planck institute</td><td>31</td></tr><tr><td>university of singapore</td><td>31</td></tr><tr><td>has been accepted</td><td>31</td></tr><tr><td>been accepted for</td><td>31</td></tr><tr><td>access article distributed</td><td>30</td></tr><tr><td>article distributed under</td><td>30</td></tr><tr><td>distributed under the</td><td>30</td></tr><tr><td>permits unrestricted use</td><td>30</td></tr><tr><td>in any medium</td><td>30</td></tr><tr><td>any medium provided</td><td>30</td></tr><tr><td>medium provided the</td><td>30</td></tr><tr><td>planck institute for</td><td>30</td></tr><tr><td>university of central</td><td>30</td></tr><tr><td>university college london</td><td>30</td></tr><tr><td>of intelligent information</td><td>29</td></tr><tr><td>which permits unrestricted</td><td>29</td></tr><tr><td>unrestricted use distribution</td><td>29</td></tr><tr><td>use distribution and</td><td>29</td></tr><tr><td>distribution and reproduction</td><td>29</td></tr><tr><td>and information technology</td><td>29</td></tr><tr><td>it has been</td><td>29</td></tr><tr><td>convolutional neural network</td><td>29</td></tr><tr><td>university of washington</td><td>29</td></tr><tr><td>journal of advanced</td><td>28</td></tr><tr><td>institute for informatics</td><td>28</td></tr><tr><td>of psychology university</td><td>28</td></tr><tr><td>cambridge ma usa</td><td>28</td></tr><tr><td>department of engineering</td><td>28</td></tr><tr><td>of computing technology</td><td>28</td></tr><tr><td>university of london</td><td>28</td></tr><tr><td>university beijing china</td><td>28</td></tr><tr><td>lab of intelligent</td><td>27</td></tr><tr><td>in computer science</td><td>27</td></tr><tr><td>image and video</td><td>27</td></tr><tr><td>article id pages</td><td>27</td></tr><tr><td>commons attribution license</td><td>27</td></tr><tr><td>is properly cited</td><td>27</td></tr><tr><td>of automation chinese</td><td>27</td></tr><tr><td>automation chinese academy</td><td>27</td></tr><tr><td>can be used</td><td>27</td></tr><tr><td>part of the</td><td>27</td></tr><tr><td>support vector machine</td><td>27</td></tr><tr><td>face recognition under</td><td>27</td></tr><tr><td>universit at unchen</td><td>27</td></tr><tr><td>deep neural networks</td><td>27</td></tr><tr><td>of central florida</td><td>27</td></tr><tr><td>computer and information</td><td>27</td></tr><tr><td>and pattern recognition</td><td>26</td></tr><tr><td>of facial expression</td><td>26</td></tr><tr><td>recognition in the</td><td>26</td></tr><tr><td>of information and</td><td>26</td></tr><tr><td>department of mathematics</td><td>26</td></tr><tr><td>intelligent information processing</td><td>25</td></tr><tr><td>illinois at urbana</td><td>25</td></tr><tr><td>at urbana champaign</td><td>25</td></tr><tr><td>journal on image</td><td>25</td></tr><tr><td>university of massachusetts</td><td>25</td></tr><tr><td>faces in the</td><td>25</td></tr><tr><td>university of pittsburgh</td><td>25</td></tr><tr><td>latex class files</td><td>25</td></tr><tr><td>image processing and</td><td>25</td></tr><tr><td>and signal processing</td><td>25</td></tr><tr><td>of technology sydney</td><td>25</td></tr><tr><td>head pose estimation</td><td>25</td></tr><tr><td>university of tokyo</td><td>25</td></tr><tr><td>national taiwan university</td><td>25</td></tr><tr><td>recognition of facial</td><td>25</td></tr><tr><td>online at www</td><td>24</td></tr><tr><td>center for automation</td><td>24</td></tr><tr><td>for automation research</td><td>24</td></tr><tr><td>computer vision center</td><td>24</td></tr><tr><td>whether they are</td><td>24</td></tr><tr><td>may come from</td><td>24</td></tr><tr><td>dept of electrical</td><td>24</td></tr><tr><td>in this work</td><td>24</td></tr><tr><td>computing technology cas</td><td>24</td></tr><tr><td>technology cas beijing</td><td>24</td></tr><tr><td>speech and signal</td><td>24</td></tr><tr><td>and electronic engineering</td><td>24</td></tr><tr><td>of massachusetts amherst</td><td>24</td></tr><tr><td>journal of latex</td><td>24</td></tr><tr><td>of latex class</td><td>24</td></tr><tr><td>pose invariant face</td><td>23</td></tr><tr><td>technology of china</td><td>23</td></tr><tr><td>cite this version</td><td>23</td></tr><tr><td>hal is multi</td><td>23</td></tr><tr><td>is multi disciplinary</td><td>23</td></tr><tr><td>multi disciplinary open</td><td>23</td></tr><tr><td>disciplinary open access</td><td>23</td></tr><tr><td>rchive for the</td><td>23</td></tr><tr><td>for the deposit</td><td>23</td></tr><tr><td>the deposit and</td><td>23</td></tr><tr><td>deposit and dissemination</td><td>23</td></tr><tr><td>dissemination of sci</td><td>23</td></tr><tr><td>research documents whether</td><td>23</td></tr><tr><td>documents whether they</td><td>23</td></tr><tr><td>they are pub</td><td>23</td></tr><tr><td>lished or not</td><td>23</td></tr><tr><td>or not the</td><td>23</td></tr><tr><td>not the documents</td><td>23</td></tr><tr><td>the documents may</td><td>23</td></tr><tr><td>documents may come</td><td>23</td></tr><tr><td>teaching and research</td><td>23</td></tr><tr><td>and research institutions</td><td>23</td></tr><tr><td>institutions in france</td><td>23</td></tr><tr><td>broad or from</td><td>23</td></tr><tr><td>or from public</td><td>23</td></tr><tr><td>public or private</td><td>23</td></tr><tr><td>or private research</td><td>23</td></tr><tr><td>private research centers</td><td>23</td></tr><tr><td>archive ouverte pluridisciplinaire</td><td>23</td></tr><tr><td>ouverte pluridisciplinaire hal</td><td>23</td></tr><tr><td>pluridisciplinaire hal est</td><td>23</td></tr><tr><td>et la diffusion</td><td>23</td></tr><tr><td>diffusion de documents</td><td>23</td></tr><tr><td>de niveau recherche</td><td>23</td></tr><tr><td>niveau recherche publi</td><td>23</td></tr><tr><td>publics ou priv</td><td>23</td></tr><tr><td>hindawi publishing corporation</td><td>23</td></tr><tr><td>et al this</td><td>23</td></tr><tr><td>have been proposed</td><td>23</td></tr><tr><td>of engineering science</td><td>23</td></tr><tr><td>class files vol</td><td>23</td></tr><tr><td>queen mary university</td><td>23</td></tr><tr><td>such as the</td><td>23</td></tr><tr><td>information engineering the</td><td>23</td></tr><tr><td>engineering the chinese</td><td>23</td></tr><tr><td>computer vision lab</td><td>22</td></tr><tr><td>video classi cation</td><td>22</td></tr><tr><td>of computer vision</td><td>22</td></tr><tr><td>paper we propose</td><td>22</td></tr><tr><td>facial emotion recognition</td><td>22</td></tr><tr><td>this article was</td><td>22</td></tr><tr><td>www frontiersin org</td><td>22</td></tr><tr><td>university of pennsylvania</td><td>22</td></tr><tr><td>processing of chinese</td><td>22</td></tr><tr><td>university of toronto</td><td>22</td></tr><tr><td>the proposed method</td><td>22</td></tr><tr><td>amsterdam the netherlands</td><td>22</td></tr><tr><td>nanyang technological university</td><td>22</td></tr><tr><td>of california berkeley</td><td>22</td></tr><tr><td>electrical computer engineering</td><td>21</td></tr><tr><td>link to publication</td><td>21</td></tr><tr><td>facial action unit</td><td>21</td></tr><tr><td>of advanced technology</td><td>21</td></tr><tr><td>institute carnegie mellon</td><td>21</td></tr><tr><td>face recognition based</td><td>21</td></tr><tr><td>end to end</td><td>21</td></tr><tr><td>to improve the</td><td>21</td></tr><tr><td>department of electronic</td><td>21</td></tr><tr><td>electrical and electronic</td><td>21</td></tr><tr><td>this article has</td><td>21</td></tr><tr><td>article has been</td><td>21</td></tr><tr><td>university of twente</td><td>20</td></tr><tr><td>institute of engineering</td><td>20</td></tr><tr><td>principal component analysis</td><td>20</td></tr><tr><td>mathematics and computer</td><td>20</td></tr><tr><td>active appearance models</td><td>20</td></tr><tr><td>some of the</td><td>20</td></tr><tr><td>institutes of advanced</td><td>20</td></tr><tr><td>springer science business</td><td>20</td></tr><tr><td>science business media</td><td>20</td></tr><tr><td>local binary pattern</td><td>20</td></tr><tr><td>gender classi cation</td><td>20</td></tr><tr><td>in real time</td><td>20</td></tr><tr><td>texas at austin</td><td>20</td></tr><tr><td>research in computer</td><td>20</td></tr><tr><td>facial landmark localization</td><td>20</td></tr><tr><td>and communication engineering</td><td>20</td></tr><tr><td>pattern analysis and</td><td>20</td></tr><tr><td>shanghai jiao tong</td><td>20</td></tr><tr><td>degree of doctor</td><td>20</td></tr><tr><td>in recent years</td><td>20</td></tr><tr><td>in future issue</td><td>20</td></tr><tr><td>issue of this</td><td>20</td></tr><tr><td>of this journal</td><td>20</td></tr><tr><td>accepted for publication</td><td>20</td></tr><tr><td>of electrical computer</td><td>19</td></tr><tr><td>mellon university pittsburgh</td><td>19</td></tr><tr><td>australian national university</td><td>19</td></tr><tr><td>use of this</td><td>19</td></tr><tr><td>of this material</td><td>19</td></tr><tr><td>material is permitted</td><td>19</td></tr><tr><td>servers or lists</td><td>19</td></tr><tr><td>school of engineering</td><td>19</td></tr><tr><td>university of michigan</td><td>19</td></tr><tr><td>as conference paper</td><td>19</td></tr><tr><td>paper at iclr</td><td>19</td></tr><tr><td>robotics institute carnegie</td><td>19</td></tr><tr><td>local binary patterns</td><td>19</td></tr><tr><td>prof dr ing</td><td>19</td></tr><tr><td>idiap research institute</td><td>19</td></tr><tr><td>of sciences cas</td><td>19</td></tr><tr><td>of the twenty</td><td>19</td></tr><tr><td>international joint conference</td><td>19</td></tr><tr><td>show that the</td><td>19</td></tr><tr><td>human action recognition</td><td>19</td></tr><tr><td>ieee international conference</td><td>19</td></tr><tr><td>for face detection</td><td>19</td></tr><tr><td>on pattern analysis</td><td>19</td></tr><tr><td>analysis and machine</td><td>19</td></tr><tr><td>and machine intelligence</td><td>19</td></tr><tr><td>jiao tong university</td><td>19</td></tr><tr><td>of the same</td><td>19</td></tr><tr><td>university of posts</td><td>19</td></tr><tr><td>of posts and</td><td>19</td></tr><tr><td>department of statistics</td><td>19</td></tr><tr><td>de la torre</td><td>18</td></tr><tr><td>follow this and</td><td>18</td></tr><tr><td>this and additional</td><td>18</td></tr><tr><td>and additional works</td><td>18</td></tr><tr><td>accepted for inclusion</td><td>18</td></tr><tr><td>terms of use</td><td>18</td></tr><tr><td>this material for</td><td>18</td></tr><tr><td>material for advertising</td><td>18</td></tr><tr><td>redistribution to servers</td><td>18</td></tr><tr><td>vol no august</td><td>18</td></tr><tr><td>this is the</td><td>18</td></tr><tr><td>university of oulu</td><td>18</td></tr><tr><td>analysis of facial</td><td>18</td></tr><tr><td>article was submitted</td><td>18</td></tr><tr><td>university of cambridge</td><td>18</td></tr><tr><td>simon fraser university</td><td>18</td></tr><tr><td>tel aviv university</td><td>18</td></tr><tr><td>the robotics institute</td><td>18</td></tr><tr><td>university of north</td><td>18</td></tr><tr><td>university of wollongong</td><td>18</td></tr><tr><td>brought to you</td><td>17</td></tr><tr><td>to you for</td><td>17</td></tr><tr><td>you for free</td><td>17</td></tr><tr><td>for free and</td><td>17</td></tr><tr><td>free and open</td><td>17</td></tr><tr><td>and open access</td><td>17</td></tr><tr><td>ieee personal use</td><td>17</td></tr><tr><td>in partial fulfillment</td><td>17</td></tr><tr><td>of mathematics and</td><td>17</td></tr><tr><td>on image and</td><td>17</td></tr><tr><td>erik learned miller</td><td>17</td></tr><tr><td>research center for</td><td>17</td></tr><tr><td>face recognition systems</td><td>17</td></tr><tr><td>we propose novel</td><td>17</td></tr><tr><td>results show that</td><td>17</td></tr><tr><td>xi an china</td><td>17</td></tr><tr><td>of computing and</td><td>17</td></tr><tr><td>facebook ai research</td><td>17</td></tr><tr><td>universit degli studi</td><td>17</td></tr><tr><td>to this work</td><td>17</td></tr><tr><td>dept of cse</td><td>17</td></tr><tr><td>face recognition from</td><td>17</td></tr><tr><td>sun yat sen</td><td>17</td></tr><tr><td>and electrical engineering</td><td>17</td></tr><tr><td>on computer vision</td><td>17</td></tr><tr><td>and computer vision</td><td>17</td></tr><tr><td>new collective works</td><td>17</td></tr><tr><td>collective works for</td><td>17</td></tr><tr><td>transactions on pattern</td><td>17</td></tr><tr><td>to whom correspondence</td><td>17</td></tr><tr><td>posts and telecommunications</td><td>17</td></tr><tr><td>section of the</td><td>17</td></tr><tr><td>of north carolina</td><td>17</td></tr><tr><td>th international conference</td><td>17</td></tr><tr><td>and information sciences</td><td>17</td></tr><tr><td>linear discriminant analysis</td><td>17</td></tr><tr><td>journal of information</td><td>17</td></tr><tr><td>frontiers in psychology</td><td>17</td></tr><tr><td>neural networks for</td><td>16</td></tr><tr><td>works at http</td><td>16</td></tr><tr><td>by an authorized</td><td>16</td></tr><tr><td>in accordance with</td><td>16</td></tr><tr><td>in other works</td><td>16</td></tr><tr><td>http hdl handle</td><td>16</td></tr><tr><td>hdl handle net</td><td>16</td></tr><tr><td>fulfillment of the</td><td>16</td></tr><tr><td>ming hsuan yang</td><td>16</td></tr><tr><td>and video processing</td><td>16</td></tr><tr><td>for more information</td><td>16</td></tr><tr><td>for face veri</td><td>16</td></tr><tr><td>expression recognition based</td><td>16</td></tr><tr><td>and intelligent systems</td><td>16</td></tr><tr><td>image classi cation</td><td>16</td></tr><tr><td>most of the</td><td>16</td></tr><tr><td>real world applications</td><td>16</td></tr><tr><td>face recognition has</td><td>16</td></tr><tr><td>from face images</td><td>16</td></tr><tr><td>face identi cation</td><td>16</td></tr><tr><td>is an important</td><td>16</td></tr><tr><td>on artificial intelligence</td><td>16</td></tr><tr><td>of the university</td><td>16</td></tr><tr><td>of electronic and</td><td>16</td></tr><tr><td>wang member ieee</td><td>16</td></tr><tr><td>and anil jain</td><td>16</td></tr><tr><td>and rama chellappa</td><td>16</td></tr><tr><td>and engineering university</td><td>16</td></tr><tr><td>electronics and communication</td><td>16</td></tr><tr><td>of electronic engineering</td><td>16</td></tr><tr><td>face recognition and</td><td>16</td></tr><tr><td>whom correspondence should</td><td>16</td></tr><tr><td>the hong kong</td><td>16</td></tr><tr><td>department of cse</td><td>16</td></tr><tr><td>hong kong china</td><td>16</td></tr><tr><td>publication in future</td><td>16</td></tr><tr><td>this journal but</td><td>16</td></tr><tr><td>journal but has</td><td>16</td></tr><tr><td>but has not</td><td>16</td></tr><tr><td>has not been</td><td>16</td></tr><tr><td>not been fully</td><td>16</td></tr><tr><td>been fully edited</td><td>16</td></tr><tr><td>fully edited content</td><td>16</td></tr><tr><td>edited content may</td><td>16</td></tr><tr><td>content may change</td><td>16</td></tr><tr><td>may change prior</td><td>16</td></tr><tr><td>prior to final</td><td>16</td></tr><tr><td>to final publication</td><td>16</td></tr><tr><td>science and software</td><td>15</td></tr><tr><td>and software engineering</td><td>15</td></tr><tr><td>and computer sciences</td><td>15</td></tr><tr><td>http www eecs</td><td>15</td></tr><tr><td>university of new</td><td>15</td></tr><tr><td>of the facial</td><td>15</td></tr><tr><td>uc san diego</td><td>15</td></tr><tr><td>access by the</td><td>15</td></tr><tr><td>an authorized administrator</td><td>15</td></tr><tr><td>university of bristol</td><td>15</td></tr><tr><td>creating new collective</td><td>15</td></tr><tr><td>facial landmark detection</td><td>15</td></tr><tr><td>laboratory of intelligent</td><td>15</td></tr><tr><td>www intechopen com</td><td>15</td></tr><tr><td>conference on artificial</td><td>15</td></tr><tr><td>for arti cial</td><td>15</td></tr><tr><td>feature extraction and</td><td>15</td></tr><tr><td>microsoft research asia</td><td>15</td></tr><tr><td>and mobile computing</td><td>15</td></tr><tr><td>the fact that</td><td>15</td></tr><tr><td>expression recognition with</td><td>15</td></tr><tr><td>and facial expression</td><td>15</td></tr><tr><td>zhang member ieee</td><td>15</td></tr><tr><td>for face alignment</td><td>15</td></tr><tr><td>of the data</td><td>15</td></tr><tr><td>of the journal</td><td>15</td></tr><tr><td>of the main</td><td>15</td></tr><tr><td>on the other</td><td>15</td></tr><tr><td>the other hand</td><td>15</td></tr><tr><td>for vision speech</td><td>15</td></tr><tr><td>vision speech and</td><td>15</td></tr><tr><td>this work was</td><td>15</td></tr><tr><td>deep convolutional neural</td><td>15</td></tr><tr><td>yat sen university</td><td>15</td></tr><tr><td>face recognition via</td><td>15</td></tr><tr><td>engineering national university</td><td>15</td></tr><tr><td>of california riverside</td><td>15</td></tr><tr><td>in this chapter</td><td>15</td></tr><tr><td>video based face</td><td>15</td></tr><tr><td>and machine learning</td><td>15</td></tr><tr><td>faculty of computer</td><td>15</td></tr><tr><td>conference on computer</td><td>15</td></tr><tr><td>conference on machine</td><td>15</td></tr><tr><td>authors contributed equally</td><td>15</td></tr><tr><td>classi cation and</td><td>15</td></tr><tr><td>et al and</td><td>15</td></tr><tr><td>based on facial</td><td>15</td></tr><tr><td>on arti cial</td><td>15</td></tr><tr><td>center for excellence</td><td>15</td></tr><tr><td>excellence in brain</td><td>15</td></tr><tr><td>science and intelligence</td><td>15</td></tr><tr><td>and intelligence technology</td><td>15</td></tr><tr><td>rio de janeiro</td><td>15</td></tr><tr><td>california at berkeley</td><td>14</td></tr><tr><td>based facial expression</td><td>14</td></tr><tr><td>detection and tracking</td><td>14</td></tr><tr><td>advertising or promotional</td><td>14</td></tr><tr><td>work in other</td><td>14</td></tr><tr><td>must be obtained</td><td>14</td></tr><tr><td>computer vision laboratory</td><td>14</td></tr><tr><td>to facial expression</td><td>14</td></tr><tr><td>university of barcelona</td><td>14</td></tr><tr><td>information and communication</td><td>14</td></tr><tr><td>shih fu chang</td><td>14</td></tr><tr><td>et al eurasip</td><td>14</td></tr><tr><td>al eurasip journal</td><td>14</td></tr><tr><td>vision and pattern</td><td>14</td></tr><tr><td>engineering and information</td><td>14</td></tr><tr><td>science and mobile</td><td>14</td></tr><tr><td>int comput vis</td><td>14</td></tr><tr><td>expression recognition and</td><td>14</td></tr><tr><td>issn volume issue</td><td>14</td></tr><tr><td>haz kemal ekenel</td><td>14</td></tr><tr><td>equally to this</td><td>14</td></tr><tr><td>university of rochester</td><td>14</td></tr><tr><td>of this paper</td><td>14</td></tr><tr><td>centre for vision</td><td>14</td></tr><tr><td>in this study</td><td>14</td></tr><tr><td>west virginia university</td><td>14</td></tr><tr><td>for large scale</td><td>14</td></tr><tr><td>in the context</td><td>14</td></tr><tr><td>tsinghua university beijing</td><td>14</td></tr><tr><td>eth zurich switzerland</td><td>14</td></tr><tr><td>university of nottingham</td><td>14</td></tr><tr><td>expressions of emotion</td><td>14</td></tr><tr><td>state key laboratory</td><td>14</td></tr><tr><td>university of trento</td><td>14</td></tr><tr><td>these authors contributed</td><td>14</td></tr><tr><td>to deal with</td><td>14</td></tr><tr><td>forbes ave pittsburgh</td><td>14</td></tr><tr><td>hong kong polytechnic</td><td>14</td></tr><tr><td>semi supervised learning</td><td>14</td></tr><tr><td>institute of computer</td><td>14</td></tr><tr><td>component of this</td><td>14</td></tr><tr><td>department of psychiatry</td><td>14</td></tr><tr><td>research on intelligent</td><td>14</td></tr><tr><td>on intelligent perception</td><td>14</td></tr><tr><td>intelligent perception and</td><td>14</td></tr><tr><td>perception and computing</td><td>14</td></tr><tr><td>in brain science</td><td>14</td></tr><tr><td>brain science and</td><td>14</td></tr><tr><td>report no ucb</td><td>13</td></tr><tr><td>no ucb eecs</td><td>13</td></tr><tr><td>www eecs berkeley</td><td>13</td></tr><tr><td>eecs berkeley edu</td><td>13</td></tr><tr><td>berkeley edu pubs</td><td>13</td></tr><tr><td>edu pubs techrpts</td><td>13</td></tr><tr><td>pubs techrpts eecs</td><td>13</td></tr><tr><td>techrpts eecs html</td><td>13</td></tr><tr><td>performance of face</td><td>13</td></tr><tr><td>we show that</td><td>13</td></tr><tr><td>university shanghai china</td><td>13</td></tr><tr><td>and ioannis pitas</td><td>13</td></tr><tr><td>peer reviewed version</td><td>13</td></tr><tr><td>or promotional purposes</td><td>13</td></tr><tr><td>works for resale</td><td>13</td></tr><tr><td>resale or redistribution</td><td>13</td></tr><tr><td>journal of science</td><td>13</td></tr><tr><td>university of defense</td><td>13</td></tr><tr><td>anil jain fellow</td><td>13</td></tr><tr><td>jain fellow ieee</td><td>13</td></tr><tr><td>more information please</td><td>13</td></tr><tr><td>entific research documents</td><td>13</td></tr><tr><td>scientifiques de niveau</td><td>13</td></tr><tr><td>publi ou non</td><td>13</td></tr><tr><td>manant des tablissements</td><td>13</td></tr><tr><td>des tablissements enseignement</td><td>13</td></tr><tr><td>recherche fran ais</td><td>13</td></tr><tr><td>ais ou trangers</td><td>13</td></tr><tr><td>ou trangers des</td><td>13</td></tr><tr><td>trangers des laboratoires</td><td>13</td></tr><tr><td>face recognition algorithms</td><td>13</td></tr><tr><td>the face recognition</td><td>13</td></tr><tr><td>recognition in videos</td><td>13</td></tr><tr><td>institute of advanced</td><td>13</td></tr><tr><td>of advanced computer</td><td>13</td></tr><tr><td>for real time</td><td>13</td></tr><tr><td>to solve the</td><td>13</td></tr><tr><td>of oulu finland</td><td>13</td></tr><tr><td>support vector machines</td><td>13</td></tr><tr><td>johns hopkins university</td><td>13</td></tr><tr><td>engineering research and</td><td>13</td></tr><tr><td>detection and recognition</td><td>13</td></tr><tr><td>according to the</td><td>13</td></tr><tr><td>submitted for the</td><td>13</td></tr><tr><td>of the image</td><td>13</td></tr><tr><td>college of information</td><td>13</td></tr><tr><td>technology chinese academy</td><td>13</td></tr><tr><td>itet eth zurich</td><td>13</td></tr><tr><td>methods have been</td><td>13</td></tr><tr><td>computer vision group</td><td>13</td></tr><tr><td>neural network for</td><td>13</td></tr><tr><td>queen university belfast</td><td>13</td></tr><tr><td>university of notre</td><td>13</td></tr><tr><td>of notre dame</td><td>13</td></tr><tr><td>of engineering technology</td><td>13</td></tr><tr><td>human robot interaction</td><td>13</td></tr><tr><td>information technology and</td><td>13</td></tr><tr><td>on facial expression</td><td>13</td></tr><tr><td>author to whom</td><td>13</td></tr><tr><td>be addressed mail</td><td>13</td></tr><tr><td>for classi cation</td><td>13</td></tr><tr><td>berkeley ca usa</td><td>13</td></tr><tr><td>kong polytechnic university</td><td>13</td></tr><tr><td>rensselaer polytechnic institute</td><td>13</td></tr><tr><td>of michigan ann</td><td>13</td></tr><tr><td>of biomedical engineering</td><td>13</td></tr><tr><td>unconstrained face recognition</td><td>13</td></tr><tr><td>electrical and electronics</td><td>13</td></tr><tr><td>for all other</td><td>13</td></tr><tr><td>classi cation with</td><td>13</td></tr><tr><td>der technischen universit</td><td>13</td></tr><tr><td>zur erlangung des</td><td>13</td></tr><tr><td>of face images</td><td>13</td></tr><tr><td>politehnica of bucharest</td><td>13</td></tr><tr><td>of the proposed</td><td>13</td></tr><tr><td>human activity recognition</td><td>13</td></tr><tr><td>university istanbul turkey</td><td>13</td></tr><tr><td>af nity matrix</td><td>13</td></tr><tr><td>recognition in video</td><td>12</td></tr><tr><td>yu gang jiang</td><td>12</td></tr><tr><td>of nebraska lincoln</td><td>12</td></tr><tr><td>if you believe</td><td>12</td></tr><tr><td>version of the</td><td>12</td></tr><tr><td>application to face</td><td>12</td></tr><tr><td>of defense technology</td><td>12</td></tr><tr><td>computer engineering department</td><td>12</td></tr><tr><td>polytechnique ed erale</td><td>12</td></tr><tr><td>the author and</td><td>12</td></tr><tr><td>hal id hal</td><td>12</td></tr><tr><td>university nanjing china</td><td>12</td></tr><tr><td>is the author</td><td>12</td></tr><tr><td>and face recognition</td><td>12</td></tr><tr><td>because of the</td><td>12</td></tr><tr><td>advanced computer science</td><td>12</td></tr><tr><td>real time face</td><td>12</td></tr><tr><td>illumination invariant face</td><td>12</td></tr><tr><td>electronics and information</td><td>12</td></tr><tr><td>refers to the</td><td>12</td></tr><tr><td>and classi cation</td><td>12</td></tr><tr><td>processing and analysis</td><td>12</td></tr><tr><td>facial expressions are</td><td>12</td></tr><tr><td>shiguang shan xilin</td><td>12</td></tr><tr><td>shan xilin chen</td><td>12</td></tr><tr><td>of new york</td><td>12</td></tr><tr><td>work was supported</td><td>12</td></tr><tr><td>supported by the</td><td>12</td></tr><tr><td>recognition under varying</td><td>12</td></tr><tr><td>pattern recognition and</td><td>12</td></tr><tr><td>video face recognition</td><td>12</td></tr><tr><td>computer engineering national</td><td>12</td></tr><tr><td>classi cation using</td><td>12</td></tr><tr><td>the facial expression</td><td>12</td></tr><tr><td>low rank representation</td><td>12</td></tr><tr><td>received april accepted</td><td>12</td></tr><tr><td>based on local</td><td>12</td></tr><tr><td>facial expression classification</td><td>12</td></tr><tr><td>university belfast research</td><td>12</td></tr><tr><td>belfast research portal</td><td>12</td></tr><tr><td>taiwan university taipei</td><td>12</td></tr><tr><td>university taipei taiwan</td><td>12</td></tr><tr><td>permission from ieee</td><td>12</td></tr><tr><td>reuse of any</td><td>12</td></tr><tr><td>university of ljubljana</td><td>12</td></tr><tr><td>shown in figure</td><td>12</td></tr><tr><td>this paper presents</td><td>12</td></tr><tr><td>there are many</td><td>12</td></tr><tr><td>in revised form</td><td>12</td></tr><tr><td>of facial images</td><td>12</td></tr><tr><td>hong kong university</td><td>12</td></tr><tr><td>re identi cation</td><td>12</td></tr><tr><td>michigan ann arbor</td><td>12</td></tr><tr><td>conference on arti</td><td>12</td></tr><tr><td>to the same</td><td>12</td></tr><tr><td>facial action units</td><td>12</td></tr><tr><td>arizona state university</td><td>12</td></tr><tr><td>islamic azad university</td><td>12</td></tr><tr><td>information please contact</td><td>12</td></tr><tr><td>natural language processing</td><td>12</td></tr><tr><td>we use the</td><td>12</td></tr><tr><td>of the human</td><td>12</td></tr><tr><td>of arti cial</td><td>12</td></tr><tr><td>university of groningen</td><td>12</td></tr><tr><td>heterogeneous face recognition</td><td>12</td></tr><tr><td>louis philippe morency</td><td>12</td></tr><tr><td>philadelphia pa usa</td><td>12</td></tr><tr><td>final publication citation</td><td>12</td></tr><tr><td>houston tx usa</td><td>11</td></tr><tr><td>this work for</td><td>11</td></tr><tr><td>and maja pantic</td><td>11</td></tr><tr><td>explore bristol research</td><td>11</td></tr><tr><td>obtained from the</td><td>11</td></tr><tr><td>to the department</td><td>11</td></tr><tr><td>to face recognition</td><td>11</td></tr><tr><td>erale de lausanne</td><td>11</td></tr><tr><td>shenzhen key lab</td><td>11</td></tr><tr><td>received december accepted</td><td>11</td></tr><tr><td>and jeffrey cohn</td><td>11</td></tr><tr><td>watson research center</td><td>11</td></tr><tr><td>for age estimation</td><td>11</td></tr><tr><td>in real world</td><td>11</td></tr><tr><td>biometrics and security</td><td>11</td></tr><tr><td>faculty of information</td><td>11</td></tr><tr><td>published as conference</td><td>11</td></tr><tr><td>of engineering research</td><td>11</td></tr><tr><td>ability to recognize</td><td>11</td></tr><tr><td>on face recognition</td><td>11</td></tr><tr><td>expression recognition system</td><td>11</td></tr><tr><td>computer engineering university</td><td>11</td></tr><tr><td>terms of the</td><td>11</td></tr><tr><td>that has been</td><td>11</td></tr><tr><td>can be found</td><td>11</td></tr><tr><td>citation for published</td><td>11</td></tr><tr><td>it is not</td><td>11</td></tr><tr><td>electrical engineering university</td><td>11</td></tr><tr><td>stony brook university</td><td>11</td></tr><tr><td>ieee and anil</td><td>11</td></tr><tr><td>in the eld</td><td>11</td></tr><tr><td>have been developed</td><td>11</td></tr><tr><td>of computing imperial</td><td>11</td></tr><tr><td>computing imperial college</td><td>11</td></tr><tr><td>in the scene</td><td>11</td></tr><tr><td>of applied sciences</td><td>11</td></tr><tr><td>school of electronic</td><td>11</td></tr><tr><td>electronic and information</td><td>11</td></tr><tr><td>chen change loy</td><td>11</td></tr><tr><td>research showcase cmu</td><td>11</td></tr><tr><td>fr ed eric</td><td>11</td></tr><tr><td>ed eric jurie</td><td>11</td></tr><tr><td>faculty of science</td><td>11</td></tr><tr><td>in any current</td><td>11</td></tr><tr><td>state university east</td><td>11</td></tr><tr><td>university east lansing</td><td>11</td></tr><tr><td>lansing mi usa</td><td>11</td></tr><tr><td>university of houston</td><td>11</td></tr><tr><td>machine learning techniques</td><td>11</td></tr><tr><td>that the proposed</td><td>11</td></tr><tr><td>www mdpi com</td><td>11</td></tr><tr><td>mdpi com journal</td><td>11</td></tr><tr><td>computational intelligence and</td><td>11</td></tr><tr><td>dept of computing</td><td>11</td></tr><tr><td>box thessaloniki greece</td><td>11</td></tr><tr><td>school of automation</td><td>11</td></tr><tr><td>in the face</td><td>11</td></tr><tr><td>university of wisconsin</td><td>11</td></tr><tr><td>brain and cognitive</td><td>11</td></tr><tr><td>cial intelligence ijcai</td><td>11</td></tr><tr><td>in partial satisfaction</td><td>11</td></tr><tr><td>volume issue may</td><td>11</td></tr><tr><td>in human computer</td><td>11</td></tr><tr><td>information about this</td><td>11</td></tr><tr><td>university of colorado</td><td>11</td></tr><tr><td>erlangung des akademischen</td><td>11</td></tr><tr><td>des akademischen grades</td><td>11</td></tr><tr><td>the main paper</td><td>11</td></tr><tr><td>hand over face</td><td>11</td></tr><tr><td>tadas baltru saitis</td><td>11</td></tr><tr><td>ur elektrotechnik und</td><td>11</td></tr><tr><td>elektrotechnik und informationstechnik</td><td>11</td></tr><tr><td>computer science the</td><td>11</td></tr><tr><td>publication citation information</td><td>11</td></tr><tr><td>citation information doi</td><td>11</td></tr><tr><td>of advanced research</td><td>10</td></tr><tr><td>and information systems</td><td>10</td></tr><tr><td>to publication record</td><td>10</td></tr><tr><td>take down policy</td><td>10</td></tr><tr><td>you believe that</td><td>10</td></tr><tr><td>science and research</td><td>10</td></tr><tr><td>for pose invariant</td><td>10</td></tr><tr><td>deep face recognition</td><td>10</td></tr><tr><td>pattern recognition casia</td><td>10</td></tr><tr><td>learning for face</td><td>10</td></tr><tr><td>ouvertes fr hal</td><td>10</td></tr><tr><td>southeast university nanjing</td><td>10</td></tr><tr><td>in pattern recognition</td><td>10</td></tr><tr><td>ibm watson research</td><td>10</td></tr><tr><td>of advanced industrial</td><td>10</td></tr><tr><td>in the literature</td><td>10</td></tr><tr><td>representation of the</td><td>10</td></tr><tr><td>vision center uab</td><td>10</td></tr><tr><td>center for biometrics</td><td>10</td></tr><tr><td>for biometrics and</td><td>10</td></tr><tr><td>and security research</td><td>10</td></tr><tr><td>is licensed under</td><td>10</td></tr><tr><td>creativecommons org licenses</td><td>10</td></tr><tr><td>received june accepted</td><td>10</td></tr><tr><td>have shown that</td><td>10</td></tr><tr><td>regression for face</td><td>10</td></tr><tr><td>jean luc dugelay</td><td>10</td></tr><tr><td>computer interaction hci</td><td>10</td></tr><tr><td>is that the</td><td>10</td></tr><tr><td>additional key words</td><td>10</td></tr><tr><td>key words and</td><td>10</td></tr><tr><td>words and phrases</td><td>10</td></tr><tr><td>of our method</td><td>10</td></tr><tr><td>facial expression and</td><td>10</td></tr><tr><td>and physical sciences</td><td>10</td></tr><tr><td>because of its</td><td>10</td></tr><tr><td>such as face</td><td>10</td></tr><tr><td>college of engg</td><td>10</td></tr><tr><td>for published version</td><td>10</td></tr><tr><td>of the author</td><td>10</td></tr><tr><td>of singapore singapore</td><td>10</td></tr><tr><td>enti research documents</td><td>10</td></tr><tr><td>ques de niveau</td><td>10</td></tr><tr><td>es ou non</td><td>10</td></tr><tr><td>emanant des etablissements</td><td>10</td></tr><tr><td>des etablissements enseignement</td><td>10</td></tr><tr><td>recherche fran cais</td><td>10</td></tr><tr><td>cais ou etrangers</td><td>10</td></tr><tr><td>ou etrangers des</td><td>10</td></tr><tr><td>etrangers des laboratoires</td><td>10</td></tr><tr><td>de minas gerais</td><td>10</td></tr><tr><td>received march accepted</td><td>10</td></tr><tr><td>real time facial</td><td>10</td></tr><tr><td>in facial expression</td><td>10</td></tr><tr><td>retained by the</td><td>10</td></tr><tr><td>has been made</td><td>10</td></tr><tr><td>on automatic face</td><td>10</td></tr><tr><td>from ieee must</td><td>10</td></tr><tr><td>obtained for all</td><td>10</td></tr><tr><td>republishing this material</td><td>10</td></tr><tr><td>promotional purposes creating</td><td>10</td></tr><tr><td>machine intelligence vol</td><td>10</td></tr><tr><td>advanced technology chinese</td><td>10</td></tr><tr><td>detection in the</td><td>10</td></tr><tr><td>college of technology</td><td>10</td></tr><tr><td>to the faculty</td><td>10</td></tr><tr><td>forbes avenue pittsburgh</td><td>10</td></tr><tr><td>in the image</td><td>10</td></tr><tr><td>science and applications</td><td>10</td></tr><tr><td>facial image analysis</td><td>10</td></tr><tr><td>and technology tsinghua</td><td>10</td></tr><tr><td>technology tsinghua university</td><td>10</td></tr><tr><td>the art methods</td><td>10</td></tr><tr><td>to predict the</td><td>10</td></tr><tr><td>association for computational</td><td>10</td></tr><tr><td>for computational linguistics</td><td>10</td></tr><tr><td>robotics and intelligent</td><td>10</td></tr><tr><td>of wisconsin madison</td><td>10</td></tr><tr><td>department of biomedical</td><td>10</td></tr><tr><td>visual geometry group</td><td>10</td></tr><tr><td>business media llc</td><td>10</td></tr><tr><td>and arti cial</td><td>10</td></tr><tr><td>structure of the</td><td>10</td></tr><tr><td>action coding system</td><td>10</td></tr><tr><td>in the training</td><td>10</td></tr><tr><td>is de ned</td><td>10</td></tr><tr><td>shown in the</td><td>10</td></tr><tr><td>due to its</td><td>10</td></tr><tr><td>accepted june published</td><td>10</td></tr><tr><td>https hal archives</td><td>10</td></tr><tr><td>hal archives ouvertes</td><td>10</td></tr><tr><td>of automatic control</td><td>10</td></tr><tr><td>the most popular</td><td>10</td></tr><tr><td>beijing jiaotong university</td><td>10</td></tr><tr><td>park md usa</td><td>10</td></tr><tr><td>seattle wa usa</td><td>10</td></tr><tr><td>in this section</td><td>10</td></tr><tr><td>deep learning for</td><td>10</td></tr><tr><td>the human face</td><td>10</td></tr><tr><td>pose and illumination</td><td>10</td></tr><tr><td>study of the</td><td>10</td></tr><tr><td>automatic face recognition</td><td>10</td></tr><tr><td>component analysis pca</td><td>10</td></tr><tr><td>at ur elektrotechnik</td><td>10</td></tr><tr><td>generative adversarial networks</td><td>10</td></tr><tr><td>white rose research</td><td>10</td></tr><tr><td>technological university singapore</td><td>10</td></tr><tr><td>in the main</td><td>10</td></tr><tr><td>metric learning for</td><td>10</td></tr><tr><td>people with schizophrenia</td><td>10</td></tr><tr><td>nec laboratories america</td><td>9</td></tr><tr><td>texas at arlington</td><td>9</td></tr><tr><td>acm reference format</td><td>9</td></tr><tr><td>republic of korea</td><td>9</td></tr><tr><td>for video classi</td><td>9</td></tr><tr><td>is made available</td><td>9</td></tr><tr><td>nature of the</td><td>9</td></tr><tr><td>any copyrighted component</td><td>9</td></tr><tr><td>and research ijsr</td><td>9</td></tr><tr><td>on systems man</td><td>9</td></tr><tr><td>man and cybernetics</td><td>9</td></tr><tr><td>and cybernetics part</td><td>9</td></tr><tr><td>rwth aachen university</td><td>9</td></tr><tr><td>mit media lab</td><td>9</td></tr><tr><td>university of the</td><td>9</td></tr><tr><td>provided by the</td><td>9</td></tr><tr><td>at the same</td><td>9</td></tr><tr><td>the experimental results</td><td>9</td></tr><tr><td>ying li tian</td><td>9</td></tr><tr><td>fellow ieee and</td><td>9</td></tr><tr><td>jean philippe thiran</td><td>9</td></tr><tr><td>onoma de barcelona</td><td>9</td></tr><tr><td>action unit recognition</td><td>9</td></tr><tr><td>advanced industrial science</td><td>9</td></tr><tr><td>industrial science and</td><td>9</td></tr><tr><td>show that our</td><td>9</td></tr><tr><td>electrical and information</td><td>9</td></tr><tr><td>expression recognition from</td><td>9</td></tr><tr><td>novel method for</td><td>9</td></tr><tr><td>action classi cation</td><td>9</td></tr><tr><td>expression analysis and</td><td>9</td></tr><tr><td>work is licensed</td><td>9</td></tr><tr><td>cite this article</td><td>9</td></tr><tr><td>high dimensional data</td><td>9</td></tr><tr><td>for robust face</td><td>9</td></tr><tr><td>faculty of informatics</td><td>9</td></tr><tr><td>of technology and</td><td>9</td></tr><tr><td>liu member ieee</td><td>9</td></tr><tr><td>journal of emerging</td><td>9</td></tr><tr><td>emotional facial expressions</td><td>9</td></tr><tr><td>in the case</td><td>9</td></tr><tr><td>on the face</td><td>9</td></tr><tr><td>facial expressions and</td><td>9</td></tr><tr><td>robust facial expression</td><td>9</td></tr><tr><td>paper we present</td><td>9</td></tr><tr><td>in the presence</td><td>9</td></tr><tr><td>from the same</td><td>9</td></tr><tr><td>mitsubishi electric research</td><td>9</td></tr><tr><td>federal de minas</td><td>9</td></tr><tr><td>www elsevier com</td><td>9</td></tr><tr><td>facial feature detection</td><td>9</td></tr><tr><td>center for cognitive</td><td>9</td></tr><tr><td>networks for facial</td><td>9</td></tr><tr><td>vision and machine</td><td>9</td></tr><tr><td>state key lab</td><td>9</td></tr><tr><td>recognition using local</td><td>9</td></tr><tr><td>content in the</td><td>9</td></tr><tr><td>academia sinica taipei</td><td>9</td></tr><tr><td>sinica taipei taiwan</td><td>9</td></tr><tr><td>facial feature tracking</td><td>9</td></tr><tr><td>albert ali salah</td><td>9</td></tr><tr><td>department of ece</td><td>9</td></tr><tr><td>on machine vision</td><td>9</td></tr><tr><td>machine vision applications</td><td>9</td></tr><tr><td>key laboratory for</td><td>9</td></tr><tr><td>unsupervised domain adaptation</td><td>9</td></tr><tr><td>is permitted permission</td><td>9</td></tr><tr><td>permitted permission from</td><td>9</td></tr><tr><td>uses in any</td><td>9</td></tr><tr><td>current or future</td><td>9</td></tr><tr><td>reprinting republishing this</td><td>9</td></tr><tr><td>lists or reuse</td><td>9</td></tr><tr><td>action recognition with</td><td>9</td></tr><tr><td>in the past</td><td>9</td></tr><tr><td>for object detection</td><td>9</td></tr><tr><td>face recognition techniques</td><td>9</td></tr><tr><td>tel aviv israel</td><td>9</td></tr><tr><td>over the past</td><td>9</td></tr><tr><td>allen institute for</td><td>9</td></tr><tr><td>com journal sensors</td><td>9</td></tr><tr><td>department of mechanical</td><td>9</td></tr><tr><td>identi cation and</td><td>9</td></tr><tr><td>school of medicine</td><td>9</td></tr><tr><td>for emotion recognition</td><td>9</td></tr><tr><td>degree of master</td><td>9</td></tr><tr><td>has been shown</td><td>9</td></tr><tr><td>ijacsa international journal</td><td>9</td></tr><tr><td>of bucharest romania</td><td>9</td></tr><tr><td>for feature extraction</td><td>9</td></tr><tr><td>the graduate school</td><td>9</td></tr><tr><td>pose estimation and</td><td>9</td></tr><tr><td>associate professor department</td><td>9</td></tr><tr><td>of brain and</td><td>9</td></tr><tr><td>the twenty sixth</td><td>9</td></tr><tr><td>twenty sixth international</td><td>9</td></tr><tr><td>sixth international joint</td><td>9</td></tr><tr><td>facial action coding</td><td>9</td></tr><tr><td>king saud university</td><td>9</td></tr><tr><td>transactions on affective</td><td>9</td></tr><tr><td>on affective computing</td><td>9</td></tr><tr><td>under varying illumination</td><td>9</td></tr><tr><td>all other uses</td><td>9</td></tr><tr><td>center for machine</td><td>9</td></tr></table></body></html> \ No newline at end of file
+<!doctype html><html><head><meta charset='utf-8'><title>PDF Report: Unknown Trigrams</title><link rel='stylesheet' href='reports.css'></head><body><h2>PDF Report: Unknown Trigrams</h2><table border='1' cellpadding='3' cellspacing='3'><tr><td>of computer science</td><td>1786</td></tr><tr><td>department of computer</td><td>1320</td></tr><tr><td>computer science and</td><td>820</td></tr><tr><td>institute of technology</td><td>755</td></tr><tr><td>science and technology</td><td>526</td></tr><tr><td>science and engineering</td><td>500</td></tr><tr><td>university of california</td><td>485</td></tr><tr><td>department of electrical</td><td>469</td></tr><tr><td>school of computer</td><td>424</td></tr><tr><td>university of technology</td><td>411</td></tr><tr><td>carnegie mellon university</td><td>381</td></tr><tr><td>re identi cation</td><td>380</td></tr><tr><td>department of psychology</td><td>364</td></tr><tr><td>of electrical engineering</td><td>360</td></tr><tr><td>for the degree</td><td>355</td></tr><tr><td>senior member ieee</td><td>350</td></tr><tr><td>of electrical and</td><td>338</td></tr><tr><td>of science and</td><td>329</td></tr><tr><td>academy of sciences</td><td>326</td></tr><tr><td>member ieee and</td><td>315</td></tr><tr><td>electrical and computer</td><td>287</td></tr><tr><td>and computer engineering</td><td>287</td></tr><tr><td>dx doi org</td><td>284</td></tr><tr><td>facial expression recognition</td><td>280</td></tr><tr><td>in this paper</td><td>279</td></tr><tr><td>http dx doi</td><td>278</td></tr><tr><td>for face recognition</td><td>272</td></tr><tr><td>and computer science</td><td>272</td></tr><tr><td>person re identi</td><td>265</td></tr><tr><td>student member ieee</td><td>256</td></tr><tr><td>of the requirements</td><td>254</td></tr><tr><td>doctor of philosophy</td><td>247</td></tr><tr><td>should be addressed</td><td>246</td></tr><tr><td>proceedings of the</td><td>231</td></tr><tr><td>university of science</td><td>227</td></tr><tr><td>state of the</td><td>223</td></tr><tr><td>journal of computer</td><td>221</td></tr><tr><td>of the art</td><td>211</td></tr><tr><td>requirements for the</td><td>211</td></tr><tr><td>engineering and computer</td><td>205</td></tr><tr><td>electrical engineering and</td><td>201</td></tr><tr><td>one of the</td><td>197</td></tr><tr><td>computer science university</td><td>194</td></tr><tr><td>arti cial intelligence</td><td>189</td></tr><tr><td>college of engineering</td><td>185</td></tr><tr><td>the requirements for</td><td>179</td></tr><tr><td>in partial ful</td><td>175</td></tr><tr><td>university of hong</td><td>169</td></tr><tr><td>of hong kong</td><td>169</td></tr><tr><td>dept of computer</td><td>168</td></tr><tr><td>of computer engineering</td><td>167</td></tr><tr><td>all rights reserved</td><td>166</td></tr><tr><td>partial ful llment</td><td>164</td></tr><tr><td>of engineering and</td><td>159</td></tr><tr><td>of psychology university</td><td>154</td></tr><tr><td>based on the</td><td>153</td></tr><tr><td>face recognition using</td><td>153</td></tr><tr><td>in the wild</td><td>150</td></tr><tr><td>computer science department</td><td>150</td></tr><tr><td>max planck institute</td><td>147</td></tr><tr><td>submitted in partial</td><td>144</td></tr><tr><td>planck institute for</td><td>143</td></tr><tr><td>of electronics and</td><td>139</td></tr><tr><td>to cite this</td><td>138</td></tr><tr><td>computer vision and</td><td>137</td></tr><tr><td>of chinese academy</td><td>134</td></tr><tr><td>department of information</td><td>133</td></tr><tr><td>of information technology</td><td>130</td></tr><tr><td>www frontiersin org</td><td>129</td></tr><tr><td>the chinese university</td><td>128</td></tr><tr><td>as conference paper</td><td>128</td></tr><tr><td>school of information</td><td>126</td></tr><tr><td>university of maryland</td><td>126</td></tr><tr><td>of computer applications</td><td>124</td></tr><tr><td>paper at iclr</td><td>124</td></tr><tr><td>submitted to the</td><td>123</td></tr><tr><td>of computer and</td><td>122</td></tr><tr><td>of this work</td><td>121</td></tr><tr><td>convolutional neural networks</td><td>119</td></tr><tr><td>is an open</td><td>119</td></tr><tr><td>of pattern recognition</td><td>119</td></tr><tr><td>and information engineering</td><td>119</td></tr><tr><td>autism spectrum disorders</td><td>119</td></tr><tr><td>department of psychiatry</td><td>119</td></tr><tr><td>an open access</td><td>118</td></tr><tr><td>creative commons attribution</td><td>118</td></tr><tr><td>llment of the</td><td>118</td></tr><tr><td>of information science</td><td>117</td></tr><tr><td>by the editor</td><td>114</td></tr><tr><td>university of texas</td><td>113</td></tr><tr><td>luc van gool</td><td>113</td></tr><tr><td>open access article</td><td>113</td></tr><tr><td>autism spectrum disorder</td><td>112</td></tr><tr><td>will be inserted</td><td>112</td></tr><tr><td>inserted by the</td><td>112</td></tr><tr><td>school of electrical</td><td>111</td></tr><tr><td>of the face</td><td>111</td></tr><tr><td>school of computing</td><td>109</td></tr><tr><td>department of electronics</td><td>108</td></tr><tr><td>in computer vision</td><td>107</td></tr><tr><td>institute of automation</td><td>106</td></tr><tr><td>this article was</td><td>106</td></tr><tr><td>of sciences beijing</td><td>106</td></tr><tr><td>https doi org</td><td>105</td></tr><tr><td>human pose estimation</td><td>105</td></tr><tr><td>university of oxford</td><td>105</td></tr><tr><td>the creative commons</td><td>104</td></tr><tr><td>it has been</td><td>104</td></tr><tr><td>provided the original</td><td>102</td></tr><tr><td>of southern california</td><td>102</td></tr><tr><td>engineering and technology</td><td>102</td></tr><tr><td>university of illinois</td><td>101</td></tr><tr><td>due to the</td><td>101</td></tr><tr><td>sciences beijing china</td><td>101</td></tr><tr><td>et al and</td><td>100</td></tr><tr><td>university of london</td><td>99</td></tr><tr><td>convolutional neural network</td><td>99</td></tr><tr><td>university of chinese</td><td>98</td></tr><tr><td>university of singapore</td><td>98</td></tr><tr><td>received date accepted</td><td>98</td></tr><tr><td>date accepted date</td><td>98</td></tr><tr><td>institute for informatics</td><td>98</td></tr><tr><td>the original work</td><td>97</td></tr><tr><td>in any medium</td><td>96</td></tr><tr><td>whether they are</td><td>96</td></tr><tr><td>computer vision center</td><td>96</td></tr><tr><td>department of computing</td><td>95</td></tr><tr><td>reproduction in any</td><td>95</td></tr><tr><td>laboratory of pattern</td><td>95</td></tr><tr><td>imperial college london</td><td>94</td></tr><tr><td>of california san</td><td>94</td></tr><tr><td>information science and</td><td>93</td></tr><tr><td>or not the</td><td>93</td></tr><tr><td>may come from</td><td>93</td></tr><tr><td>of face recognition</td><td>93</td></tr><tr><td>association for computational</td><td>93</td></tr><tr><td>for computational linguistics</td><td>93</td></tr><tr><td>california san diego</td><td>93</td></tr><tr><td>public or private</td><td>92</td></tr><tr><td>distribution and reproduction</td><td>92</td></tr><tr><td>medium provided the</td><td>92</td></tr><tr><td>work is properly</td><td>92</td></tr><tr><td>california los angeles</td><td>92</td></tr><tr><td>of the most</td><td>92</td></tr><tr><td>cite this version</td><td>91</td></tr><tr><td>hal is multi</td><td>91</td></tr><tr><td>is multi disciplinary</td><td>91</td></tr><tr><td>multi disciplinary open</td><td>91</td></tr><tr><td>disciplinary open access</td><td>91</td></tr><tr><td>rchive for the</td><td>91</td></tr><tr><td>for the deposit</td><td>91</td></tr><tr><td>the deposit and</td><td>91</td></tr><tr><td>deposit and dissemination</td><td>91</td></tr><tr><td>dissemination of sci</td><td>91</td></tr><tr><td>research documents whether</td><td>91</td></tr><tr><td>documents whether they</td><td>91</td></tr><tr><td>they are pub</td><td>91</td></tr><tr><td>lished or not</td><td>91</td></tr><tr><td>not the documents</td><td>91</td></tr><tr><td>the documents may</td><td>91</td></tr><tr><td>documents may come</td><td>91</td></tr><tr><td>teaching and research</td><td>91</td></tr><tr><td>and research institutions</td><td>91</td></tr><tr><td>institutions in france</td><td>91</td></tr><tr><td>broad or from</td><td>91</td></tr><tr><td>or from public</td><td>91</td></tr><tr><td>or private research</td><td>91</td></tr><tr><td>private research centers</td><td>91</td></tr><tr><td>archive ouverte pluridisciplinaire</td><td>91</td></tr><tr><td>ouverte pluridisciplinaire hal</td><td>91</td></tr><tr><td>pluridisciplinaire hal est</td><td>91</td></tr><tr><td>et la diffusion</td><td>91</td></tr><tr><td>diffusion de documents</td><td>91</td></tr><tr><td>de niveau recherche</td><td>91</td></tr><tr><td>niveau recherche publi</td><td>91</td></tr><tr><td>publics ou priv</td><td>91</td></tr><tr><td>university of michigan</td><td>91</td></tr><tr><td>access article distributed</td><td>90</td></tr><tr><td>article distributed under</td><td>90</td></tr><tr><td>distributed under the</td><td>90</td></tr><tr><td>any medium provided</td><td>90</td></tr><tr><td>university of amsterdam</td><td>90</td></tr><tr><td>face recognition system</td><td>89</td></tr><tr><td>frontiers in psychology</td><td>89</td></tr><tr><td>university of washington</td><td>89</td></tr><tr><td>with autism spectrum</td><td>88</td></tr><tr><td>use distribution and</td><td>88</td></tr><tr><td>of facial expressions</td><td>88</td></tr><tr><td>university beijing china</td><td>88</td></tr><tr><td>latex class files</td><td>88</td></tr><tr><td>school of engineering</td><td>87</td></tr><tr><td>permits unrestricted use</td><td>87</td></tr><tr><td>has been accepted</td><td>87</td></tr><tr><td>journal of latex</td><td>87</td></tr><tr><td>of latex class</td><td>87</td></tr><tr><td>part of the</td><td>86</td></tr><tr><td>which permits unrestricted</td><td>86</td></tr><tr><td>under the creative</td><td>86</td></tr><tr><td>pittsburgh pa usa</td><td>85</td></tr><tr><td>image classi cation</td><td>85</td></tr><tr><td>facebook ai research</td><td>85</td></tr><tr><td>such as the</td><td>85</td></tr><tr><td>been accepted for</td><td>85</td></tr><tr><td>school of medicine</td><td>85</td></tr><tr><td>for more information</td><td>84</td></tr><tr><td>university of toronto</td><td>84</td></tr><tr><td>michigan state university</td><td>83</td></tr><tr><td>automation chinese academy</td><td>83</td></tr><tr><td>deep neural networks</td><td>83</td></tr><tr><td>computer science engineering</td><td>83</td></tr><tr><td>class files vol</td><td>83</td></tr><tr><td>university college london</td><td>83</td></tr><tr><td>university of central</td><td>82</td></tr><tr><td>of automation chinese</td><td>82</td></tr><tr><td>section of the</td><td>82</td></tr><tr><td>face veri cation</td><td>82</td></tr><tr><td>faculty of electrical</td><td>82</td></tr><tr><td>technology of china</td><td>81</td></tr><tr><td>school of psychology</td><td>81</td></tr><tr><td>university of southern</td><td>81</td></tr><tr><td>faculty of engineering</td><td>81</td></tr><tr><td>et al this</td><td>80</td></tr><tr><td>unrestricted use distribution</td><td>80</td></tr><tr><td>institute of science</td><td>80</td></tr><tr><td>and pattern recognition</td><td>80</td></tr><tr><td>department of mathematics</td><td>79</td></tr><tr><td>shanghai jiao tong</td><td>79</td></tr><tr><td>master of science</td><td>79</td></tr><tr><td>for facial expression</td><td>78</td></tr><tr><td>jiao tong university</td><td>78</td></tr><tr><td>of california los</td><td>78</td></tr><tr><td>can be used</td><td>77</td></tr><tr><td>of information engineering</td><td>77</td></tr><tr><td>queen mary university</td><td>76</td></tr><tr><td>in partial fulfillment</td><td>76</td></tr><tr><td>computer and information</td><td>76</td></tr><tr><td>center for research</td><td>76</td></tr><tr><td>department of engineering</td><td>76</td></tr><tr><td>human computer interaction</td><td>76</td></tr><tr><td>article id pages</td><td>75</td></tr><tr><td>and information technology</td><td>75</td></tr><tr><td>review as conference</td><td>75</td></tr><tr><td>college of computer</td><td>74</td></tr><tr><td>in computer science</td><td>74</td></tr><tr><td>university of surrey</td><td>74</td></tr><tr><td>children with autism</td><td>74</td></tr><tr><td>is properly cited</td><td>73</td></tr><tr><td>individuals with autism</td><td>73</td></tr><tr><td>of central florida</td><td>72</td></tr><tr><td>dept of electrical</td><td>72</td></tr><tr><td>facial expression analysis</td><td>72</td></tr><tr><td>fulfillment of the</td><td>72</td></tr><tr><td>cambridge ma usa</td><td>72</td></tr><tr><td>journal of advanced</td><td>71</td></tr><tr><td>and electronic engineering</td><td>71</td></tr><tr><td>department of informatics</td><td>71</td></tr><tr><td>springer science business</td><td>70</td></tr><tr><td>science business media</td><td>70</td></tr><tr><td>illinois at urbana</td><td>70</td></tr><tr><td>commons attribution license</td><td>70</td></tr><tr><td>department of electronic</td><td>70</td></tr><tr><td>based face recognition</td><td>70</td></tr><tr><td>of engineering science</td><td>70</td></tr><tr><td>end to end</td><td>70</td></tr><tr><td>of california berkeley</td><td>70</td></tr><tr><td>at urbana champaign</td><td>69</td></tr><tr><td>australian national university</td><td>69</td></tr><tr><td>of electronic engineering</td><td>69</td></tr><tr><td>visual question answering</td><td>69</td></tr><tr><td>institute of information</td><td>69</td></tr><tr><td>of information and</td><td>69</td></tr><tr><td>article was submitted</td><td>68</td></tr><tr><td>to whom correspondence</td><td>68</td></tr><tr><td>university of wisconsin</td><td>68</td></tr><tr><td>individuals with asd</td><td>68</td></tr><tr><td>in face recognition</td><td>67</td></tr><tr><td>electrical and electronic</td><td>67</td></tr><tr><td>on computer vision</td><td>67</td></tr><tr><td>of maryland college</td><td>66</td></tr><tr><td>maryland college park</td><td>66</td></tr><tr><td>journal of engineering</td><td>66</td></tr><tr><td>robust face recognition</td><td>66</td></tr><tr><td>university of north</td><td>66</td></tr><tr><td>in revised form</td><td>66</td></tr><tr><td>for action recognition</td><td>65</td></tr><tr><td>science and information</td><td>65</td></tr><tr><td>whom correspondence should</td><td>65</td></tr><tr><td>link to publication</td><td>64</td></tr><tr><td>hindawi publishing corporation</td><td>64</td></tr><tr><td>image and video</td><td>64</td></tr><tr><td>detection and tracking</td><td>64</td></tr><tr><td>of the journal</td><td>64</td></tr><tr><td>pattern analysis and</td><td>64</td></tr><tr><td>and communication engineering</td><td>64</td></tr><tr><td>of the same</td><td>63</td></tr><tr><td>of intelligent information</td><td>63</td></tr><tr><td>idiap research institute</td><td>63</td></tr><tr><td>computer vision laboratory</td><td>62</td></tr><tr><td>school of electronic</td><td>62</td></tr><tr><td>vol no august</td><td>62</td></tr><tr><td>national taiwan university</td><td>62</td></tr><tr><td>accepted for publication</td><td>62</td></tr><tr><td>state key laboratory</td><td>61</td></tr><tr><td>on the other</td><td>61</td></tr><tr><td>in this work</td><td>61</td></tr><tr><td>nanyang technological university</td><td>61</td></tr><tr><td>university of new</td><td>60</td></tr><tr><td>computer vision lab</td><td>60</td></tr><tr><td>spectrum disorder asd</td><td>60</td></tr><tr><td>university of pennsylvania</td><td>60</td></tr><tr><td>ieee international conference</td><td>60</td></tr><tr><td>of north carolina</td><td>60</td></tr><tr><td>university of tokyo</td><td>59</td></tr><tr><td>we show that</td><td>59</td></tr><tr><td>entific research documents</td><td>59</td></tr><tr><td>scientifiques de niveau</td><td>59</td></tr><tr><td>publi ou non</td><td>59</td></tr><tr><td>manant des tablissements</td><td>59</td></tr><tr><td>des tablissements enseignement</td><td>59</td></tr><tr><td>recherche fran ais</td><td>59</td></tr><tr><td>ais ou trangers</td><td>59</td></tr><tr><td>ou trangers des</td><td>59</td></tr><tr><td>trangers des laboratoires</td><td>59</td></tr><tr><td>degree of doctor</td><td>59</td></tr><tr><td>paper we propose</td><td>59</td></tr><tr><td>magnetic resonance imaging</td><td>59</td></tr><tr><td>faces in the</td><td>58</td></tr><tr><td>face recognition with</td><td>58</td></tr><tr><td>institute of computing</td><td>58</td></tr><tr><td>new york university</td><td>57</td></tr><tr><td>university shanghai china</td><td>57</td></tr><tr><td>and electrical engineering</td><td>57</td></tr><tr><td>international joint conference</td><td>57</td></tr><tr><td>principal component analysis</td><td>57</td></tr><tr><td>and computer vision</td><td>57</td></tr><tr><td>of wisconsin madison</td><td>57</td></tr><tr><td>conference on computer</td><td>57</td></tr><tr><td>research in computer</td><td>57</td></tr><tr><td>online at www</td><td>56</td></tr><tr><td>version of the</td><td>56</td></tr><tr><td>expression recognition using</td><td>56</td></tr><tr><td>amsterdam the netherlands</td><td>56</td></tr><tr><td>image processing and</td><td>56</td></tr><tr><td>face detection and</td><td>55</td></tr><tr><td>to this work</td><td>55</td></tr><tr><td>generative adversarial networks</td><td>55</td></tr><tr><td>and signal processing</td><td>55</td></tr><tr><td>university of pittsburgh</td><td>55</td></tr><tr><td>face recognition based</td><td>55</td></tr><tr><td>this article has</td><td>55</td></tr><tr><td>ming hsuan yang</td><td>54</td></tr><tr><td>research center for</td><td>54</td></tr><tr><td>terms of use</td><td>54</td></tr><tr><td>have been proposed</td><td>54</td></tr><tr><td>sun yat sen</td><td>54</td></tr><tr><td>in individuals with</td><td>54</td></tr><tr><td>johns hopkins university</td><td>54</td></tr><tr><td>article has been</td><td>54</td></tr><tr><td>the proposed method</td><td>53</td></tr><tr><td>of electronic and</td><td>53</td></tr><tr><td>engineering the chinese</td><td>53</td></tr><tr><td>in the context</td><td>53</td></tr><tr><td>of machine learning</td><td>53</td></tr><tr><td>zero shot learning</td><td>53</td></tr><tr><td>show that the</td><td>53</td></tr><tr><td>vision and pattern</td><td>53</td></tr><tr><td>multi target tracking</td><td>53</td></tr><tr><td>support vector machine</td><td>53</td></tr><tr><td>this is the</td><td>52</td></tr><tr><td>university of edinburgh</td><td>52</td></tr><tr><td>of this material</td><td>52</td></tr><tr><td>creativecommons org licenses</td><td>52</td></tr><tr><td>use of this</td><td>52</td></tr><tr><td>of mathematics and</td><td>52</td></tr><tr><td>yat sen university</td><td>52</td></tr><tr><td>university of massachusetts</td><td>52</td></tr><tr><td>invariant face recognition</td><td>52</td></tr><tr><td>and machine intelligence</td><td>52</td></tr><tr><td>open access books</td><td>52</td></tr><tr><td>functional magnetic resonance</td><td>52</td></tr><tr><td>cas beijing china</td><td>52</td></tr><tr><td>of the university</td><td>52</td></tr><tr><td>neural networks for</td><td>51</td></tr><tr><td>the other hand</td><td>51</td></tr><tr><td>computer vision group</td><td>51</td></tr><tr><td>of the twenty</td><td>51</td></tr><tr><td>microsoft research asia</td><td>51</td></tr><tr><td>person re identification</td><td>51</td></tr><tr><td>published as conference</td><td>51</td></tr><tr><td>analysis and machine</td><td>51</td></tr><tr><td>issue of this</td><td>51</td></tr><tr><td>of this journal</td><td>51</td></tr><tr><td>has not been</td><td>51</td></tr><tr><td>texas at austin</td><td>50</td></tr><tr><td>lab of intelligent</td><td>50</td></tr><tr><td>intelligent information processing</td><td>50</td></tr><tr><td>follow this and</td><td>50</td></tr><tr><td>this and additional</td><td>50</td></tr><tr><td>and additional works</td><td>50</td></tr><tr><td>university of posts</td><td>50</td></tr><tr><td>supported by the</td><td>50</td></tr><tr><td>xi an china</td><td>50</td></tr><tr><td>in future issue</td><td>50</td></tr><tr><td>department of statistics</td><td>50</td></tr><tr><td>universit de montr</td><td>49</td></tr><tr><td>and engineering university</td><td>49</td></tr><tr><td>university of twente</td><td>49</td></tr><tr><td>of posts and</td><td>49</td></tr><tr><td>university of southampton</td><td>49</td></tr><tr><td>some of the</td><td>49</td></tr><tr><td>can be found</td><td>49</td></tr><tr><td>this work was</td><td>49</td></tr><tr><td>electronics and communication</td><td>49</td></tr><tr><td>university of cambridge</td><td>49</td></tr><tr><td>universit at unchen</td><td>49</td></tr><tr><td>multi object tracking</td><td>48</td></tr><tr><td>of the proposed</td><td>48</td></tr><tr><td>for object detection</td><td>48</td></tr><tr><td>journal on image</td><td>48</td></tr><tr><td>hal id hal</td><td>48</td></tr><tr><td>of technology sydney</td><td>48</td></tr><tr><td>paper under double</td><td>48</td></tr><tr><td>under double blind</td><td>48</td></tr><tr><td>double blind review</td><td>48</td></tr><tr><td>author to whom</td><td>48</td></tr><tr><td>be addressed mail</td><td>48</td></tr><tr><td>of computing technology</td><td>48</td></tr><tr><td>department of cse</td><td>48</td></tr><tr><td>in autism spectrum</td><td>48</td></tr><tr><td>additional key words</td><td>47</td></tr><tr><td>key words and</td><td>47</td></tr><tr><td>words and phrases</td><td>47</td></tr><tr><td>of computer vision</td><td>47</td></tr><tr><td>http creativecommons org</td><td>47</td></tr><tr><td>brought to you</td><td>47</td></tr><tr><td>institute carnegie mellon</td><td>47</td></tr><tr><td>speech and signal</td><td>47</td></tr><tr><td>the present study</td><td>47</td></tr><tr><td>cite this article</td><td>47</td></tr><tr><td>recognition in the</td><td>47</td></tr><tr><td>institute of computer</td><td>47</td></tr><tr><td>in recent years</td><td>47</td></tr><tr><td>the public portal</td><td>47</td></tr><tr><td>universit degli studi</td><td>47</td></tr><tr><td>according to the</td><td>47</td></tr><tr><td>most of the</td><td>46</td></tr><tr><td>to you for</td><td>46</td></tr><tr><td>you for free</td><td>46</td></tr><tr><td>for free and</td><td>46</td></tr><tr><td>free and open</td><td>46</td></tr><tr><td>and open access</td><td>46</td></tr><tr><td>more information please</td><td>46</td></tr><tr><td>information please contact</td><td>46</td></tr><tr><td>information and communication</td><td>46</td></tr><tr><td>of the human</td><td>46</td></tr><tr><td>of psychology and</td><td>46</td></tr><tr><td>xi an jiaotong</td><td>46</td></tr><tr><td>simon fraser university</td><td>46</td></tr><tr><td>of computing and</td><td>46</td></tr><tr><td>brain and cognitive</td><td>46</td></tr><tr><td>of california riverside</td><td>46</td></tr><tr><td>of facial expression</td><td>45</td></tr><tr><td>th international conference</td><td>45</td></tr><tr><td>face recognition and</td><td>45</td></tr><tr><td>on pattern analysis</td><td>45</td></tr><tr><td>for large scale</td><td>45</td></tr><tr><td>the fact that</td><td>45</td></tr><tr><td>local binary pattern</td><td>45</td></tr><tr><td>in real time</td><td>45</td></tr><tr><td>deep convolutional neural</td><td>44</td></tr><tr><td>if you believe</td><td>44</td></tr><tr><td>seoul national university</td><td>44</td></tr><tr><td>in psychology www</td><td>44</td></tr><tr><td>psychology www frontiersin</td><td>44</td></tr><tr><td>posts and telecommunications</td><td>44</td></tr><tr><td>www intechopen com</td><td>44</td></tr><tr><td>in which the</td><td>44</td></tr><tr><td>zur erlangung des</td><td>44</td></tr><tr><td>eth zurich switzerland</td><td>44</td></tr><tr><td>on arti cial</td><td>44</td></tr><tr><td>www mdpi com</td><td>44</td></tr><tr><td>mdpi com journal</td><td>44</td></tr><tr><td>authors contributed equally</td><td>44</td></tr><tr><td>citation for published</td><td>43</td></tr><tr><td>retained by the</td><td>43</td></tr><tr><td>and computer sciences</td><td>43</td></tr><tr><td>terms of the</td><td>43</td></tr><tr><td>university of oulu</td><td>43</td></tr><tr><td>works at http</td><td>43</td></tr><tr><td>by an authorized</td><td>43</td></tr><tr><td>mathematics and computer</td><td>43</td></tr><tr><td>www tandfonline com</td><td>43</td></tr><tr><td>for intelligent systems</td><td>43</td></tr><tr><td>head pose estimation</td><td>43</td></tr><tr><td>tsinghua university beijing</td><td>43</td></tr><tr><td>university of trento</td><td>42</td></tr><tr><td>and software engineering</td><td>42</td></tr><tr><td>for arti cial</td><td>42</td></tr><tr><td>accepted for inclusion</td><td>42</td></tr><tr><td>an authorized administrator</td><td>42</td></tr><tr><td>face recognition under</td><td>42</td></tr><tr><td>http www tandfonline</td><td>42</td></tr><tr><td>an jiaotong university</td><td>42</td></tr><tr><td>classi cation and</td><td>42</td></tr><tr><td>of the main</td><td>42</td></tr><tr><td>to improve the</td><td>42</td></tr><tr><td>equally to this</td><td>42</td></tr><tr><td>university of rochester</td><td>42</td></tr><tr><td>department of ece</td><td>42</td></tr><tr><td>we use the</td><td>42</td></tr><tr><td>wang member ieee</td><td>42</td></tr><tr><td>mellon university pittsburgh</td><td>41</td></tr><tr><td>for published version</td><td>41</td></tr><tr><td>well as the</td><td>41</td></tr><tr><td>university of thessaloniki</td><td>41</td></tr><tr><td>be used for</td><td>41</td></tr><tr><td>material is permitted</td><td>41</td></tr><tr><td>and information sciences</td><td>41</td></tr><tr><td>the face recognition</td><td>41</td></tr><tr><td>research showcase cmu</td><td>41</td></tr><tr><td>in children with</td><td>41</td></tr><tr><td>the eye region</td><td>41</td></tr><tr><td>facial emotion recognition</td><td>41</td></tr><tr><td>of psychiatry and</td><td>41</td></tr><tr><td>differences in the</td><td>41</td></tr><tr><td>int comput vis</td><td>40</td></tr><tr><td>saarland informatics campus</td><td>40</td></tr><tr><td>in accordance with</td><td>40</td></tr><tr><td>excellence in brain</td><td>40</td></tr><tr><td>brain science and</td><td>40</td></tr><tr><td>it is not</td><td>40</td></tr><tr><td>of the image</td><td>40</td></tr><tr><td>servers or lists</td><td>40</td></tr><tr><td>of massachusetts amherst</td><td>40</td></tr><tr><td>laboratory of intelligent</td><td>40</td></tr><tr><td>for real time</td><td>40</td></tr><tr><td>and face recognition</td><td>40</td></tr><tr><td>robotics institute carnegie</td><td>40</td></tr><tr><td>face recognition has</td><td>40</td></tr><tr><td>gender classi cation</td><td>40</td></tr><tr><td>university of adelaide</td><td>40</td></tr><tr><td>seattle wa usa</td><td>40</td></tr><tr><td>version of record</td><td>39</td></tr><tr><td>on artificial intelligence</td><td>39</td></tr><tr><td>center for excellence</td><td>39</td></tr><tr><td>in brain science</td><td>39</td></tr><tr><td>science and intelligence</td><td>39</td></tr><tr><td>and intelligence technology</td><td>39</td></tr><tr><td>this work for</td><td>39</td></tr><tr><td>journal of information</td><td>39</td></tr><tr><td>in the same</td><td>39</td></tr><tr><td>faculty of computer</td><td>39</td></tr><tr><td>face recognition systems</td><td>39</td></tr><tr><td>this article should</td><td>39</td></tr><tr><td>information engineering the</td><td>39</td></tr><tr><td>computing technology cas</td><td>39</td></tr><tr><td>the author published</td><td>39</td></tr><tr><td>of the data</td><td>39</td></tr><tr><td>in this study</td><td>39</td></tr><tr><td>but has not</td><td>39</td></tr><tr><td>content may change</td><td>39</td></tr><tr><td>may change prior</td><td>39</td></tr><tr><td>in the past</td><td>39</td></tr><tr><td>of electronic science</td><td>38</td></tr><tr><td>you believe that</td><td>38</td></tr><tr><td>university of western</td><td>38</td></tr><tr><td>on image and</td><td>38</td></tr><tr><td>in the literature</td><td>38</td></tr><tr><td>https hal archives</td><td>38</td></tr><tr><td>hal archives ouvertes</td><td>38</td></tr><tr><td>redistribution to servers</td><td>38</td></tr><tr><td>journal of science</td><td>38</td></tr><tr><td>to the department</td><td>38</td></tr><tr><td>modena and reggio</td><td>38</td></tr><tr><td>and intelligent systems</td><td>38</td></tr><tr><td>is an important</td><td>38</td></tr><tr><td>correspondence concerning this</td><td>38</td></tr><tr><td>concerning this article</td><td>38</td></tr><tr><td>linear discriminant analysis</td><td>38</td></tr><tr><td>school of electronics</td><td>38</td></tr><tr><td>of the amygdala</td><td>38</td></tr><tr><td>of brain and</td><td>38</td></tr><tr><td>publication in future</td><td>38</td></tr><tr><td>this journal but</td><td>38</td></tr><tr><td>journal but has</td><td>38</td></tr><tr><td>not been fully</td><td>38</td></tr><tr><td>been fully edited</td><td>38</td></tr><tr><td>fully edited content</td><td>38</td></tr><tr><td>edited content may</td><td>38</td></tr><tr><td>prior to final</td><td>38</td></tr><tr><td>to final publication</td><td>38</td></tr><tr><td>et al the</td><td>38</td></tr><tr><td>transactions on pattern</td><td>38</td></tr><tr><td>the graduate school</td><td>38</td></tr><tr><td>conference on arti</td><td>38</td></tr><tr><td>received june accepted</td><td>37</td></tr><tr><td>university of waterloo</td><td>37</td></tr><tr><td>conference on artificial</td><td>37</td></tr><tr><td>in the public</td><td>37</td></tr><tr><td>we found that</td><td>37</td></tr><tr><td>object detection and</td><td>37</td></tr><tr><td>republic of korea</td><td>37</td></tr><tr><td>feature extraction and</td><td>37</td></tr><tr><td>ouvertes fr hal</td><td>37</td></tr><tr><td>school of informatics</td><td>37</td></tr><tr><td>http hdl handle</td><td>37</td></tr><tr><td>hdl handle net</td><td>37</td></tr><tr><td>computer engineering department</td><td>37</td></tr><tr><td>hal id tel</td><td>37</td></tr><tr><td>ouvertes fr tel</td><td>37</td></tr><tr><td>work was supported</td><td>37</td></tr><tr><td>received april accepted</td><td>37</td></tr><tr><td>at chapel hill</td><td>37</td></tr><tr><td>oxford university press</td><td>37</td></tr><tr><td>technology cas beijing</td><td>37</td></tr><tr><td>automatic facial expression</td><td>37</td></tr><tr><td>angeles ca usa</td><td>37</td></tr><tr><td>recognition of facial</td><td>37</td></tr><tr><td>respect to the</td><td>37</td></tr><tr><td>science and software</td><td>36</td></tr><tr><td>institute for computer</td><td>36</td></tr><tr><td>of electrical computer</td><td>36</td></tr><tr><td>electrical computer engineering</td><td>36</td></tr><tr><td>edinburgh research explorer</td><td>36</td></tr><tr><td>journal of experimental</td><td>36</td></tr><tr><td>is the author</td><td>36</td></tr><tr><td>ieee personal use</td><td>36</td></tr><tr><td>university of modena</td><td>36</td></tr><tr><td>of modena and</td><td>36</td></tr><tr><td>of new york</td><td>36</td></tr><tr><td>conference on machine</td><td>36</td></tr><tr><td>that the proposed</td><td>36</td></tr><tr><td>of applied sciences</td><td>36</td></tr><tr><td>carolina at chapel</td><td>36</td></tr><tr><td>published by oxford</td><td>36</td></tr><tr><td>by oxford university</td><td>36</td></tr><tr><td>based on their</td><td>36</td></tr><tr><td>processing of chinese</td><td>36</td></tr><tr><td>university of nottingham</td><td>36</td></tr><tr><td>spectrum disorders asd</td><td>36</td></tr><tr><td>university of florida</td><td>36</td></tr><tr><td>cial intelligence ijcai</td><td>36</td></tr><tr><td>de montr eal</td><td>35</td></tr><tr><td>computer graphics and</td><td>35</td></tr><tr><td>university of electronic</td><td>35</td></tr><tr><td>electronic science and</td><td>35</td></tr><tr><td>take down policy</td><td>35</td></tr><tr><td>results suggest that</td><td>35</td></tr><tr><td>access by the</td><td>35</td></tr><tr><td>this material for</td><td>35</td></tr><tr><td>tel aviv university</td><td>35</td></tr><tr><td>of software engineering</td><td>35</td></tr><tr><td>for face detection</td><td>35</td></tr><tr><td>of singapore singapore</td><td>35</td></tr><tr><td>for human pose</td><td>35</td></tr><tr><td>prof dr ing</td><td>35</td></tr><tr><td>of arti cial</td><td>35</td></tr><tr><td>for zero shot</td><td>35</td></tr><tr><td>in signal processing</td><td>35</td></tr><tr><td>classi cation using</td><td>35</td></tr><tr><td>key laboratory for</td><td>35</td></tr><tr><td>of notre dame</td><td>35</td></tr><tr><td>the robotics institute</td><td>35</td></tr><tr><td>permission to make</td><td>34</td></tr><tr><td>to make digital</td><td>34</td></tr><tr><td>acm reference format</td><td>34</td></tr><tr><td>institute of engineering</td><td>34</td></tr><tr><td>university of bonn</td><td>34</td></tr><tr><td>for visual question</td><td>34</td></tr><tr><td>pose invariant face</td><td>34</td></tr><tr><td>university of barcelona</td><td>34</td></tr><tr><td>local binary patterns</td><td>34</td></tr><tr><td>polytechnique ed erale</td><td>34</td></tr><tr><td>advance access publication</td><td>34</td></tr><tr><td>des akademischen grades</td><td>34</td></tr><tr><td>mitsubishi electric research</td><td>34</td></tr><tr><td>onoma de barcelona</td><td>34</td></tr><tr><td>have shown that</td><td>34</td></tr><tr><td>results show that</td><td>34</td></tr><tr><td>the main paper</td><td>34</td></tr><tr><td>school of automation</td><td>34</td></tr><tr><td>of psychiatry university</td><td>34</td></tr><tr><td>dept of cse</td><td>34</td></tr><tr><td>computer engineering university</td><td>34</td></tr><tr><td>university of colorado</td><td>34</td></tr><tr><td>ministry of education</td><td>34</td></tr><tr><td>original research article</td><td>34</td></tr><tr><td>children with asd</td><td>34</td></tr><tr><td>due to its</td><td>34</td></tr><tr><td>for semantic segmentation</td><td>33</td></tr><tr><td>business media new</td><td>33</td></tr><tr><td>media new york</td><td>33</td></tr><tr><td>for the publications</td><td>33</td></tr><tr><td>the publications made</td><td>33</td></tr><tr><td>publications made accessible</td><td>33</td></tr><tr><td>or other copyright</td><td>33</td></tr><tr><td>it is condition</td><td>33</td></tr><tr><td>condition of accessing</td><td>33</td></tr><tr><td>publications that users</td><td>33</td></tr><tr><td>that users recognise</td><td>33</td></tr><tr><td>users recognise and</td><td>33</td></tr><tr><td>legal requirements associated</td><td>33</td></tr><tr><td>california at berkeley</td><td>33</td></tr><tr><td>copies are not</td><td>33</td></tr><tr><td>made or distributed</td><td>33</td></tr><tr><td>deep neural network</td><td>33</td></tr><tr><td>material for advertising</td><td>33</td></tr><tr><td>work in other</td><td>33</td></tr><tr><td>face recognition from</td><td>33</td></tr><tr><td>li fei fei</td><td>33</td></tr><tr><td>in the face</td><td>33</td></tr><tr><td>erale de lausanne</td><td>33</td></tr><tr><td>https tel archives</td><td>33</td></tr><tr><td>tel archives ouvertes</td><td>33</td></tr><tr><td>in the scene</td><td>33</td></tr><tr><td>states of america</td><td>33</td></tr><tr><td>electronics and information</td><td>33</td></tr><tr><td>license which permits</td><td>33</td></tr><tr><td>received december accepted</td><td>33</td></tr><tr><td>erlangung des akademischen</td><td>33</td></tr><tr><td>on face recognition</td><td>33</td></tr><tr><td>has been shown</td><td>33</td></tr><tr><td>electrical engineering university</td><td>33</td></tr><tr><td>journal on advances</td><td>33</td></tr><tr><td>electronic and information</td><td>33</td></tr><tr><td>neural network for</td><td>33</td></tr><tr><td>university of notre</td><td>33</td></tr><tr><td>image to image</td><td>33</td></tr><tr><td>the hong kong</td><td>33</td></tr><tr><td>peer reviewed version</td><td>32</td></tr><tr><td>by the legal</td><td>32</td></tr><tr><td>the legal requirements</td><td>32</td></tr><tr><td>with these rights</td><td>32</td></tr><tr><td>use is granted</td><td>32</td></tr><tr><td>uc san diego</td><td>32</td></tr><tr><td>of the creative</td><td>32</td></tr><tr><td>the th international</td><td>32</td></tr><tr><td>and video processing</td><td>32</td></tr><tr><td>works for resale</td><td>32</td></tr><tr><td>in other works</td><td>32</td></tr><tr><td>must be obtained</td><td>32</td></tr><tr><td>rwth aachen university</td><td>32</td></tr><tr><td>of advanced technology</td><td>32</td></tr><tr><td>this research was</td><td>32</td></tr><tr><td>support vector machines</td><td>32</td></tr><tr><td>of advanced computer</td><td>32</td></tr><tr><td>human action recognition</td><td>32</td></tr><tr><td>of computing science</td><td>32</td></tr><tr><td>enti research documents</td><td>32</td></tr><tr><td>ques de niveau</td><td>32</td></tr><tr><td>es ou non</td><td>32</td></tr><tr><td>emanant des etablissements</td><td>32</td></tr><tr><td>des etablissements enseignement</td><td>32</td></tr><tr><td>recherche fran cais</td><td>32</td></tr><tr><td>cais ou etrangers</td><td>32</td></tr><tr><td>ou etrangers des</td><td>32</td></tr><tr><td>etrangers des laboratoires</td><td>32</td></tr><tr><td>com journal sensors</td><td>32</td></tr><tr><td>for permissions please</td><td>32</td></tr><tr><td>accepted june published</td><td>32</td></tr><tr><td>new collective works</td><td>32</td></tr><tr><td>collective works for</td><td>32</td></tr><tr><td>these authors contributed</td><td>32</td></tr><tr><td>component of this</td><td>32</td></tr><tr><td>for ef cient</td><td>31</td></tr><tr><td>recognition system based</td><td>31</td></tr><tr><td>investigate your claim</td><td>31</td></tr><tr><td>or classroom use</td><td>31</td></tr><tr><td>are not made</td><td>31</td></tr><tr><td>that copies are</td><td>31</td></tr><tr><td>commercial advantage and</td><td>31</td></tr><tr><td>advantage and that</td><td>31</td></tr><tr><td>of the facial</td><td>31</td></tr><tr><td>multi task learning</td><td>31</td></tr><tr><td>of western australia</td><td>31</td></tr><tr><td>under the terms</td><td>31</td></tr><tr><td>and or other</td><td>31</td></tr><tr><td>or promotional purposes</td><td>31</td></tr><tr><td>and facial expression</td><td>31</td></tr><tr><td>and reggio emilia</td><td>31</td></tr><tr><td>in the image</td><td>31</td></tr><tr><td>for vision speech</td><td>31</td></tr><tr><td>electronics and computer</td><td>31</td></tr><tr><td>the author and</td><td>31</td></tr><tr><td>at the same</td><td>31</td></tr><tr><td>expressions of emotion</td><td>31</td></tr><tr><td>in the human</td><td>31</td></tr><tr><td>refers to the</td><td>31</td></tr><tr><td>song chun zhu</td><td>31</td></tr><tr><td>there has been</td><td>31</td></tr><tr><td>amit roy chowdhury</td><td>31</td></tr><tr><td>www elsevier com</td><td>31</td></tr><tr><td>york ny usa</td><td>31</td></tr><tr><td>electrical and electronics</td><td>31</td></tr><tr><td>hong kong polytechnic</td><td>31</td></tr><tr><td>for all other</td><td>31</td></tr><tr><td>of advanced research</td><td>30</td></tr><tr><td>for computer graphics</td><td>30</td></tr><tr><td>other copyright owners</td><td>30</td></tr><tr><td>we will remove</td><td>30</td></tr><tr><td>will remove access</td><td>30</td></tr><tr><td>the work immediately</td><td>30</td></tr><tr><td>http www eecs</td><td>30</td></tr><tr><td>to deal with</td><td>30</td></tr><tr><td>or distributed for</td><td>30</td></tr><tr><td>digital or hard</td><td>30</td></tr><tr><td>or hard copies</td><td>30</td></tr><tr><td>provided that copies</td><td>30</td></tr><tr><td>or commercial advantage</td><td>30</td></tr><tr><td>and luc van</td><td>30</td></tr><tr><td>pattern recognition and</td><td>30</td></tr><tr><td>video classi cation</td><td>30</td></tr><tr><td>italiano di tecnologia</td><td>30</td></tr><tr><td>resale or redistribution</td><td>30</td></tr><tr><td>science and research</td><td>30</td></tr><tr><td>of technology and</td><td>30</td></tr><tr><td>vision speech and</td><td>30</td></tr><tr><td>in the present</td><td>30</td></tr><tr><td>business media llc</td><td>30</td></tr><tr><td>natural language processing</td><td>30</td></tr><tr><td>this paper presents</td><td>30</td></tr><tr><td>stony brook university</td><td>30</td></tr><tr><td>boston ma usa</td><td>30</td></tr><tr><td>center for biometrics</td><td>30</td></tr><tr><td>of michigan ann</td><td>30</td></tr><tr><td>facial expressions are</td><td>30</td></tr><tr><td>college of information</td><td>30</td></tr><tr><td>of the association</td><td>30</td></tr><tr><td>the association for</td><td>30</td></tr><tr><td>in this chapter</td><td>30</td></tr><tr><td>received in revised</td><td>30</td></tr><tr><td>for fine grained</td><td>30</td></tr><tr><td>university of munich</td><td>30</td></tr><tr><td>classi cation with</td><td>30</td></tr><tr><td>hong kong china</td><td>30</td></tr><tr><td>science and applications</td><td>30</td></tr><tr><td>graphics and vision</td><td>29</td></tr><tr><td>and we will</td><td>29</td></tr><tr><td>eecs berkeley edu</td><td>29</td></tr><tr><td>of experimental psychology</td><td>29</td></tr><tr><td>work for personal</td><td>29</td></tr><tr><td>of engineering research</td><td>29</td></tr><tr><td>ibm watson research</td><td>29</td></tr><tr><td>engineering research and</td><td>29</td></tr><tr><td>for the purpose</td><td>29</td></tr><tr><td>of the paper</td><td>29</td></tr><tr><td>advertising or promotional</td><td>29</td></tr><tr><td>creating new collective</td><td>29</td></tr><tr><td>facial action unit</td><td>29</td></tr><tr><td>saarbr ucken germany</td><td>29</td></tr><tr><td>to face recognition</td><td>29</td></tr><tr><td>face recognition algorithms</td><td>29</td></tr><tr><td>institutes of advanced</td><td>29</td></tr><tr><td>technology chinese academy</td><td>29</td></tr><tr><td>fr ed eric</td><td>29</td></tr><tr><td>watson research center</td><td>29</td></tr><tr><td>american psychological association</td><td>29</td></tr><tr><td>advanced computer science</td><td>29</td></tr><tr><td>web of science</td><td>29</td></tr><tr><td>chen change loy</td><td>29</td></tr><tr><td>universitat aut onoma</td><td>29</td></tr><tr><td>department of mechanical</td><td>29</td></tr><tr><td>biometrics and security</td><td>29</td></tr><tr><td>contents lists available</td><td>29</td></tr><tr><td>michigan ann arbor</td><td>29</td></tr><tr><td>emotional facial expressions</td><td>29</td></tr><tr><td>an important role</td><td>29</td></tr><tr><td>school of software</td><td>29</td></tr><tr><td>based image retrieval</td><td>29</td></tr><tr><td>in the presence</td><td>29</td></tr><tr><td>der technischen universit</td><td>29</td></tr><tr><td>advances in signal</td><td>29</td></tr><tr><td>institute of psychology</td><td>29</td></tr><tr><td>and arti cial</td><td>29</td></tr><tr><td>for autism research</td><td>29</td></tr><tr><td>intelligent perception and</td><td>29</td></tr><tr><td>of california davis</td><td>29</td></tr><tr><td>received may accepted</td><td>29</td></tr><tr><td>features of the</td><td>29</td></tr><tr><td>theory of mind</td><td>29</td></tr><tr><td>the twenty sixth</td><td>29</td></tr><tr><td>twenty sixth international</td><td>29</td></tr><tr><td>sixth international joint</td><td>29</td></tr><tr><td>to the faculty</td><td>28</td></tr><tr><td>university of queensland</td><td>28</td></tr><tr><td>university of bristol</td><td>28</td></tr><tr><td>houston tx usa</td><td>28</td></tr><tr><td>report no ucb</td><td>28</td></tr><tr><td>no ucb eecs</td><td>28</td></tr><tr><td>www eecs berkeley</td><td>28</td></tr><tr><td>berkeley edu pubs</td><td>28</td></tr><tr><td>edu pubs techrpts</td><td>28</td></tr><tr><td>pubs techrpts eecs</td><td>28</td></tr><tr><td>techrpts eecs html</td><td>28</td></tr><tr><td>new york usa</td><td>28</td></tr><tr><td>new south wales</td><td>28</td></tr><tr><td>fellow ieee and</td><td>28</td></tr><tr><td>in the paper</td><td>28</td></tr><tr><td>and research ijsr</td><td>28</td></tr><tr><td>information technology and</td><td>28</td></tr><tr><td>university of freiburg</td><td>28</td></tr><tr><td>we propose novel</td><td>28</td></tr><tr><td>degree of master</td><td>28</td></tr><tr><td>to this article</td><td>28</td></tr><tr><td>detection and recognition</td><td>28</td></tr><tr><td>and mobile computing</td><td>28</td></tr><tr><td>of our books</td><td>28</td></tr><tr><td>our books indexed</td><td>28</td></tr><tr><td>of science core</td><td>28</td></tr><tr><td>science core collection</td><td>28</td></tr><tr><td>core collection bkci</td><td>28</td></tr><tr><td>in publishing with</td><td>28</td></tr><tr><td>jean marc odobez</td><td>28</td></tr><tr><td>published version apa</td><td>28</td></tr><tr><td>university of york</td><td>28</td></tr><tr><td>deep learning for</td><td>28</td></tr><tr><td>for biometrics and</td><td>28</td></tr><tr><td>and security research</td><td>28</td></tr><tr><td>university of british</td><td>28</td></tr><tr><td>www pnas org</td><td>28</td></tr><tr><td>human robot interaction</td><td>28</td></tr><tr><td>philadelphia pa usa</td><td>28</td></tr><tr><td>machine learning research</td><td>28</td></tr><tr><td>face to face</td><td>28</td></tr><tr><td>in an image</td><td>28</td></tr><tr><td>pose estimation and</td><td>28</td></tr><tr><td>school of economics</td><td>28</td></tr><tr><td>to the same</td><td>28</td></tr><tr><td>out of the</td><td>28</td></tr><tr><td>faculty of science</td><td>28</td></tr><tr><td>terms and conditions</td><td>28</td></tr><tr><td>year of publication</td><td>27</td></tr><tr><td>breaches copyright please</td><td>27</td></tr><tr><td>providing details and</td><td>27</td></tr><tr><td>personal or classroom</td><td>27</td></tr><tr><td>de la torre</td><td>27</td></tr><tr><td>in the last</td><td>27</td></tr><tr><td>engineering national university</td><td>27</td></tr><tr><td>school of eecs</td><td>27</td></tr><tr><td>of the author</td><td>27</td></tr><tr><td>engineering and information</td><td>27</td></tr><tr><td>because of the</td><td>27</td></tr><tr><td>et al eurasip</td><td>27</td></tr><tr><td>al eurasip journal</td><td>27</td></tr><tr><td>role in the</td><td>27</td></tr><tr><td>automatic face recognition</td><td>27</td></tr><tr><td>assistant professor department</td><td>27</td></tr><tr><td>pattern recognition institute</td><td>27</td></tr><tr><td>are among the</td><td>27</td></tr><tr><td>and brain sciences</td><td>27</td></tr><tr><td>institutes of health</td><td>27</td></tr><tr><td>of british columbia</td><td>27</td></tr><tr><td>permissions please email</td><td>27</td></tr><tr><td>conflict of interest</td><td>27</td></tr><tr><td>is that the</td><td>27</td></tr><tr><td>of this paper</td><td>27</td></tr><tr><td>facial expressions and</td><td>27</td></tr><tr><td>centre for vision</td><td>27</td></tr><tr><td>multiple object tracking</td><td>27</td></tr><tr><td>meeting of the</td><td>27</td></tr><tr><td>center for cognitive</td><td>27</td></tr><tr><td>cole polytechnique rale</td><td>27</td></tr><tr><td>of oriented gradients</td><td>27</td></tr><tr><td>department of automation</td><td>27</td></tr><tr><td>research on intelligent</td><td>27</td></tr><tr><td>on intelligent perception</td><td>27</td></tr><tr><td>perception and computing</td><td>27</td></tr><tr><td>university of sydney</td><td>27</td></tr><tr><td>for object recognition</td><td>27</td></tr><tr><td>reuse of any</td><td>27</td></tr><tr><td>dept of ece</td><td>27</td></tr><tr><td>to image translation</td><td>27</td></tr><tr><td>of this article</td><td>27</td></tr><tr><td>and information science</td><td>27</td></tr><tr><td>kong polytechnic university</td><td>27</td></tr><tr><td>recurrent neural networks</td><td>27</td></tr><tr><td>rale de lausanne</td><td>27</td></tr><tr><td>by the author</td><td>26</td></tr><tr><td>requirements associated with</td><td>26</td></tr><tr><td>access to the</td><td>26</td></tr><tr><td>semi supervised learning</td><td>26</td></tr><tr><td>part of this</td><td>26</td></tr><tr><td>center for automation</td><td>26</td></tr><tr><td>for automation research</td><td>26</td></tr><tr><td>the full citation</td><td>26</td></tr><tr><td>nature of the</td><td>26</td></tr><tr><td>allen institute for</td><td>26</td></tr><tr><td>et al for</td><td>26</td></tr><tr><td>committee on graduate</td><td>26</td></tr><tr><td>application to face</td><td>26</td></tr><tr><td>the relationship between</td><td>26</td></tr><tr><td>from single image</td><td>26</td></tr><tr><td>the human visual</td><td>26</td></tr><tr><td>university of the</td><td>26</td></tr><tr><td>shih fu chang</td><td>26</td></tr><tr><td>provided by the</td><td>26</td></tr><tr><td>link to this</td><td>26</td></tr><tr><td>saarbr cken germany</td><td>26</td></tr><tr><td>over the past</td><td>26</td></tr><tr><td>in the form</td><td>26</td></tr></table></body></html> \ No newline at end of file