diff --git a/02. Face and Audio Recognition using Siamese Networks/2.4 Face Recognition Using Siamese Network.ipynb b/02. Face and Audio Recognition using Siamese Networks/2.4 Face Recognition Using Siamese Network.ipynb index c3e6f2864..7a3748f3c 100644 --- a/02. Face and Audio Recognition using Siamese Networks/2.4 Face Recognition Using Siamese Network.ipynb +++ b/02. Face and Audio Recognition using Siamese Networks/2.4 Face Recognition Using Siamese Network.ipynb @@ -81,26 +81,19 @@ "metadata": { "scrolled": false }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Using TensorFlow backend.\n" - ] - } - ], + "outputs": [], "source": [ "import re\n", "import numpy as np\n", "from PIL import Image\n", + "import tensorflow as tf\n", "\n", "from sklearn.model_selection import train_test_split\n", - "from keras import backend as K\n", - "from keras.layers import Activation\n", - "from keras.layers import Input, Lambda, Dense, Dropout, Convolution2D, MaxPooling2D, Flatten\n", - "from keras.models import Sequential, Model\n", - "from keras.optimizers import RMSprop" + "from tensorflow.keras import backend as K\n", + "from tensorflow.keras.layers import Activation\n", + "from tensorflow.keras.layers import Input, Lambda, Dense, Dropout, Conv2D, MaxPooling2D, Flatten\n", + "from tensorflow.keras.models import Sequential, Model\n", + "from tensorflow.keras.optimizers import RMSprop" ] }, { @@ -113,9 +106,7 @@ { "cell_type": "code", "execution_count": 2, - "metadata": { - "collapsed": true - }, + "metadata": {}, "outputs": [], "source": [ "def read_image(filename, byteorder='>'):\n", @@ -156,7 +147,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAFwAAABwCAAAAAC26kjJAAAZDUlEQVR4nC2ZS6+u2XWV55hzrrXe97vtvc+tLq4qm9hxiBEoISQEQgOJQBo06CAhevkttGgh0YEOiN8AEihEkIQEEWECWCE2DvhSKdtVdU6d297f7X3Xmhca5heM1pCe8Qz8Aox5ei1b03Z7M9n9uJ5Wxiptmg9PP/CXb4/vn+6fx/ZJ79d6ffbxMzd/96Xo9aq4DJlY+Xqedgc6ye3z/fmuvzoN3qcVCvV2ae1EZSy5ffsevBwXqMtmu626L5++c5ftJ/uDvb5ZirZyvsyYxqVkjGOss6Sd6eAFl82QyzTuih0j4fFmxxZQCimDAPM8tpdy3Q9zvtkWrjdNt88+LbopR2rbV9tnb6a+59rLviyQt9xtaJ9LaycjRLFnNHu10/BCfQny4qpWTJjd2h2dbn1ebzdvl25N3zy8f/PD65OXlxf7c3IdPPZeKOuQ0SiFB+/gysADAoU2ztXJZh7zfQ9+vSOGOhHCkU/6+fG2g3q89IcnN2PctPvr/MXzP/eaP50XRmMq8zFnmsXqsh0kU5uKTpfnZwnUrBeDDxy3TO2hLTJEhiJY1mudbOpsqW9O7YSbybnNKs99z3/6lZf70+64jSpbecOHzcPNsTEHt/lxAmXb7i8PU+Q57Yu7q+Zbva66SIKjcEjkmcGau8tyvV7jlT3+8uF6jMv6RV7O0j99XGjZ0U25ma/TYc8TsPPoPD+tMyFObVPvdiyLuAZ9cUZWisa8Cg9loLTrXZQhcZ+bUfSDzf16mR4PkUHRyv07GwqO+Yhq0pzm610lb0mXx1vxxa90Ez3Po/pYVkVeLzqnXcupgJHC/mTqb8Z69fUSIfX2ILfvC93fbbbbV3fb+0Z7eeR366Xtd7EzrtfN4/3jlsuDS+V5JgPUrg/dUswv26z5bJulk5aVQ+vb/e6VdaEO2U7juOE4Ubt8+QjYvp8eHWsqF683INrLlsbQaywPm3kQX0rheyP3eT2P8/Yal4Pl8e54LdDkpDhvPvpuvW/3WWXj97yk7dsO6Y9mWmx7ikdR5k3HlLkbdbOJa2+yvZil7xf2KbZFO6nvrmW9oi3seS80igbK1RX/k7ZFWlD4+zdn5dsqbTuNrNTkvGmmvNEaLkSe+5bjtGoc6Ox+nC+5ag1AIRlNl2vUsn/ok7dFpet1jmXH113fHyeeNqq6PdSZtbSSiJSQXovs5ljA7IK7B9zkfB2j9s2lS6wTQqJnrrt7v3GdXh2evPbsqslgDXv2hoz0se2nw6XctY2qZkqwJkdyKU32HMJOlSTrWiZt5z5KtHHxGmX/kOvukg+86qH59jIVzjGrjCKE+lpbgKe6eRT8jjQRytAkK9JFc9YdGi+bNi6k1NoxbXKaz9dSYzzf+KCy3j70wUdlOzzoGqfZJfTajmXzIMd3b47nbLvdvD6ZY7aQ0BTSdJHCcWjYr7xN7rxByUPmtbRe20PIewstfm4VC29fYdidjpI9Xu0iVUTeycOP84vFxy33jtvqJV3ESwiSE1uOumnq2or3itTKFLHZ+et1S9eId1/0scfm5Y2+feSv7/C21tf3bEykvLfLF1/fPdSuemy620KoUNXCLEkk0Clo1uhZd7UXBKIXZ0XElmwu64hHp6Xv7eZodtw9WhE9A4NIU3M9iXh9cmp2ePx4s0cBt2RpEjWYmFhDqjEjxZBGgCddt6cSul10yJXy3dZyAtaNUdrm04OUS10TrPq2DsNoe+rbxzfblgEuhCZCDGIVIolMwPgs1MWWIq62xJXSqRAx+mHjTm5jb/Lk26/qw+2tvtlykFLNiNd9quNpD92QhkSJSaIEI70xktJdoUaZmZ7uWH2VJM5g1TGV48yxXm7yuLHPvvS9NZ9v2Kpm6nT1VV5tr0JQKFUAjVRJQrMEHIHqhM6IGkFGRBkokUjikrJW221yjZonwlj4tLvnB9uUSgENXC1TeRq6KeLJxVmQSgJOBjs4AwyRwZxuFICzM1wiJL2kplAddt2N+80StWrBaZMuGjyNlr7t3mNL2SuDKjMzKTGxJwvCKRluNEY6RNKRlChOFMMiDBkBHKBNcmo81VnOxaUTL9a91S2tuyFck1NCIEHErFmJRDg5s0phbaRM4gmwu0kwiSQ82JNGKm1RfJUsUi+KTHDatd2aYD/PTcApqcKkhRDJKJ7MLEquIZwE4VQFIBSdQkAcklwiEknnHeeyttPwd+CLBDMUD4OnVRxIC63KBCQzEglCKkh5FmKwIDwyncKIBBHGBGhXCoYOPbfxrHF2n+beIvg0bXnht9gcO2WwUBLUiYUyANek8AgaHplMCQskJ4E0kxyRAMsApUULuZTr0yfN5bOYVwEz3fUSj6ljlkxiFWEpgshgBidBCznCg+CJSEGyA+FK7MlKlJkAi4maX6xfx136A4RJhz7QjWsNDG5BQoJASVDXHEgOTypckxHJiCxJQQB7MhEjmDgkktySwWVzste+bF9HyGBecGoF63VbJqlbVUTRRIQjFVw4keqUSglABAwqQQkFMVO2YICUU5LXUja5vV6zL6UIE7GWgbMtQJXNXW1oFKAAkZOLIKUINdZkFXVLIqEi7E4AlNUEkfAhWcZE1/TzmmHJGBDwPqwIdFjXWEklW4iSRNRIH0tmgHgKUHdmZUSKEVgDHk7MyYoEm+btmi13E5MGiCai4FIej9O00VZFqwo4yJ0I6lWYm0PACAF2hadkkXTOsEzWQhouCCkGdrG7oHJZ7jZLhTF5KjfrYcubVjcysbAQhBgQ0gSBKoQJwkk2wcuqU8AgNTncEWBypURwcl12qXx8xXmIiUloMC9m03XDq2CujTORHKCUpCAKlEzOLHNBJKNRiOcgB4UYcSo4kIUhUdGg7d1q/JBkSgAvYvW9PJ6WrCREHpFOEICZ4MgkZh6RNFFejSgBwJJSUjIpnOGURAHadfT7D+MRW3ZbAsm2Pz/emdcJtN9rGoU7hWeyFAaCksmEBmA6MUVYgszIR2YSA541OZlo6FTW3Y/8uOFgATz5TNObZbT20L3oSB5kGY7ISCSSHZykgzPABEktNMjp6mbRM4yAVINJwHZ83h/HZSkTdkHEjKWdjhtvNG53jJHDWZgHEboFCEzmIPjoNJwCfSQ50n+a3gMaJAKkOEmdlhWr9FpjMhUVl/Xl1pduLB5ZksRCNJMpMwWUSbFosgeFe1ImqZuEkFOEp8taSI0CBFQJaFQXz4hUYcv57s1poWlQejbEsUwkphhqKVbYKMmZeESNBIiIJdXCxSJhYmujeUTCY//pUrb3twurJ6BW54e72c6bqyYZg3M93oYtbaEh8DF5BvfGJpbpAQ0ydVt7XktKNWlXHsbCbBK7H13jft9y6lRJTNl98+qD8/FuIacQDb+IL9pPPjL0ZrUdyaB1IitZQtDFeFxen1fibOtOclP9QgipPNph/2cr6N3PtZnpKsq+OQ7qUYidA9mpCFn2U1hO+XYzVqGkKVLAZgVYO4/VyK9Zoket3pl7502HxDRR5C4Py+bkyaLpRXK1SursEit7nU+m0YelyYzgGkAGh5Be1Sl9SU7Zzdc2Vb2UWXMaMdYkYBvv/t8NjtDXX/5YkcoiZbxsR59IKMyYo2yvrE9Jr0FdS+NoddkwGRhGmqWT3jhRaGHZoU4V18t1GZWZt4c7v0ivdN2ehJT4+uwzXY/bNAqP2Gar20N2t2m/BFFlnqrOhbsh4AKlfRfq3QZ60a0MLjIHhXF4fvCjqVwi889T3Aspffj4s+dyzyHqMJ35MM/mI9v5en3bqe1nhDIKYqQghjGE/fz6+HZL3376qtbpy4fbu2o0u3J/tP/Jgd4++uu/+Pbh33xh+msf2Zfqm9zkDQNbqnLYL3Q5vf3iR+1y8mdxePRoBTYjHMYUujB4vP1J0nt/4cWLn/2Vf/mVH3z5R9/6xoel9eIaeaAx3vmlj1p79vf/GeuvdLqVb6KuEwtPtm10efnjj/ef0Ke/+e+e/uQvvXj54vSB66UG0ajigxZOuVz1yX635ncOm2/0rx/+/beOXxMoOGT/eHn8M1+7GRf7+i/9Z92su7vt5r8/+rN9kVJFZf3iW/0rP9/+6NE//9pay8/mp4cv3j/6pGoGKZdYvVzO773dlut7H73RJ3/j2bsP79XjZx9KIWJ/ItsnX9/oi89+7vv/8D/pnz57/dVpmr74ZAYXma/ls08//+UPdn+zft85PjqMO54+/tL+RLMs7q3xg/Vx7pEvDl5//ZtfPPnhzzx/Pj8ea99Ucvhhr+/f1ud+x8fDL+glHv3kw0O7+/aWiwpvj73+4sNP/s755/7R09i99/r1gexmCSvsGet2PcRlPY13uJzK5fuPN+/evbZro0aj70QpB76WHx6+vX+sL+bv/qaug/ff/JVt+SVPgGp5iLbnH/6rv/ziZ46H8NiN96Jc11kcJoyLlulkiz2rtdf2IkRSv3SWZV48jSU1Wzvcv3lHdPfy5V/RKzK/+vt/mw5rcILHwK5+IP5HiGku/cqFMyQkkfAALNAcp9dzq34qUm50u22nXe6DiIOJZDt/J9ZJN9f1uXK5bEw/fZ8grHldfad1+sZ58XD0dUL2taiIgLSDeWXZesm+5QSJUJmqT4OIoGqcelVap6cbGXPyx3qrh7L7hgWoEFsfUJ42u9uHsy2DN6P0BZPWohzO/QEGTfHb86W0NOG06OwaSIYxMZKm//ELu1yJnnxy0qdbzU355gsVSs9IFWSOoDIgATeqMrMIig/2KwVA1flgV4gyOK4xR3N1cQkeoPL5dz/56ofa5IMfie5wGj/+ePnOr0lQMlMa53kdToZQYNCmUGNhzxAQryDeRMQkJszsKWRs4gwhSQoqa/nK8XtP99jLIxU7vjyOZ8vDTJmARIhJH+KgTZesrEhlikxJ5UpBxLqwonpIgRAhwYnUYCcKe+dP7javH6hAn+m8Lq/zqTxzRIA4OZFrQToTShehwoYUuAsnsnVYKR3EUi09qpOmQbMks2ci5KN+uMZg31SGQfT987stggBXaCYGc2MaDq3CXFgzUFjhMYg1S3NuLE1DLMWhRAJQCrvD3/+iS1Htjx7UmUnf2tPPnRGIFqPQqMY2kvinzKgkIyl8qKaGM0g3nglhGBhMLIHgRCDS2Wy1ur29v/0TPd3GZonl5s3VhLjpaKRkpWfllUsWyg3FYsSrFaoERRDFxjHgAiHW5CRhI8mkTPSFh395r9dn39N1r/JnYznvFkcyNNSQSkqITXBwzlLePICqXXYKNvaUuGwLIiU1spAEJIjgNBCFHzZ/cfRnuL998X/43c9vPvrq7Vd/d8vmaAIiDeKUwoAw0LSN+3XpJgcpRGkeptPaWFpn04klwQTPkZROGaNuHt3QNQ5//JK39snY1A8PP0RxYSWGMRMplaqJKhA7DkthQGeQhGTkLgZvdBZlBJDIDGE37ek5PQrCm+Ptj3+/8H/8Mj5+2E2/+JaQSUwlE5UbSQ5iYYatRrW2FGEnH+qM8LKQUK1JIkQsGUQiEQju25I2xp5/7wvwv3j7bpN9Pfz8yp6mSaUyFSEe6QKCmyV012oTJxinCMzJLIgAdtKizFDJTFAKomb2ufzBH26Z7/7JtU4GkmBErBosXCGMLBwZQe4+TXWa26Y8emeKJA9EJwthnUQpqQiUiIJBaYHLNcbyu78jGVp+/I//wTv1NLyGBTKFrbhyQNoKhHNAeVNo4IB7701e9Um4D6ogJ4aEgDg5iC0ikbe19E++9fkbpeBy++k//S9lI/NtuKdngItUCqgLexpl0OauHK9nfkJn0uW6nOeGESAQu3ApJFUERERBY1vK6Vv/4X9fagapXrfxb8ffuimPv7nzMXNGQ3ENhzhlwphOB/v+D5598Pzp5Pnienz547/b4u1H0MEkRBpAkgQ5fPAon734wef34kSp2kuG/vYf/9ov35X0DIUABkOJ4EAws5z+17P13WdLeVtpROBpfu/Jp/smkGSEDGIEETlihNBvHeMSRtXYTSmX6vPzf/0H23d+VchDYTUoJQtFCjFrvPvpQ9uwnsHwo7Xx6Pr68KSCEmzVJRTlOl2JmFBefawhxhkUQjpQnEYZb99896+59ImgXpKQIQNUfVD6dlZwEYIfaS7IeaapJkCUJBL/36ZxWJRPli27IJLJlYWNU1KQ91+gy08HFQqJuFMS8yWMulFAt+xyU5hEsmQKicItQ0cQe1HKkNNn6smUHhkMFpdEgODb/1pcCERKkgKQsESQDHDagmFrBMN6JlVJygS4MUUQUY6ksJyfv5CSPSQ1EM7BmbCktd39H7gFAYBGBhgk5kaemcbp6RG6BAHMzj1BBFF1qkKDCeZDf+AU1DyILDRZkgTihMDnb3MkkTBIQAS10WMEpE2zeHglkkKehzLo6iPDgpAamQFEUpbzJ9AkawtC4INd2XMwsXP9Dsk1MhAkhSg8syRIyjyxRo7TJQJFoNKH9G6dxiBiBjrAOVyfvwkg1AleTUSRBC/WIu3mB3/ViykFgCQmF1MnZXB4+hJDuUnUoqx9WstSTQYixZCZFknyo2v1gHEZJUqQSk+SKJSk87e7gpJciJJNbbRoYgH4SCH3rj5FURfacNF0BK8lOZhg0RHrJ8KjDqohMWoxdiqaQc6y8vpDy0gi8kQmaK61qErVUgTbqUwyFwETqkhhERBdnTtSQI6Ynr/wSGZ2MtIwMJf0QuDhQptvliS27CAn5lpIUtrMfFlPY+kiuJyWy5lAhUUgzCQ9KcM9QUZfPDRuqxOPypEEDSYa1eqqgf233j7pvAtOSkKUFWcr5fr6SiO4rKYuAhvHOm+bdICJdThJlz4wFjnDOdqi3rqwUygDDOfBkODtf/sNllGAYHK1vr59zXo5RV9OcpOZbV7RCVHr4y3HNG3yp23q6eCVB9TLMi+kRA5iJVNaqzGCmHe//euaNFpQisR6vP/81BuNNRPl5HalaQ1Yq5EvSp3vbn2flBkOJ5yvGwtiZxcakuBgtSJZACtpBJff+3XipJQg5Ah/52Y1E5draI9YqtNFV2mdSi1VaN2AMTiiU18dkEBIkDUTD00Ful63q1jJKDr/1q9WVCeBJFj3k4zz2UDNESMa315Xj2xDeZ50O3UjJbHsgesiAGUKkcnQXjJSNWQ0Z3UvEc79d/5eTStDiWyORo1vz8clm2cQ9R61uEkD5laazTPVNHe3yKszqlGDT5QskhRFUzuLl2QLHjqe/u6va0UgAc1DnIla2Z1HLKsnFfEI1TZTq0WjigQb5ejZ41pNKYUC7LNbvWwGKZKSczBC4V48Pm2TZImkdpXillZyY7L2kdbLyBAWzSrFlYBBSWEj4gGMokj06SwebDUZSpmyzANZI9pw9fG6TDU44bWXSkHOIdTqmtGbsyPMpTCg6RGevlj6sjaSFFHXpEzqGvOpaBBaZ3BQyc5rmbg8JNNkakjT8DqkGDpgkaYoQ1KpphBHBFle3UZfsvAok9UooR5UkrskM6Q352QLNJpofVOWJYaZ09K7AxCSKhzJPc0ylUppERm0jNF79LTRr3MNJW+eIyNYTIMm4xAKHiIEwFmiwuty8WGDs2dmWjqxFk6wauT46ZlWnC4EDVxihK/SGKK8pgalmCAZDAVxejExaGceaUOHPiRySihAnBEISkKwiQcHrASJkQmcritZrOsNWwElIQWguoIN4ZpkmhEgOPUixFaRfMW0eJNOHMwZMZQok4wjQHqtVoI5IkaQ0+W0UREXtRBONXhqZ/KJQaAhkaJJKlFkZPE5j1eKMInhq6UHDbMe4bY6DYcVB5P1bjHG0nVbBUVMQcZeScgUgGsGxXyeHa4BTr5oyhyHl/dRbdPbopLhC2WM8PShPQenehnpERRXsxWTsm8j1TxhCJiyZwlSyGCrPKg65ms9hwKV/e7VgNNMxlkIvCaQMFLTESvCSTwR4zLclTaFFVirMNk8PHMy6CipnPDSObizGuE0ahIV+NNXIZ1JuNegDS1MzOYERIyp8uAgt8uFvKI0rkwoYcji6fPZvXkZYHdJ51SmQG/HXmYX8CTTbiz9zQXZoBJ7DWfixpqZbaqYiC6+2hIVm8ZTIURoJqczejV1JRZ2GkLEI1MHr2fpOxoamLDbT76cehKhCpoHs7JSkTZVIo7e748rVW21Vm0SaTYcgCOYRZBWFMVcI5qD2cJXvgFBIXLZdH3oRsUKU+jODBrQaFAMHnmih1E2hWvBzCaOYMUyWU0aasUhprIygV2CHWuC50LqSkAboLvjFQfpLQmVnaplwAPrEB/LpU/bJJ1mUYQF8eBlZnYqJjSma4Nau84GQQYvmTFCCOJSUstV7nfj3GsZNS2bOkh4jbGYnLFe/MAoJLUwGGIemmHgSC+hHkqmxVpX5+DAoisxXn2QoeRC6nNd9nU5s0ys3YHuPMjToo/urSGVapXi8EQmxYVzrV5gnGoI8KhJJGaMNYkIu29XkNEQoZn2M7XNO/t4OK1LjNVs2FjCl3W07RYKLlayUDKBIu0+qRd2IrgCgFZn4yC2+hpEhEd/+BviOuooVI12wxq09XFZyZPUY4Qbq5AYwNr3TZdEsLHXt280inmRzLYwJ/0/S7JsNfhWRLwAAAAASUVORK5CYII=\n", "text/plain": [ - "" + "" ] }, "execution_count": 3, @@ -178,9 +169,7 @@ { "cell_type": "code", "execution_count": 4, - "metadata": { - "collapsed": true - }, + "metadata": {}, "outputs": [], "source": [ "img = read_image('data/orl_faces/s1/1.pgm')" @@ -220,9 +209,7 @@ { "cell_type": "code", "execution_count": 6, - "metadata": { - "collapsed": true - }, + "metadata": {}, "outputs": [], "source": [ "size = 2\n", @@ -313,9 +300,7 @@ { "cell_type": "code", "execution_count": 7, - "metadata": { - "collapsed": true - }, + "metadata": {}, "outputs": [], "source": [ "X, Y = get_data(size, total_sample_size)" @@ -344,6 +329,27 @@ { "cell_type": "code", "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(20000, 2, 56, 46, 1)" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "X = np.transpose(X, [0, 1, 3, 4, 2])\n", + "X.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 10, "metadata": { "scrolled": true }, @@ -354,7 +360,7 @@ "(20000, 1)" ] }, - "execution_count": 9, + "execution_count": 10, "metadata": {}, "output_type": "execute_result" } @@ -372,10 +378,8 @@ }, { "cell_type": "code", - "execution_count": 10, - "metadata": { - "collapsed": true - }, + "execution_count": 11, + "metadata": {}, "outputs": [], "source": [ "x_train, x_test, y_train, y_test = train_test_split(X, Y, test_size=.25)" @@ -390,10 +394,8 @@ }, { "cell_type": "code", - "execution_count": 11, - "metadata": { - "collapsed": true - }, + "execution_count": 12, + "metadata": {}, "outputs": [], "source": [ "def build_base_network(input_shape):\n", @@ -405,16 +407,15 @@ " \n", " \n", " #convolutional layer 1\n", - " seq.add(Convolution2D(nb_filter[0], kernel_size, kernel_size, input_shape=input_shape,\n", - " border_mode='valid', dim_ordering='th'))\n", + " seq.add(Conv2D(nb_filter[0], (kernel_size,kernel_size), input_shape=input_shape, padding='valid'))\n", " seq.add(Activation('relu'))\n", " seq.add(MaxPooling2D(pool_size=(2, 2))) \n", " seq.add(Dropout(.25))\n", " \n", " #convolutional layer 2\n", - " seq.add(Convolution2D(nb_filter[1], kernel_size, kernel_size, border_mode='valid', dim_ordering='th'))\n", + " seq.add(Conv2D(nb_filter[1], (kernel_size,kernel_size), padding='valid'))\n", " seq.add(Activation('relu'))\n", - " seq.add(MaxPooling2D(pool_size=(2, 2), dim_ordering='th')) \n", + " seq.add(MaxPooling2D(pool_size=(2, 2))) \n", " seq.add(Dropout(.25))\n", "\n", " #flatten \n", @@ -422,7 +423,7 @@ " seq.add(Dense(128, activation='relu'))\n", " seq.add(Dropout(0.1))\n", " seq.add(Dense(50, activation='relu'))\n", - " return seq\n" + " return seq" ] }, { @@ -434,10 +435,8 @@ }, { "cell_type": "code", - "execution_count": 12, - "metadata": { - "collapsed": true - }, + "execution_count": 13, + "metadata": {}, "outputs": [], "source": [ "input_dim = x_train.shape[2:]\n", @@ -447,11 +446,18 @@ }, { "cell_type": "code", - "execution_count": 13, - "metadata": { - "collapsed": true - }, - "outputs": [], + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-09-19 01:18:32.222226: I tensorflow/core/platform/cpu_feature_guard.cc:142] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 FMA\n", + "To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.\n" + ] + } + ], "source": [ "base_network = build_base_network(input_dim)\n", "feat_vecs_a = base_network(img_a)\n", @@ -467,10 +473,8 @@ }, { "cell_type": "code", - "execution_count": 14, - "metadata": { - "collapsed": true - }, + "execution_count": 15, + "metadata": {}, "outputs": [], "source": [ "def euclidean_distance(vects):\n", @@ -485,10 +489,8 @@ }, { "cell_type": "code", - "execution_count": 15, - "metadata": { - "collapsed": true - }, + "execution_count": 16, + "metadata": {}, "outputs": [], "source": [ "distance = Lambda(euclidean_distance, output_shape=eucl_dist_output_shape)([feat_vecs_a, feat_vecs_b])" @@ -503,10 +505,8 @@ }, { "cell_type": "code", - "execution_count": 16, - "metadata": { - "collapsed": true - }, + "execution_count": 17, + "metadata": {}, "outputs": [], "source": [ "epochs = 13\n", @@ -515,13 +515,11 @@ }, { "cell_type": "code", - "execution_count": 17, - "metadata": { - "collapsed": true - }, + "execution_count": 18, + "metadata": {}, "outputs": [], "source": [ - "model = Model(input=[img_a, img_b], output=distance)" + "model = Model(inputs=[img_a, img_b], outputs=distance)" ] }, { @@ -533,10 +531,8 @@ }, { "cell_type": "code", - "execution_count": 18, - "metadata": { - "collapsed": true - }, + "execution_count": 19, + "metadata": {}, "outputs": [], "source": [ "def contrastive_loss(y_true, y_pred):\n", @@ -546,10 +542,8 @@ }, { "cell_type": "code", - "execution_count": 19, - "metadata": { - "collapsed": true - }, + "execution_count": 20, + "metadata": {}, "outputs": [], "source": [ "model.compile(loss=contrastive_loss, optimizer=rms)" @@ -557,10 +551,8 @@ }, { "cell_type": "code", - "execution_count": 20, - "metadata": { - "collapsed": true - }, + "execution_count": 21, + "metadata": {}, "outputs": [], "source": [ "img_1 = x_train[:, 0]\n", @@ -569,56 +561,71 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "#x_train[:, 0].shape" + ] + }, + { + "cell_type": "code", + "execution_count": 23, "metadata": {}, "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-09-19 01:18:33.046387: I tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc:185] None of the MLIR Optimization Passes are enabled (registered 2)\n" + ] + }, { "name": "stdout", "output_type": "stream", "text": [ - "Train on 11250 samples, validate on 3750 samples\n", "Epoch 1/13\n", - " - 60s - loss: 0.2179 - val_loss: 0.2156\n", + "88/88 - 8s - loss: 0.2073 - val_loss: 0.2179\n", "Epoch 2/13\n", - " - 53s - loss: 0.1520 - val_loss: 0.2102\n", + "88/88 - 8s - loss: 0.1357 - val_loss: 0.1868\n", "Epoch 3/13\n", - " - 53s - loss: 0.1190 - val_loss: 0.1545\n", + "88/88 - 8s - loss: 0.1109 - val_loss: 0.1320\n", "Epoch 4/13\n", - " - 55s - loss: 0.0959 - val_loss: 0.1705\n", + "88/88 - 8s - loss: 0.0927 - val_loss: 0.1299\n", "Epoch 5/13\n", - " - 52s - loss: 0.0801 - val_loss: 0.1181\n", + "88/88 - 8s - loss: 0.0773 - val_loss: 0.1156\n", "Epoch 6/13\n", - " - 52s - loss: 0.0684 - val_loss: 0.0821\n", + "88/88 - 8s - loss: 0.0653 - val_loss: 0.0701\n", "Epoch 7/13\n", - " - 52s - loss: 0.0591 - val_loss: 0.0762\n", + "88/88 - 8s - loss: 0.0566 - val_loss: 0.0585\n", "Epoch 8/13\n", - " - 52s - loss: 0.0526 - val_loss: 0.0655\n", + "88/88 - 7s - loss: 0.0514 - val_loss: 0.0455\n", "Epoch 9/13\n", - " - 52s - loss: 0.0475 - val_loss: 0.0662\n", + "88/88 - 8s - loss: 0.0452 - val_loss: 0.0347\n", "Epoch 10/13\n", - " - 52s - loss: 0.0444 - val_loss: 0.0469\n", + "88/88 - 8s - loss: 0.0407 - val_loss: 0.0334\n", "Epoch 11/13\n", - " - 52s - loss: 0.0408 - val_loss: 0.0478\n", + "88/88 - 8s - loss: 0.0369 - val_loss: 0.0286\n", "Epoch 12/13\n", - " - 52s - loss: 0.0381 - val_loss: 0.0498\n", + "88/88 - 8s - loss: 0.0341 - val_loss: 0.0247\n", "Epoch 13/13\n", - " - 54s - loss: 0.0356 - val_loss: 0.0363\n" + "88/88 - 8s - loss: 0.0313 - val_loss: 0.0191\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 21, + "execution_count": 23, "metadata": {}, "output_type": "execute_result" } ], "source": [ "model.fit([img_1, img2], y_train, validation_split=.25,\n", - " batch_size=128, verbose=2, nb_epoch=epochs)" + " batch_size=128, verbose=2, epochs=epochs)" ] }, { @@ -630,10 +637,8 @@ }, { "cell_type": "code", - "execution_count": 22, - "metadata": { - "collapsed": true - }, + "execution_count": 24, + "metadata": {}, "outputs": [], "source": [ "pred = model.predict([x_test[:, 0], x_test[:, 1]])" @@ -641,10 +646,8 @@ }, { "cell_type": "code", - "execution_count": 23, - "metadata": { - "collapsed": true - }, + "execution_count": 25, + "metadata": {}, "outputs": [], "source": [ "def compute_accuracy(predictions, labels):\n", @@ -660,7 +663,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 26, "metadata": { "scrolled": true }, @@ -668,10 +671,10 @@ { "data": { "text/plain": [ - "0.9258126195028681" + "0.9595375722543352" ] }, - "execution_count": 24, + "execution_count": 26, "metadata": {}, "output_type": "execute_result" } @@ -679,25 +682,32 @@ "source": [ "compute_accuracy(pred, y_test)" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Python [default]", + "display_name": "Python 3 (ipykernel)", "language": "python", - "name": "python2" + "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", - "version": 2 + "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", - "pygments_lexer": "ipython2", - "version": "2.7.11" + "pygments_lexer": "ipython3", + "version": "3.8.8" } }, "nbformat": 4,